summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore12
-rw-r--r--.travis.yml2
-rw-r--r--CONTRIBUTING.md74
-rw-r--r--Gemfile2
-rw-r--r--README.md206
-rw-r--r--bincompat-backward.whitelist.conf27
-rw-r--r--bincompat-forward.whitelist.conf93
-rw-r--r--build-ant-macros.xml45
-rw-r--r--build.number6
-rw-r--r--build.sbt443
-rwxr-xr-xbuild.xml182
-rw-r--r--compare-build-dirs-ignore-patterns8
-rwxr-xr-xcompare-build-dirs.sh5
-rw-r--r--docs/development/scala.tools.nsc/zipfile-bug.txt93
-rw-r--r--project/ScalaTool.scala44
-rw-r--r--project/build.properties1
-rw-r--r--project/plugins.sbt1
-rw-r--r--scripts/common153
-rwxr-xr-xscripts/jobs/integrate/bootstrap579
-rwxr-xr-xscripts/jobs/integrate/ide32
-rwxr-xr-xscripts/jobs/validate/publish-core44
-rwxr-xr-xscripts/jobs/validate/test17
-rw-r--r--scripts/readproperties.awk39
-rw-r--r--spec/01-lexical-syntax.md416
-rw-r--r--spec/02-identifiers-names-and-scopes.md17
-rw-r--r--spec/03-types.md350
-rw-r--r--spec/04-basic-declarations-and-definitions.md137
-rw-r--r--spec/05-classes-and-objects.md160
-rw-r--r--spec/06-expressions.md320
-rw-r--r--spec/07-implicits.md (renamed from spec/07-implicit-parameters-and-views.md)73
-rw-r--r--spec/08-pattern-matching.md134
-rw-r--r--spec/09-top-level-definitions.md16
-rw-r--r--spec/10-xml-expressions-and-patterns.md57
-rw-r--r--spec/11-annotations.md (renamed from spec/11-user-defined-annotations.md)34
-rw-r--r--spec/12-the-scala-standard-library.md137
-rw-r--r--spec/13-syntax-summary.md46
-rw-r--r--spec/14-references.md8
-rw-r--r--spec/15-changelog.md823
-rw-r--r--spec/README.md7
-rw-r--r--spec/_includes/numbering.css8
-rw-r--r--spec/_layouts/default.yml32
-rw-r--r--spec/_layouts/toc.yml23
-rw-r--r--spec/index.md19
-rw-r--r--spec/public/favicon.icobin0 -> 6518 bytes
-rw-r--r--spec/public/fonts/Heuristica-Bold.woffbin0 -> 106188 bytes
-rw-r--r--spec/public/fonts/Heuristica-BoldItalic.woffbin0 -> 104316 bytes
-rw-r--r--spec/public/fonts/Heuristica-Regular.woffbin0 -> 141416 bytes
-rw-r--r--spec/public/fonts/Heuristica-RegularItalic.woffbin0 -> 104700 bytes
-rw-r--r--spec/public/fonts/LuxiMono-Bold.woffbin0 -> 26560 bytes
-rw-r--r--spec/public/fonts/LuxiMono-BoldOblique.woffbin0 -> 29480 bytes
-rw-r--r--spec/public/fonts/LuxiMono-Regular.woffbin0 -> 26432 bytes
-rw-r--r--spec/public/fonts/LuxiMono-RegularOblique.woffbin0 -> 29300 bytes
-rw-r--r--spec/public/fonts/LuxiSans-Bold.woffbin0 -> 13592 bytes
-rw-r--r--spec/public/fonts/LuxiSans-Regular.woffbin0 -> 13568 bytes
-rw-r--r--spec/public/images/github-logo@2x.pngbin0 -> 1753 bytes
-rw-r--r--spec/public/images/scala-spiral-white.pngbin0 -> 1442 bytes
-rw-r--r--spec/public/octicons/LICENSE.txt9
-rw-r--r--spec/public/octicons/octicons.css235
-rw-r--r--spec/public/octicons/octicons.eotbin0 -> 31440 bytes
-rw-r--r--spec/public/octicons/octicons.svg198
-rw-r--r--spec/public/octicons/octicons.ttfbin0 -> 31272 bytes
-rw-r--r--spec/public/octicons/octicons.woffbin0 -> 17492 bytes
-rw-r--r--spec/public/scripts/LICENSE-highlight24
-rw-r--r--spec/public/scripts/LICENSE-toc18
-rw-r--r--spec/public/scripts/highlight.pack.js1
-rw-r--r--spec/public/scripts/main.js57
-rw-r--r--spec/public/scripts/toc.js128
-rw-r--r--spec/public/stylesheets/fonts.css73
-rw-r--r--spec/public/stylesheets/print.css15
-rw-r--r--spec/public/stylesheets/screen-small.css57
-rw-r--r--spec/public/stylesheets/screen-toc.css37
-rw-r--r--spec/public/stylesheets/screen.css227
-rw-r--r--src/actors/scala/actors/Actor.scala2
-rw-r--r--src/actors/scala/actors/LinkedQueue.java2
-rw-r--r--src/actors/scala/actors/remote/Proxy.scala2
-rw-r--r--src/actors/scala/actors/threadpool/AbstractCollection.java2
-rw-r--r--src/actors/scala/actors/threadpool/ExecutorCompletionService.java2
-rw-r--r--src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java4
-rw-r--r--src/asm/README37
-rw-r--r--src/asm/scala/tools/asm/AnnotationVisitor.java8
-rw-r--r--src/asm/scala/tools/asm/AnnotationWriter.java55
-rw-r--r--src/asm/scala/tools/asm/ByteVector.java99
-rw-r--r--src/asm/scala/tools/asm/ClassReader.java366
-rw-r--r--src/asm/scala/tools/asm/ClassVisitor.java46
-rw-r--r--src/asm/scala/tools/asm/ClassWriter.java124
-rw-r--r--src/asm/scala/tools/asm/Context.java35
-rw-r--r--src/asm/scala/tools/asm/CustomAttr.java2
-rw-r--r--src/asm/scala/tools/asm/FieldVisitor.java41
-rw-r--r--src/asm/scala/tools/asm/FieldWriter.java58
-rw-r--r--src/asm/scala/tools/asm/Frame.java23
-rw-r--r--src/asm/scala/tools/asm/Handle.java13
-rw-r--r--src/asm/scala/tools/asm/Item.java3
-rw-r--r--src/asm/scala/tools/asm/Label.java4
-rw-r--r--src/asm/scala/tools/asm/MethodVisitor.java254
-rw-r--r--src/asm/scala/tools/asm/MethodWriter.java376
-rw-r--r--src/asm/scala/tools/asm/Opcodes.java7
-rw-r--r--src/asm/scala/tools/asm/Type.java17
-rw-r--r--src/asm/scala/tools/asm/TypePath.java193
-rw-r--r--src/asm/scala/tools/asm/TypeReference.java452
-rw-r--r--src/asm/scala/tools/asm/commons/CodeSizeEvaluator.java238
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureVisitor.java7
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureWriter.java2
-rw-r--r--src/asm/scala/tools/asm/tree/AbstractInsnNode.java78
-rw-r--r--src/asm/scala/tools/asm/tree/AnnotationNode.java14
-rw-r--r--src/asm/scala/tools/asm/tree/ClassNode.java80
-rw-r--r--src/asm/scala/tools/asm/tree/FieldInsnNode.java8
-rw-r--r--src/asm/scala/tools/asm/tree/FieldNode.java76
-rw-r--r--src/asm/scala/tools/asm/tree/IincInsnNode.java3
-rw-r--r--src/asm/scala/tools/asm/tree/InsnList.java20
-rw-r--r--src/asm/scala/tools/asm/tree/InsnNode.java3
-rw-r--r--src/asm/scala/tools/asm/tree/IntInsnNode.java3
-rw-r--r--src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java4
-rw-r--r--src/asm/scala/tools/asm/tree/JumpInsnNode.java4
-rw-r--r--src/asm/scala/tools/asm/tree/LdcInsnNode.java3
-rw-r--r--src/asm/scala/tools/asm/tree/LocalVariableAnnotationNode.java157
-rw-r--r--src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java3
-rw-r--r--src/asm/scala/tools/asm/tree/MethodInsnNode.java36
-rw-r--r--src/asm/scala/tools/asm/tree/MethodNode.java260
-rw-r--r--src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java3
-rw-r--r--src/asm/scala/tools/asm/tree/ParameterNode.java76
-rw-r--r--src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java3
-rw-r--r--src/asm/scala/tools/asm/tree/TryCatchBlockNode.java59
-rw-r--r--src/asm/scala/tools/asm/tree/TypeAnnotationNode.java100
-rw-r--r--src/asm/scala/tools/asm/tree/TypeInsnNode.java3
-rw-r--r--src/asm/scala/tools/asm/tree/VarInsnNode.java3
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Analyzer.java4
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java1
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java2
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java2
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Frame.java19
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Interpreter.java14
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java2
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java2
-rw-r--r--src/asm/scala/tools/asm/util/ASMifier.java141
-rw-r--r--src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java4
-rw-r--r--src/asm/scala/tools/asm/util/CheckClassAdapter.java135
-rw-r--r--src/asm/scala/tools/asm/util/CheckFieldAdapter.java26
-rw-r--r--src/asm/scala/tools/asm/util/CheckMethodAdapter.java159
-rw-r--r--src/asm/scala/tools/asm/util/CheckSignatureAdapter.java4
-rw-r--r--src/asm/scala/tools/asm/util/Printer.java95
-rw-r--r--src/asm/scala/tools/asm/util/Textifier.java279
-rw-r--r--src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java2
-rw-r--r--src/asm/scala/tools/asm/util/TraceClassVisitor.java13
-rw-r--r--src/asm/scala/tools/asm/util/TraceFieldVisitor.java13
-rw-r--r--src/asm/scala/tools/asm/util/TraceMethodVisitor.java77
-rw-r--r--src/asm/scala/tools/asm/util/TraceSignatureVisitor.java4
-rw-r--r--src/build/bnd/scala-actors.bnd2
-rw-r--r--src/build/bnd/scala-compiler-doc.bnd3
-rw-r--r--src/build/bnd/scala-compiler-interactive.bnd3
-rw-r--r--src/build/bnd/scala-compiler.bnd4
-rw-r--r--src/build/bnd/scala-continuations-library.bnd4
-rw-r--r--src/build/bnd/scala-continuations-plugin.bnd4
-rw-r--r--src/build/bnd/scala-library.bnd1
-rw-r--r--src/build/bnd/scala-parser-combinators.bnd4
-rw-r--r--src/build/bnd/scala-reflect.bnd5
-rw-r--r--src/build/bnd/scala-swing.bnd4
-rw-r--r--src/build/bnd/scala-xml.bnd4
-rw-r--r--src/build/maven/scala-dist-pom.xml5
-rw-r--r--src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala4
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Errors.scala1
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Resolvers.scala6
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Validators.scala2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Infrastructure.scala2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Parsers.scala17
-rw-r--r--src/compiler/scala/reflect/macros/util/Helpers.scala6
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Holes.scala (renamed from src/compiler/scala/tools/reflect/quasiquotes/Holes.scala)6
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Parsers.scala (renamed from src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala)17
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Placeholders.scala (renamed from src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala)2
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala (renamed from src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala)2
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Reifiers.scala (renamed from src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala)7
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala1
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenSymbols.scala2
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala4
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala12
-rw-r--r--src/compiler/scala/tools/ant/FastScalac.scala2
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala4
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Compiler.scala2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ScalacFork.scala2
-rwxr-xr-xsrc/compiler/scala/tools/ant/templates/tool-unix.tmpl61
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl2
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala77
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala42
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala4
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala44
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala33
-rw-r--r--src/compiler/scala/tools/nsc/CompilerCommand.scala21
-rw-r--r--src/compiler/scala/tools/nsc/Driver.scala21
-rw-r--r--src/compiler/scala/tools/nsc/EvalLoop.scala3
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerCommand.scala30
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala5
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala479
-rw-r--r--src/compiler/scala/tools/nsc/MainBench.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ObjectRunner.scala6
-rw-r--r--src/compiler/scala/tools/nsc/Parsing.scala35
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala6
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala5
-rw-r--r--src/compiler/scala/tools/nsc/Reporting.scala107
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala39
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala1
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala10
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala4
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala143
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala106
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala243
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala28
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/Platform.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala55
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Primitives.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala123
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala401
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala288
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala716
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala551
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala25
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala204
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala114
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala21
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala880
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala1167
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala571
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala279
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala24
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala292
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala300
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala79
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala83
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala173
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala340
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala195
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala148
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala681
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala584
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala7
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala8
-rw-r--r--src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala125
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala55
-rw-r--r--src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala162
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FileUtils.scala68
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala101
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala38
-rw-r--r--src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala26
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala180
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala67
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala14
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala6
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala4
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala4
-rw-r--r--src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala15
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala5
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala100
-rw-r--r--src/compiler/scala/tools/nsc/reporters/StoreReporter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala22
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala6
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala282
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala165
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala124
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala86
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala72
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala52
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala13
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala45
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala9
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala282
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala86
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala16
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala46
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala112
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/SampleTransform.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala57
-rw-r--r--src/compiler/scala/tools/nsc/transform/Statics.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala54
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypingTransformers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala76
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala307
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala299
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala24
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala49
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala37
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala600
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Adaptations.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala22
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala54
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala142
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala462
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala150
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala164
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala33
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala59
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala67
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala124
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala82
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala51
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala484
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala26
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassFileLookup.scala57
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala138
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/StatisticsInfo.scala4
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala11
-rw-r--r--src/compiler/scala/tools/reflect/FormatInterpolator.scala20
-rw-r--r--src/compiler/scala/tools/reflect/ReflectMain.scala8
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala5
-rw-r--r--src/compiler/scala/tools/util/Javap.scala32
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala102
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala4
-rw-r--r--src/eclipse/README.md23
-rw-r--r--src/eclipse/partest/.classpath6
-rw-r--r--src/eclipse/repl/.classpath16
-rw-r--r--src/intellij/README12
-rw-r--r--src/intellij/actors.iml.SAMPLE14
-rw-r--r--src/intellij/asm.iml.SAMPLE3
-rw-r--r--src/intellij/compiler.iml.SAMPLE19
-rw-r--r--src/intellij/forkjoin.iml.SAMPLE3
-rw-r--r--src/intellij/interactive.iml.SAMPLE16
-rw-r--r--src/intellij/library.iml.SAMPLE16
-rw-r--r--src/intellij/manual.iml.SAMPLE16
-rw-r--r--src/intellij/partest-extras.iml.SAMPLE18
-rw-r--r--src/intellij/partest-javaagent.iml.SAMPLE12
-rw-r--r--src/intellij/reflect.iml.SAMPLE16
-rw-r--r--src/intellij/repl.iml.SAMPLE16
-rw-r--r--src/intellij/scala-lang.ipr.SAMPLE278
-rw-r--r--src/intellij/scala.iml.SAMPLE3
-rw-r--r--src/intellij/scala.ipr.SAMPLE121
-rw-r--r--src/intellij/scaladoc.iml.SAMPLE21
-rw-r--r--src/intellij/scalap.iml.SAMPLE16
-rwxr-xr-xsrc/intellij/setup.sh18
-rw-r--r--src/intellij/test-junit.iml.SAMPLE23
-rw-r--r--src/intellij/test.iml.SAMPLE23
-rwxr-xr-xsrc/intellij/update.sh22
-rw-r--r--src/interactive/scala/tools/nsc/interactive/CompilerControl.scala2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ContextTrees.scala6
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala36
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Lexer.scala2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Main.scala7
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Pickler.scala6
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala2
-rw-r--r--src/jline/LICENSE.txt33
-rw-r--r--src/jline/README.md24
-rw-r--r--src/jline/build.sbt49
-rwxr-xr-xsrc/jline/manual-test.sh9
-rw-r--r--src/jline/project/build.properties1
-rw-r--r--src/jline/project/plugins.sbt3
-rw-r--r--src/jline/src/main/java/scala/tools/jline/AnsiWindowsTerminal.java90
-rw-r--r--src/jline/src/main/java/scala/tools/jline/NoInterruptUnixTerminal.java44
-rw-r--r--src/jline/src/main/java/scala/tools/jline/Terminal.java59
-rw-r--r--src/jline/src/main/java/scala/tools/jline/TerminalFactory.java173
-rw-r--r--src/jline/src/main/java/scala/tools/jline/TerminalSupport.java179
-rw-r--r--src/jline/src/main/java/scala/tools/jline/UnixTerminal.java248
-rw-r--r--src/jline/src/main/java/scala/tools/jline/UnsupportedTerminal.java25
-rw-r--r--src/jline/src/main/java/scala/tools/jline/WindowsTerminal.java468
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java2185
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java106
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/Key.java82
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/Operation.java291
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/completer/AggregateCompleter.java108
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/completer/ArgumentCompleter.java398
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/completer/CandidateListCompletionHandler.java193
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/completer/Completer.java37
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/completer/CompletionHandler.java25
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/completer/EnumCompleter.java35
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/completer/FileNameCompleter.java133
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/completer/NullCompleter.java39
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/completer/StringsCompleter.java79
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/completer/package-info.java22
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/history/FileHistory.java106
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/history/History.java71
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/history/MemoryHistory.java318
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/history/PersistentHistory.java34
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/history/package-info.java22
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/package-info.java22
-rw-r--r--src/jline/src/main/java/scala/tools/jline/internal/Configuration.java127
-rw-r--r--src/jline/src/main/java/scala/tools/jline/internal/Log.java112
-rw-r--r--src/jline/src/main/java/scala/tools/jline/internal/ReplayPrefixOneCharInputStream.java95
-rw-r--r--src/jline/src/main/java/scala/tools/jline/internal/TerminalLineSettings.java217
-rw-r--r--src/jline/src/main/java/scala/tools/jline/internal/package-info.java22
-rw-r--r--src/jline/src/main/java/scala/tools/jline/package-info.java22
-rw-r--r--src/jline/src/main/resources/scala/tools/jline/console/completer/CandidateListCompletionHandler.properties4
-rw-r--r--src/jline/src/main/resources/scala/tools/jline/keybindings.properties71
-rw-r--r--src/jline/src/main/resources/scala/tools/jline/windowsbindings.properties71
-rw-r--r--src/jline/src/test/java/scala/tools/jline/TerminalFactoryTest.java34
-rw-r--r--src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTest.java261
-rw-r--r--src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java143
-rw-r--r--src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java208
-rw-r--r--src/jline/src/test/java/scala/tools/jline/console/completer/ArgumentCompleterTest.java46
-rw-r--r--src/jline/src/test/java/scala/tools/jline/console/completer/NullCompleterTest.java39
-rw-r--r--src/jline/src/test/java/scala/tools/jline/console/completer/StringsCompleterTest.java40
-rw-r--r--src/jline/src/test/java/scala/tools/jline/console/history/HistoryTest.java79
-rw-r--r--src/jline/src/test/java/scala/tools/jline/console/history/MemoryHistoryTest.java99
-rw-r--r--src/jline/src/test/java/scala/tools/jline/example/Example.java107
-rw-r--r--src/jline/src/test/java/scala/tools/jline/internal/TerminalLineSettingsTest.java146
-rw-r--r--src/library/rootdoc.txt2
-rw-r--r--src/library/scala/Enumeration.scala4
-rw-r--r--src/library/scala/Mutable.scala2
-rw-r--r--src/library/scala/Option.scala31
-rw-r--r--src/library/scala/PartialFunction.scala5
-rw-r--r--src/library/scala/Predef.scala12
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/StringContext.scala128
-rw-r--r--src/library/scala/annotation/switch.scala3
-rw-r--r--src/library/scala/collection/GenMapLike.scala2
-rw-r--r--src/library/scala/collection/GenSeqLike.scala4
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala6
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala6
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala6
-rw-r--r--src/library/scala/collection/Iterable.scala2
-rw-r--r--src/library/scala/collection/IterableLike.scala15
-rw-r--r--src/library/scala/collection/IterableProxy.scala1
-rw-r--r--src/library/scala/collection/IterableViewLike.scala4
-rw-r--r--src/library/scala/collection/Iterator.scala72
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala4
-rw-r--r--src/library/scala/collection/LinearSeq.scala9
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala30
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala41
-rw-r--r--src/library/scala/collection/MapLike.scala2
-rw-r--r--src/library/scala/collection/MapProxy.scala1
-rw-r--r--src/library/scala/collection/Searching.scala12
-rw-r--r--src/library/scala/collection/SeqLike.scala87
-rw-r--r--src/library/scala/collection/SeqViewLike.scala26
-rw-r--r--src/library/scala/collection/SetLike.scala37
-rw-r--r--src/library/scala/collection/SetProxy.scala1
-rw-r--r--src/library/scala/collection/Traversable.scala2
-rw-r--r--src/library/scala/collection/TraversableLike.scala14
-rw-r--r--src/library/scala/collection/TraversableOnce.scala28
-rw-r--r--src/library/scala/collection/TraversableProxy.scala1
-rw-r--r--src/library/scala/collection/concurrent/Map.scala4
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala38
-rw-r--r--src/library/scala/collection/convert/DecorateAsScala.scala6
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala8
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala13
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala8
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala13
-rw-r--r--src/library/scala/collection/immutable/Iterable.scala1
-rw-r--r--src/library/scala/collection/immutable/List.scala13
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala13
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala8
-rw-r--r--src/library/scala/collection/immutable/Queue.scala6
-rw-r--r--src/library/scala/collection/immutable/Range.scala27
-rw-r--r--src/library/scala/collection/immutable/Set.scala16
-rw-r--r--src/library/scala/collection/immutable/Stack.scala4
-rw-r--r--src/library/scala/collection/immutable/Stream.scala150
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala65
-rw-r--r--src/library/scala/collection/immutable/Traversable.scala2
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala4
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala4
-rw-r--r--src/library/scala/collection/immutable/Vector.scala2
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala6
-rw-r--r--src/library/scala/collection/mutable/AnyRefMap.scala4
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala21
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala15
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala2
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedListLike.scala4
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala2
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala1
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala1
-rw-r--r--src/library/scala/collection/mutable/LinkedList.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala6
-rw-r--r--src/library/scala/collection/mutable/LongMap.scala44
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala8
-rw-r--r--src/library/scala/collection/mutable/MultiMap.scala7
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala19
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala2
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala14
-rw-r--r--src/library/scala/collection/mutable/Queue.scala2
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala19
-rw-r--r--src/library/scala/collection/mutable/Stack.scala4
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/UnrolledBuffer.scala56
-rw-r--r--src/library/scala/collection/mutable/WrappedArray.scala2
-rw-r--r--src/library/scala/collection/package.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterable.scala3
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala5
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/ParSetLike.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala2
-rw-r--r--src/library/scala/collection/parallel/package.scala2
-rw-r--r--src/library/scala/compat/Platform.scala14
-rw-r--r--src/library/scala/concurrent/Channel.scala17
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala74
-rw-r--r--src/library/scala/concurrent/Future.scala6
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala2
-rw-r--r--src/library/scala/concurrent/Lock.scala2
-rw-r--r--src/library/scala/concurrent/Promise.scala5
-rw-r--r--src/library/scala/concurrent/SyncVar.scala20
-rw-r--r--src/library/scala/concurrent/duration/Deadline.scala6
-rw-r--r--src/library/scala/concurrent/duration/Duration.scala7
-rw-r--r--src/library/scala/concurrent/package.scala83
-rw-r--r--src/library/scala/io/BufferedSource.scala2
-rw-r--r--src/library/scala/io/Source.scala17
-rw-r--r--src/library/scala/io/StdIn.scala2
-rw-r--r--src/library/scala/language.scala10
-rw-r--r--src/library/scala/languageFeature.scala10
-rw-r--r--src/library/scala/math/BigDecimal.scala14
-rw-r--r--src/library/scala/math/BigInt.scala6
-rw-r--r--src/library/scala/math/Ordering.scala7
-rw-r--r--src/library/scala/math/PartialOrdering.scala17
-rw-r--r--src/library/scala/reflect/ClassTag.scala50
-rw-r--r--src/library/scala/reflect/Manifest.scala3
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java6
-rw-r--r--src/library/scala/runtime/MethodCache.scala2
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala4
-rw-r--r--src/library/scala/runtime/SeqCharSequence.scala7
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala4
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala7
-rw-r--r--src/library/scala/sys/Prop.scala2
-rw-r--r--src/library/scala/sys/SystemProperties.scala2
-rw-r--r--src/library/scala/sys/package.scala9
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/sys/process/ProcessLogger.scala2
-rw-r--r--src/library/scala/sys/process/package.scala2
-rw-r--r--src/library/scala/util/Either.scala8
-rw-r--r--src/library/scala/util/Properties.scala15
-rw-r--r--src/library/scala/util/Random.scala10
-rw-r--r--src/library/scala/util/Try.scala4
-rw-r--r--src/library/scala/util/control/Exception.scala2
-rw-r--r--src/library/scala/util/hashing/MurmurHash3.scala2
-rw-r--r--src/library/scala/util/matching/Regex.scala418
-rw-r--r--src/manual/scala/man1/Command.scala2
-rw-r--r--src/manual/scala/man1/scalac.scala2
-rw-r--r--src/partest-extras/scala/tools/partest/ASMConverters.scala246
-rw-r--r--src/partest-extras/scala/tools/partest/BytecodeTest.scala40
-rw-r--r--src/partest-extras/scala/tools/partest/ParserTest.scala21
-rw-r--r--src/partest-extras/scala/tools/partest/ReplTest.scala37
-rw-r--r--src/partest-extras/scala/tools/partest/instrumented/Profiler.java2
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java2
-rw-r--r--src/reflect/scala/reflect/api/Constants.scala4
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala25
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala6
-rw-r--r--src/reflect/scala/reflect/api/Liftables.scala2
-rw-r--r--src/reflect/scala/reflect/api/Mirror.scala2
-rw-r--r--src/reflect/scala/reflect/api/Mirrors.scala6
-rw-r--r--src/reflect/scala/reflect/api/Names.scala16
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala30
-rw-r--r--src/reflect/scala/reflect/api/Quasiquotes.scala4
-rw-r--r--src/reflect/scala/reflect/api/StandardDefinitions.scala6
-rw-r--r--src/reflect/scala/reflect/api/StandardLiftables.scala2
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala7
-rw-r--r--src/reflect/scala/reflect/api/TreeCreator.scala6
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala14
-rw-r--r--src/reflect/scala/reflect/api/TypeCreator.scala2
-rw-r--r--src/reflect/scala/reflect/api/TypeTags.scala39
-rw-r--r--src/reflect/scala/reflect/api/Types.scala2
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala7
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala60
-rw-r--r--src/reflect/scala/reflect/internal/Depth.scala16
-rw-r--r--src/reflect/scala/reflect/internal/FreshNames.scala24
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Internals.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala110
-rw-r--r--src/reflect/scala/reflect/internal/Positions.scala13
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala50
-rw-r--r--src/reflect/scala/reflect/internal/ReificationSupport.scala9
-rw-r--r--src/reflect/scala/reflect/internal/Reporting.scala116
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala24
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala4
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala47
-rw-r--r--src/reflect/scala/reflect/internal/SymbolPairs.scala1
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala18
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala251
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala12
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala16
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala31
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala36
-rw-r--r--src/reflect/scala/reflect/internal/Variances.scala4
-rw-r--r--src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala2
-rw-r--r--src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala4
-rw-r--r--src/reflect/scala/reflect/internal/pickling/Translations.scala31
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala84
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala3
-rw-r--r--src/reflect/scala/reflect/internal/tpe/FindMembers.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/GlbLubs.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala15
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala16
-rw-r--r--src/reflect/scala/reflect/internal/transform/PostErasure.scala1
-rw-r--r--src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala66
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala6
-rw-r--r--src/reflect/scala/reflect/internal/util/SourceFile.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/WeakHashSet.scala8
-rw-r--r--src/reflect/scala/reflect/io/AbstractFile.scala16
-rw-r--r--src/reflect/scala/reflect/io/VirtualFile.scala4
-rw-r--r--src/reflect/scala/reflect/io/ZipArchive.scala34
-rw-r--r--src/reflect/scala/reflect/macros/Attachments.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Enclosures.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Parsers.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Typers.scala8
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala64
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala20
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala16
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedOps.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala11
-rw-r--r--src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala8
-rw-r--r--src/reflect/scala/reflect/runtime/package.scala5
-rw-r--r--src/repl/scala/tools/nsc/MainGenericRunner.scala128
-rw-r--r--src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala15
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala316
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala97
-rw-r--r--src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala1
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JavapClass.scala420
-rw-r--r--src/repl/scala/tools/nsc/interpreter/LoopCommands.scala6
-rw-r--r--src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala25
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Power.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplProps.scala3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplReporter.scala37
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplStrings.scala5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/SimpleReader.scala29
-rw-r--r--src/repl/scala/tools/nsc/interpreter/package.scala11
-rw-r--r--src/scaladoc/scala/tools/ant/Scaladoc.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/ScalaDoc.scala20
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocFactory.scala6
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocParser.scala3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Index.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala15
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala1
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Settings.scala4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Universe.scala3
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala141
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala27
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala102
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala34
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala58
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala7
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala127
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala24
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala26
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css1
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js54
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.pngbin0 -> 943 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css45
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js94
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Entity.scala2
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala8
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala12
-rw-r--r--src/scaladoc/scala/tools/partest/ScaladocModelTest.scala8
-rw-r--r--src/scalap/scala/tools/scalap/Arguments.scala22
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala198
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala22
-rw-r--r--test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala2
-rw-r--r--test/disabled/run/t8946.scala29
-rw-r--r--test/files/jvm/beanInfo.check6
-rw-r--r--test/files/jvm/beanInfo/C_1.scala9
-rw-r--r--test/files/jvm/beanInfo/Test_2.scala17
-rw-r--r--test/files/jvm/deprecation.check2
-rw-r--r--test/files/jvm/duration-tck.scala5
-rw-r--r--test/files/jvm/future-spec.check2
-rw-r--r--test/files/jvm/future-spec/PromiseTests.scala75
-rw-r--r--test/files/jvm/inner.scala2
-rw-r--r--test/files/jvm/innerClassAttribute.check54
-rw-r--r--test/files/jvm/innerClassAttribute/Classes_1.scala297
-rw-r--r--test/files/jvm/innerClassAttribute/JavaAnnot_1.java3
-rw-r--r--test/files/jvm/innerClassAttribute/Java_A_1.java10
-rw-r--r--test/files/jvm/innerClassAttribute/Test.scala592
-rw-r--r--test/files/jvm/innerClassEnclMethodJavaReflection.scala65
-rw-r--r--test/files/jvm/interpreter.check6
-rw-r--r--test/files/jvm/javaReflection.check259
-rw-r--r--test/files/jvm/javaReflection/Classes_1.scala84
-rw-r--r--test/files/jvm/javaReflection/Test.scala137
-rw-r--r--test/files/jvm/serialization-new.check2
-rw-r--r--test/files/jvm/serialization.check2
-rw-r--r--test/files/jvm/t6941/test.scala4
-rw-r--r--test/files/jvm/t7253/test.scala6
-rw-r--r--test/files/jvm/t8582.check44
-rw-r--r--test/files/jvm/t8582.scala81
-rw-r--r--test/files/jvm/t8689.check1
-rw-r--r--test/files/jvm/t8689.scala18
-rw-r--r--test/files/jvm/t9044.scala6
-rw-r--r--test/files/jvm/t9105.check18
-rw-r--r--test/files/jvm/t9105.scala22
-rw-r--r--test/files/jvm/throws-annot-from-java.check2
-rw-r--r--test/files/jvm/varargs.check3
-rw-r--r--test/files/jvm/varargs/JavaClass.java1
-rw-r--r--test/files/jvm/varargs/VaClass.scala2
-rw-r--r--test/files/jvm/xml05.check2
-rw-r--r--test/files/neg/aladdin1055.check7
-rw-r--r--test/files/neg/aladdin1055.flags1
-rw-r--r--test/files/neg/aladdin1055/A.scala6
-rw-r--r--test/files/neg/aladdin1055/Test_1.scala5
-rw-r--r--test/files/neg/case-collision2.flags2
-rw-r--r--test/files/neg/checksensible.check3
-rw-r--r--test/files/neg/compile-time-only-a.check5
-rw-r--r--test/files/neg/double-def-top-level.check7
-rw-r--r--test/files/neg/double-def-top-level/A_1.scala4
-rw-r--r--test/files/neg/double-def-top-level/B_2.scala2
-rw-r--r--test/files/neg/double-def-top-level/C_3.scala2
-rw-r--r--test/files/neg/double-def-top-level/D_3.scala2
-rw-r--r--test/files/neg/forgot-interpolator.check16
-rw-r--r--test/files/neg/forgot-interpolator.scala4
-rw-r--r--test/files/neg/inlineMaxSize.check9
-rw-r--r--test/files/neg/inlineMaxSize.flags1
-rw-r--r--test/files/neg/inlineMaxSize.scala8
-rw-r--r--test/files/neg/literals.check40
-rw-r--r--test/files/neg/literals.scala36
-rw-r--r--test/files/neg/literate_existentials.scala2
-rw-r--r--test/files/neg/macro-basic-mamdmi.check10
-rw-r--r--test/files/neg/macro-invalidret.check3
-rw-r--r--test/files/neg/macro-invalidret/Impls_1.scala2
-rw-r--r--test/files/neg/macro-invalidusage-badargs.check3
-rw-r--r--test/files/neg/names-defaults-neg.check12
-rw-r--r--test/files/neg/overloaded-implicit.check3
-rw-r--r--test/files/neg/patmatexhaust-huge.check7
-rw-r--r--test/files/neg/patmatexhaust-huge.flags1
-rw-r--r--test/files/neg/patmatexhaust-huge.scala806
-rw-r--r--test/files/neg/patmatexhaust.check2
-rw-r--r--test/files/neg/patmatexhaust.flags2
-rw-r--r--test/files/neg/patmatexhaust.scala2
-rw-r--r--test/files/neg/reflection-names-neg.check13
-rw-r--r--test/files/neg/reflection-names-neg.scala6
-rw-r--r--test/files/neg/sammy_error_exist_no_crash.check6
-rw-r--r--test/files/neg/sammy_error_exist_no_crash.flags (renamed from test/files/run/t5530.flags)0
-rw-r--r--test/files/neg/sammy_error_exist_no_crash.scala6
-rw-r--r--test/files/neg/sammy_restrictions.scala28
-rw-r--r--test/files/neg/structural.scala10
-rw-r--r--test/files/neg/t0899.check6
-rw-r--r--test/files/neg/t1909-object.check4
-rw-r--r--test/files/neg/t2866.check17
-rw-r--r--test/files/neg/t2866.scala59
-rw-r--r--test/files/neg/t3240.check4
-rw-r--r--test/files/neg/t3909.check1
-rw-r--r--test/files/neg/t4851.check4
-rw-r--r--test/files/neg/t4851.flags2
-rw-r--r--test/files/neg/t4851/J2.java4
-rw-r--r--test/files/neg/t5044.check4
-rw-r--r--test/files/neg/t5091.check9
-rw-r--r--test/files/neg/t5091.scala (renamed from test/pending/pos/t5091.scala)0
-rw-r--r--test/files/neg/t5148.check16
-rw-r--r--test/files/neg/t562.check2
-rw-r--r--test/files/neg/t5639b.check4
-rw-r--r--test/files/neg/t5639b/A_1.scala17
-rw-r--r--test/files/neg/t5639b/A_2.scala11
-rw-r--r--test/files/neg/t5675.check4
-rw-r--r--test/files/neg/t5691.check24
-rw-r--r--test/files/neg/t5691.flags1
-rw-r--r--test/files/neg/t5691.scala27
-rw-r--r--test/files/neg/t6162-inheritance.check8
-rw-r--r--test/files/neg/t6289.check2
-rw-r--r--test/files/neg/t6567.check3
-rw-r--r--test/files/neg/t6582_exhaust_big.check7
-rw-r--r--test/files/neg/t6582_exhaust_big.flags1
-rw-r--r--test/files/neg/t6582_exhaust_big.scala32
-rw-r--r--test/files/neg/t6675b.scala2
-rw-r--r--test/files/neg/t6771b.check2
-rw-r--r--test/files/neg/t6771b.scala2
-rw-r--r--test/files/neg/t6902.scala2
-rw-r--r--test/files/neg/t6988.check7
-rw-r--r--test/files/neg/t6988.scala10
-rw-r--r--test/files/neg/t7157.check36
-rw-r--r--test/files/neg/t7602.check5
-rw-r--r--test/files/neg/t7602.scala26
-rw-r--r--test/files/neg/t7623.check21
-rw-r--r--test/files/neg/t7623.flags1
-rw-r--r--test/files/neg/t7623.scala38
-rw-r--r--test/files/neg/t7636.check2
-rw-r--r--test/files/neg/t7848-interp-warn.check11
-rw-r--r--test/files/neg/t7848-interp-warn.flags2
-rw-r--r--test/files/neg/t7848-interp-warn.scala2
-rw-r--r--test/files/neg/t8035-no-adapted-args.check21
-rw-r--r--test/files/neg/t8035-no-adapted-args.flags1
-rw-r--r--test/files/neg/t8035-no-adapted-args.scala6
-rw-r--r--test/files/neg/t8217-local-alias-requires-rhs.check10
-rw-r--r--test/files/neg/t8217-local-alias-requires-rhs.scala15
-rw-r--r--test/files/neg/t8266-invalid-interp.check4
-rw-r--r--test/files/neg/t8291.check7
-rw-r--r--test/files/neg/t8291.scala7
-rw-r--r--test/files/neg/t8325-b.check10
-rw-r--r--test/files/neg/t8325-b.scala4
-rw-r--r--test/files/neg/t8325-c.check7
-rw-r--r--test/files/neg/t8325-c.scala4
-rw-r--r--test/files/neg/t8325.check15
-rw-r--r--test/files/neg/t8325.scala11
-rw-r--r--test/files/neg/t8430.check27
-rw-r--r--test/files/neg/t8430.flags1
-rw-r--r--test/files/neg/t8430.scala32
-rw-r--r--test/files/neg/t845.check4
-rw-r--r--test/files/neg/t8450.check6
-rw-r--r--test/files/neg/t8450.flags1
-rw-r--r--test/files/neg/t8450.scala12
-rw-r--r--test/files/neg/t8463.check27
-rw-r--r--test/files/neg/t8463.scala38
-rw-r--r--test/files/neg/t8525.check15
-rw-r--r--test/files/neg/t8525.flags1
-rw-r--r--test/files/neg/t8525.scala10
-rw-r--r--test/files/neg/t8534.check4
-rw-r--r--test/files/neg/t8534.scala7
-rw-r--r--test/files/neg/t8534b.check4
-rw-r--r--test/files/neg/t8534b.scala4
-rw-r--r--test/files/neg/t8597.check21
-rw-r--r--test/files/neg/t8597.flags (renamed from test/files/pos/switch-small.flags)0
-rw-r--r--test/files/neg/t8597.scala27
-rw-r--r--test/files/neg/t8597b.check6
-rw-r--r--test/files/neg/t8597b.flags1
-rw-r--r--test/files/neg/t8597b.scala21
-rw-r--r--test/files/neg/t8610-arg.check6
-rw-r--r--test/files/neg/t8610-arg.flags1
-rw-r--r--test/files/neg/t8610-arg.scala10
-rw-r--r--test/files/neg/t8610.check18
-rw-r--r--test/files/neg/t8610.flags1
-rw-r--r--test/files/neg/t8610.scala10
-rw-r--r--test/files/neg/t8630.check7
-rw-r--r--test/files/neg/t8630.scala1
-rw-r--r--test/files/neg/t8675.check11
-rw-r--r--test/files/neg/t8675.scala24
-rw-r--r--test/files/neg/t8675b.check6
-rw-r--r--test/files/neg/t8675b.scala22
-rw-r--r--test/files/neg/t8731.check6
-rw-r--r--test/files/neg/t8731.flags1
-rw-r--r--test/files/neg/t8731.scala15
-rw-r--r--test/files/neg/t8736-c.check11
-rw-r--r--test/files/neg/t8736-c.flags1
-rw-r--r--test/files/neg/t8736-c.scala7
-rw-r--r--test/files/neg/t8764.check6
-rw-r--r--test/files/neg/t8764.flags (renamed from test/files/run/t5614.flags)0
-rw-r--r--test/files/neg/t8764.scala9
-rw-r--r--test/files/neg/t8841.check9
-rw-r--r--test/files/neg/t8841.scala15
-rw-r--r--test/files/neg/t8869.check7
-rw-r--r--test/files/neg/t8869.scala10
-rw-r--r--test/files/neg/t8890.check4
-rw-r--r--test/files/neg/t8890.scala11
-rw-r--r--test/files/neg/t9008.check4
-rw-r--r--test/files/neg/t9008.scala3
-rw-r--r--test/files/neg/t9008b.check4
-rw-r--r--test/files/neg/t9008b.scala3
-rw-r--r--test/files/neg/t9041.check4
-rw-r--r--test/files/neg/t9041.scala17
-rw-r--r--test/files/neg/t9093.check6
-rw-r--r--test/files/neg/t9093.scala5
-rw-r--r--test/files/neg/t9127.check12
-rw-r--r--test/files/neg/t9127.flags1
-rw-r--r--test/files/neg/t9127.scala7
-rw-r--r--test/files/neg/t9231.check4
-rw-r--r--test/files/neg/t9231.scala9
-rw-r--r--test/files/neg/t9273.check10
-rw-r--r--test/files/neg/t9273.scala9
-rw-r--r--test/files/neg/t963.check8
-rw-r--r--test/files/neg/tailrec-4.check16
-rw-r--r--test/files/neg/tailrec-4.scala35
-rw-r--r--test/files/neg/unchecked-abstract.check14
-rw-r--r--test/files/neg/unchecked-refinement.check3
-rw-r--r--test/files/neg/virtpatmat_exhaust_big.check7
-rw-r--r--test/files/neg/virtpatmat_exhaust_big.flags1
-rw-r--r--test/files/neg/virtpatmat_exhaust_big.scala32
-rw-r--r--test/files/neg/virtpatmat_exhaust_compound.check15
-rw-r--r--test/files/neg/virtpatmat_exhaust_compound.flags1
-rw-r--r--test/files/neg/virtpatmat_exhaust_compound.scala29
-rw-r--r--test/files/neg/warn-inferred-any.check5
-rw-r--r--test/files/neg/warn-inferred-any.flags2
-rw-r--r--test/files/neg/warn-inferred-any.scala8
-rw-r--r--test/files/neg/warn-unused-privates.check39
-rw-r--r--test/files/neg/warn-unused-privates.scala1
-rw-r--r--test/files/pos/dotless-targs.flags1
-rw-r--r--test/files/pos/five-dot-f.flags1
-rw-r--r--test/files/pos/jesper.scala30
-rw-r--r--test/files/pos/macro-attachments/Macros_1.scala19
-rw-r--r--test/files/pos/macro-attachments/Test_2.scala3
-rw-r--r--test/files/pos/patmat-suppress.flags1
-rw-r--r--test/files/pos/patmat-suppress.scala159
-rw-r--r--test/files/pos/sammy_exist.flags1
-rw-r--r--test/files/pos/sammy_exist.scala17
-rw-r--r--test/files/pos/sammy_overload.flags1
-rw-r--r--test/files/pos/sammy_overload.scala9
-rw-r--r--test/files/pos/sammy_override.flags1
-rw-r--r--test/files/pos/sammy_override.scala8
-rw-r--r--test/files/pos/switch-small.scala8
-rw-r--r--test/files/pos/t3240.scala (renamed from test/files/neg/t3240.scala)0
-rw-r--r--test/files/pos/t3368.flags1
-rw-r--r--test/files/pos/t3368.scala5
-rw-r--r--test/files/pos/t3439.scala26
-rw-r--r--test/files/pos/t4070.scala2
-rw-r--r--test/files/pos/t5154.scala9
-rw-r--r--test/files/pos/t5217.scala17
-rw-r--r--test/files/pos/t5413.scala9
-rw-r--r--test/files/pos/t5454.scala10
-rw-r--r--test/files/pos/t5639.flags1
-rw-r--r--test/files/pos/t5639/A_1.scala17
-rw-r--r--test/files/pos/t5639/A_2.scala11
-rw-r--r--test/files/pos/t5639/Bar.scala7
-rw-r--r--test/files/pos/t5639/Foo.scala7
-rw-r--r--test/files/pos/t6051.scala19
-rw-r--r--test/files/pos/t6582_exhaust_big.scala33
-rw-r--r--test/files/pos/t6942.flags2
-rw-r--r--test/files/pos/t7459a.scala18
-rw-r--r--test/files/pos/t7459b.scala12
-rw-r--r--test/files/pos/t7459c.scala18
-rw-r--r--test/files/pos/t7459d.scala8
-rw-r--r--test/files/pos/t7596/A_1.scala10
-rw-r--r--test/files/pos/t7596/B_2.scala19
-rw-r--r--test/files/pos/t7596b/A.scala10
-rw-r--r--test/files/pos/t7596b/B.scala6
-rw-r--r--test/files/pos/t7596c/A_1.scala11
-rw-r--r--test/files/pos/t7596c/B_2.scala9
-rw-r--r--test/files/pos/t7683-stop-after-parser/ThePlugin.scala31
-rw-r--r--test/files/pos/t7683-stop-after-parser/sample_2.flags1
-rw-r--r--test/files/pos/t7683-stop-after-parser/sample_2.scala6
-rw-r--r--test/files/pos/t7683-stop-after-parser/scalac-plugin.xml5
-rw-r--r--test/files/pos/t7704.scala10
-rw-r--r--test/files/pos/t7750.flags1
-rw-r--r--test/files/pos/t7750.scala8
-rw-r--r--test/files/pos/t7815.scala2
-rw-r--r--test/files/pos/t8013.flags2
-rw-r--r--test/files/pos/t8157-2.10.flags1
-rw-r--r--test/files/pos/t8157-2.10.scala5
-rw-r--r--test/files/pos/t8267.scala33
-rw-r--r--test/files/pos/t8310.flags1
-rw-r--r--test/files/pos/t8310.scala22
-rw-r--r--test/files/pos/t8325.scala9
-rw-r--r--test/files/pos/t8329.scala29
-rw-r--r--test/files/pos/t8359-closelim-crash.flags1
-rw-r--r--test/files/pos/t8359-closelim-crash.scala23
-rw-r--r--test/files/pos/t8410.flags1
-rw-r--r--test/files/pos/t8410.scala15
-rw-r--r--test/files/pos/t845.scala (renamed from test/files/neg/t845.scala)0
-rw-r--r--test/files/pos/t8497/A_1.scala13
-rw-r--r--test/files/pos/t8497/B_2.scala1
-rw-r--r--test/files/pos/t8498.scala6
-rw-r--r--test/files/pos/t8523.flags1
-rw-r--r--test/files/pos/t8523.scala10
-rw-r--r--test/files/pos/t8531/MyEnum.java5
-rw-r--r--test/files/pos/t8531/Test.scala24
-rw-r--r--test/files/pos/t8546.flags1
-rw-r--r--test/files/pos/t8546.scala49
-rw-r--r--test/files/pos/t8578.flags1
-rw-r--r--test/files/pos/t8578.scala18
-rw-r--r--test/files/pos/t8596.flags1
-rw-r--r--test/files/pos/t8596.scala7
-rw-r--r--test/files/pos/t8617.flags1
-rw-r--r--test/files/pos/t8617.scala10
-rw-r--r--test/files/pos/t8625.scala5
-rw-r--r--test/files/pos/t8708/Either_1.scala6
-rw-r--r--test/files/pos/t8708/Test_2.scala13
-rw-r--r--test/files/pos/t8719.check0
-rw-r--r--test/files/pos/t8719/Macros_1.scala21
-rw-r--r--test/files/pos/t8719/Test_2.scala10
-rw-r--r--test/files/pos/t8736-b.flags1
-rw-r--r--test/files/pos/t8736-b.scala7
-rw-r--r--test/files/pos/t8736.flags1
-rw-r--r--test/files/pos/t8736.scala7
-rw-r--r--test/files/pos/t8743.scala15
-rw-r--r--test/files/pos/t8781/Macro_1.scala13
-rw-r--r--test/files/pos/t8781/Test_2.flags1
-rw-r--r--test/files/pos/t8781/Test_2.scala5
-rw-r--r--test/files/pos/t8793.scala15
-rw-r--r--test/files/pos/t8801.scala21
-rw-r--r--test/files/pos/t8828.flags1
-rw-r--r--test/files/pos/t8828.scala20
-rw-r--r--test/files/pos/t8844.scala4
-rw-r--r--test/files/pos/t8861.flags1
-rw-r--r--test/files/pos/t8861.scala11
-rw-r--r--test/files/pos/t8868a/Sub_2.scala1
-rw-r--r--test/files/pos/t8868a/T_1.scala6
-rw-r--r--test/files/pos/t8868b/Sub_2.scala2
-rw-r--r--test/files/pos/t8868b/T_1.scala4
-rw-r--r--test/files/pos/t8868c/Sub_2.scala2
-rw-r--r--test/files/pos/t8868c/T_1.scala9
-rw-r--r--test/files/pos/t8893.scala129
-rw-r--r--test/files/pos/t8894.scala12
-rw-r--r--test/files/pos/t8900.scala11
-rw-r--r--test/files/pos/t8934a/A_1.scala18
-rw-r--r--test/files/pos/t8934a/Test_2.flags1
-rw-r--r--test/files/pos/t8934a/Test_2.scala12
-rw-r--r--test/files/pos/t8947/Client_2.scala1
-rw-r--r--test/files/pos/t8947/Macro_1.scala41
-rw-r--r--test/files/pos/t8954.flags1
-rw-r--r--test/files/pos/t8954/t1.scala13
-rw-r--r--test/files/pos/t8954/t2.scala39
-rw-r--r--test/files/pos/t8962.scala31
-rw-r--r--test/files/pos/t8965.flags1
-rw-r--r--test/files/pos/t8965.scala7
-rw-r--r--test/files/pos/t8999.flags1
-rw-r--r--test/files/pos/t8999.scala271
-rw-r--r--test/files/pos/t9008.scala5
-rw-r--r--test/files/pos/t9018.scala16
-rw-r--r--test/files/pos/t9020.flags1
-rw-r--r--test/files/pos/t9020.scala10
-rw-r--r--test/files/pos/t9050.scala13
-rw-r--r--test/files/pos/t9086.scala8
-rw-r--r--test/files/pos/t9111-inliner-workaround.flags1
-rw-r--r--test/files/pos/t9111-inliner-workaround/A_1.java13
-rw-r--r--test/files/pos/t9111-inliner-workaround/Test_1.scala10
-rw-r--r--test/files/pos/t9116.scala7
-rw-r--r--test/files/pos/t9123.flags1
-rw-r--r--test/files/pos/t9123.scala10
-rw-r--r--test/files/pos/t9135.scala16
-rw-r--r--test/files/pos/t9157.scala13
-rw-r--r--test/files/pos/t9181.flags1
-rw-r--r--test/files/pos/t9181.scala806
-rw-r--r--test/files/pos/t9239/Declaration.scala3
-rw-r--r--test/files/pos/t9239/Usage.java15
-rw-r--r--test/files/pos/t9285.flags1
-rw-r--r--test/files/pos/t9285.scala1
-rw-r--r--test/files/pos/virtpatmat_exhaust_big.scala33
-rw-r--r--test/files/presentation/doc.check (renamed from test/disabled/presentation/doc.check)0
-rwxr-xr-xtest/files/presentation/doc/doc.scala (renamed from test/disabled/presentation/doc/doc.scala)0
-rwxr-xr-xtest/files/presentation/doc/src/Class.scala (renamed from test/disabled/presentation/doc/src/Class.scala)0
-rwxr-xr-xtest/files/presentation/doc/src/p/Base.scala (renamed from test/disabled/presentation/doc/src/p/Base.scala)0
-rwxr-xr-xtest/files/presentation/doc/src/p/Derived.scala (renamed from test/disabled/presentation/doc/src/p/Derived.scala)0
-rw-r--r--test/files/presentation/ide-bug-1000531.check111
-rw-r--r--test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala15
-rw-r--r--test/files/presentation/ide-bug-1000531/src/TestIterable.java7
-rw-r--r--test/files/presentation/infix-completion.check193
-rw-r--r--test/files/presentation/infix-completion/Runner.scala3
-rw-r--r--test/files/presentation/infix-completion/src/Snippet.scala1
-rw-r--r--test/files/presentation/infix-completion2.check211
-rw-r--r--test/files/presentation/infix-completion2/Runner.scala3
-rw-r--r--test/files/presentation/infix-completion2/src/Snippet.scala1
-rw-r--r--test/files/presentation/private-case-class-members.check1
-rw-r--r--test/files/presentation/private-case-class-members/Test.scala34
-rw-r--r--test/files/presentation/private-case-class-members/src/State.scala5
-rw-r--r--test/files/presentation/quasiquotes.flags0
-rw-r--r--test/files/presentation/t7915.check20
-rw-r--r--test/files/presentation/t7915/src/Foo.scala4
-rw-r--r--test/files/presentation/t8459.check14
-rw-r--r--test/files/presentation/t8459/Test.scala3
-rw-r--r--test/files/presentation/t8459/src/IncompleteDynamicSelect.scala14
-rw-r--r--test/files/presentation/t8934.check2
-rw-r--r--test/files/presentation/t8934/Runner.scala27
-rw-r--r--test/files/presentation/t8934/src/Source.scala10
-rw-r--r--test/files/presentation/t8941.check7
-rw-r--r--test/files/presentation/t8941/Runner.scala11
-rw-r--r--test/files/presentation/t8941/src/Source.scala8
-rw-r--r--test/files/presentation/t8941b/IdempotencyTest.scala73
-rw-r--r--test/files/presentation/t8941b/Test.scala53
-rw-r--r--test/files/presentation/visibility/src/Completions.scala4
-rw-r--r--test/files/res/t6613.check5
-rw-r--r--test/files/res/t6613.res3
-rw-r--r--test/files/res/t6613/Broken.scala1
-rw-r--r--test/files/res/t6613/Enummy.java1
-rw-r--r--test/files/res/t8871.check5
-rw-r--r--test/files/res/t8871.res4
-rw-r--r--test/files/res/t8871/tag.scala3
-rw-r--r--test/files/res/t8871/usetag.scala6
-rw-r--r--test/files/res/t9089.check4
-rw-r--r--test/files/res/t9089.res2
-rw-r--r--test/files/res/t9089/A.scala1
-rw-r--r--test/files/res/t9170.check7
-rw-r--r--test/files/res/t9170.res2
-rw-r--r--test/files/res/t9170/A.scala4
-rw-r--r--test/files/run/abstypetags_serialize.check4
-rw-r--r--test/files/run/analyzerPlugins.check13
-rw-r--r--test/files/run/applydynamic_sip.flags1
-rw-r--r--test/files/run/applydynamic_sip.scala2
-rw-r--r--test/files/run/bcodeInlinerMixed.flags1
-rw-r--r--test/files/run/bcodeInlinerMixed/A_1.java3
-rw-r--r--test/files/run/bcodeInlinerMixed/B_1.scala20
-rw-r--r--test/files/run/bcodeInlinerMixed/Test.scala16
-rw-r--r--test/files/run/bitsets.check1
-rw-r--r--test/files/run/class-symbol-contravariant.check2
-rw-r--r--test/files/run/classfile-format-51.scala16
-rw-r--r--test/files/run/classfile-format-52.scala2
-rw-r--r--test/files/run/collection-stacks.check2
-rw-r--r--test/files/run/colltest.check2
-rw-r--r--test/files/run/colltest1.scala2
-rw-r--r--test/files/run/compiler-asSeenFrom.scala2
-rw-r--r--test/files/run/constant-type.check2
-rw-r--r--test/files/run/constrained-types.check10
-rw-r--r--test/files/run/delambdafy-specialized.check1
-rw-r--r--test/files/run/delambdafy-specialized.flags1
-rw-r--r--test/files/run/delambdafy-specialized.scala6
-rw-r--r--test/files/run/delambdafyLambdaClassNames.check1
-rw-r--r--test/files/run/delambdafyLambdaClassNames.flags1
-rw-r--r--test/files/run/delambdafyLambdaClassNames/A_1.scala5
-rw-r--r--test/files/run/delambdafyLambdaClassNames/Test.scala4
-rw-r--r--test/files/run/delambdafy_t6028.check53
-rw-r--r--test/files/run/delambdafy_t6028.scala10
-rw-r--r--test/files/run/delambdafy_t6555.check8
-rw-r--r--test/files/run/delambdafy_t6555.scala2
-rw-r--r--test/files/run/delambdafy_uncurry_byname_inline.check2
-rw-r--r--test/files/run/delambdafy_uncurry_byname_method.check6
-rw-r--r--test/files/run/delambdafy_uncurry_byname_method.scala4
-rw-r--r--test/files/run/delambdafy_uncurry_inline.check2
-rw-r--r--test/files/run/delay-bad.check2
-rw-r--r--test/files/run/eta-expand-star2.check2
-rw-r--r--test/files/run/existentials-in-compiler.scala2
-rw-r--r--test/files/run/exprs_serialize.check21
-rw-r--r--test/files/run/exprs_serialize.scala12
-rw-r--r--test/files/run/global-showdef.check8
-rw-r--r--test/files/run/global-showdef.scala52
-rw-r--r--test/files/run/icode-reader-dead-code.check27
-rw-r--r--test/files/run/icode-reader-dead-code.scala82
-rw-r--r--test/files/run/inferred-type-constructors.check2
-rw-r--r--test/files/run/interop_typetags_are_manifests.flags1
-rw-r--r--test/files/run/is-valid-num.scala2
-rw-r--r--test/files/run/iterator-concat.check4
-rw-r--r--test/files/run/iterator-concat.scala15
-rw-r--r--test/files/run/iterator-from.scala2
-rw-r--r--test/files/run/iterator-iterate-lazy.scala5
-rw-r--r--test/files/run/iterators.check13
-rw-r--r--test/files/run/iterators.scala136
-rw-r--r--test/files/run/kind-repl-command.check2
-rw-r--r--test/files/run/large_class.check3
-rw-r--r--test/files/run/large_class.scala27
-rw-r--r--test/files/run/literals.check69
-rw-r--r--test/files/run/literals.flags1
-rw-r--r--test/files/run/literals.scala39
-rw-r--r--test/files/run/lub-visibility.check2
-rw-r--r--test/files/run/macro-bundle-repl.check2
-rw-r--r--test/files/run/macro-openmacros.flags3
-rw-r--r--test/files/run/macro-parse-position.flags1
-rw-r--r--test/files/run/macro-rangepos-args.check1
-rw-r--r--test/files/run/macro-rangepos-args.flags1
-rw-r--r--test/files/run/macro-rangepos-args/Macros_1.scala10
-rw-r--r--test/files/run/macro-rangepos-args/Test_2.scala4
-rw-r--r--test/files/run/macro-rangepos-subpatterns.check1
-rw-r--r--test/files/run/macro-rangepos-subpatterns.flags1
-rw-r--r--test/files/run/macro-rangepos-subpatterns/Macros_1.scala18
-rw-r--r--test/files/run/macro-rangepos-subpatterns/Test_2.scala5
-rw-r--r--test/files/run/macro-repl-basic.check2
-rw-r--r--test/files/run/macro-repl-dontexpand.check2
-rw-r--r--test/files/run/macro-system-properties.check2
-rw-r--r--test/files/run/macroPlugins-enterStats.check30
-rw-r--r--test/files/run/macroPlugins-enterStats.scala50
-rw-r--r--test/files/run/macroPlugins-isBlackbox/Macros_2.scala11
-rw-r--r--test/files/run/macroPlugins-isBlackbox/Plugin_1.scala21
-rw-r--r--test/files/run/macroPlugins-isBlackbox/Test_3.flags1
-rw-r--r--test/files/run/macroPlugins-isBlackbox/Test_3.scala3
-rw-r--r--test/files/run/macroPlugins-isBlackbox/scalac-plugin.xml4
-rw-r--r--test/files/run/macroPlugins-macroExpand.flags1
-rw-r--r--test/files/run/macroPlugins-typedMacroBody.flags1
-rw-r--r--test/files/run/mapConserve.scala2
-rw-r--r--test/files/run/names-defaults.check3
-rw-r--r--test/files/run/names-defaults.scala4
-rw-r--r--test/files/run/nothingTypeDce.flags1
-rw-r--r--test/files/run/nothingTypeDce.scala63
-rw-r--r--test/files/run/nothingTypeNoFramesNoDce.check1
-rw-r--r--test/files/run/nothingTypeNoFramesNoDce.flags1
-rw-r--r--test/files/run/nothingTypeNoFramesNoDce.scala61
-rw-r--r--test/files/run/nothingTypeNoOpt.flags1
-rw-r--r--test/files/run/nothingTypeNoOpt.scala61
-rw-r--r--test/files/run/pc-conversions.scala2
-rw-r--r--test/files/run/priorityQueue.scala373
-rw-r--r--test/files/run/private-override.check1
-rw-r--r--test/files/run/reflection-attachments.check0
-rw-r--r--test/files/run/reflection-equality.check2
-rw-r--r--test/files/run/reflection-java-annotations.check2
-rw-r--r--test/files/run/reflection-magicsymbols-repl.check4
-rw-r--r--test/files/run/reflection-repl-classes.check2
-rw-r--r--test/files/run/reflection-repl-elementary.check2
-rw-r--r--test/files/run/reify-repl-fail-gracefully.check2
-rw-r--r--test/files/run/reify_newimpl_22.check2
-rw-r--r--test/files/run/reify_newimpl_23.check2
-rw-r--r--test/files/run/reify_newimpl_25.check2
-rw-r--r--test/files/run/reify_newimpl_26.check2
-rw-r--r--test/files/run/reify_newimpl_35.check2
-rw-r--r--test/files/run/repl-assign.check2
-rw-r--r--test/files/run/repl-bare-expr.check2
-rw-r--r--test/files/run/repl-colon-type.check2
-rw-r--r--test/files/run/repl-empty-package.check2
-rw-r--r--test/files/run/repl-javap-app.check43
-rw-r--r--test/files/run/repl-javap-app.scala11
-rw-r--r--test/files/run/repl-javap-lambdas.scala23
-rw-r--r--test/files/run/repl-javap-memfun.scala4
-rw-r--r--test/files/run/repl-javap-outdir-funs.flags1
-rw-r--r--test/files/run/repl-javap-outdir-funs/run-repl_7.scala5
-rw-r--r--test/files/run/repl-out-dir.check2
-rw-r--r--test/files/run/repl-parens.check2
-rw-r--r--test/files/run/repl-paste-2.check2
-rw-r--r--test/files/run/repl-paste-3.check2
-rw-r--r--test/files/run/repl-paste-4.scala2
-rw-r--r--test/files/run/repl-paste-raw.scala2
-rw-r--r--test/files/run/repl-paste.check2
-rw-r--r--test/files/run/repl-power.check6
-rw-r--r--test/files/run/repl-reset.check2
-rw-r--r--test/files/run/repl-save.scala2
-rw-r--r--test/files/run/repl-term-macros.check2
-rw-r--r--test/files/run/repl-transcript.check2
-rw-r--r--test/files/run/repl-trim-stack-trace.scala2
-rw-r--r--test/files/run/repl-type-verbose.check2
-rw-r--r--test/files/run/richs.check2
-rw-r--r--test/files/run/sammy_repeated.check1
-rw-r--r--test/files/run/sammy_repeated.flags (renamed from test/files/run/t5532.flags)0
-rw-r--r--test/files/run/sammy_repeated.scala8
-rw-r--r--test/files/run/search.check4
-rw-r--r--test/files/run/settings-parse.scala5
-rw-r--r--test/files/run/stringinterpolation_macro-run.check4
-rw-r--r--test/files/run/stringinterpolation_macro-run.scala3
-rw-r--r--test/files/run/synchronized.check6
-rw-r--r--test/files/run/t1994.scala20
-rw-r--r--test/files/run/t2212.check2
-rw-r--r--test/files/run/t2318.scala1
-rw-r--r--test/files/run/t2866.check3
-rw-r--r--test/files/run/t2866.scala44
-rw-r--r--test/files/run/t3361.check2
-rw-r--r--test/files/run/t3368-b.check89
-rw-r--r--test/files/run/t3368-b.scala26
-rw-r--r--test/files/run/t3368-c.check85
-rw-r--r--test/files/run/t3368-c.scala26
-rw-r--r--test/files/run/t3368-d.check89
-rw-r--r--test/files/run/t3368-d.scala26
-rw-r--r--test/files/run/t3368.check85
-rw-r--r--test/files/run/t3368.scala26
-rw-r--r--test/files/run/t3376.check2
-rw-r--r--test/files/run/t3516.check3
-rw-r--r--test/files/run/t3516.scala13
-rw-r--r--test/files/run/t3569.scala3
-rw-r--r--test/files/run/t3888.check2
-rw-r--r--test/files/run/t3970.check2
-rw-r--r--test/files/run/t3996.check2
-rw-r--r--test/files/run/t4025.check2
-rw-r--r--test/files/run/t4080.check2
-rw-r--r--test/files/run/t4172.check4
-rw-r--r--test/files/run/t4216.check2
-rw-r--r--test/files/run/t4285.check2
-rw-r--r--test/files/run/t4396.check2
-rw-r--r--test/files/run/t4461.check2
-rw-r--r--test/files/run/t4542.check2
-rw-r--r--test/files/run/t4594-repl-settings.scala6
-rw-r--r--test/files/run/t4671.check2
-rw-r--r--test/files/run/t4680.check2
-rw-r--r--test/files/run/t4710.check4
-rw-r--r--test/files/run/t4788-separate-compilation.check5
-rw-r--r--test/files/run/t4788-separate-compilation/CAnnotation_1.java5
-rw-r--r--test/files/run/t4788-separate-compilation/C_1.scala2
-rw-r--r--test/files/run/t4788-separate-compilation/D_1.scala5
-rw-r--r--test/files/run/t4788-separate-compilation/RAnnotation_1.java5
-rw-r--r--test/files/run/t4788-separate-compilation/R_1.scala2
-rw-r--r--test/files/run/t4788-separate-compilation/SAnnotation_1.java5
-rw-r--r--test/files/run/t4788-separate-compilation/S_1.scala2
-rw-r--r--test/files/run/t4788-separate-compilation/Test_2.scala35
-rw-r--r--test/files/run/t4788.check5
-rw-r--r--test/files/run/t4788/C.scala2
-rw-r--r--test/files/run/t4788/CAnnotation.java5
-rw-r--r--test/files/run/t4788/D.scala5
-rw-r--r--test/files/run/t4788/R.scala2
-rw-r--r--test/files/run/t4788/RAnnotation.java5
-rw-r--r--test/files/run/t4788/S.scala2
-rw-r--r--test/files/run/t4788/SAnnotation.java5
-rw-r--r--test/files/run/t4788/Test.scala35
-rw-r--r--test/files/run/t4813.check2
-rw-r--r--test/files/run/t4950.check9
-rw-r--r--test/files/run/t4950.scala12
-rw-r--r--test/files/run/t5072.check2
-rw-r--r--test/files/run/t5256c.check2
-rw-r--r--test/files/run/t5256d.check2
-rw-r--r--test/files/run/t5256h.scala3
-rw-r--r--test/files/run/t5313.scala2
-rw-r--r--test/files/run/t5428.check2
-rw-r--r--test/files/run/t5535.check2
-rw-r--r--test/files/run/t5537.check2
-rw-r--r--test/files/run/t5583.check2
-rw-r--r--test/files/run/t5655.check2
-rw-r--r--test/files/run/t5665.scala13
-rwxr-xr-xtest/files/run/t5699.scala14
-rw-r--r--test/files/run/t576.check2
-rw-r--r--test/files/run/t5789.check2
-rw-r--r--test/files/run/t5830.check1
-rw-r--r--test/files/run/t5830.scala13
-rw-r--r--test/files/run/t5905-features.flags1
-rw-r--r--test/files/run/t5905-features.scala31
-rw-r--r--test/files/run/t5905b-features.check1
-rw-r--r--test/files/run/t5905b-features.scala15
-rw-r--r--test/files/run/t5938.scala35
-rw-r--r--test/files/run/t6011c.scala2
-rw-r--r--test/files/run/t6028.check6
-rw-r--r--test/files/run/t6086-repl.check2
-rw-r--r--test/files/run/t6111.check2
-rw-r--r--test/files/run/t6114.scala2
-rw-r--r--test/files/run/t6146b.check2
-rw-r--r--test/files/run/t6187.check2
-rw-r--r--test/files/run/t6260c.check4
-rw-r--r--test/files/run/t6273.check2
-rw-r--r--test/files/run/t6292.check2
-rw-r--r--test/files/run/t6318_primitives.check54
-rw-r--r--test/files/run/t6318_primitives.scala40
-rw-r--r--test/files/run/t6320.check2
-rw-r--r--test/files/run/t6327.flags1
-rw-r--r--test/files/run/t6329_repl.check10
-rw-r--r--test/files/run/t6329_repl_bug.check4
-rw-r--r--test/files/run/t6329_vanilla_bug.check2
-rw-r--r--test/files/run/t6381.check2
-rw-r--r--test/files/run/t6434.check2
-rw-r--r--test/files/run/t6439.check2
-rw-r--r--test/files/run/t6440.check8
-rw-r--r--test/files/run/t6440.scala2
-rw-r--r--test/files/run/t6440b.check9
-rw-r--r--test/files/run/t6481.check2
-rw-r--r--test/files/run/t6502.scala146
-rw-r--r--test/files/run/t6507.check2
-rw-r--r--test/files/run/t6541-option.scala19
-rw-r--r--test/files/run/t6541.flags1
-rw-r--r--test/files/run/t6541.scala25
-rw-r--r--test/files/run/t6549.check2
-rw-r--r--test/files/run/t6555.check2
-rw-r--r--test/files/run/t6622.check10
-rw-r--r--test/files/run/t6622.scala50
-rw-r--r--test/files/run/t6631.scala18
-rw-r--r--test/files/run/t6663.flags1
-rw-r--r--test/files/run/t6669.scala7
-rw-r--r--test/files/run/t6690.check2
-rw-r--r--test/files/run/t6731.flags1
-rw-r--r--test/files/run/t6863.check2
-rw-r--r--test/files/run/t6935.check2
-rw-r--r--test/files/run/t6937.check2
-rw-r--r--test/files/run/t6988.check2
-rw-r--r--test/files/run/t6988.scala9
-rw-r--r--test/files/run/t7019.scala10
-rw-r--r--test/files/run/t7096.scala2
-rw-r--r--test/files/run/t7185.check2
-rw-r--r--test/files/run/t7319.check8
-rw-r--r--test/files/run/t7407.flags2
-rw-r--r--test/files/run/t7407b.flags2
-rw-r--r--test/files/run/t7459a.scala14
-rw-r--r--test/files/run/t7459b-optimize.flags1
-rw-r--r--test/files/run/t7459b-optimize.scala21
-rw-r--r--test/files/run/t7459b.scala21
-rw-r--r--test/files/run/t7459c.scala16
-rw-r--r--test/files/run/t7459d.scala15
-rw-r--r--test/files/run/t7459f.scala12
-rw-r--r--test/files/run/t7482a.check2
-rw-r--r--test/files/run/t7582.check6
-rw-r--r--test/files/run/t7582b.check6
-rw-r--r--test/files/run/t7634.check2
-rw-r--r--test/files/run/t7741a/GroovyInterface$1Dump.java222
-rw-r--r--test/files/run/t7741a/GroovyInterfaceDump.java51
-rw-r--r--test/files/run/t7741a/Test.scala47
-rw-r--r--test/files/run/t7741b.check3
-rw-r--r--test/files/run/t7741b/HasInner.java3
-rw-r--r--test/files/run/t7741b/Test.scala29
-rw-r--r--test/files/run/t7747-repl.check2
-rw-r--r--test/files/run/t7801.check2
-rw-r--r--test/files/run/t7805-repl-i.check2
-rw-r--r--test/files/run/t7852.scala2
-rw-r--r--test/files/run/t7932.check2
-rw-r--r--test/files/run/t7965.scala54
-rw-r--r--test/files/run/t7974.check52
-rw-r--r--test/files/run/t7974.flags1
-rw-r--r--test/files/run/t7974/Test.scala16
-rw-r--r--test/files/run/t7992.scala20
-rw-r--r--test/files/run/t7992b.scala18
-rw-r--r--test/files/run/t8087.scala12
-rw-r--r--test/files/run/t8196.check7
-rw-r--r--test/files/run/t8196.scala51
-rw-r--r--test/files/run/t8253.check40
-rw-r--r--test/files/run/t8253.scala14
-rw-r--r--test/files/run/t8346.check6
-rw-r--r--test/files/run/t8346.scala34
-rw-r--r--test/files/run/t8442.check1
-rw-r--r--test/files/run/t8442/A_1.java4
-rw-r--r--test/files/run/t8442/B_1.java3
-rw-r--r--test/files/run/t8442/C_2.scala5
-rw-r--r--test/files/run/t8442/Test.scala29
-rw-r--r--test/files/run/t8445.check1
-rw-r--r--test/files/run/t8445.scala11
-rw-r--r--test/files/run/t8502.scala41
-rw-r--r--test/files/run/t8549.check1
-rw-r--r--test/files/run/t8549.scala189
-rw-r--r--test/files/run/t8549b.scala16
-rw-r--r--test/files/run/t8570.flags1
-rw-r--r--test/files/run/t8570.scala10
-rw-r--r--test/files/run/t8570a.check1
-rw-r--r--test/files/run/t8570a.flags1
-rw-r--r--test/files/run/t8570a.scala14
-rw-r--r--test/files/run/t8574.scala27
-rw-r--r--test/files/run/t8601-closure-elim.flags1
-rw-r--r--test/files/run/t8601-closure-elim.scala26
-rw-r--r--test/files/run/t8601.flags1
-rw-r--r--test/files/run/t8601.scala15
-rw-r--r--test/files/run/t8601b.flags1
-rw-r--r--test/files/run/t8601b.scala14
-rw-r--r--test/files/run/t8601c.flags1
-rw-r--r--test/files/run/t8601c.scala12
-rw-r--r--test/files/run/t8601d.flags1
-rw-r--r--test/files/run/t8601d.scala8
-rw-r--r--test/files/run/t8601e.flags1
-rw-r--r--test/files/run/t8601e/StaticInit.classbin0 -> 417 bytes
-rw-r--r--test/files/run/t8601e/StaticInit.java8
-rw-r--r--test/files/run/t8601e/Test.scala12
-rw-r--r--test/files/run/t8607.scala36
-rw-r--r--test/files/run/t8608-no-format.scala15
-rw-r--r--test/files/run/t8610.check7
-rw-r--r--test/files/run/t8610.flags1
-rw-r--r--test/files/run/t8610.scala13
-rw-r--r--test/files/run/t8611a.flags1
-rw-r--r--test/files/run/t8611a.scala16
-rw-r--r--test/files/run/t8611b.flags1
-rw-r--r--test/files/run/t8611b.scala54
-rw-r--r--test/files/run/t8611c.flags1
-rw-r--r--test/files/run/t8611c.scala21
-rw-r--r--test/files/run/t8637.check0
-rw-r--r--test/files/run/t8637.scala9
-rw-r--r--test/files/run/t8680.scala53
-rw-r--r--test/files/run/t8690.check2
-rw-r--r--test/files/run/t8690.scala12
-rw-r--r--test/files/run/t8708_b.check8
-rw-r--r--test/files/run/t8708_b/A_1.scala8
-rw-r--r--test/files/run/t8708_b/Test_2.scala21
-rw-r--r--test/files/run/t8738.scala16
-rw-r--r--test/files/run/t8764.check5
-rw-r--r--test/files/run/t8764.flags1
-rw-r--r--test/files/run/t8764.scala16
-rw-r--r--test/files/run/t8803.check16
-rw-r--r--test/files/run/t8803.scala57
-rw-r--r--test/files/run/t8823.scala10
-rw-r--r--test/files/run/t8843-repl-xlat.scala33
-rw-r--r--test/files/run/t8845.flags1
-rw-r--r--test/files/run/t8845.scala17
-rw-r--r--test/files/run/t8852a.scala34
-rw-r--r--test/files/run/t8888.flags1
-rw-r--r--test/files/run/t8888.scala12
-rw-r--r--test/files/run/t8893.scala40
-rw-r--r--test/files/run/t8893b.scala15
-rw-r--r--test/files/run/t8907.scala39
-rw-r--r--test/files/run/t8925.check2
-rw-r--r--test/files/run/t8925.flags1
-rw-r--r--test/files/run/t8925.scala31
-rw-r--r--test/files/run/t8931.check1
-rw-r--r--test/files/run/t8931.scala15
-rw-r--r--test/files/run/t8933.check1
-rw-r--r--test/files/run/t8933/A_1.scala6
-rw-r--r--test/files/run/t8933/Test_2.scala10
-rw-r--r--test/files/run/t8933b/A.scala4
-rw-r--r--test/files/run/t8933b/Test.scala9
-rw-r--r--test/files/run/t8933c.scala14
-rw-r--r--test/files/run/t8960.scala72
-rw-r--r--test/files/run/t9003.flags1
-rw-r--r--test/files/run/t9003.scala71
-rw-r--r--test/files/run/t9027.check19
-rw-r--r--test/files/run/t9027.scala15
-rw-r--r--test/files/run/t9030.scala19
-rw-r--r--test/files/run/t9097.scala34
-rw-r--r--test/files/run/t9102.scala81
-rw-r--r--test/files/run/t9170.scala58
-rw-r--r--test/files/run/t9182.check3
-rw-r--r--test/files/run/t9182.scala12
-rw-r--r--test/files/run/t9219.check3
-rw-r--r--test/files/run/t9219.scala11
-rw-r--r--test/files/run/t9223.scala8
-rw-r--r--test/files/run/t9223b.scala8
-rw-r--r--test/files/run/t9252.check1
-rw-r--r--test/files/run/t9252.scala5
-rw-r--r--test/files/run/t9268.check5
-rw-r--r--test/files/run/t9268/Java.java12
-rw-r--r--test/files/run/t9268/Test.scala40
-rw-r--r--test/files/run/tailcalls.check8
-rw-r--r--test/files/run/tailcalls.scala35
-rw-r--r--test/files/run/tpeCache-tyconCache.check2
-rw-r--r--test/files/run/typetags_serialize.check5
-rw-r--r--test/files/run/typetags_serialize.scala5
-rw-r--r--test/files/run/typetags_without_scala_reflect_typetag_lookup.scala4
-rw-r--r--test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala4
-rw-r--r--test/files/run/unittest_collection.check2
-rw-r--r--test/files/run/valueClassSelfType.scala52
-rw-r--r--test/files/run/various-flat-classpath-types.check12
-rw-r--r--test/files/run/various-flat-classpath-types.scala214
-rw-r--r--test/files/run/virtpatmat_nested_lists.flags1
-rw-r--r--test/files/run/virtpatmat_opt_sharing.flags1
-rw-r--r--test/files/run/virtpatmat_staging.flags1
-rw-r--r--test/files/run/virtpatmat_typetag.check4
-rw-r--r--test/files/run/xMigration.check49
-rw-r--r--test/files/run/xMigration.scala19
-rw-r--r--test/files/scalacheck/Ctrie.scala19
-rw-r--r--test/files/scalacheck/nan-ordering.scala16
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala2
-rw-r--r--test/files/scalacheck/quasiquotes/TermConstructionProps.scala12
-rw-r--r--test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala7
-rw-r--r--test/files/scalacheck/quasiquotes/TypecheckedProps.scala9
-rw-r--r--test/files/scalap/t8679.check3503
-rw-r--r--test/files/scalap/t8679.scala3502
-rw-r--r--test/files/t8449/Client.scala3
-rw-r--r--test/files/t8449/Test.java10
-rw-r--r--test/instrumented/library/scala/runtime/ScalaRunTime.scala2
-rw-r--r--test/junit/scala/StringContextTest.scala87
-rw-r--r--test/junit/scala/collection/IndexedSeqOptimizedTest.scala29
-rw-r--r--test/junit/scala/collection/IterableViewLikeTest.scala22
-rw-r--r--test/junit/scala/collection/IteratorTest.scala157
-rw-r--r--test/junit/scala/collection/PagedSeq.scala16
-rw-r--r--test/junit/scala/collection/ParallelConsistencyTest.scala44
-rw-r--r--test/junit/scala/collection/SetMapConsistencyTest.scala15
-rw-r--r--test/junit/scala/collection/TraversableOnceTest.scala8
-rw-r--r--test/junit/scala/collection/convert/MapWrapperTest.scala10
-rw-r--r--test/junit/scala/collection/immutable/ListTest.scala49
-rw-r--r--test/junit/scala/collection/immutable/PagedSeqTest.scala28
-rw-r--r--test/junit/scala/collection/immutable/QueueTest.scala (renamed from test/junit/scala/collection/QueueTest.scala)0
-rw-r--r--test/junit/scala/collection/immutable/RangeConsistencyTest.scala (renamed from test/junit/scala/collection/NumericRangeTest.scala)0
-rw-r--r--test/junit/scala/collection/immutable/StringLikeTest.scala37
-rw-r--r--test/junit/scala/collection/immutable/TreeMapTest.scala20
-rw-r--r--test/junit/scala/collection/immutable/TreeSetTest.scala20
-rw-r--r--test/junit/scala/collection/mutable/ArrayBufferTest.scala36
-rw-r--r--test/junit/scala/collection/mutable/ArraySortingTest.scala (renamed from test/junit/scala/collection/ArraySortingTest.scala)0
-rw-r--r--test/junit/scala/collection/mutable/BitSetTest.scala31
-rw-r--r--test/junit/scala/collection/mutable/LinkedHashMapTest.scala25
-rw-r--r--test/junit/scala/collection/mutable/LinkedHashSetTest.scala25
-rw-r--r--test/junit/scala/collection/mutable/MutableListTest.scala37
-rw-r--r--test/junit/scala/collection/mutable/PriorityQueueTest.scala (renamed from test/junit/scala/collection/PriorityQueueTest.scala)0
-rw-r--r--test/junit/scala/collection/mutable/UnrolledBufferTest.scala25
-rw-r--r--test/junit/scala/collection/mutable/VectorTest.scala (renamed from test/junit/scala/collection/VectorTest.scala)1
-rw-r--r--test/junit/scala/concurrent/duration/SerializationTest.scala24
-rw-r--r--test/junit/scala/io/SourceTest.scala86
-rw-r--r--test/junit/scala/issues/BytecodeTests.scala80
-rw-r--r--test/junit/scala/math/BigDecimalTest.scala6
-rw-r--r--test/junit/scala/math/NumericTest.scala2
-rw-r--r--test/junit/scala/math/OrderingTest.scala61
-rw-r--r--test/junit/scala/reflect/ClassTag.scala29
-rw-r--r--test/junit/scala/reflect/QTest.scala23
-rw-r--r--test/junit/scala/reflect/internal/NamesTest.scala95
-rw-r--r--test/junit/scala/reflect/internal/PrintersTest.scala92
-rw-r--r--test/junit/scala/reflect/internal/ScopeTest.scala54
-rw-r--r--test/junit/scala/reflect/internal/TypesTest.scala35
-rw-r--r--test/junit/scala/reflect/internal/util/AbstractFileClassLoaderTest.scala138
-rw-r--r--test/junit/scala/reflect/internal/util/SourceFileTest.scala5
-rw-r--r--test/junit/scala/tools/nsc/SampleTest.scala3
-rw-r--r--test/junit/scala/tools/nsc/ScriptRunnerTest.scala23
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala96
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala166
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala98
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala103
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala152
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala80
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala99
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala99
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala67
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala194
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala198
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala115
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala978
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala92
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala85
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala221
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala228
-rw-r--r--test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala95
-rw-r--r--test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala208
-rw-r--r--test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala159
-rw-r--r--test/junit/scala/tools/nsc/doc/html/HtmlDocletTest.scala22
-rw-r--r--test/junit/scala/tools/nsc/interpreter/TabulatorTest.scala20
-rw-r--r--test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala18
-rw-r--r--test/junit/scala/tools/nsc/settings/SettingsTest.scala147
-rw-r--r--test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala16
-rw-r--r--test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala12
-rw-r--r--test/junit/scala/tools/nsc/symtab/StdNamesTest.scala8
-rw-r--r--test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala44
-rw-r--r--test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala9
-rw-r--r--test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala610
-rw-r--r--test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala143
-rw-r--r--test/junit/scala/tools/testing/AssertThrowsTest.scala11
-rw-r--r--test/junit/scala/tools/testing/AssertUtil.scala93
-rw-r--r--test/junit/scala/tools/testing/AssertUtilTest.scala21
-rw-r--r--test/junit/scala/tools/testing/ClearAfterClass.java20
-rw-r--r--test/junit/scala/tools/testing/TempDir.scala18
-rw-r--r--test/junit/scala/util/RandomTest.scala15
-rw-r--r--test/junit/scala/util/SpecVersionTest.scala (renamed from test/junit/scala/util/t7265.scala)4
-rw-r--r--test/junit/scala/util/matching/CharRegexTest.scala (renamed from test/junit/scala/util/matching/regextract-char.scala)0
-rw-r--r--test/junit/scala/util/matching/RegexTest.scala17
-rw-r--r--test/osgi/src/BasicLibrary.scala15
-rw-r--r--test/osgi/src/BasicReflection.scala15
-rw-r--r--test/osgi/src/BasicTest.scala15
-rw-r--r--test/osgi/src/ReflectionToolboxTest.scala15
-rw-r--r--test/osgi/src/ScalaOsgiHelper.scala6
-rw-r--r--test/pending/jvm/cf-attributes.scala2
-rw-r--r--test/pending/jvm/javasigs.scala2
-rw-r--r--test/pending/jvm/timeout.scala2
-rw-r--r--test/pending/pos/t3439.scala2
-rw-r--r--test/pending/run/delambdafy-lambdametafactory.scala50
-rw-r--r--test/scaladoc/filters8
-rw-r--r--test/scaladoc/resources/SI-3314-diagrams.scala2
-rw-r--r--test/scaladoc/resources/SI-4476.scala9
-rw-r--r--test/scaladoc/resources/SI-8144.scala17
-rw-r--r--test/scaladoc/resources/SI-8514.scala10
-rw-r--r--test/scaladoc/resources/Trac4420.scala2
-rw-r--r--test/scaladoc/resources/code-indent.scala6
-rw-r--r--test/scaladoc/resources/implicit-inheritance-override.scala2
-rw-r--r--test/scaladoc/resources/implicits-ambiguating-res.scala2
-rw-r--r--test/scaladoc/resources/implicits-shadowing-res.scala2
-rw-r--r--test/scaladoc/run/SI-8479.check1
-rwxr-xr-xtest/scaladoc/run/SI-8479.scala32
-rw-r--r--test/scaladoc/run/t5730.check1
-rw-r--r--test/scaladoc/run/t5730.scala36
-rw-r--r--test/scaladoc/run/t5795.check4
-rw-r--r--test/scaladoc/run/t5795.scala63
-rw-r--r--test/scaladoc/run/t6626.check7
-rw-r--r--test/scaladoc/run/t6626.scala42
-rw-r--r--test/scaladoc/run/t8113.check1
-rw-r--r--test/scaladoc/run/t8113.scala36
-rw-r--r--test/scaladoc/run/t8314.check3
-rw-r--r--test/scaladoc/run/t8314.scala16
-rw-r--r--test/scaladoc/run/t8557.check1
-rw-r--r--test/scaladoc/run/t8557.scala32
-rw-r--r--test/scaladoc/run/t8672.check4
-rw-r--r--test/scaladoc/run/t8672.scala32
-rw-r--r--test/scaladoc/scalacheck/CommentFactoryTest.scala20
-rw-r--r--test/scaladoc/scalacheck/DeprecatedIndexTest.scala50
-rw-r--r--test/scaladoc/scalacheck/HtmlFactoryTest.scala93
-rw-r--r--test/scaladoc/scalacheck/IndexTest.scala8
-rwxr-xr-xtest/script-tests/README7
-rwxr-xr-xtools/binary-repo-lib.sh7
-rwxr-xr-xtools/scaladoc-compare2
-rw-r--r--versions.properties23
1628 files changed, 50658 insertions, 18767 deletions
diff --git a/.gitignore b/.gitignore
index 32a1665721..d6571a377f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -39,12 +39,18 @@
# eclipse, intellij
/.classpath
/.project
-/src/intellij/*.iml
-/src/intellij/*.ipr
-/src/intellij/*.iws
+/src/intellij*/*.iml
+/src/intellij*/*.ipr
+/src/intellij*/*.iws
**/.cache
/.idea
/.settings
# Standard symbolic link to build/quick/bin
/qbin
+
+# Sbt's target directories
+/target/
+/project/target/
+/project/project/target
+/build-sbt/
diff --git a/.travis.yml b/.travis.yml
index e90fc35267..6a7ac45e3d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,7 +2,7 @@
# based on http://www.paperplanes.de/2013/8/13/deploying-your-jekyll-blog-to-s3-with-travis-ci.html
language: ruby
rvm:
- - 1.9.3
+ - 2.2
script: bundle exec jekyll build -s spec/ -d build/spec
install: bundle install
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 1c05b4fd6b..e9505c26df 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,66 +1,52 @@
-# Scala Project & Developer Guidelines
+# Welcome! Thank you for contributing to Scala!
+We follow the standard GitHub [fork & pull](https://help.github.com/articles/using-pull-requests/#fork--pull) approach to pull requests. Just fork the official repo, develop in a branch, and submit a PR!
-These guidelines are meant to be a living document that should be changed and adapted as needed. We encourage changes that make it easier to achieve our goals in an efficient way.
+You're always welcome to submit your PR straight away and start the discussion (without reading the rest of this wonderful doc, or the `READMEnot^H^H^H.md`). The goal of these notes is to make your experience contributing to Scala as smooth and pleasant as possible. We're happy to guide you through the process once you've submitted your PR.
-## General Workflow
+## The Scala Community
+Last year, you -- the Scala community -- matched the core team at EPFL in number of commits contributed to Scala 2.11, doubling the percentage of commits from outside EPFL/Typesafe since 2.10. Excellent work! (The split is roughly 25/25/50 for you/epfl/typesafe. By the way, the team at Typesafe is: @adriaanm, @gkossakowski, @lrytz and @retronym.)
-This is the process for committing code to the Scala project. There are of course exceptions to these rules, for example minor changes to comments and documentation, fixing a broken build etc.
+We are super happy about this, and are eager to make your experience contributing to Scala productive and satisfying, so that we can keep up this growth. We can't do this alone (nor do we want to)!
-1. Make sure you have signed the [Scala CLA](http://typesafe.com/contribute/cla/scala), if not, sign it.
-2. Before starting to work on a feature or a fix, it's good practice to ensure that:
- 1. There is a ticket for your work in the project's issue tracker. If not, create it first (perhaps given a thumbs up from the scala-internals mailing list first).
- 2. The ticket has been discussed and prioritized by the team.
-3. You should always perform your work in its own Git branch. The branch should be given a descriptive name that explains its intent. Some teams also like adding the ticket number and/or the [GitHub](http://github.com) user ID to the branch name, these details is up to each of the individual teams. (See below for more details on branch naming.)
-4. When the feature or fix is completed you should open a [Pull Request](https://help.github.com/articles/using-pull-requests) on GitHub.
-5. The Pull Request should be reviewed by other maintainers (as many as feasible/practical). Note that a reviewer can also be an outside contributor-- members of Typesafe and independent contributors are encouraged to participate in the review process. It is not a closed process. Please try to avoid conflict of interest -- the spirit of the review process is to evenly distribute the understanding of our code base across its maintainers as well as to load balance quality assurance. Assigning a review to a "sure win" reviewer is not a good long-term solution.
-6. After the review, you should resolve issues brought up by the reviewers as needed (pushing a new commit to address reviewers' comments), iterating until the reviewers give their thumbs up, the "LGTM" (acronym for "Looks Good To Me").
-7. Once the code has passed review the Pull Request can be merged into the distribution.
+This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to scala-internals, or tweet about it to @adriaanm.)
-## Pull Request Requirements
+## What kind of PR are you submitting?
-First, please have a look at and follow the [Pull Request Policy](https://github.com/scala/scala/wiki/Pull-Request-Policy) for guidelines on submitting a pull request to the Scala project.
+Regardless of the nature of your Pull Request, we have to ask you to sign the [Scala CLA](http://typesafe.com/contribute/cla/scala), to protect the OSS nature of the code base.
-In order for a Pull Request to be considered, it has to meet these requirements:
+### Documentation
+Whether you finally decided you couldn't stand that annoying typo anymore, you fixed the outdated code sample in some comment, or you wrote a nice, comprehensive, overview for an under-documented package, some docs for a class or the specifics about a method, your documentation improvement is very much appreciated, and we will do our best to fasttrack it.
-1. Live up to the current code standard:
- - Not violate [DRY](http://programmer.97things.oreilly.com/wiki/index.php/Don%27t_Repeat_Yourself).
- - [Boy Scout Rule](http://programmer.97things.oreilly.com/wiki/index.php/The_Boy_Scout_Rule) should be applied.
-2. Tests are of paramount importance.
-3. The code must be well documented in the project's standard documentation format (see the ‘Documentation’ section below).
+You can make these changes directly in your browser in GitHub, or follow the same process as for code. Up to you!
-If *all* of these requirements are not met then the code should **not** be merged into the distribution, and need not even be reviewed.
+For bigger documentation changes, you may want to poll the (scala-internals) mailing list first, to quickly gauge whether others support the direction you're taking, so there won't be any surprises when it comes to reviewing your PR.
-## Documentation
+### Code
+For bigger changes, we do recommend announcing your intentions on scala-internals first, to avoid duplicated effort, or spending a lot of time reworking something we are not able to change at this time in the release cycle, for example.
-All contributed code should come accompanied with documentation. Pull requests containing undocumented code will not be accepted. Both user-facing Scaladoc comments, as well as committer-facing internal documentation (i.e. important design decisions that other maintainers should know about should be placed inline with line comments `//`) should be accompanying all contributed code where possible.
+The kind of code we can accept depends on the life cycle for the release you're targeting. The current maintenance release (2.11.x) cannot break source/binary compatibility, which means public APIs cannot change. It also means we are reluctant to change, e.g., type inference or implicit search, as this can have unforeseen consequences for source compatibility.
+#### Bug Fix
-## Work In Progress
+Prefix your commit title with "SI-NNNN", where https://issues.scala-lang.org/browse/SI-NNNN tracks the bug you're fixing. We also recommend naming your branch after the Jira ticket number.
-It is ok to work on a public feature branch in the GitHub repository. Something that can sometimes be useful for early feedback etc. If so, then it is preferable to name the branch accordingly. This can be done by either prefixing the name with ``wip-`` as in ‘Work In Progress’, or use hierarchical names like ``wip/..``, ``feature/..`` or ``topic/..``. Either way is fine as long as it is clear that it is work in progress and not ready for merge. This work can temporarily have a lower standard. However, to be merged into master it will have to go through the regular process outlined above, with Pull Request, review etc..
+Please make sure the Jira ticket's fix version corresponds to the upcoming milestone for the branch your PR targets (the CI automation will automatically assign the milestone after you open the PR).
-Also, to facilitate both well-formed commits and working together, the ``wip`` and ``feature``/``topic`` identifiers also have special meaning. Any branch labelled with ``wip`` is considered “git-unstable” and may be rebased and have its history rewritten. Any branch with ``feature``/``topic`` in the name is considered “stable” enough for others to depend on when a group is working on a feature.
+#### Enhancement or New Feature
-## Creating Commits And Writing Commit Messages
+For longer-running development, likely required for this category of code contributions, we suggest you include "topic" or "wip" in your branch name, to indicate that this is work in progress, and that others should be prepared to rebase if they branch off your branch.
-Follow these guidelines when creating public commits and writing commit messages.
+Any language change (including bug fixes) must be accompanied by the relevant updates to the spec, which lives in the same repository for this reason.
-1. If your work spans multiple local commits (for example; if you do safe point commits while working in a feature branch or work in a branch for long time doing merges/rebases etc.) then please do not commit it all but rewrite the history by squashing the commits into one large commit which is accompanied by a detailed commit message for (as discussed in the following sections). For more info, see the article: [Git Workflow](http://sandofsky.com/blog/git-workflow.html). Additionally, every commit should be able to be used in isolation-- that is, each commit must build and pass all tests.
-2. The first line should be a descriptive sentence about what the commit is doing. It should be possible to fully understand what the commit does by just reading this single line. It is **not ok** to only list the ticket number, type "minor fix" or similar. If the commit has a corresponding ticket, include a reference to the ticket number, prefixed with "SI-", at the beginning of the first line followed by the title of the ticket, assuming that it aptly and concisely summarizes the commit in a single line. If the commit is a small fix, then you are done. If not, go to 3.
-3. Following the single line description (ideally no more than 70 characters long) should be a blank line followed by an enumerated list with the details of the commit.
-4. Add keywords for your commit (depending on the degree of automation we reach, the list may change over time):
- * ``Review by @githubuser`` - will notify the reviewer via GitHub. Everyone is encouraged to give feedback, however. (Remember that @-mentions will result in notifications also when pushing to a WIP branch, so please only include this in your commit message when you're ready for your pull request to be reviewed. Alternatively, you may request a review in the pull request's description.)
- * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the ticket as fixed in the issue tracker (Assembla understands this).
- * ``backport to _branch name_`` - if the fix needs to be cherry-picked to another branch (like 2.9.x, 2.10.x, etc)
+A new language feature requires a SIP (Scala Improvement Process) proposal. For more details on submitting SIPs, see [how to submit a SIP](http://docs.scala-lang.org/sips/sip-submission.html).
-Example:
+#### Summary
- SI-4032 Implicit conversion visibility affected by presence of "this"
+1. We require regression tests for bug fixes. New features and enhancements must be supported by a respectable test suite.
+2. Documentation. Yep! Also required :-)
+3. Please follow these standard code standards, though in moderation (scouts quickly learn to let sleeping dogs lie):
+ - Not violate [DRY](http://programmer.97things.oreilly.com/wiki/index.php/Don%27t_Repeat_Yourself).
+ - [Boy Scout Rule](http://programmer.97things.oreilly.com/wiki/index.php/The_Boy_Scout_Rule) should be applied.
- - Details 1
- - Details 2
- - Details 3
+Please also have a look at our [Pull Request Policy](https://github.com/scala/scala/wiki/Pull-Request-Policy), as well as the [Scala Hacker Guide](http://www.scala-lang.org/contribute/hacker-guide.html) by @xeno-by.
-## The Scala Improvement Process
-A new language feature requires a SIP (Scala Improvement Process) proposal. Note that significant additions to the standard library are also considered candidates for a SIP proposal.
-For more details on submitting SIPs, see [how to submit a SIP](http://docs.scala-lang.org/sips/sip-submission.html).
diff --git a/Gemfile b/Gemfile
index 53924a4381..6921f792c3 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,7 +1,7 @@
# To build the spec on Travis CI
source "https://rubygems.org"
-gem "jekyll", "2.0.0.alpha.2"
+gem "jekyll", "2.5.3"
gem "rouge"
# gem 's3_website'
# gem 'redcarpet'
diff --git a/README.md b/README.md
index fdc989228c..3c7e4d1ed8 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,198 @@
-This is the repository for the [Scala Programming Language](http://www.scala-lang.org).
-
- - [Report an issue](https://issues.scala-lang.org);
- - [Read about the development of the compiler and the standard library](http://docs.scala-lang.org/scala/);
- - [Check our Jenkins status](https://scala-webapps.epfl.ch/jenkins/);
- - [Download the latest nightly](https://scala-webapps.epfl.ch/jenkins/job/scala-nightly-main-master/ws/dists/latest/*zip*/latest.zip);
- - ... and contribute right here! Please, first read our [policy](http://docs.scala-lang.org/scala/pull-request-policy.html), our [development guidelines](CONTRIBUTING.md),
-and [sign the contributor's license agreement](http://typesafe.com/contribute/cla/scala).
+# Welcome!
+This is the official repository for the [Scala Programming Language](http://www.scala-lang.org).
+
+To contribute to the Scala Standard Library, Scala Compiler and Scala Language Specification, please send us a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository! We do have to ask you to sign the [Scala CLA](http://typesafe.com/contribute/cla/scala) before we can merge any of your work into our code base, to protect its open source nature.
+
+For more information on building and developing the core of Scala, read on! Please also check out our [guidelines for contributing](CONTRIBUTING.md).
+
+We're still using Jira for issue reporting, so please [report any issues](https://issues.scala-lang.org) over there.
+(We would love to start using GitHub Issues, but we're too resource-constrained to take on this migration right now.)
+
+# Get in touch!
+If you need some help with your PR at any time, please feel free to @-mention anyone from the list below (or simply `@scala/team-core-scala`), and we will do our best to help you out:
+
+ | username | talk to me about... |
+--------------------------------------------------------------------------------------------------|----------------------------------------------------------------|---------------------------------------------------|
+ <img src="https://avatars.githubusercontent.com/adriaanm" height="50px" title="Adriaan Moors"/> | [`@adriaanm`](https://github.com/adriaanm) | anything (type checker, pattern matcher, CI,...) |
+ <img src="https://avatars.githubusercontent.com/gkossakowski" height="50px" title="Grzegorz Kossakowski"/> | [`@gkossakowski`](https://github.com/gkossakowski) | infrastructure, incremental compilation, back-end |
+ <img src="https://avatars.githubusercontent.com/retronym" height="50px" title="Jason Zaugg"/> | [`@retronym`](https://github.com/retronym) | Java 8 lambdas, tricky bug detective work |
+ <img src="https://avatars.githubusercontent.com/Ichoran" height="50px" title="Rex Kerr"/> | [`@Ichoran`](https://github.com/Ichoran) | the collections library, performance |
+ <img src="https://avatars.githubusercontent.com/lrytz" height="50px" title="Lukas Rytz"/> | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments |
+ <img src="https://avatars.githubusercontent.com/dickwall" height="50px" title="Dick Wall"/> | [`@dickwall`](https://github.com/dickwall) | process & documentation |
+ <img src="https://avatars.githubusercontent.com/VladUreche" height="50px" title="Vlad Ureche"/> | [`@VladUreche`](https://github.com/VladUreche) | specialization & the scaladoc tool |
+ <img src="https://avatars.githubusercontent.com/densh" height="50px" title="Denys Shabalin"/> | [`@densh`](https://github.com/densh) | quasiquotes, parser, string interpolators, macros in standard library |
+ <img src="https://avatars.githubusercontent.com/xeno-by" height="50px" title="Eugene Burmako"/> | [`@xeno-by`](https://github.com/xeno-by) | macros and reflection |
+
+
+PS: If you have some spare time to help out around here, we would be delighted to add your name to this list!
+
+# Handy Links
+ - [A wealth of documentation](http://docs.scala-lang.org)
+ - [Scala CI](https://scala-ci.typesafe.com/)
+ - [Scala CI at EPFL](https://scala-webapps.epfl.ch/jenkins/)
+ - [Download the latest nightly](http://www.scala-lang.org/files/archive/nightly/2.11.x/);
+ - Scala mailing lists:
+ - [Compiler and standard library development](https://groups.google.com/group/scala-internals)
+ - [Users of Scala](https://groups.google.com/group/scala-user)
+ - [Scala language discussion](https://groups.google.com/group/scala-language)
+ - [Scala Improvement Process](https://groups.google.com/group/scala-sips)
+ - [Debate](https://groups.google.com/group/scala-debate)
+ - [Announcements](https://groups.google.com/group/scala-announce)
+
+# Repository structure
+
+```
+scala/
++--build.xml The main Ant build script, see also under src/build.
++--pull-binary-libs.sh Pulls binary artifacts from remote repository.
++--lib/ Pre-compiled libraries for the build.
++--src/ All sources.
+ +---/library Scala Standard Library.
+ +---/reflect Scala Reflection.
+ +---/compiler Scala Compiler.
+ +---/eclipse Eclipse project files.
+ +---/intellij IntelliJ project templates.
++--scripts/ Scripts for the CI jobs (including building releases)
++--test/ The Scala test suite.
++--build/ [Generated] Build products output directory for ant.
++--dist/ [Generated] The destination folder for Scala distributions.
+```
+
+# How we roll
+
+## Requirements
+
+You'll need a Java SDK (6 or newer), Apache Ant (version 1.8.0 or above), and curl (for `./pull-binary-libs.sh`).
+
+## Git Hygiene
+
+As git history is forever, we take great pride in the quality of the commits we merge into the repository. The title of your commit will be read hundreds (of thousands? :-)) of times, so it pays off to spend just a little bit more time to polish it, making it descriptive and concise. Please take a minute to read the advice [most projects agree on](https://github.com/erlang/otp/wiki/Writing-good-commit-messages), and stick to 50-60 characters for the first line, wrapping subsequent ones at 80 (at most).
+
+When not sure how to formulate your commit message, imagine you're writing a bullet item for the next release notes, or describing what the commit does to the code base (use active verbs in the present tense). When your commit title is featured in the next release notes, it will be read by a lot of curious Scala users, looking for the latest improvements. Satisfy their thirst for information with as few words as possible! Also, a commit should convey clearly to your (future) fellow contributors what it does to the code base.
+
+Writing the commit message is a great sanity check that the commit is of the right size. If it does too many things, the description will be unwieldy and tedious to write. Chop it up (`git add -u --patch` and `git rebase` are your friends) and simplify!
+
+To pinpoint bugs, we often use git bisect, which is only effective when we can count on each commit building (and passing the test suite). Thus, the CI bot enforces this. Please rebase your development history into a sensible list of self-contained commits that tell the story of your bug fix or improvement. Carve them up so that the riskier bits can be reverted independently. Keep changes focussed by splitting out cleanups from refactorings from actual changes to the logic.
+
+This facilitates reviewing: a commit that reformats code can be judged quickly not to affect anything, so we can focus on the meat of the PR. It also helps when merging between long-running branches, reducing conflicts (or providing at least a limited scope for each one).
+
+Please do not @mention anyone in the commit message -- that's what the PR description and comments are for. Every time a commit is shuffled through github (in a merge in some fork, say), every @mention results in an email to that person (the core team treats them as personal email, straight to their inbox, so please don't flood us :-)).
+
+
+## Reviews
+
+Please consider nominating a reviewer for your PR in the PR's description or a comment. If unsure, not to worry -- the core team will assign one for you.
+
+Your reviewer is also your mentor, who will help you rework your PR so that it meets our requirements. We strive to give timely feedback, and apologize for those times when we are overwhelmed by the volume of contributions. Please feel free to ping us. You are entitled to regular progress updates and at least a quick assessment of feasibility of a bigger PR.
+
+To help you plan your contributions, we communicate our plans on a regular basis on scala-internals, and deadlines are tracked as due dates for [GitHub milestones](https://github.com/scala/scala/milestones).
+
+## Reviewing
+
+Once you've gained some experience with the code base and the process, the logical next step is to offers reviews for others's contributions. The main goal of this whole process, in the end, is to ensure the health of the Scala project by improving the quality of the code base, the documentation, as well as this process itself. Thank you for doing your part!
+
+### Tips & Tricks
+Once the `publish-core` task has completed on a commit, you can try it out in sbt as follows:
+
+```
+$ sbt
+
+> set resolvers += "pr" at "http://private-repo.typesafe.com/typesafe/scala-pr-validation-snapshots/"
+> set scalaVersion := "<milestone>-<sha7>-SNAPSHOT"
+> console
+```
+
+Here, `<milestone>` is the milestone targeted by the PR (e.g., 2.11.6), and `<sha7>` is the 7-character sha (the format used by GitHub on the web).
+
+## IDE Setup
+### Eclipse
+Download the [Scala IDE bundle](http://scala-ide.org/download/sdk.html). It comes preconfigured for optimal performance.
+
+ - Run `ant init` to download some necessary jars.
+ - Import the project (in `src/eclipse`) via `File` → `Import Existing Projects into Workspace`. Check all projects and click ok.
+
+For important details on building, debugging and file encodings, please see [the excellent tutorial on scala-ide.org](http://scala-ide.org/docs/tutorials/scalac-trunk/index.html) and the included README.md in src/eclipse.
+
+### IntelliJ 14
+Use the latest IntelliJ IDEA release and install the Scala plugin from within the IDE.
+
+The following steps are required to use IntelliJ IDEA on Scala trunk
+ - Run `ant init`. This will download some JARs to `./build/deps`, which are included in IntelliJ's classpath.
+ - Run src/intellij/setup.sh
+ - Open ./src/intellij/scala.ipr in IntelliJ
+ - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the Java 1.6 SDK.
+ (You may use a later SDK for local development, but the CI will verify against Java 6.)
+
+Compilation within IDEA is performed in "-Dlocker.skip=1" mode: the sources are built
+directly using the STARR compiler (which is downloaded from maven, according to `starr.version` in `versions.properties`).
+
+
+## Building with Ant
+
+NOTE: we are working on migrating the build to sbt.
+
+Run `ant build-opt` to build an optimized version of the compiler.
+Verify your build using `ant test-opt`.
+
+The Scala build system is based on Apache Ant. Most required pre-compiled
+libraries are part of the repository (in 'lib/'). The following however is
+assumed to be installed on the build machine:
+
+## Building with Sbt (EXPERIMENTAL)
+
+The experimental sbt-based build definition has arrived! Run `sbt package`
+to build the compiler. You can run `sbt test` to run unit (JUnit) tests.
+Use `sbt test/it:test` to run integration (partest) tests.
+
+We would like to migrate to sbt build as quickly as possible. If you would
+like to help please contact scala-internals@ mailing list to discuss your
+ideas and coordinate your effort with others.
+
+### Tips and tricks
+
+Here are some common commands. Most ant targets offer a `-opt` variant that runs under `-optimise` (CI runs the -optimize variant).
+
+ - `./pull-binary-libs.sh` [downloads](http://typesafe.artifactoryonline.com/typesafe) all binary artifacts associated with this commit.
+ - `ant -p` prints out information about the commonly used ant targets.
+ - `ant` or `ant build`: A quick compilation (to build/quick) of your changes using the locker compiler.
+
+A typical debug cycle incrementally builds quick, then uses it to compile and run the file
+`sandbox/test.scala` as follows:
+
+ - `ant && build/quick/bin/scalac -d sandbox sandbox/test.scala && build/quick/bin/scala -cp sandbox Test`
+
+We typically alias `build/quick/bin/scalac -d sandbox` to `qsc` and `build/quick/bin/scala -cp sandbox` to `qs` in our shell.
+
+`ant test-opt` tests that your code is working and fit to be committed:
+
+ - Runs the test suite and bootstrapping test on quick.
+ - You can run the suite only (skipping strap) with 'ant test.suite'.
+
+`ant docs` generates the HTML documentation for the library from the sources using the scaladoc tool in quick.
+Note: on most machines this requires more heap than is allocate by default. You can adjust the parameters with ANT_OPTS. Example command line:
+
+```
+ANT_OPTS = "-Xms512M -Xmx2048M -Xss1M -XX:MaxPermSize=128M" ant docs
+```
+
+ - `ant dist` builds a distribution in 'dists/latest'.
+ - `ant all.clean` Removes all build files and all distributions.
+
+### Bootstrapping concepts
+NOTE: This is somewhat outdated, but the ideas still hold.
+
+In order to guarantee the bootstrapping of the Scala compiler, the ant build
+compiles Scala in layers. Each layer is a complete compiled Scala compiler and library.
+A superior layer is always compiled by the layer just below it. Here is a short
+description of the four layers that the build uses, from bottom to top:
+
+ - `starr`: the stable reference Scala release. We use an official version of Scala (specified by `starr.version` in `versions.properties`), downloaded from maven central.
+ - `locker`: the local reference which is compiled by starr and is the work compiler in a typical development cycle. Add `locker.skip=true` to `build.properties` to skip this step and speed up development when you're not changing code generation. In any case, after it has been built once, it is “frozen” in this state. Updating it to fit the current source code must be explicitly requested (`ant locker.unlock`).
+ - `quick`: the layer which is incrementally built when testing changes in the compiler or library. This is considered an actual new version when locker is up-to-date in relation to the source code.
+ - `strap`: a test layer used to check stability of the build.
+
+For each layer, the Scala library is compiled first and the compiler next.
+That means that any changes in the library can immediately be used in the
+compiler without an intermediate build. On the other hand, if building the
+library requires changes in the compiler, a new locker must be built if
+bootstrapping is still possible, or a new starr if it is not.
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
index 703c5add42..a1706d103d 100644
--- a/bincompat-backward.whitelist.conf
+++ b/bincompat-backward.whitelist.conf
@@ -181,6 +181,33 @@ filter {
{
matchName="scala.reflect.api.Internals#ReificationSupportApi.SyntacticExistentialType"
problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.SynchronizedOps.newNestedScope"
+ problemName=MissingMethodProblem
+ },
+ // https://github.com/scala/scala/pull/3848 -- SI-8680
+ {
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
+ problemName=MissingMethodProblem
+ },
+ // SI-8946
+ {
+ matchName="scala.reflect.runtime.ThreadLocalStorage#MyThreadLocalStorage.values"
+ problemName=MissingMethodProblem
+ },
+ // the below method was the unused private (sic!) method but the compatibility checker was complaining about it
+ {
+ matchName="scala.reflect.io.ZipArchive.scala$reflect$io$ZipArchive$$walkIterator"
+ problemName=MissingMethodProblem
}
]
}
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
index 5e869e3215..3808083dd3 100644
--- a/bincompat-forward.whitelist.conf
+++ b/bincompat-forward.whitelist.conf
@@ -226,6 +226,99 @@ filter {
{
matchName="scala.reflect.api.Internals$ReificationSupportApi$SyntacticTypeProjectionExtractor"
problemName=MissingClassProblem
+ },
+ {
+ matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$followStatic"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.SynchronizedOps.newNestedScope"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.JavaUniverse"
+ problemName=MissingTypesProblem
+ },
+ {
+ matchName="scala.reflect.runtime.JavaUniverse.reporter"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.JavaUniverse$PerRunReporting"
+ problemName=MissingClassProblem
+ },
+ {
+ matchName="scala.reflect.runtime.JavaUniverse.currentRun"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.JavaUniverse.PerRunReporting"
+ problemName=MissingMethodProblem
+ },
+ // see SI-5919
+ {
+ matchName="scala.reflect.api.TypeTags$PredefTypeCreator"
+ problemName=MissingTypesProblem
+ },
+ {
+ matchName="scala.reflect.api.TreeCreator"
+ problemName=MissingTypesProblem
+ },
+ {
+ matchName="scala.reflect.api.TypeCreator"
+ problemName=MissingTypesProblem
+ },
+ {
+ matchName="scala.reflect.api.PredefTypeCreator"
+ problemName=MissingClassProblem
+ },
+ // https://github.com/scala/scala/pull/3848 -- SI-8680
+ {
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$3"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$2"
+ problemName=MissingMethodProblem
+ },
+ // changes needed by ZipArchiveFileLookup (the flat classpath representation)
+ {
+ matchName="scala.reflect.io.FileZipArchive.allDirs"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.io.FileZipArchive.root"
+ problemName=MissingMethodProblem
+ },
+ // introduced the harmless method (instead of the repeated code in several places)
+ {
+ matchName="scala.reflect.runtime.Settings#MultiStringSetting.valueSetByUser"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.Settings#BooleanSetting.valueSetByUser"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.Settings#IntSetting.valueSetByUser"
+ problemName=MissingMethodProblem
+ },
+ // SI-9059
+ {
+ matchName="scala.util.Random.scala$util$Random$$nextAlphaNum$1"
+ problemName=MissingMethodProblem
}
]
}
diff --git a/build-ant-macros.xml b/build-ant-macros.xml
index 816a18b13f..259d6a6eb6 100644
--- a/build-ant-macros.xml
+++ b/build-ant-macros.xml
@@ -408,12 +408,44 @@
<if>
<equals arg1="${@{project}.docroot}" arg2="NOT SET"/>
<then>
- <scaladoc destdir="${build-docs.dir}/@{project}" doctitle="${@{project}.description}" docfooter="epfl" docversion="${version.number}" sourcepath="${src.dir}" classpathref="docs.@{project}.build.path" srcdir="${src.dir}/${@{project}.srcdir}" addparams="${scalac.args.all}" implicits="on" diagrams="on" groups="on" rawOutput="${scaladoc.raw.output}" noPrefixes="${scaladoc.no.prefixes}" docUncompilable="${src.dir}/library-aux" skipPackages="${@{project}.skipPackages}">
+ <scaladoc
+ destdir="${build-docs.dir}/@{project}"
+ doctitle="${@{project}.description}"
+ docfooter="epfl"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="docs.@{project}.build.path"
+ srcdir="${src.dir}/${@{project}.srcdir}"
+ addparams="${scalac.args.all}"
+ docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
+ implicits="on"
+ diagrams="on"
+ groups="on"
+ rawOutput="${scaladoc.raw.output}"
+ noPrefixes="${scaladoc.no.prefixes}"
+ docUncompilable="${src.dir}/library-aux"
+ skipPackages="${@{project}.skipPackages}">
<includes/>
</scaladoc>
</then>
<else>
- <scaladoc destdir="${build-docs.dir}/@{project}" doctitle="${@{project}.description}" docfooter="epfl" docversion="${version.number}" sourcepath="${src.dir}" classpathref="docs.@{project}.build.path" srcdir="${src.dir}/${@{project}.srcdir}" docRootContent="${src.dir}/@{project}/${@{project}.docroot}" addparams="${scalac.args.all}" implicits="on" diagrams="on" groups="on" rawOutput="${scaladoc.raw.output}" noPrefixes="${scaladoc.no.prefixes}" docUncompilable="${src.dir}/library-aux" skipPackages="${@{project}.skipPackages}">
+ <scaladoc docRootContent="${src.dir}/@{project}/${@{project}.docroot}"
+ destdir="${build-docs.dir}/@{project}"
+ doctitle="${@{project}.description}"
+ docfooter="epfl"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="docs.@{project}.build.path"
+ srcdir="${src.dir}/${@{project}.srcdir}"
+ addparams="${scalac.args.all}"
+ docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
+ implicits="on"
+ diagrams="on"
+ groups="on"
+ rawOutput="${scaladoc.raw.output}"
+ noPrefixes="${scaladoc.no.prefixes}"
+ docUncompilable="${src.dir}/library-aux"
+ skipPackages="${@{project}.skipPackages}">
<includes/>
</scaladoc>
</else>
@@ -435,6 +467,11 @@
<filter token="SCALA_FULL_VERSION" value="${scala.full.version}"/>
<filter token="SCALA_COMPILER_DOC_VERSION" value="${scala-compiler-doc.version.number}"/>
<filter token="SCALA_COMPILER_INTERACTIVE_VERSION" value="${scala-compiler-interactive.version.number}"/>
+ <filter token="XML_VERSION" value="${scala-xml.version.number}" />
+ <filter token="PARSER_COMBINATORS_VERSION" value="${scala-parser-combinators.version.number}" />
+ <filter token="CONTINUATIONS_PLUGIN_VERSION" value="${scala-continuations-plugin.version.number}" />
+ <filter token="CONTINUATIONS_LIBRARY_VERSION" value="${scala-continuations-library.version.number}" />
+ <filter token="SCALA_SWING_VERSION" value="${scala-swing.version.number}" />
</filterset>
</copy>
<bnd classpath="${@{project}.jar}" eclipse="false" failok="false" exceptions="true" files="${build-osgi.dir}/${@{project}.name}.bnd" output="${build-osgi.dir}"/>
@@ -713,7 +750,8 @@
<attribute name="dir" default="${partest.dir}"/>
<attribute name="srcdir" default="files"/> <!-- TODO: make targets for `pending` and other subdirs -->
<attribute name="colors" default="${partest.colors}"/>
- <attribute name="scalacOpts" default="${scalac.args.optimise}"/>
+ <attribute name="scalacOpts" default="${partest.scalac_opts} ${scalac.args.optimise}"/>
+ <attribute name="javaOpts" default="${env.ANT_OPTS}"/>
<attribute name="pcp" default="${toString:partest.compilation.path}"/>
<attribute name="kinds"/>
<sequential>
@@ -722,6 +760,7 @@
kinds="@{kinds}"
colors="@{colors}"
scalacOpts="@{scalacOpts}"
+ javaOpts="@{javaOpts}"
compilationpath="@{pcp}"/>
</sequential>
</macrodef>
diff --git a/build.number b/build.number
index 51674b6915..27d2d23041 100644
--- a/build.number
+++ b/build.number
@@ -1,9 +1,9 @@
#Tue Sep 11 19:21:09 CEST 2007
version.major=2
version.minor=11
-version.patch=0
+version.patch=7
# This is the -N part of a version. if it's 0, it's dropped from maven versions.
version.bnum=0
-# Note: To build a release run ant with -Dbuild.release=true
-# To build an RC, run ant with -Dmaven.version.suffix=-RCN
+# To build a release, see scripts/jobs/scala-release-2.11.x-build
+# (normally run by the eponymous job on scala-ci.typesafe.com). \ No newline at end of file
diff --git a/build.sbt b/build.sbt
new file mode 100644
index 0000000000..0df2e6a800
--- /dev/null
+++ b/build.sbt
@@ -0,0 +1,443 @@
+/*
+ * The new, sbt-based build definition for Scala.
+ *
+ * What you see below is very much work-in-progress. Basics like compiling and packaging jars
+ * (into right location) work. Everything else is missing:
+ * building docs, placing shell scripts in right locations (so you can run compiler easily),
+ * running partest test, compiling and running JUnit test, and many, many other things.
+ *
+ * You'll notice that this build definition is much more complicated than your typical sbt build.
+ * The main reason is that we are not benefiting from sbt's conventions when it comes project
+ * layout. For that reason we have to configure a lot more explicitly. I've tried explain in
+ * comments the less obvious settings.
+ *
+ * This nicely leads me to explaining goal and non-goals of this build definition. Goals are:
+ *
+ * - to be easy to tweak it in case a bug or small inconsistency is found
+ * - to mimic Ant's behavior as closely as possible
+ * - to be super explicit about any departure from standard sbt settings
+ * - to achieve functional parity with Ant build as quickly as possible
+ * - to be readable and not necessarily succinct
+ * - to provide the nicest development experience for people hacking on Scala
+ *
+ * Non-goals are:
+ *
+ * - to have the shortest sbt build definition possible; we'll beat Ant definition
+ * easily and that will thrill us already
+ * - to remove irregularities from our build process right away
+ * - to modularize the Scala compiler or library further
+ *
+ * It boils down to simple rules:
+ *
+ * - project layout is set in stone for now
+ * - if you need to work on convincing sbt to follow non-standard layout then
+ * explain everything you did in comments
+ * - constantly check where Ant build produces class files, artifacts, what kind of other
+ * files generates and port all of that to here
+ *
+ * Note on bootstrapping:
+ *
+ * Let's start with reminder what bootstrapping means in our context. It's an answer
+ * to this question: which version of Scala are using to compile Scala? The fact that
+ * the question sounds circular suggests trickiness. Indeed, bootstrapping Scala
+ * compiler is a tricky process.
+ *
+ * Ant build used to have involved system of bootstrapping Scala. It would consist of
+ * three layers: starr, locker and quick. The sbt build for Scala ditches layering
+ * and strives to be as standard sbt project as possible. This means that we are simply
+ * building Scala with latest stable release of Scala.
+ * See this discussion for more details behind this decision:
+ * https://groups.google.com/d/topic/scala-internals/gp5JsM1E0Fo/discussion
+ */
+
+val bootstrapScalaVersion = "2.11.5"
+
+def withoutScalaLang(moduleId: ModuleID): ModuleID = moduleId exclude("org.scala-lang", "*")
+
+// exclusion of the scala-library transitive dependency avoids eviction warnings during `update`.
+val scalaParserCombinatorsDep = withoutScalaLang("org.scala-lang.modules" %% "scala-parser-combinators" % versionNumber("scala-parser-combinators"))
+val scalaXmlDep = withoutScalaLang("org.scala-lang.modules" %% "scala-xml" % versionNumber("scala-xml"))
+val partestDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest" % versionNumber("partest"))
+val partestInterfaceDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest-interface" % "0.5.0")
+val junitDep = "junit" % "junit" % "4.11"
+val junitIntefaceDep = "com.novocode" % "junit-interface" % "0.11" % "test"
+val jlineDep = "jline" % "jline" % versionProps("jline.version")
+val antDep = "org.apache.ant" % "ant" % "1.9.4"
+val scalacheckDep = withoutScalaLang("org.scalacheck" %% "scalacheck" % "1.11.4")
+
+lazy val commonSettings = clearSourceAndResourceDirectories ++ Seq[Setting[_]](
+ organization := "org.scala-lang",
+ version := "2.11.6-SNAPSHOT",
+ scalaVersion := bootstrapScalaVersion,
+ // we don't cross build Scala itself
+ crossPaths := false,
+ // do not add Scala library jar as a dependency automatically
+ autoScalaLibrary := false,
+ // we also do not add scala instance automatically because it introduces
+ // a circular instance, see: https://github.com/sbt/sbt/issues/1872
+ managedScalaInstance := false,
+ // this is a way to workaround issue described in https://github.com/sbt/sbt/issues/1872
+ // check it out for more details
+ scalaInstance := ScalaInstance(scalaVersion.value, appConfiguration.value.provider.scalaProvider.launcher getScala scalaVersion.value),
+ // we always assume that Java classes are standalone and do not have any dependency
+ // on Scala classes
+ compileOrder := CompileOrder.JavaThenScala,
+ javacOptions in Compile ++= Seq("-g", "-source", "1.5", "-target", "1.6"),
+ // we don't want any unmanaged jars; as a reminder: unmanaged jar is a jar stored
+ // directly on the file system and it's not resolved through Ivy
+ // Ant's build stored unmanaged jars in `lib/` directory
+ unmanagedJars in Compile := Seq.empty,
+ sourceDirectory in Compile := baseDirectory.value,
+ unmanagedSourceDirectories in Compile := List(baseDirectory.value),
+ scalaSource in Compile := (sourceDirectory in Compile).value,
+ javaSource in Compile := (sourceDirectory in Compile).value,
+ // resources are stored along source files in our current layout
+ resourceDirectory in Compile := (sourceDirectory in Compile).value,
+ // each subproject has to ask specifically for files they want to include
+ includeFilter in unmanagedResources in Compile := NothingFilter,
+ target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id,
+ target in Compile in doc := buildDirectory.value / "scaladoc" / thisProject.value.id,
+ classDirectory in Compile := buildDirectory.value / "quick/classes" / thisProject.value.id,
+ // given that classDirectory is overriden to be _outside_ of target directory, we have
+ // to make sure its being cleaned properly
+ cleanFiles += (classDirectory in Compile).value,
+ fork in run := true
+)
+
+// disable various tasks that are not needed for projects that are used
+// only for compiling code and not publishing it as a standalone artifact
+// we disable those tasks by overriding them and returning bogus files when
+// needed. This is a bit sketchy but I haven't found any better way.
+val disableDocsAndPublishingTasks = Seq[Setting[_]](
+ doc := file("!!! NO DOCS !!!"),
+ publishLocal := {},
+ publish := {},
+ packageBin in Compile := file("!!! NO PACKAGING !!!")
+)
+
+lazy val setJarLocation: Setting[_] =
+ artifactPath in packageBin in Compile := {
+ // two lines below are copied over from sbt's sources:
+ // https://github.com/sbt/sbt/blob/0.13/main/src/main/scala/sbt/Defaults.scala#L628
+ //val resolvedScalaVersion = ScalaVersion((scalaVersion in artifactName).value, (scalaBinaryVersion in artifactName).value)
+ //val resolvedArtifactName = artifactName.value(resolvedScalaVersion, projectID.value, artifact.value)
+ // if you would like to get a jar with version number embedded in it (as normally sbt does)
+ // uncomment the other definition of the `resolvedArtifactName`
+ val resolvedArtifact = artifact.value
+ val resolvedArtifactName = s"${resolvedArtifact.name}.${resolvedArtifact.extension}"
+ buildDirectory.value / "pack/lib" / resolvedArtifactName
+ }
+lazy val scalaSubprojectSettings: Seq[Setting[_]] = commonSettings :+ setJarLocation
+
+lazy val generatePropertiesFileSettings = Seq[Setting[_]](
+ copyrightString := "Copyright 2002-2013, LAMP/EPFL",
+ resourceGenerators in Compile += generateVersionPropertiesFile.map(file => Seq(file)).taskValue,
+ generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value
+)
+
+val libIncludes: FileFilter = "*.tmpl" | "*.xml" | "*.js" | "*.css" | "rootdoc.txt"
+
+lazy val library = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(
+ name := "scala-library",
+ scalacOptions in Compile ++= Seq[String]("-sourcepath", (scalaSource in Compile).value.toString),
+ // Workaround for a bug in `scaladoc` that it seems to not respect the `-sourcepath` option
+ // as a result of this bug, the compiler cannot even initialize Definitions without
+ // binaries of the library on the classpath. Specifically, we get this error:
+ // (library/compile:doc) scala.reflect.internal.FatalError: package class scala does not have a member Int
+ // Ant build does the same thing always: it puts binaries for documented classes on the classpath
+ // sbt never does this by default (which seems like a good default)
+ dependencyClasspath in Compile in doc += (classDirectory in Compile).value,
+ scalacOptions in Compile in doc ++= {
+ val libraryAuxDir = (baseDirectory in ThisBuild).value / "src/library-aux"
+ Seq("-doc-no-compile", libraryAuxDir.toString)
+ },
+ includeFilter in unmanagedResources in Compile := libIncludes)
+ .dependsOn (forkjoin)
+
+lazy val reflect = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(name := "scala-reflect")
+ .dependsOn(library)
+
+val compilerIncludes: FileFilter =
+ "*.tmpl" | "*.xml" | "*.js" | "*.css" | "*.html" | "*.properties" | "*.swf" |
+ "*.png" | "*.gif" | "*.gif" | "*.txt"
+
+lazy val compiler = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(
+ name := "scala-compiler",
+ libraryDependencies += antDep,
+ // this a way to make sure that classes from interactive and scaladoc projects
+ // end up in compiler jar (that's what Ant build does)
+ // we need to use LocalProject references (with strings) to deal with mutual recursion
+ mappings in Compile in packageBin :=
+ (mappings in Compile in packageBin).value ++
+ (mappings in Compile in packageBin in LocalProject("interactive")).value ++
+ (mappings in Compile in packageBin in LocalProject("scaladoc")).value ++
+ (mappings in Compile in packageBin in LocalProject("repl")).value,
+ includeFilter in unmanagedResources in Compile := compilerIncludes)
+ .dependsOn(library, reflect, asm)
+
+lazy val interactive = configureAsSubproject(project)
+ .settings(disableDocsAndPublishingTasks: _*)
+ .dependsOn(compiler)
+
+lazy val repl = configureAsSubproject(project)
+ .settings(libraryDependencies += jlineDep)
+ .settings(disableDocsAndPublishingTasks: _*)
+ .dependsOn(compiler)
+
+lazy val scaladoc = configureAsSubproject(project)
+ .settings(
+ libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, partestDep)
+ )
+ .settings(disableDocsAndPublishingTasks: _*)
+ .dependsOn(compiler)
+
+lazy val scalap = configureAsSubproject(project).
+ dependsOn(compiler)
+
+// deprecated Scala Actors project
+// TODO: it packages into actors.jar but it should be scala-actors.jar
+lazy val actors = configureAsSubproject(project)
+ .settings(generatePropertiesFileSettings: _*)
+ .settings(name := "scala-actors")
+ .dependsOn(library)
+
+lazy val forkjoin = configureAsForkOfJavaProject(project)
+
+lazy val asm = configureAsForkOfJavaProject(project)
+
+lazy val partestExtras = configureAsSubproject(Project("partest-extras", file(".") / "src" / "partest-extras"))
+ .dependsOn(repl)
+ .settings(clearSourceAndResourceDirectories: _*)
+ .settings(
+ libraryDependencies += partestDep,
+ unmanagedSourceDirectories in Compile := List(baseDirectory.value)
+ )
+
+lazy val junit = project.in(file("test") / "junit")
+ .dependsOn(library, reflect, compiler, partestExtras, scaladoc)
+ .settings(clearSourceAndResourceDirectories: _*)
+ .settings(commonSettings: _*)
+ .settings(
+ fork in Test := true,
+ libraryDependencies ++= Seq(junitDep, junitIntefaceDep),
+ testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"),
+ unmanagedSourceDirectories in Test := List(baseDirectory.value)
+ )
+
+lazy val partestJavaAgent = (project in file(".") / "src" / "partest-javaagent").
+ dependsOn(asm).
+ settings(commonSettings: _*).
+ settings(
+ doc := file("!!! NO DOCS !!!"),
+ publishLocal := {},
+ publish := {},
+ // Setting name to "scala-partest-javaagent" so that the jar file gets that name, which the Runner relies on
+ name := "scala-partest-javaagent",
+ // writing jar file to $buildDirectory/pack/lib because that's where it's expected to be found
+ setJarLocation,
+ // add required manifest entry - previously included from file
+ packageOptions in (Compile, packageBin) +=
+ Package.ManifestAttributes( "Premain-Class" -> "scala.tools.partest.javaagent.ProfilingAgent" ),
+ // we need to build this to a JAR
+ exportJars := true
+ )
+
+lazy val test = project.
+ dependsOn(compiler, interactive, actors, repl, scalap, partestExtras, partestJavaAgent, asm, scaladoc).
+ configs(IntegrationTest).
+ settings(disableDocsAndPublishingTasks: _*).
+ settings(commonSettings: _*).
+ settings(Defaults.itSettings: _*).
+ settings(
+ libraryDependencies ++= Seq(partestDep, scalaXmlDep, partestInterfaceDep, scalacheckDep),
+ unmanagedBase in Test := baseDirectory.value / "files" / "lib",
+ unmanagedJars in Test <+= (unmanagedBase) (j => Attributed.blank(j)) map(identity),
+ // no main sources
+ sources in Compile := Seq.empty,
+ // test sources are compiled in partest run, not here
+ sources in IntegrationTest := Seq.empty,
+ fork in IntegrationTest := true,
+ javaOptions in IntegrationTest += "-Xmx1G",
+ testFrameworks += new TestFramework("scala.tools.partest.Framework"),
+ testOptions in IntegrationTest += Tests.Setup( () => root.base.getAbsolutePath + "/pull-binary-libs.sh" ! ),
+ definedTests in IntegrationTest += (
+ new sbt.TestDefinition(
+ "partest",
+ // marker fingerprint since there are no test classes
+ // to be discovered by sbt:
+ new sbt.testing.AnnotatedFingerprint {
+ def isModule = true
+ def annotationName = "partest"
+ }, true, Array())
+ )
+ )
+
+lazy val root = (project in file(".")).
+ aggregate(library, forkjoin, reflect, compiler, asm, interactive, repl,
+ scaladoc, scalap, actors, partestExtras, junit).settings(
+ sources in Compile := Seq.empty,
+ onLoadMessage := """|*** Welcome to the sbt build definition for Scala! ***
+ |This build definition has an EXPERIMENTAL status. If you are not
+ |interested in testing or working on the build itself, please use
+ |the Ant build definition for now. Check README.md for more information.""".stripMargin
+ )
+
+lazy val dist = (project in file("dist")).settings(
+ mkBin := mkBinImpl.value
+)
+
+/**
+ * Configures passed project as a subproject (e.g. compiler or repl)
+ * with common settings attached to it.
+ *
+ * Typical usage is:
+ *
+ * lazy val mySubproject = configureAsSubproject(project)
+ *
+ * We pass `project` as an argument which is in fact a macro call. This macro determines
+ * project.id based on the name of the lazy val on the left-hand side.
+ */
+def configureAsSubproject(project: Project): Project = {
+ val base = file(".") / "src" / project.id
+ (project in base).settings(scalaSubprojectSettings: _*)
+}
+
+/**
+ * Configuration for subprojects that are forks of some Java projects
+ * we depend on. At the moment there are just two: asm and forkjoin.
+ *
+ * We do not publish artifacts for those projects but we package their
+ * binaries in a jar of other project (compiler or library).
+ *
+ * For that reason we disable docs generation, packaging and publishing.
+ */
+def configureAsForkOfJavaProject(project: Project): Project = {
+ val base = file(".") / "src" / project.id
+ (project in base).
+ settings(commonSettings: _*).
+ settings(disableDocsAndPublishingTasks: _*).
+ settings(
+ sourceDirectory in Compile := baseDirectory.value,
+ javaSource in Compile := (sourceDirectory in Compile).value,
+ sources in Compile in doc := Seq.empty,
+ classDirectory in Compile := buildDirectory.value / "libs/classes" / thisProject.value.id
+ )
+}
+
+lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build")
+lazy val copyrightString = settingKey[String]("Copyright string.")
+lazy val generateVersionPropertiesFile = taskKey[File]("Generating version properties file.")
+lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).")
+
+lazy val generateVersionPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task {
+ val propFile = (resourceManaged in Compile).value / s"${thisProject.value.id}.properties"
+ val props = new java.util.Properties
+
+ /**
+ * Regexp that splits version number split into two parts: version and suffix.
+ * Examples of how the split is performed:
+ *
+ * "2.11.5": ("2.11.5", null)
+ * "2.11.5-acda7a": ("2.11.5", "-acda7a")
+ * "2.11.5-SNAPSHOT": ("2.11.5", "-SNAPSHOT")
+ *
+ */
+ val versionSplitted = """([\w+\.]+)(-[\w+\.]+)??""".r
+
+ val versionSplitted(ver, suffixOrNull) = version.value
+ val osgiSuffix = suffixOrNull match {
+ case null => "-VFINAL"
+ case "-SNAPSHOT" => ""
+ case suffixStr => suffixStr
+ }
+
+ def executeTool(tool: String) = {
+ val cmd =
+ if (System.getProperty("os.name").toLowerCase.contains("windows"))
+ s"cmd.exe /c tools\\$tool.bat -p"
+ else s"tools/$tool"
+ Process(cmd).lines.head
+ }
+
+ val commitDate = executeTool("get-scala-commit-date")
+ val commitSha = executeTool("get-scala-commit-sha")
+
+ props.put("version.number", s"${version.value}-$commitDate-$commitSha")
+ props.put("maven.version.number", s"${version.value}")
+ props.put("osgi.version.number", s"$ver.v$commitDate$osgiSuffix-$commitSha")
+ props.put("copyright.string", copyrightString.value)
+
+ // unfortunately, this will write properties in arbitrary order
+ // this makes it harder to test for stability of generated artifacts
+ // consider using https://github.com/etiennestuder/java-ordered-properties
+ // instead of java.util.Properties
+ IO.write(props, null, propFile)
+
+ propFile
+}
+
+// Defining these settings is somewhat redundant as we also redefine settings that depend on them.
+// However, IntelliJ's project import works better when these are set correctly.
+def clearSourceAndResourceDirectories = Seq(Compile, Test).flatMap(config => inConfig(config)(Seq(
+ unmanagedSourceDirectories := Nil,
+ managedSourceDirectories := Nil,
+ unmanagedResourceDirectories := Nil,
+ managedResourceDirectories := Nil
+)))
+
+lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task {
+ def mkScalaTool(mainCls: String, classpath: Seq[Attributed[File]]): ScalaTool =
+ ScalaTool(mainClass = mainCls,
+ classpath = classpath.toList.map(_.data.getAbsolutePath),
+ properties = Map.empty,
+ javaOpts = "-Xmx256M -Xms32M",
+ toolFlags = "")
+ val rootDir = (classDirectory in Compile in compiler).value
+ def writeScripts(scalaTool: ScalaTool, file: String, outDir: File): Seq[File] =
+ Seq(
+ scalaTool.writeScript(file, "unix", rootDir, outDir),
+ scalaTool.writeScript(file, "windows", rootDir, outDir)
+ )
+ def mkQuickBin(file: String, mainCls: String, classpath: Seq[Attributed[File]]): Seq[File] = {
+ val scalaTool = mkScalaTool(mainCls, classpath)
+ val outDir = buildDirectory.value / "quick/bin"
+ writeScripts(scalaTool, file, outDir)
+ }
+
+ def mkPackBin(file: String, mainCls: String): Seq[File] = {
+ val scalaTool = mkScalaTool(mainCls, classpath = Nil)
+ val outDir = buildDirectory.value / "pack/bin"
+ writeScripts(scalaTool, file, outDir)
+ }
+
+ def mkBin(file: String, mainCls: String, classpath: Seq[Attributed[File]]): Seq[File] =
+ mkQuickBin(file, mainCls, classpath) ++ mkPackBin(file, mainCls)
+
+ mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (fullClasspath in Compile in repl).value) ++
+ mkBin("scalac" , "scala.tools.nsc.Main", (fullClasspath in Compile in compiler).value) ++
+ mkBin("fsc" , "scala.tools.nsc.CompileClient", (fullClasspath in Compile in compiler).value) ++
+ mkBin("scaladoc" , "scala.tools.nsc.ScalaDoc", (fullClasspath in Compile in scaladoc).value) ++
+ mkBin("scalap" , "scala.tools.scalap.Main", (fullClasspath in Compile in scalap).value)
+}
+
+buildDirectory in ThisBuild := (baseDirectory in ThisBuild).value / "build-sbt"
+
+lazy val versionProps: Map[String, String] = {
+ import java.io.FileInputStream
+ import java.util.Properties
+ val props = new Properties()
+ val in = new FileInputStream(file("versions.properties"))
+ try props.load(in)
+ finally in.close()
+ import scala.collection.JavaConverters._
+ props.asScala.toMap
+}
+
+def versionNumber(name: String): String =
+ versionProps(s"$name.version.number")
diff --git a/build.xml b/build.xml
index fec1d947c4..fa6ff72baa 100755
--- a/build.xml
+++ b/build.xml
@@ -165,7 +165,7 @@ TODO:
<property name="build.dir" value="${basedir}/build"/>
<property name="build-deps.dir" value="${build.dir}/deps"/>
<property name="build-libs.dir" value="${build.dir}/libs"/>
- <property name="build-asm.dir" value="${build.dir}/asm"/>
+ <property name="build-asm.dir" value="${build-libs.dir}"/>
<property name="build-forkjoin.dir" value="${build-libs.dir}"/>
<property name="build-locker.dir" value="${build.dir}/locker"/>
<property name="build-quick.dir" value="${build.dir}/quick"/>
@@ -187,8 +187,6 @@ TODO:
<property name="copyright.string" value="Copyright 2002-2013, LAMP/EPFL"/>
- <property name="jline.version" value="2.11"/>
-
<!-- These are NOT the flags used to run SuperSabbus, but the ones written
into the script runners created with scala.tools.ant.ScalaTool -->
<property name="java.flags" value="-Xmx256M -Xms32M"/>
@@ -268,42 +266,87 @@ TODO:
-->
<if><not><isset property="maven-deps-done"></isset></not><then>
<mkdir dir="${user.home}/.m2/repository"/>
+
+ <artifact:remoteRepository id="sonatype-release" url="https://oss.sonatype.org/content/repositories/releases"/>
+ <artifact:remoteRepository id="sonatype-snapshots" url="https://oss.sonatype.org/content/repositories/snapshots"/>
+ <artifact:remoteRepository id="extra-repo" url="${extra.repo.url}"/>
+
<!-- This task has an issue where if the user directory does not exist, so we create it above. UGH. -->
<artifact:dependencies pathId="extra.tasks.classpath" filesetId="extra.tasks.fileset">
<dependency groupId="biz.aQute" artifactId="bnd" version="1.50.0"/>
</artifact:dependencies>
<!-- JUnit -->
- <property name="junit.version" value="4.10"/>
+ <property name="junit.version" value="4.11"/>
<artifact:dependencies pathId="junit.classpath" filesetId="junit.fileset">
<dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
</artifact:dependencies>
<copy-deps project="junit"/>
<!-- Pax runner -->
- <property name="pax.exam.version" value="2.6.0"/>
+ <property name="pax.exam.version" value="3.5.0"/><!-- Last version which supports Java 6 -->
+ <property name="osgi.felix.version" value="4.4.0"/>
+ <property name="osgi.equinox.version" value="3.7.1"/>
<artifact:dependencies pathId="pax.exam.classpath" filesetId="pax.exam.fileset">
- <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-container-native" version="${pax.exam.version}"/>
+ <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-container-native" version="${pax.exam.version}">
+ <exclusion groupId="org.osgi" artifactId="org.osgi.core"/><!-- Avoid dragging in a dependency which requires Java >6 -->
+ </dependency>
<dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-junit4" version="${pax.exam.version}"/>
<dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-link-assembly" version="${pax.exam.version}"/>
- <!-- upgraded to 1.6.0 to get fix for https://ops4j1.jira.com/browse/PAXURL-217
- https://ops4j1.jira.com/browse/PAXURL-138 is still unresolved... -->
- <dependency groupId="org.ops4j.pax.url" artifactId="pax-url-aether" version="1.6.0"/>
- <dependency groupId="org.ops4j.pax.swissbox" artifactId="pax-swissbox-framework" version="1.5.1"/>
- <dependency groupId="ch.qos.logback" artifactId="logback-core" version="0.9.20"/>
- <dependency groupId="ch.qos.logback" artifactId="logback-classic" version="0.9.20"/>
+ <dependency groupId="org.ops4j.pax.url" artifactId="pax-url-aether" version="2.2.0"/>
+ <dependency groupId="org.ops4j.pax.swissbox" artifactId="pax-swissbox-tracker" version="1.8.0"/>
+ <dependency groupId="ch.qos.logback" artifactId="logback-core" version="1.1.2"/>
+ <dependency groupId="ch.qos.logback" artifactId="logback-classic" version="1.1.2"/>
<dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
- <dependency groupId="org.apache.felix" artifactId="org.apache.felix.framework" version="3.2.2"/>
</artifact:dependencies>
+ <copy-deps project="pax.exam"/>
+ <artifact:dependencies pathId="osgi.framework.felix">
+ <dependency groupId="org.apache.felix" artifactId="org.apache.felix.framework" version="${osgi.felix.version}"/>
+ </artifact:dependencies>
+
+ <artifact:dependencies pathId="osgi.framework.equinox">
+ <dependency groupId="org.eclipse.osgi" artifactId="org.eclipse.osgi" version="${osgi.equinox.version}"/>
+ </artifact:dependencies>
<artifact:remoteRepository id="sonatype-release" url="https://oss.sonatype.org/content/repositories/releases"/>
<artifact:remoteRepository id="extra-repo" url="${extra.repo.url}"/>
+ <!-- scala-java8-compat, used by the experimental -target jvm-1.8 support. -->
+ <if><isset property="scala-java8-compat.package"/><then>
+ <property name="scala-java8-compat.version" value="0.2.0"/>
+ <property name="scala-java8-compat.binary.version" value="2.11"/>
+ <artifact:dependencies pathId="scala-java8-compat.classpath" filesetId="scala-java8-compat.fileset">
+ <dependency groupId="org.scala-lang.modules" artifactId="scala-java8-compat_${scala-java8-compat.binary.version}" version="${scala-java8-compat.version}">
+ <exclusion groupId="org.scala-lang" artifactId="scala-library"/>
+ </dependency>
+ </artifact:dependencies>
+ <property name="scala-java8-compat-classes" value="${build-quick.dir}/scala-java8-compat"/>
+ <delete dir="${scala-java8-compat-classes}"/>
+ <unzip dest="${scala-java8-compat-classes}">
+ <fileset refid="scala-java8-compat.fileset"/>
+ <patternset>
+ <include name="**/*.class"/>
+ </patternset>
+ </unzip>
+ <path id="scala-java8-compat.libs">
+ <pathelement location="${scala-java8-compat-classes}"/>
+ </path>
+ <fileset id="scala-java8-compat.fileset" dir="${scala-java8-compat-classes}">
+ <include name="**/*"/>
+ </fileset>
+ </then>
+ <else>
+ <path id="scala-java8-compat.libs"/>
+ <fileset id="scala-java8-compat.fileset" dir="." excludes="**"/>
+ </else>
+ </if>
+
<!-- prepare, for each of the names below, the property "@{name}.cross", set to the
necessary cross suffix (usually something like "_2.11.0-M6". -->
<prepareCross name="scala-xml" />
<prepareCross name="scala-parser-combinators" />
+ <property name="scala-continuations-plugin.cross.suffix" value="_${scala.full.version}"/>
<prepareCross name="scala-continuations-plugin" />
<prepareCross name="scala-continuations-library"/>
<prepareCross name="scala-swing"/>
@@ -367,12 +410,13 @@ TODO:
<typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
<echo message="Using Scala ${starr.version} for STARR."/>
- <artifact:dependencies pathId="starr.compiler.path">
+ <artifact:dependencies pathId="starr.compiler.path" filesetId="starr.fileset">
<artifact:remoteRepository refid="extra-repo"/>
<dependency groupId="org.scala-lang" artifactId="scala-library" version="${starr.version}"/>
<dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${starr.version}"/>
<dependency groupId="org.scala-lang" artifactId="scala-compiler" version="${starr.version}"/>
</artifact:dependencies>
+ <copy-deps project="starr"/>
<property name="maven-deps-done" value="yep!"/>
</then></if>
@@ -558,6 +602,7 @@ TODO:
<echo message="scala-swing.version.number = ${scala-swing.version.number}"/>
<echo message="akka-actor.version.number = ${akka-actor.version.number}"/>
<echo message="actors-migration.version.number = ${actors-migration.version.number}"/>
+ <echo message="jline.version = ${jline.version}"/>
<echo message="partest.version.number = ${partest.version.number}"/>
<echo message="scalacheck.version.number = ${scalacheck.version.number}"/>
@@ -572,13 +617,14 @@ TODO:
<entry key="scala-swing.version.number" value="${scala-swing.version.number}"/>
<entry key="akka-actor.version.number" value="${akka-actor.version.number}"/>
<entry key="actors-migration.version.number" value="${actors-migration.version.number}"/>
+ <entry key="jline.version" value="${jline.version}"/>
<entry key="partest.version.number" value="${partest.version.number}"/>
<entry key="scalacheck.version.number" value="${scalacheck.version.number}"/>
</propertyfile>
</then></if>
- <path id="forkjoin.classpath" path="${build-libs.dir}/classes/forkjoin"/>
- <path id="asm.classpath" path="${build-asm.dir}/classes"/>
+ <path id="forkjoin.classpath" path="${build-forkjoin.dir}/classes/forkjoin"/>
+ <path id="asm.classpath" path="${build-asm.dir}/classes/asm"/>
<property name="forkjoin-classes" refid="forkjoin.classpath"/>
<property name="asm-classes" refid="asm.classpath"/>
@@ -707,6 +753,7 @@ TODO:
<pathelement location="${build-locker.dir}/classes/library"/>
<path refid="forkjoin.classpath"/>
<path refid="aux.libs"/>
+ <path refid="scala-java8-compat.libs"/>
</path>
<path id="locker.reflect.build.path">
@@ -728,6 +775,7 @@ TODO:
<pathelement location="${build-quick.dir}/classes/library"/>
<path refid="forkjoin.classpath"/>
<path refid="aux.libs"/>
+ <path refid="scala-java8-compat.libs"/>
</path>
<path id="quick.actors.build.path">
@@ -816,6 +864,7 @@ TODO:
<path id="pack.library.files">
<fileset dir="${build-quick.dir}/classes/library"/>
<fileset dir="${forkjoin-classes}"/>
+ <fileset refid="scala-java8-compat.fileset"/>
</path>
<path id="pack.actors.files">
@@ -838,8 +887,7 @@ TODO:
-->
<path id="pack.reflect.files"> <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
- <path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/>
- <fileset file="${src.dir}/scalap/decoder.properties"/> </path>
+ <path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/> </path>
<path id="pack.partest-extras.files"> <fileset dir="${build-quick.dir}/classes/partest-extras"/> </path>
<path id="pack.partest-javaagent.files"> <fileset dir="${build-quick.dir}/classes/partest-javaagent"/> </path>
@@ -968,6 +1016,8 @@ TODO:
<pathelement location="${test.junit.classes}"/>
<path refid="quick.compiler.build.path"/>
<path refid="quick.repl.build.path"/>
+ <path refid="quick.scaladoc.build.path"/>
+ <path refid="quick.partest-extras.build.path"/>
<path refid="junit.classpath"/>
</path>
@@ -981,6 +1031,16 @@ TODO:
<path refid="forkjoin.classpath"/>
</path>
+ <path id="test.osgi.compiler.build.path.felix">
+ <path refid="test.osgi.compiler.build.path"/>
+ <path refid="osgi.framework.felix"/>
+ </path>
+
+ <path id="test.osgi.compiler.build.path.equinox">
+ <path refid="test.osgi.compiler.build.path"/>
+ <path refid="osgi.framework.equinox"/>
+ </path>
+
<path id="test.positions.sub.build.path" path="${build-quick.dir}/classes/library"/>
<!-- TODO: consolidate *.includes -->
@@ -1039,7 +1099,7 @@ TODO:
============================================================================ -->
<target name="asm.done" depends="init"> <simple-javac project="asm" jar="no"/> </target>
- <target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file"/></target>
+ <target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file" jar="no"/></target>
<!-- For local development only. We only allow released versions of Scala for STARR.
This builds quick (core only) and publishes it with a generated version number,
@@ -1348,27 +1408,45 @@ TODO:
srcdir="${test.osgi.src}"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath refid="test.osgi.compiler.build.path"/>
+ <compilationpath refid="test.osgi.compiler.build.path.felix"/>
</scalacfork>
<touch file="${build-osgi.dir}/test-compile.complete" verbose="no"/>
<stopwatch name="test.osgi.compiler.timer" action="total"/>
</target>
<target name="test.osgi" depends="test.osgi.comp">
- <stopwatch name="test.osgi.timer"/>
- <mkdir dir="${test.osgi.classes}"/>
-
- <echo message="Running OSGi JUnit tests. Output in ${build-osgi.dir}"/>
- <junit fork="yes" haltonfailure="yes">
- <classpath refid="test.osgi.compiler.build.path"/>
- <batchtest fork="yes" todir="${build-osgi.dir}">
- <fileset dir="${test.osgi.classes}">
- <include name="**/*Test.class"/>
- </fileset>
- </batchtest>
- <formatter type="xml" /> <!-- silenced by having it use a file; I tried for an hour to use other formatters but classpath issues drove me to this usefile="false" -->
- </junit>
- <stopwatch name="test.osgi.timer" action="total"/>
+ <if><isset property="test.osgi.skip"/><then>
+ <echo message="Skipping OSGi JUnit tests"/>
+ </then><else>
+ <echo message="Running OSGi JUnit tests. Output in ${build-osgi.dir}"/>
+ <stopwatch name="test.osgi.timer"/>
+ <mkdir dir="${test.osgi.classes}"/>
+
+ <echo message="Test pass 1 of 2 using Apache Felix ${osgi.felix.version}"/>
+ <junit fork="yes" haltonfailure="yes">
+ <classpath refid="test.osgi.compiler.build.path.felix"/>
+ <jvmarg value="-Duser.home=${user.home}"/>
+ <batchtest fork="yes" todir="${build-osgi.dir}">
+ <fileset dir="${test.osgi.classes}">
+ <include name="**/*Test.class"/>
+ </fileset>
+ </batchtest>
+ <formatter type="xml" />
+ </junit>
+
+ <echo message="Test pass 2 of 2 using Eclipse Equinox ${osgi.equinox.version}"/>
+ <junit fork="yes" haltonfailure="yes">
+ <classpath refid="test.osgi.compiler.build.path.equinox"/>
+ <jvmarg value="-Duser.home=${user.home}"/>
+ <batchtest fork="yes" todir="${build-osgi.dir}">
+ <fileset dir="${test.osgi.classes}">
+ <include name="**/*Test.class"/>
+ </fileset>
+ </batchtest>
+ <formatter type="xml" /> <!-- silenced by having it use a file; I tried for an hour to use other formatters but classpath issues drove me to this usefile="false" -->
+ </junit>
+ <stopwatch name="test.osgi.timer" action="total"/>
+ </else></if>
</target>
@@ -1406,17 +1484,18 @@ TODO:
<stopwatch name="quick.sbt-interface.timer" action="total"/>
</target>
- <target name="test.junit.init" depends="quick.done">
- <uptodate property="test.junit.available" targetfile="${build-junit.dir}/test-compile.complete">
- <srcfiles dir="${test.junit.src}">
- <include name="**/*.scala"/>
- </srcfiles>
- </uptodate>
- </target>
-
- <target name="test.junit.comp" depends="test.junit.init, quick.done" unless="test.junit.available">
+ <target name="test.junit.comp" depends="pack.done">
<stopwatch name="test.junit.compiler.timer"/>
<mkdir dir="${test.junit.classes}"/>
+ <javac
+ debug="true"
+ srcdir="${test.junit.src}"
+ destdir="${test.junit.classes}"
+ classpathref="test.junit.compiler.build.path"
+ target="1.6"
+ source="1.5"
+ compiler="javac1.6"
+ includes="**/*.java"/>
<scalacfork
destdir="${test.junit.classes}"
compilerpathref="quick.compiler.path"
@@ -1434,9 +1513,13 @@ TODO:
<stopwatch name="test.junit.timer"/>
<mkdir dir="${test.junit.classes}"/>
<echo message="Note: details of failed tests will be output to ${build-junit.dir}"/>
+
+ <if><isset property="test.method" /><then><property name="test.methods" value="${test.method}" /></then></if>
<junit fork="yes" haltonfailure="yes" printsummary="on">
<classpath refid="test.junit.compiler.build.path"/>
- <batchtest fork="yes" todir="${build-junit.dir}">
+ <test fork="yes" todir="${build-junit.dir}" if="test.class" unless="test.methods" name="${test.class}" />
+ <test fork="yes" todir="${build-junit.dir}" if="test.methods" name="${test.class}" methods="${test.methods}" />
+ <batchtest fork="yes" todir="${build-junit.dir}" unless="test.class">
<fileset dir="${test.junit.classes}">
<include name="**/*Test.class"/>
</fileset>
@@ -1502,8 +1585,13 @@ TODO:
<!-- ===========================================================================
BINARY COMPATIBILITY TESTING
============================================================================ -->
- <target name="bc.init" depends="init" unless="maven-deps-done-mima">
- <property name="bc-reference-version" value="2.11.0-RC1"/>
+ <target name="bc.init" depends="init" if="test.bc.skip">
+ <!-- if test.bc.skip is set, make sure that pc.prepare is not executed either -->
+ <property name="maven-deps-done-mima" value="true"/>
+ </target>
+
+ <target name="bc.prepare" depends="bc.init" unless="maven-deps-done-mima">
+ <property name="bc-reference-version" value="2.11.0"/>
<property name="bc-build.dir" value="${build.dir}/bc"/>
<!-- Obtain mima -->
@@ -1520,7 +1608,7 @@ TODO:
</target>
<target name="test.bc-opt" description="Optimized version of test.bc."> <optimized name="test.bc"/></target>
- <target name="test.bc" depends="bc.init, pack.lib, pack.reflect">
+ <target name="test.bc" depends="bc.prepare, pack.lib, pack.reflect" unless="test.bc.skip">
<bc.check project="library"/>
<bc.check project="reflect"/>
</target>
diff --git a/compare-build-dirs-ignore-patterns b/compare-build-dirs-ignore-patterns
new file mode 100644
index 0000000000..8c8160ba15
--- /dev/null
+++ b/compare-build-dirs-ignore-patterns
@@ -0,0 +1,8 @@
+.DS_Store
+*.complete
+locker
+deps
+scala-continuations-*.jar
+scala-parser-combinators*.jar
+scala-swing*.jar
+scala-xml*.jar
diff --git a/compare-build-dirs.sh b/compare-build-dirs.sh
new file mode 100755
index 0000000000..f6806dd422
--- /dev/null
+++ b/compare-build-dirs.sh
@@ -0,0 +1,5 @@
+# Compares build directories generated by Ant and sbt build definitions
+# This let's us to see how far are we from achieving perfect parity
+# between the builds
+
+diff -X compare-build-dirs-ignore-patterns -qr build/ build-sbt/
diff --git a/docs/development/scala.tools.nsc/zipfile-bug.txt b/docs/development/scala.tools.nsc/zipfile-bug.txt
deleted file mode 100644
index 3838318564..0000000000
--- a/docs/development/scala.tools.nsc/zipfile-bug.txt
+++ /dev/null
@@ -1,93 +0,0 @@
-// Some stack traces of a bug which has been hitting me regularly
-// for over a year (as of oct 2010.) Manifestation: partest hangs.
-// These are some of the regulars among the thread dumps.
-
-"main" prio=5 tid=101801000 nid=0x100501000 in Object.wait() [1004ff000]
- java.lang.Thread.State: WAITING (on object monitor)
- at java.lang.Object.wait(Native Method)
- - waiting on <112bcc7c0> (a scala.actors.ActorProxy)
- at java.lang.Object.wait(Object.java:485)
- at scala.actors.Actor$class.liftedTree1$1(Actor.scala:644)
- at scala.actors.Actor$class.scala$actors$Actor$$suspendActor(Actor.scala:643)
- - locked <112bcc7c0> (a scala.actors.ActorProxy)
- at scala.actors.Actor$blocker$.block(Actor.scala:634)
- at scala.actors.scheduler.ForkJoinScheduler$$anon$2.block(ForkJoinScheduler.scala:145)
- at scala.concurrent.forkjoin.ForkJoinPool.awaitBlocker(ForkJoinPool.java:1791)
- at scala.concurrent.forkjoin.ForkJoinPool.managedBlock(ForkJoinPool.java:1781)
- at scala.actors.scheduler.ForkJoinScheduler.managedBlock(ForkJoinScheduler.scala:144)
- at scala.actors.scheduler.DelegatingScheduler$class.managedBlock(DelegatingScheduler.scala:73)
- at scala.actors.Scheduler$.managedBlock(Scheduler.scala:21)
- at scala.actors.Actor$class.receiveWithin(Actor.scala:576)
- - locked <112bcc7c0> (a scala.actors.ActorProxy)
- at scala.actors.ActorProxy.receiveWithin(ActorProxy.scala:20)
- at scala.actors.Actor$.receiveWithin(Actor.scala:204)
- at scala.tools.partest.nest.DirectRunner$$anonfun$runTestsForFiles$1.apply(DirectRunner.scala:65)
- at scala.tools.partest.nest.DirectRunner$$anonfun$runTestsForFiles$1.apply(DirectRunner.scala:64)
-
-
-"ForkJoinPool-4-worker-11" daemon prio=5 tid=19b680000 nid=0x19d50d000 runnable [19d50b000]
- java.lang.Thread.State: RUNNABLE
- at java.util.zip.ZipFile.getNextEntry(Native Method)
- at java.util.zip.ZipFile.access$400(ZipFile.java:29)
- at java.util.zip.ZipFile$2.nextElement(ZipFile.java:313)
- - locked <12581d1e0> (a java.util.zip.ZipFile)
- at java.util.zip.ZipFile$2.nextElement(ZipFile.java:299)
- at scala.collection.JavaConversions$JEnumerationWrapper.next(JavaConversions.scala:573)
- at scala.collection.Iterator$class.foreach(Iterator.scala:631)
- at scala.collection.JavaConversions$JEnumerationWrapper.foreach(JavaConversions.scala:571)
- at scala.collection.IterableLike$class.foreach(IterableLike.scala:79)
- at scala.tools.nsc.io.ZipArchive$$anon$1.foreach(ZipArchive.scala:246)
- at scala.tools.nsc.io.ZipContainer$ZipRootCreator.apply(ZipArchive.scala:143)
- at scala.tools.nsc.io.ZipArchive.root(ZipArchive.scala:204)
- - locked <12581d240> (a scala.tools.nsc.io.ZipArchive)
- at scala.tools.nsc.io.ZipContainer$class.iterator(ZipArchive.scala:170)
- at scala.tools.nsc.io.ZipArchive.iterator(ZipArchive.scala:197)
- at scala.collection.IterableLike$class.foreach(IterableLike.scala:79)
- at scala.tools.nsc.io.AbstractFile.foreach(AbstractFile.scala:84)
- at scala.collection.TraversableLike$class.collect(TraversableLike.scala:271)
- at scala.tools.nsc.io.AbstractFile.collect(AbstractFile.scala:84)
- at scala.tools.nsc.util.DirectoryClassPath.classes(ClassPath.scala:315)
- - locked <12581d2c8> (a scala.tools.nsc.util.DirectoryClassPath)
- at scala.tools.nsc.util.MergedClassPath$$anonfun$classes$3.apply(ClassPath.scala:342)
- at scala.tools.nsc.util.MergedClassPath$$anonfun$classes$3.apply(ClassPath.scala:342)
- at scala.collection.LinearSeqOptimized$class.foreach(LinearSeqOptimized.scala:61)
- at scala.collection.immutable.List.foreach(List.scala:45)
- at scala.tools.nsc.util.MergedClassPath.classes(ClassPath.scala:342)
- - locked <12581d390> (a scala.tools.nsc.util.JavaClassPath)
- at scala.tools.nsc.symtab.SymbolLoaders$PackageLoader.doComplete(SymbolLoaders.scala:150)
- at scala.tools.nsc.symtab.SymbolLoaders$SymbolLoader.complete(SymbolLoaders.scala:58)
- at scala.tools.nsc.symtab.SymbolLoaders$SymbolLoader.complete(SymbolLoaders.scala:32)
- at scala.tools.nsc.symtab.Symbols$Symbol.info(Symbols.scala:730)
- at scala.tools.nsc.symtab.Definitions$definitions$.init(Definitions.scala:827)
- at scala.tools.nsc.Global$Run.<init>(Global.scala:626)
-
-
-at java.util.zip.ZipFile.getNextEntry(Native Method)
-at java.util.zip.ZipFile.access$400(ZipFile.java:29)
-at java.util.zip.ZipFile$2.nextElement(ZipFile.java:313)
-- locked <113014f40> (a java.util.zip.ZipFile)
-at java.util.zip.ZipFile$2.nextElement(ZipFile.java:299)
-at scala.collection.JavaConversions$JEnumerationWrapper.next(JavaConversions.scala:556)
-at scala.collection.Iterator$class.foreach(Iterator.scala:631)
-at scala.collection.JavaConversions$JEnumerationWrapper.foreach(JavaConversions.scala:554)
-at scala.collection.IterableLike$class.foreach(IterableLike.scala:79)
-at scala.tools.nsc.io.ZipArchive$$anon$1.foreach(ZipArchive.scala:246)
-at scala.tools.nsc.io.ZipContainer$ZipRootCreator.apply(ZipArchive.scala:143)
- at scala.tools.nsc.io.ZipArchive.root(ZipArchive.scala:204)
-- locked <113018658> (a scala.tools.nsc.io.ZipArchive)
-at scala.tools.nsc.io.ZipContainer$class.iterator(ZipArchive.scala:170)
-at scala.tools.nsc.io.ZipArchive.iterator(ZipArchive.scala:197)
-at scala.collection.IterableLike$class.foreach(IterableLike.scala:79)
-at scala.tools.nsc.io.AbstractFile.foreach(AbstractFile.scala:84)
-at scala.collection.TraversableLike$class.collect(TraversableLike.scala:271)
-at scala.tools.nsc.io.AbstractFile.collect(AbstractFile.scala:84)
-at scala.tools.nsc.util.DirectoryClassPath.classes(ClassPath.scala:315)
-- locked <1130186e0> (a scala.tools.nsc.util.DirectoryClassPath)
-at scala.tools.nsc.util.MergedClassPath$$anonfun$classes$3.apply(ClassPath.scala:342)
-at scala.tools.nsc.util.MergedClassPath$$anonfun$classes$3.apply(ClassPath.scala:342)
-at scala.collection.LinearSeqOptimized$class.foreach(LinearSeqOptimized.scala:61)
-at scala.collection.immutable.List.foreach(List.scala:45)
-at scala.tools.nsc.util.MergedClassPath.classes(ClassPath.scala:342)
-- locked <1130187a8> (a scala.tools.nsc.util.JavaClassPath)
-at scala.tools.nsc.symtab.SymbolLoaders$PackageLoader.doComplete(SymbolLoaders.scala:150)
- +
diff --git a/project/ScalaTool.scala b/project/ScalaTool.scala
new file mode 100644
index 0000000000..559b215c18
--- /dev/null
+++ b/project/ScalaTool.scala
@@ -0,0 +1,44 @@
+import sbt._
+import org.apache.commons.lang3.StringUtils.replaceEach
+
+/**
+ * A class that generates a shell or batch script to execute a Scala program.
+ *
+ * This is a simplified copy of Ant task (see scala.tools.ant.ScalaTool).
+ */
+case class ScalaTool(mainClass: String,
+ classpath: List[String],
+ properties: Map[String, String],
+ javaOpts: String,
+ toolFlags: String) {
+ // For classpath, the platform specific
+ // demarcation of any script variables (e.g. `${SCALA_HOME}` or
+ // `%SCALA_HOME%`) can be specified in a platform independent way (e.g.
+ // `@SCALA_HOME@`) and automatically translated for you.
+ def patchedToolScript(template: String, platform: String) = {
+ val varRegex = """@(\w+)@""" // the group should be able to capture each of the keys of the map below
+
+ val variables = Map(
+ ("@@" -> "@"), // for backwards compatibility
+ ("@class@" -> mainClass),
+ ("@properties@" -> (properties map { case (k, v) => s"""-D$k="$v""""} mkString " ")),
+ ("@javaflags@" -> javaOpts),
+ ("@toolflags@" -> toolFlags),
+ ("@classpath@" -> (platform match {
+ case "unix" => classpath.mkString(":").replace('\\', '/').replaceAll(varRegex, """\${$1}""")
+ case "windows" => classpath.mkString(";").replace('/', '\\').replaceAll(varRegex, "%$1%")
+ }))
+ )
+
+ val (from, to) = variables.unzip
+ replaceEach(template, from.toArray, to.toArray)
+ }
+
+ def writeScript(file: String, platform: String, rootDir: File, outDir: File): File = {
+ val templatePath = s"scala/tools/ant/templates/tool-$platform.tmpl"
+ val suffix = platform match { case "windows" => ".bat" case _ => "" }
+ val scriptFile = outDir / s"$file$suffix"
+ IO.write(scriptFile, patchedToolScript(IO.read(rootDir / templatePath), platform))
+ scriptFile
+ }
+}
diff --git a/project/build.properties b/project/build.properties
new file mode 100644
index 0000000000..748703f770
--- /dev/null
+++ b/project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.13.7
diff --git a/project/plugins.sbt b/project/plugins.sbt
new file mode 100644
index 0000000000..dc266a8db1
--- /dev/null
+++ b/project/plugins.sbt
@@ -0,0 +1 @@
+libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" \ No newline at end of file
diff --git a/scripts/common b/scripts/common
new file mode 100644
index 0000000000..b075469379
--- /dev/null
+++ b/scripts/common
@@ -0,0 +1,153 @@
+# This is for forcibly stopping the job from a subshell (see test
+# below).
+trap "exit 1" TERM
+export TOP_PID=$$
+set -e
+
+# Known problems : does not fare well with interrupted, partial
+# compilations. We should perhaps have a multi-dependency version
+# of do_i_have below
+
+LOGGINGDIR="$WORKSPACE/logs"
+mkdir -p $LOGGINGDIR
+
+unset SBT_HOME
+SBT_HOME="$WORKSPACE/.sbt"
+mkdir -p $SBT_HOME
+IVY_CACHE="$WORKSPACE/.ivy2"
+mkdir -p $IVY_CACHE
+rm -rf $IVY_CACHE/cache/org.scala-lang
+
+# temp dir where all 'non-build' operation are performed
+TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX)
+TMP_DIR="${TMP_ROOT_DIR}/tmp"
+mkdir "${TMP_DIR}"
+
+
+# detect sed version and how to enable extended regexes
+SEDARGS="-n$(if (echo "a" | sed -nE "s/a/b/" &> /dev/null); then echo E; else echo r; fi)"
+
+
+
+# :docstring test:
+# Usage: test <argument ..>
+# Executes <argument ..>, logging the launch of the command to the
+# main log file, and kills global script execution with the TERM
+# signal if the commands ends up failing.
+# DO NOT USE ON FUNCTIONS THAT DECLARE VARIABLES,
+# AS YOU'LL BE RUNNING IN A SUBSHELL AND VARIABLE DECLARATIONS WILL BE LOST
+# :end docstring:
+
+function test() {
+ echo "### $@"
+ "$@"
+ status=$?
+ if [ $status -ne 0 ]; then
+ say "### ERROR with $1"
+ kill -s TERM $TOP_PID
+ fi
+}
+
+# :docstring say:
+# Usage: say <argument ..>
+# Prints <argument ..> to both console and the main log file.
+# :end docstring:
+
+function say(){
+ (echo "$@") | tee -a $LOGGINGDIR/compilation-$SCALADATE-$SCALAHASH.log
+}
+
+# General debug logging
+# $* - message
+function debug () {
+ echo "----- $*"
+}
+
+function parseScalaProperties(){
+ propFile="$baseDir/$1"
+ if [ ! -f $propFile ]; then
+ echo "Property file $propFile not found."
+ exit 1
+ else
+ awk -f "$scriptsDir/readproperties.awk" "$propFile" > "$propFile.sh"
+ . "$propFile.sh" # yeah yeah, not that secure, improvements welcome (I tried, but bash made me cry again)
+ fi
+}
+
+
+## TAKEN FROM UBER-BUILD, except that it "returns" (via $RES) true/false
+# Check if an artifact is available
+# $1 - groupId
+# $2 - artifacId
+# $3 - version
+# $4 - extra repository to look in (optional)
+# return value in $RES
+function checkAvailability () {
+ pushd "${TMP_DIR}"
+ rm -rf *
+
+# pom file for the test project
+ cat > pom.xml << EOF
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>com.typesafe</groupId>
+ <artifactId>typesafeDummy</artifactId>
+ <packaging>war</packaging>
+ <version>1.0-SNAPSHOT</version>
+ <name>Dummy</name>
+ <url>http://127.0.0.1</url>
+ <dependencies>
+ <dependency>
+ <groupId>$1</groupId>
+ <artifactId>$2</artifactId>
+ <version>$3</version>
+ </dependency>
+ </dependencies>
+ <repositories>
+ <repository>
+ <id>sonatype.snapshot</id>
+ <name>Sonatype maven snapshot repository</name>
+ <url>https://oss.sonatype.org/content/repositories/snapshots</url>
+ <snapshots>
+ <updatePolicy>daily</updatePolicy>
+ </snapshots>
+ </repository>
+EOF
+
+ if [ -n "$4" ]
+ then
+# adds the extra repository
+ cat >> pom.xml << EOF
+ <repository>
+ <id>extrarepo</id>
+ <name>extra repository</name>
+ <url>$4</url>
+ </repository>
+EOF
+ fi
+
+ cat >> pom.xml << EOF
+ </repositories>
+</project>
+EOF
+
+ set +e
+ mvn "${MAVEN_ARGS[@]}" compile &> "${TMP_DIR}/mvn.log"
+ RES=$?
+ # Quiet the maven, but allow diagnosing problems.
+ grep -i downloading "${TMP_DIR}/mvn.log"
+ grep -i exception "${TMP_DIR}/mvn.log"
+ grep -i error "${TMP_DIR}/mvn.log"
+ set -e
+
+# log the result
+ if [ ${RES} == 0 ]
+ then
+ debug "$1:$2:jar:$3 found !"
+ RES=true
+ else
+ debug "$1:$2:jar:$3 not found !"
+ RES=false
+ fi
+ popd
+}
diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap
new file mode 100755
index 0000000000..7944ab3fd3
--- /dev/null
+++ b/scripts/jobs/integrate/bootstrap
@@ -0,0 +1,579 @@
+#!/bin/bash -e
+# TODO: different scripts for the different phases -- usually we don't need to bootstrap the modules,
+# since we can use the previous version of scala for STARR as well as for compiling the modules (assuming it's binary compatible)
+# We should move away from the complicated bootstrap and set up our release schedule so we always have a previous build that satisfies these criteria.
+# (Potentially trivially, by splitting up this script, and publishing locker as if it were a real release.)
+
+# requirements:
+# sbtCmd must point to sbt from sbt-extras (this is the standard on the Scala jenkins, so we only support that one)
+# - ~/.sonatype-curl that consists of user = USER:PASS
+# - ~/.m2/settings.xml with credentials for sonatype
+ # <server>
+ # <id>private-repo</id>
+ # <username>jenkinside</username>
+ # <password></password>
+ # </server>
+# - ~/.credentials (for sonatype)
+ # realm=Sonatype Nexus Repository Manager
+ # host=oss.sonatype.org
+ # user=lamp
+ # password=
+# - ~/.credentials-private-repo for private-repo.typesafe.com, as follows:
+ # realm=Artifactory Realm
+ # host=private-repo.typesafe.com
+ # user=jenkinside
+ # password=
+# - ~/.sbt/0.13/plugins/gpg.sbt with:
+# addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.1")
+
+# Modus operandi:
+#
+# Determine Scala version as:
+#
+# $SCALA_VER_BASE$SCALA_VER_SUFFIX (if former variable is set)
+# By parsing the tag (if HEAD is tagged as v$base$suffix)
+# By parsing build.number for the base version, suffixing with -$sha-nightly
+# Serialize these versions to jenkins.properties, which are passed downstream to scala-release-2.11.x-dist.
+# This also removes the need to tag scala/scala-dist (not possible for nightlies, still encouraged for releases, but not a hard requirement).
+#
+# Determine Module Versions
+#
+# When running in "versions.properties" mode (the default), derive tags from these versions and build, publishing only those modules that are not available yet.
+# Otherwise, build HEAD for all modules, derive a -nightly version for them.
+# Bootstrap:
+#
+# Build minimal core of Scala as this version (aka locker), publish to private-repo
+# Build modules required to bootstrap, publish to private-repo
+# Build Scala using the previously built core and bootstrap modules, publish to private-repo This overwrites the minimal core on private-repo
+# Stage to sonatype (unless building a -nightly release):
+#
+# Stage this Scala build on sonatype
+# Rebuild modules with this Scala build, and stage them on sonatype as well
+# This script can be run in multiple modes. It is design to work without any input,
+# so that it could be run in Travis CI. In that mode, it'll build a release when
+# the current HEAD of the checkout in $WORKSPACE is tagged, and stage to sonatype. Otherwise,
+# it'll build a nightly.
+#
+# Since the nightlies are intended to be a drop in replacement, all modules are built with the
+# full Scala version as their binary version, so that you can just set scalaVersion to the
+# nightly's sha-derived version and be good to go.
+#
+# The other way to trigger a release is by setting the SCALA_VER_BASE env var.
+#
+# By default, we build the versions of the modules as specified by versions.properties
+# (as specified in the HEAD commit). Set moduleVersioning to something random
+# to trigger building HEAD of each module, generating a fresh -$(git describe)-nightly version for each.
+#
+# PS: set publishToSonatype to anything but "yes" to avoid publishing to sonatype
+# (publishing only done when $WORKSPACE checkout's HEAD is tagged / SCALA_VER_BASE is set.)
+
+
+# set to something besides the default to build nightly snapshots of the modules instead of some tagged version
+moduleVersioning=${moduleVersioning-"versions.properties"}
+
+publishPrivateTask=${publishPrivateTask-"publish"}
+publishSonatypeTaskCore=${publishSonatypeTaskCore-"publish-signed"}
+publishSonatypeTaskModules=${publishSonatypeTaskModules-"publish-signed"}
+publishLockerPrivateTask=${publishLockerPrivateTask-$publishPrivateTask} # set to "init" to speed up testing of the script (if you already built locker before)
+
+sbtCmd=${sbtCmd-sbt} # TESTING (this is a marker for defaults to change when testing locally: should be sbtx on my mac)
+
+# 0.13.5 does not respect "set every scalaVersion", see
+# https://github.com/scala/scala-parser-combinators/pull/27
+sbtCmd="$sbtCmd -sbt-version 0.13.2"
+
+forceRebuild=${forceRebuild-no}
+
+# publishToSonatype
+# set to anything but "yes" to avoid publishing to sonatype
+# overridden to "no" when no SCALA_VER_BASE is passed and HEAD is not tagged with a valid version tag
+#
+
+antBuildTask="${antBuildTask-nightly}" # TESTING leave empty to avoid the sanity check (don't set it to "init" because ant will croak)
+clean="clean" # TESTING leave empty to speed up testing
+
+
+
+baseDir=${WORKSPACE-`pwd`}
+scriptsDir="$baseDir/scripts"
+. $scriptsDir/common
+
+# we must change ivy home to get a fresh ivy cache, otherwise we get half-bootstrapped scala
+# rm it in case it existed (and there's no ivy2-shadow, which indicates we're running in a TESTING environment)...
+# we don't nuke the whole ws since that clobbers the git clones needlessly
+[[ -d $baseDir/ivy2-shadow ]] || rm -rf $baseDir/ivy2
+mkdir -p $baseDir/ivy2
+
+rm -rf $baseDir/resolutionScratch_
+mkdir -p $baseDir/resolutionScratch_
+
+# repo used to publish "locker" scala to (to start the bootstrap)
+releaseTempRepoCred="private-repo"
+releaseTempRepoUrl=${releaseTempRepoUrl-"http://private-repo.typesafe.com/typesafe/scala-release-temp/"}
+
+# Used below in sbtArgs since we use a dedicated repository to share artifcacts between jobs,
+# so we need to configure SBT to use these rather than its default, Maven Central.
+# See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html
+sbtRepositoryConfig="$scriptsDir/repositories-scala-release"
+cat > "$sbtRepositoryConfig" << EOF
+[repositories]
+ plugins: http://dl.bintray.com/sbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
+ private-repo: $releaseTempRepoUrl
+ typesafe-ivy-releases: http://repo.typesafe.com/typesafe/ivy-releases/, [organization]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly
+ sbt-plugin-releases: http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
+ maven-central
+ local
+EOF
+
+##### git
+gfxd() {
+ git clean -fxd # TESTING
+}
+
+update() {
+ [[ -d $baseDir ]] || mkdir -p $baseDir
+ cd $baseDir
+
+ if [ ! -d $baseDir/$2 ]; then git clone "https://github.com/$1/$2.git"; fi
+
+ cd $2
+
+ git fetch --tags "https://github.com/$1/$2.git"
+ (git fetch "https://github.com/$1/$2.git" $3 && git checkout -q FETCH_HEAD) #|| git checkout -q $3 # || fallback is for local testing on tag
+ git reset --hard
+}
+
+##### sonatype interface
+
+stApi="https://oss.sonatype.org/service/local"
+
+function st_curl(){
+ curl -H "Content-Type: application/json" -H "accept: application/json,application/vnd.siesta-error-v1+json,application/vnd.siesta-validation-errors-v1+json" -K ~/.sonatype-curl -s -o - $@
+}
+
+function st_stagingReposOpen() {
+ st_curl "$stApi/staging/profile_repositories" | jq '.data[] | select(.profileName == "org.scala-lang") | select(.type == "open")'
+}
+
+function st_stagingRepoDrop() {
+ repo=$1
+ message=$2
+ echo "{\"data\":{\"description\":\"$message\",\"stagedRepositoryIds\":[\"$repo\"]}}" | st_curl -X POST -d @- "$stApi/staging/bulk/drop"
+}
+
+function st_stagingRepoClose() {
+ repo=$1
+ message=$2
+ echo "{\"data\":{\"description\":\"$message\",\"stagedRepositoryIds\":[\"$repo\"]}}" | st_curl -X POST -d @- "$stApi/staging/bulk/close"
+}
+
+
+# ARGH trying to get this to work on multiple versions of sbt-extras...
+# the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir
+# the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base
+# need to set sbt-dir to one that has the gpg.sbt plugin config
+sbtArgs="-no-colors -ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13"
+
+sbtBuild() {
+ echo "### sbtBuild: "$sbtCmd $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@"
+ $sbtCmd $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" >> $baseDir/logs/builds 2>&1
+}
+
+sbtResolve() {
+ cd $baseDir/resolutionScratch_
+ touch build.sbt
+ cross=${4-binary} # Disabled / binary / full
+ echo "### sbtResolve: $sbtCmd $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross"
+ $sbtCmd $sbtArgs "${scalaVersionTasks[@]}" \
+ "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \
+ 'show update' >> $baseDir/logs/resolution 2>&1
+}
+
+# Oh boy... can't use scaladoc to document scala-xml/scala-parser-combinators
+# if scaladoc depends on the same version of scala-xml/scala-parser-combinators.
+# Even if that version is available through the project's resolvers, sbt won't look past this project.
+# SOOOOO, we set the version to a dummy (-DOC), generate documentation,
+# then set the version to the right one and publish (which won't re-gen the docs).
+# Also tried publish-local without docs using 'set publishArtifact in (Compile, packageDoc) := false' and republishing, no dice.
+
+# Each buildModule() function is invoked twice: first to build against locker and publish to private-repo, then
+# to build against the release and publish to sonatype (or publish-local if publishToSonatype is not "yes").
+# In the second round, sbtResolve is always true: the module will be found in the private-repo!
+# Therefore, if MODULE_BUILT is "yes" (in the second round), we know that we need to build (and publish) the
+# module again.
+#
+# Note: we tried an alternative solution in which sbtResolve would not look at private-repo, but that fails. For example,
+# scala-xml depends on scala-library, so sbt tries to find the scala-library of the version that we are currently building,
+# which exists only in private-repo.
+
+buildXML() {
+ if [ "$XML_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER )
+ then echo "Found scala-xml $XML_VER; not building."
+ else
+ update scala scala-xml "$XML_REF" && gfxd
+ sbtBuild 'set version := "'$XML_VER'-DOC"' $clean doc 'set version := "'$XML_VER'"' test "${buildTasks[@]}"
+ XML_BUILT="yes" # ensure the module is built and published when buildXML is invoked for the second time, see comment above
+ fi
+}
+
+buildParsers() {
+ if [ "$PARSERS_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-parser-combinators" $PARSERS_VER )
+ then echo "Found scala-parser-combinators $PARSERS_VER; not building."
+ else
+ update scala scala-parser-combinators "$PARSERS_REF" && gfxd
+ sbtBuild 'set version := "'$PARSERS_VER'-DOC"' $clean doc 'set version := "'$PARSERS_VER'"' test "${buildTasks[@]}"
+ PARSERS_BUILT="yes"
+ fi
+}
+
+buildPartest() {
+ if [ "$PARTEST_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-partest" $PARTEST_VER )
+ then echo "Found scala-partest $PARTEST_VER; not building."
+ else
+ update scala scala-partest "$PARTEST_REF" && gfxd
+ sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' 'set VersionKeys.scalaCheckVersion := "'$SCALACHECK_VER'"' $clean test "${buildTasks[@]}"
+ PARTEST_BUILT="yes"
+ fi
+}
+
+# buildPartestIface() {
+# if [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-partest-interface" $PARTEST_IFACE_VER )
+# then echo "Found scala-partest-interface $PARTEST_IFACE_VER; not building."
+# else
+# update scala scala-partest-interface "$PARTEST_IFACE_REF" && gfxd
+# sbtBuild 'set version :="'$PARTEST_IFACE_VER'"' $clean "${buildTasks[@]}"
+# fi
+# }
+
+buildContinuations() {
+ if [ "$CONT_PLUG_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.plugins" "scala-continuations-plugin" $CONTINUATIONS_VER full )
+ then echo "Found scala-continuations-plugin $CONTINUATIONS_VER; not building."
+ else
+ update scala scala-continuations $CONTINUATIONS_REF && gfxd
+
+ $sbtCmd $sbtArgs 'project plugin' "${scalaVersionTasks[@]}" "${publishTasks[@]}" \
+ 'set version := "'$CONTINUATIONS_VER'"' $clean "compile:package" test "${buildTasks[@]}" # https://github.com/scala/scala-continuations/pull/4
+ CONT_PLUG_BUILT="yes"
+ fi
+
+ if [ "$CONT_LIB_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.plugins" "scala-continuations-library" $CONTINUATIONS_VER )
+ then echo "Found scala-continuations-library $CONTINUATIONS_VER; not building."
+ else
+ update scala scala-continuations $CONTINUATIONS_REF && gfxd
+ $sbtCmd $sbtArgs 'project library' "${scalaVersionTasks[@]}" "${publishTasks[@]}" \
+ 'set version := "'$CONTINUATIONS_VER'"' $clean test "${buildTasks[@]}"
+ CONT_LIB_BUILT="yes"
+ fi
+}
+
+buildSwing() {
+ if [ "$SWING_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-swing" $SWING_VER )
+ then echo "Found scala-swing $SWING_VER; not building."
+ else
+ update scala scala-swing "$SWING_REF" && gfxd
+ sbtBuild 'set version := "'$SWING_VER'"' $clean test "${buildTasks[@]}"
+ SWING_BUILT="yes"
+ fi
+}
+
+buildActorsMigration(){
+ if [ "$ACTORS_MIGRATION_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang" "scala-actors-migration" $ACTORS_MIGRATION_VER )
+ then echo "Found scala-actors-migration $ACTORS_MIGRATION_VER; not building."
+ else
+ update scala actors-migration "$ACTORS_MIGRATION_REF" && gfxd
+ # not running tests because
+ # [error] Test scala.actors.migration.NestedReact.testNestedReactAkka failed: java.util.concurrent.TimeoutException: Futures timed out after [20 seconds]
+ sbtBuild 'set version := "'$ACTORS_MIGRATION_VER'"' 'set VersionKeys.continuationsVersion := "'$CONTINUATIONS_VER'"' $clean "${buildTasks[@]}"
+ ACTORS_MIGRATION_BUILT="yes"
+ fi
+}
+
+buildScalacheck(){
+ if [ "$SCALACHECK_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scalacheck" "scalacheck" $SCALACHECK_VER )
+ then echo "Found scalacheck $SCALACHECK_VER; not building."
+ else
+ update rickynils scalacheck $SCALACHECK_REF && gfxd
+ sbtBuild 'set version := "'$SCALACHECK_VER'"' 'set VersionKeys.scalaParserCombinatorsVersion := "'$PARSERS_VER'"' $clean $publishPrivateTask # test times out NOTE: never published to sonatype
+ SCALACHECK_BUILT="yes"
+ fi
+}
+
+# build modules, using ${buildTasks[@]} (except for Scalacheck, which is hard-coded to publish to private-repo)
+buildModules() {
+ buildXML
+ buildParsers
+ buildContinuations
+ buildSwing
+ buildActorsMigration
+ buildScalacheck
+ buildPartest
+ # buildPartestIface
+}
+
+
+## BUILD STEPS:
+
+determineScalaVersion() {
+ cd $WORKSPACE
+ parseScalaProperties "versions.properties"
+
+ if [ -z "$SCALA_VER_BASE" ]; then
+ echo "No SCALA_VER_BASE specified."
+
+ scalaTag=$(git describe --exact-match ||:)
+
+ SCALA_BINARY_VER=${SCALA_BINARY_VER-"$scala_binary_version"}
+
+ if [ -z "$scalaTag" ]
+ then
+ echo "No tag found, building nightly snapshot."
+ parseScalaProperties "build.number"
+ SCALA_VER_BASE="$version_major.$version_minor.$version_patch"
+ SCALA_VER_SUFFIX="-$(git rev-parse --short HEAD)-nightly"
+ SCALADOC_SOURCE_LINKS_VER=$(git rev-parse HEAD)
+
+ # TODO: publish nightly snapshot using this script
+ publishToSonatype="no"
+ echo "repo_ref=2.11.x" >> $baseDir/jenkins.properties # for the -dist downstream jobs that build the actual archives
+ else
+ echo "HEAD is tagged as $scalaTag."
+ # borrowed from https://github.com/cloudflare/semver_bash/blob/master/semver.sh
+ local RE='v*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)' # don't change this to make it more accurate, it's not worth it
+ SCALA_VER_BASE="$(echo $scalaTag | sed -e "s#$RE#\1.\2.\3#")"
+ SCALA_VER_SUFFIX="$(echo $scalaTag | sed -e "s#$RE#\4#")"
+ SCALADOC_SOURCE_LINKS_VER=$scalaTag
+
+ if [ "$SCALA_VER_BASE" == "$scalaTag" ]; then
+ echo "Could not parse version $scalaTag"
+ exit 1
+ fi
+ publishToSonatype=${publishToSonatype-"yes"} # unless forced previously, publish
+ fi
+ else
+ publishToSonatype=${publishToSonatype-"yes"} # unless forced previously, publish
+ # if version base/suffix are provided, we assume a corresponding tag exists for the scaladoc source links
+ SCALADOC_SOURCE_LINKS_VER="v$SCALA_VER_BASE$SCALA_VER_SUFFIX"
+ fi
+
+ SCALA_VER="$SCALA_VER_BASE$SCALA_VER_SUFFIX"
+ echo "version=$SCALA_VER" >> $baseDir/jenkins.properties
+ echo "sbtDistVersionOverride=-Dproject.version=$SCALA_VER" >> $baseDir/jenkins.properties
+
+ # We don't override the scala binary version: when running in -nightly + versions.properties versioning mode,
+ # we intend to be a drop-in replacement -- all you need to do is change the Scala version
+ # In order to override this, add 'set every scalaBinaryVersion := "'$SCALA_BINARY_VER'"',
+ # which, when used with pre-release Scala version numbers, will require tweaking at the sbt usage site as well.
+ scalaVersionTasks=('set every scalaVersion := "'$SCALA_VER'"')
+
+ echo "Building Scala $SCALA_VER."
+}
+
+deriveVersion() {
+ update $1 $2 $3 &> /dev/null
+ echo "$(git describe --match=v* | cut -dv -f2)-nightly"
+}
+
+deriveVersionAnyTag() {
+ update $1 $2 $3 &> /dev/null
+ echo "$(git describe | cut -dv -f2)-nightly"
+}
+
+# determineScalaVersion must have been called
+deriveModuleVersions() {
+ if [ "$moduleVersioning" == "versions.properties" ]
+ then
+ # use versions.properties as defaults when no version specified on command line
+ XML_VER=${XML_VER-$scala_xml_version_number}
+ PARSERS_VER=${PARSERS_VER-$scala_parser_combinators_version_number}
+ CONTINUATIONS_VER=${CONTINUATIONS_VER-$scala_continuations_plugin_version_number}
+ SWING_VER=${SWING_VER-$scala_swing_version_number}
+ ACTORS_MIGRATION_VER=${ACTORS_MIGRATION_VER-$actors_migration_version_number}
+ PARTEST_VER=${PARTEST_VER-$partest_version_number}
+ SCALACHECK_VER=${SCALACHECK_VER-$scalacheck_version_number}
+
+ # If a _VER was not specified, the corresponding _REF will be non-empty by now (as specified, or HEAD)
+ XML_REF=${XML_REF-"v$XML_VER"}
+ PARSERS_REF=${PARSERS_REF-"v$PARSERS_VER"}
+ CONTINUATIONS_REF=${CONTINUATIONS_REF-"v$CONTINUATIONS_VER"}
+ SWING_REF=${SWING_REF-"v$SWING_VER"}
+ ACTORS_MIGRATION_REF=${ACTORS_MIGRATION_REF-"v$ACTORS_MIGRATION_VER"}
+ PARTEST_REF=${PARTEST_REF-"v$PARTEST_VER"}
+ # PARTEST_IFACE_REF=${PARTEST_IFACE_REF-"v$PARTEST_IFACE_VER"}
+ SCALACHECK_REF=${SCALACHECK_REF-"$SCALACHECK_VER"}
+ else
+ XML_VER=${XML_VER-$(deriveVersion scala scala-xml "$XML_REF")}
+ PARSERS_VER=${PARSERS_VER-$(deriveVersion scala scala-parser-combinators "$PARSERS_REF")}
+ CONTINUATIONS_VER=${CONTINUATIONS_VER-$(deriveVersion scala scala-continuations "$CONTINUATIONS_REF")}
+ SWING_VER=${SWING_VER-$(deriveVersion scala scala-swing "$SWING_REF")}
+ ACTORS_MIGRATION_VER=${ACTORS_MIGRATION_VER-$(deriveVersion scala actors-migration "$ACTORS_MIGRATION_REF")}
+ PARTEST_VER=${PARTEST_VER-$(deriveVersion scala scala-partest "$PARTEST_REF")}
+ SCALACHECK_VER=${SCALACHECK_VER-$(deriveVersionAnyTag rickynils scalacheck "$SCALACHECK_REF")}
+
+ XML_REF=${XML_REF-"HEAD"}
+ PARSERS_REF=${PARSERS_REF-"HEAD"}
+ CONTINUATIONS_REF=${CONTINUATIONS_REF-"HEAD"}
+ SWING_REF=${SWING_REF-"HEAD"}
+ ACTORS_MIGRATION_REF=${ACTORS_MIGRATION_REF-"HEAD"}
+ PARTEST_REF=${PARTEST_REF-"HEAD"}
+ # PARTEST_IFACE_REF=${PARTEST_IFACE_REF-"HEAD"}
+ SCALACHECK_REF=${SCALACHECK_REF-"HEAD"}
+ fi
+
+ echo "Module versions (versioning strategy: $moduleVersioning):"
+ echo "ACTORS_MIGRATION = $ACTORS_MIGRATION_VER at $ACTORS_MIGRATION_REF"
+ echo "CONTINUATIONS = $CONTINUATIONS_VER at $CONTINUATIONS_REF"
+ echo "PARSERS = $PARSERS_VER at $PARSERS_REF"
+ echo "PARTEST = $PARTEST_VER at $PARTEST_REF"
+ echo "SCALACHECK = $SCALACHECK_VER at $SCALACHECK_REF"
+ echo "SWING = $SWING_VER at $SWING_REF"
+ echo "XML = $XML_VER at $XML_REF"
+
+ # PARTEST_IFACE_VER=${PARTEST_IFACE_VER-$(deriveVersion scala scala-partest-interface "$PARTEST_IFACE_REF")}
+}
+
+constructUpdatedModuleVersions() {
+ updatedModuleVersions=()
+
+ # force the new module versions for building the core. these may be different from the values in versions.properties,
+ # either because the variables (XML_VER) were provided, or because we're building the modules from HEAD.
+ # in the common case, the values are the same as in versions.properties.
+ updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dactors-migration.version.number=$ACTORS_MIGRATION_VER")
+ updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-continuations-library.version.number=$CONTINUATIONS_VER")
+ updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-continuations-plugin.version.number=$CONTINUATIONS_VER")
+ updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-parser-combinators.version.number=$PARSERS_VER")
+ updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-swing.version.number=$SWING_VER")
+ updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-xml.version.number=$XML_VER")
+
+ updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dpartest.version.number=$PARTEST_VER")
+ updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscalacheck.version.number=$SCALACHECK_VER")
+
+ # allow overriding the akka-actors and jline version using a jenkins build parameter
+ if [ ! -z "$AKKA_ACTOR_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dakka-actor.version.number=$AKKA_ACTOR_VER"); fi
+ if [ ! -z "$JLINE_VER" ] ; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Djline.version=$JLINE_VER"); fi
+
+ if [ ! -z "$SCALA_BINARY_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.binary.version=$SCALA_BINARY_VER"); fi
+}
+
+# build locker (scala + modules) and quick, publishing everything to private-repo
+bootstrap() {
+ echo "### Bootstrapping"
+
+ cd $WORKSPACE
+
+ #### LOCKER
+
+ echo "### Building locker"
+
+ # for bootstrapping, publish core (or at least smallest subset we can get away with)
+ # so that we can build modules with this version of Scala and publish them locally
+ # must publish under $SCALA_VER so that the modules will depend on this (binary) version of Scala
+ # publish more than just core: partest needs scalap
+ # in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler
+ ant -Dmaven.version.number=$SCALA_VER\
+ -Dremote.snapshot.repository=NOPE\
+ -Dremote.release.repository=$releaseTempRepoUrl\
+ -Drepository.credentials.id=$releaseTempRepoCred\
+ -Dscalac.args.optimise=-optimise\
+ -Ddocs.skip=1\
+ -Dlocker.skip=1\
+ $publishLockerPrivateTask >> $baseDir/logs/builds 2>&1
+
+
+ echo "### Building modules using locker"
+
+ # build, test and publish modules with this core
+ # publish to our internal repo (so we can resolve the modules in the scala build below)
+ # we only need to build the modules necessary to build Scala itself
+ # since the version of locker and quick are the same
+ publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-private-repo")' "set every publishTo := Some(\"private-repo\" at \"$releaseTempRepoUrl\")")
+ buildTasks=($publishPrivateTask)
+ buildModules
+
+ constructUpdatedModuleVersions
+
+ #### QUICK
+
+ echo "### Bootstrapping Scala using locker"
+
+ # # TODO: close all open staging repos so that we can be reaonably sure the only open one we see after publishing below is ours
+ # # the ant call will create a new one
+ #
+ # Rebuild Scala with these modules so that all binary versions are consistent.
+ # Update versions.properties to new modules.
+ # Sanity check: make sure the Scala test suite passes / docs can be generated with these modules.
+ # don't skip locker (-Dlocker.skip=1), or stability will fail
+ # overwrite "locker" version of scala at private-repo with bootstrapped version
+ cd $baseDir
+ rm -rf build/ # must leave everything else in $baseDir for downstream jobs
+
+ # scala.full.version determines the dependency of scala-dist on the continuations plugin,
+ # which is fully cross-versioned (for $SCALA_VER, the version we're releasing)
+ ant -Dstarr.version=$SCALA_VER\
+ -Dscala.full.version=$SCALA_VER\
+ -Dextra.repo.url=$releaseTempRepoUrl\
+ -Dmaven.version.suffix=$SCALA_VER_SUFFIX\
+ ${updatedModuleVersions[@]} \
+ -Dupdate.versions=1\
+ -Dscaladoc.git.commit=$SCALADOC_SOURCE_LINKS_VER\
+ -Dremote.snapshot.repository=NOPE\
+ -Dremote.release.repository=$releaseTempRepoUrl\
+ -Drepository.credentials.id=$releaseTempRepoCred\
+ -Dscalac.args.optimise=-optimise\
+ $antBuildTask $publishPrivateTask
+
+ # clear ivy cache (and to be sure, local as well), so the next round of sbt builds sees the fresh scala
+ rm -rf $baseDir/ivy2
+
+ # TODO: create PR with following commit (note that release will have been tagged already)
+ # git commit versions.properties -m"Bump versions.properties for $SCALA_VER."
+}
+
+# assumes we just bootstrapped, and current directory is $baseDir
+# publishes locker to sonatype, then builds modules again (those for which version numbers were provided),
+# and publishes those to sonatype as well
+# finally, the staging repos are closed
+publishSonatype() {
+ # stage to sonatype, along with all modules -Dmaven.version.suffix/-Dbuild.release not necessary,
+ # since we're just publishing an existing build
+ echo "### Publishing core to sonatype"
+ ant -Dmaven.version.number=$SCALA_VER $publishSonatypeTaskCore
+
+ echo "### Publishing modules to sonatype"
+ # build/test/publish scala core modules to sonatype (this will start a new staging repo)
+ # (was hoping we could make everything go to the same staging repo, but it's not timing that causes two staging repos to be opened)
+ # NOTE: only publish those for which versions are set
+ # test and publish to sonatype, assuming you have ~/.sbt/0.13/sonatype.sbt and ~/.sbt/0.13/plugin/gpg.sbt
+ publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-sonatype")' "set pgpPassphrase := Some(Array.empty)")
+ buildTasks=($publishSonatypeTaskModules)
+ buildModules
+
+ open=$(st_stagingReposOpen)
+ allOpenUrls=$(echo $open | jq '.repositoryURI' | tr -d \")
+ allOpen=$(echo $open | jq '.repositoryId' | tr -d \")
+
+ echo "Closing open repos: $allOpen"
+
+ for repo in $allOpen; do st_stagingRepoClose $repo; done
+
+ echo "Closed sonatype staging repos: $allOpenUrls."
+}
+
+
+#### MAIN
+
+determineScalaVersion
+
+deriveModuleVersions
+
+bootstrap
+
+if [ "$publishToSonatype" == "yes" ]
+ then publishSonatype
+ else # build modules one more time, just to mimic the regular build as much when running as nightly
+ echo "### Rebuilding modules with quick, publishing to $baseDir/ivy/local"
+ buildTasks=(publish-local)
+ # buildScalacheck always uses publishPrivateTask (not buildTasks). we override it to avoid publishing to private-repo.
+ publishPrivateTask="publish-local"
+ forceRebuild="yes"
+ buildModules
+fi
diff --git a/scripts/jobs/integrate/ide b/scripts/jobs/integrate/ide
new file mode 100755
index 0000000000..1651ad2892
--- /dev/null
+++ b/scripts/jobs/integrate/ide
@@ -0,0 +1,32 @@
+#!/bin/bash -e
+# requires checkout: root is a scala checkout with which to integrate (actually, only required file is versions.properties, as documented below)
+# requires env: scalaVersion (specifies binary already built from above checkout), WORKSPACE (provided by jenkins), repo_ref (HEAD of the scala checkout),
+# requires files: $baseDir/versions.properties (from checkout -- defines version numbers for modules used to build scala for dbuild...)
+
+# TODO: remove when integration is up and running
+if [ "woele$_scabot_last" != "woele1" ]; then echo "Scabot didn't mark this as last commit -- skipping."; exit 0; fi
+
+baseDir=${WORKSPACE-`pwd`}
+uberBuildUrl=${uberBuildUrl-"https://github.com/scala-ide/uber-build.git"}
+uberBuildConfig=${uberBuildConfig-"validator.conf"} # TODO: backport to 2.10.x: uberBuildConfig="validator-2.10.conf"
+
+uberBuildDir="$baseDir/uber-build/"
+
+cd $baseDir
+if [[ -d $uberBuildDir ]]; then
+ ( cd $uberBuildDir && git fetch $uberBuildUrl HEAD && git checkout -f FETCH_HEAD && git clean -fxd )
+else
+ git clone $uberBuildUrl
+fi
+
+echo "maven.version.number=$scalaVersion" >> versions.properties
+
+# pass prRepoUrl in, which uber-build passes along to dbuild (in sbt-builds-for-ide)
+# the "-P pr-scala" maven arg accomplishes the same thing for maven (directly used in uber-build)
+BASEDIR="$baseDir" prRepoUrl="$prRepoUrl" IDE_M2_REPO="$prRepoUrl" MAVEN_ARGS="-P pr-scala"\
+ $uberBuildDir/uber-build.sh $uberBuildDir/config/$uberBuildConfig $repo_ref $scalaVersion
+
+# uber-build puts its local repo under target/m2repo
+# wipe the org/scala-lang part, which otherwise just keeps
+# growing and growing due to the -$sha-SNAPSHOT approach
+[[ -d $baseDir/target/m2repo/org/scala-lang ]] && rm -rf $baseDir/target/m2repo/org/scala-lang
diff --git a/scripts/jobs/validate/publish-core b/scripts/jobs/validate/publish-core
new file mode 100755
index 0000000000..9dff5a34b0
--- /dev/null
+++ b/scripts/jobs/validate/publish-core
@@ -0,0 +1,44 @@
+#!/bin/bash -e
+# This script publishes the core of Scala to maven for use as locker downstream,
+# and saves the relevant properties used in its build artifacts, versions.properties.
+# (This means we'll use locker instead of quick downstream in dbuild.
+# The only downside is that backend improvements don't improve compiler performance itself until they are in STARR).
+# The version is suffixed with "-${sha:0:7}-SNAPSHOT"
+
+baseDir=${WORKSPACE-`pwd`}
+scriptsDir="$baseDir/scripts"
+. $scriptsDir/common
+
+case $prDryRun in
+ yep)
+ echo "DRY RUN"
+ mkdir -p build/pack ; mkdir -p dists/maven/latest
+ ;;
+ *)
+ sha=$(git rev-parse HEAD) # TODO: warn if $repo_ref != $sha (we shouldn't do PR validation using symbolic gitrefs)
+ echo "sha/repo_ref == $sha/$repo_ref ?"
+
+ parseScalaProperties build.number
+
+ ./pull-binary-libs.sh
+ # "noyoudont" is there juuuust in case
+ antDeployArgs="-Dmaven.version.suffix=\"-${sha:0:7}-SNAPSHOT\" -Dremote.snapshot.repository=$prRepoUrl -Drepository.credentials.id=pr-scala -Dremote.release.repository=noyoudont"
+
+ echo ">>> Getting Scala version number."
+ ant -q $antDeployArgs init
+ parseScalaProperties buildcharacter.properties # produce maven_version_number
+
+ echo ">>> Checking availability of Scala ${maven_version_number} in $prRepoUrl."
+ checkAvailability "org.scala-lang" "scala-library" "${maven_version_number}" $prRepoUrl; libraryAvailable=$RES
+ checkAvailability "org.scala-lang" "scala-reflect" "${maven_version_number}" $prRepoUrl; reflectAvailable=$RES
+ checkAvailability "org.scala-lang" "scala-compiler" "${maven_version_number}" $prRepoUrl; compilerAvailable=$RES
+
+ if $libraryAvailable && $reflectAvailable && $compilerAvailable; then
+ echo "Scala core already built!"
+ else
+ ant $antDeployArgs $antBuildArgs publish-opt-nodocs
+ fi
+
+ mv buildcharacter.properties jenkins.properties # parsed by the jenkins job
+ ;;
+esac
diff --git a/scripts/jobs/validate/test b/scripts/jobs/validate/test
new file mode 100755
index 0000000000..c1c02c80cb
--- /dev/null
+++ b/scripts/jobs/validate/test
@@ -0,0 +1,17 @@
+#!/bin/bash -e
+
+case $prDryRun in
+ yep)
+ echo "DRY RUN"
+ ;;
+ *)
+ ./pull-binary-libs.sh
+
+ # build quick using STARR built upstream, as specified by scalaVersion
+ # (in that sense it's locker, since it was built with starr by that upstream job)
+ ant -Dstarr.version=$scalaVersion \
+ -Dscalac.args.optimise=-optimise \
+ -Dlocker.skip=1 -Dstarr.use.released=1 -Dextra.repo.url=$prRepoUrl \
+ $testExtraArgs ${testTarget-test.core docs.done}
+ ;;
+esac \ No newline at end of file
diff --git a/scripts/readproperties.awk b/scripts/readproperties.awk
new file mode 100644
index 0000000000..96da94775b
--- /dev/null
+++ b/scripts/readproperties.awk
@@ -0,0 +1,39 @@
+# Adapted from http://stackoverflow.com/questions/1682442/reading-java-properties-file-from-bash/2318840#2318840
+BEGIN {
+ FS="=";
+ n="";
+ v="";
+ c=0; # Not a line continuation.
+}
+/^\#/ { # The line is a comment. Breaks line continuation.
+ c=0;
+ next;
+}
+/\\$/ && (c==0) && (NF>=2) { # Name value pair with a line continuation...
+ e=index($0,"=");
+ n=substr($0,1,e-1);
+ v=substr($0,e+1,length($0) - e - 1); # Trim off the backslash.
+ c=1; # Line continuation mode.
+ next;
+}
+/^[^\\]+\\$/ && (c==1) { # Line continuation. Accumulate the value.
+ v= "" v substr($0,1,length($0)-1);
+ next;
+}
+((c==1) || (NF>=2)) && !/^[^\\]+\\$/ { # End of line continuation, or a single line name/value pair
+ if (c==0) { # Single line name/value pair
+ e=index($0,"=");
+ n=substr($0,1,e-1);
+ v=substr($0,e+1,length($0) - e);
+ } else { # Line continuation mode - last line of the value.
+ c=0; # Turn off line continuation mode.
+ v= "" v $0;
+ }
+ # Make sure the name is a legal shell variable name
+ gsub(/[^A-Za-z0-9_]/,"_",n);
+ # Silently drop everything that might confuse bash.
+ gsub(/[\n\r\\\t'"\$!]/,"",v);
+ print "export " n "=\"" v "\" || echo \"Failed to set " n "\""; # don't make bash crap out when a property could not be parsed
+ n = "";
+ v = "";
+}
diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md
index 6c8712cda2..e26cb796c8 100644
--- a/spec/01-lexical-syntax.md
+++ b/spec/01-lexical-syntax.md
@@ -11,7 +11,7 @@ Scala programs are written using the Unicode Basic Multilingual Plane
presently supported. This chapter defines the two modes of Scala's
lexical syntax, the Scala mode and the _XML mode_. If not
otherwise mentioned, the following descriptions of Scala tokens refer
-to _Scala mode_, and literal characters ‘c’ refer to the ASCII fragment
+to _Scala mode_, and literal characters ‘c’ refer to the ASCII fragment
`\u0000` – `\u007F`.
In Scala mode, _Unicode escapes_ are replaced by the corresponding
@@ -29,7 +29,7 @@ but I can't make it work nor can I imagine how this would make sense,
so I removed it for now.
-->
-To construct tokens, characters are distinguished according to the following
+To construct tokens, characters are distinguished according to the following
classes (Unicode general category given in parentheses):
1. Whitespace characters. `\u0020 | \u0009 | \u000D | \u000A`.
@@ -41,13 +41,13 @@ classes (Unicode general category given in parentheses):
1. Parentheses `‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ `.
1. Delimiter characters ``‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ ``.
1. Operator characters. These consist of all printable ASCII characters
- `\u0020` - `\u007F` which are in none of the sets above, mathematical
+ `\u0020` - `\u007F` which are in none of the sets above, mathematical
symbols (`Sm`) and other symbols (`So`).
## Identifiers
```ebnf
-op ::= opchar {opchar}
+op ::= opchar {opchar}
varid ::= lower idrest
plainid ::= upper idrest
| varid
@@ -59,16 +59,16 @@ idrest ::= {letter | digit} [‘_’ op]
There are three ways to form an identifier. First, an identifier can
start with a letter which can be followed by an arbitrary sequence of
-letters and digits. This may be followed by underscore ‘_’
+letters and digits. This may be followed by underscore `‘_‘`
characters and another string composed of either letters and digits or
-of operator characters. Second, an identifier can start with an operator
+of operator characters. Second, an identifier can start with an operator
character followed by an arbitrary sequence of operator characters.
The preceding two forms are called _plain_ identifiers. Finally,
an identifier may also be formed by an arbitrary string between
back-quotes (host systems may impose some restrictions on which
strings are legal for identifiers). The identifier then is composed
of all characters excluding the backquotes themselves.
-
+
As usual, a longest match rule applies. For instance, the string
```scala
@@ -92,8 +92,8 @@ do else extends false final
finally for forSome if implicit
import lazy match new null
object override package private protected
-return sealed super this throw
-trait try true type val
+return sealed super this throw
+trait try true type val
var while with yield
_ : = => <- <: <% >: # @
```
@@ -101,20 +101,18 @@ _ : = => <- <: <% >: # @
The Unicode operators `\u21D2` ‘$\Rightarrow$’ and `\u2190` ‘$\leftarrow$’, which have the ASCII
equivalents `=>` and `<-`, are also reserved.
-### Example
-Here are examples of identifiers:
-```scala
- x Object maxIndex p2p empty_?
- + `yield` αρετη _y dot_product_*
- __system _MAX_LEN_
-```
+> Here are examples of identifiers:
+> ```scala
+> x Object maxIndex p2p empty_?
+> + `yield` αρετη _y dot_product_*
+> __system _MAX_LEN_
+> ```
-### Example
-When one needs to access Java identifiers that are reserved words in Scala, use backquote-enclosed strings.
-For instance, the statement `Thread.yield()` is illegal, since
-`yield` is a reserved word in Scala. However, here's a
-work-around: `` Thread.`yield`() ``
+<!-- -->
+> When one needs to access Java identifiers that are reserved words in Scala, use backquote-enclosed strings.
+> For instance, the statement `Thread.yield()` is illegal, since `yield` is a reserved word in Scala.
+> However, here's a work-around: `` Thread.`yield`() ``
## Newline Characters
@@ -134,7 +132,7 @@ The tokens that can terminate a statement are: literals, identifiers
and the following delimiters and reserved words:
```scala
-this null true false return type <xml-start>
+this null true false return type <xml-start>
_ ) ] }
```
@@ -142,8 +140,8 @@ The tokens that can begin a statement are all Scala tokens _except_
the following delimiters and reserved words:
```scala
-catch else extends finally forSome match
-with yield , . ; : = => <- <: <%
+catch else extends finally forSome match
+with yield , . ; : = => <- <: <%
>: # [ ) ] }
```
@@ -169,7 +167,7 @@ Newlines are disabled in:
1. Any regions analyzed in [XML mode](#xml-mode).
Note that the brace characters of `{...}` escapes in XML and
-string literals are not tokens,
+string literals are not tokens,
and therefore do not enclose a region where newlines
are enabled.
@@ -179,7 +177,7 @@ between the two tokens. However, if two tokens are separated by at
least one completely blank line (i.e a line which contains no
printable characters), then two `nl` tokens are inserted.
-The Scala grammar (given in full [here](#scala-syntax-summary))
+The Scala grammar (given in full [here](13-syntax-summary.html))
contains productions where optional `nl` tokens, but not
semicolons, are accepted. This has the effect that a newline in one of these
positions does not terminate an expression or statement. These positions can
@@ -189,117 +187,113 @@ Multiple newline tokens are accepted in the following places (note
that a semicolon in place of the newline would be illegal in every one
of these cases):
-- between the condition of a
+- between the condition of a
[conditional expression](06-expressions.html#conditional-expressions)
or [while loop](06-expressions.html#while-loop-expressions) and the next
following expression,
-- between the enumerators of a
+- between the enumerators of a
[for-comprehension](06-expressions.html#for-comprehensions-and-for-loops)
and the next following expression, and
-- after the initial `type` keyword in a
+- after the initial `type` keyword in a
[type definition or declaration](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases).
A single new line token is accepted
- in front of an opening brace ‘{’, if that brace is a legal
continuation of the current statement or expression,
-- after an [infix operator](06-expressions.html#prefix-infix-and-postfix-operations),
+- after an [infix operator](06-expressions.html#prefix,-infix,-and-postfix-operations),
if the first token on the next line can start an expression,
- in front of a [parameter clause](04-basic-declarations-and-definitions.html#function-declarations-and-definitions), and
-- after an [annotation](11-user-defined-annotations.html#user-defined-annotations).
-
-### Example
-
-The newline tokens between the two lines are not
-treated as statement separators.
-
-```scala
-if (x > 0)
- x = x - 1
-
-while (x > 0)
- x = x / 2
-
-for (x <- 1 to 10)
- println(x)
-
-type
- IntList = List[Int]
-```
-
-### Example
-
-```scala
-new Iterator[Int]
-{
- private var x = 0
- def hasNext = true
- def next = { x += 1; x }
-}
-```
-
-With an additional newline character, the same code is interpreted as
-an object creation followed by a local block:
-
-```scala
-new Iterator[Int]
-
-{
- private var x = 0
- def hasNext = true
- def next = { x += 1; x }
-}
-```
-
-### Example
-
-```scala
- x < 0 ||
- x > 10
-```
-
-With an additional newline character, the same code is interpreted as
-two expressions:
-
-```scala
- x < 0 ||
-
- x > 10
-```
-
-### Example
-
-```scala
-def func(x: Int)
- (y: Int) = x + y
-```
-
-With an additional newline character, the same code is interpreted as
-an abstract function definition and a syntactically illegal statement:
-
-```scala
-def func(x: Int)
-
- (y: Int) = x + y
-```
-
-### Example
-
-```scala
-@serializable
-protected class Data { ... }
-```
-
-With an additional newline character, the same code is interpreted as
-an attribute and a separate statement (which is syntactically
-illegal).
-
-```scala
-@serializable
-
-protected class Data { ... }
-```
-
+- after an [annotation](11-annotations.html#user-defined-annotations).
+
+> The newline tokens between the two lines are not
+> treated as statement separators.
+>
+> ```scala
+> if (x > 0)
+> x = x - 1
+>
+> while (x > 0)
+> x = x / 2
+>
+> for (x <- 1 to 10)
+> println(x)
+>
+> type
+> IntList = List[Int]
+> ```
+
+<!-- -->
+
+> ```scala
+> new Iterator[Int]
+> {
+> private var x = 0
+> def hasNext = true
+> def next = { x += 1; x }
+> }
+> ```
+>
+> With an additional newline character, the same code is interpreted as
+> an object creation followed by a local block:
+>
+> ```scala
+> new Iterator[Int]
+>
+> {
+> private var x = 0
+> def hasNext = true
+> def next = { x += 1; x }
+> }
+> ```
+
+<!-- -->
+
+> ```scala
+> x < 0 ||
+> x > 10
+> ```
+>
+> With an additional newline character, the same code is interpreted as
+> two expressions:
+>
+> ```scala
+> x < 0 ||
+>
+> x > 10
+> ```
+
+<!-- -->
+
+> ```scala
+> def func(x: Int)
+> (y: Int) = x + y
+> ```
+>
+> With an additional newline character, the same code is interpreted as
+> an abstract function definition and a syntactically illegal statement:
+>
+> ```scala
+> def func(x: Int)
+>
+> (y: Int) = x + y
+> ```
+
+<!-- -->
+
+> ```scala
+> @serializable
+> protected class Data { ... }
+> ```
+>
+> With an additional newline character, the same code is interpreted as
+> an attribute and a separate statement (which is syntactically illegal).
+>
+> ```scala
+> @serializable
+>
+> protected class Data { ... }
+> ```
## Literals
@@ -307,9 +301,9 @@ There are literals for integer numbers, floating point numbers,
characters, booleans, symbols, strings. The syntax of these literals is in
each case as in Java.
-<!-- TODO
+<!-- TODO
say that we take values from Java, give examples of some lits in
- particular float and double.
+ particular float and double.
-->
```ebnf
@@ -322,18 +316,15 @@ Literal ::= [‘-’] integerLiteral
| ‘null’
```
-
### Integer Literals
```ebnf
-integerLiteral ::= (decimalNumeral | hexNumeral | octalNumeral)
+integerLiteral ::= (decimalNumeral | hexNumeral)
[‘L’ | ‘l’]
decimalNumeral ::= ‘0’ | nonZeroDigit {digit}
-hexNumeral ::= ‘0’ ‘x’ hexDigit {hexDigit}
-octalNumeral ::= ‘0’ octalDigit {octalDigit}
+hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit}
digit ::= ‘0’ | nonZeroDigit
nonZeroDigit ::= ‘1’ | … | ‘9’
-octalDigit ::= ‘0’ | … | ‘7’
```
Integer literals are usually of type `Int`, or of type
@@ -356,13 +347,9 @@ is _pt_. The numeric ranges given by these types are:
|`Short` | $-2\^{15}$ to $2\^{15}-1$|
|`Char` | $0$ to $2\^{16}-1$ |
-
-### Example
-
-```scala
-0 21 0xFFFFFFFF -42L
-```
-
+> ```scala
+> 0 21 0xFFFFFFFF -42L
+> ```
### Floating Point Literals
@@ -386,20 +373,18 @@ If a floating point literal in a program is followed by a token
starting with a letter, there must be at least one intervening
whitespace character between the two tokens.
-### Example
+> ```scala
+> 0.0 1e30f 3.14159f 1.0e-100 .1
+> ```
-```scala
-0.0 1e30f 3.14159f 1.0e-100 .1
-```
+<!-- -->
-### Example
+> The phrase `1.toString` parses as three different tokens:
+> the integer literal `1`, a `.`, and the identifier `toString`.
-The phrase `1.toString` parses as three different tokens:
-the integer literal `1`, a `.`, and the identifier `toString`.
+<!-- -->
-### Example
-
-`1.` is not a valid floating point literal because the mandatory digit after the `.` is missing.
+> `1.` is not a valid floating point literal because the mandatory digit after the `.` is missing.
### Boolean Literals
@@ -410,7 +395,6 @@ booleanLiteral ::= ‘true’ | ‘false’
The boolean literals `true` and `false` are
members of type `Boolean`.
-
### Character Literals
```ebnf
@@ -421,19 +405,16 @@ A character literal is a single character enclosed in quotes.
The character is either a printable unicode character or is described
by an [escape sequence](#escape-sequences).
-### Example
-
-```scala
-'a' '\u0041' '\n' '\t'
-```
+> ```scala
+> 'a' '\u0041' '\n' '\t'
+> ```
Note that `'\u000A'` is _not_ a valid character literal because
Unicode conversion is done before literal parsing and the Unicode
-character \\u000A (line feed) is not a printable
+character `\u000A` (line feed) is not a printable
character. One can use instead the escape sequence `'\n'` or
the octal escape `'\12'` ([see here](#escape-sequences)).
-
### String Literals
```ebnf
@@ -446,14 +427,12 @@ characters are either printable unicode character or are described by
[escape sequences](#escape-sequences). If the string literal
contains a double quote character, it must be escaped,
i.e. `"\""`. The value of a string literal is an instance of
-class `String`.
+class `String`.
-### Example
-
-```scala
-"Hello,\nWorld!"
-"This string contains a \" character."
-```
+> ```scala
+> "Hello,\nWorld!"
+> "This string contains a \" character."
+> ```
#### Multi-Line String Literals
@@ -470,46 +449,43 @@ must not necessarily be printable; newlines or other
control characters are also permitted. Unicode escapes work as everywhere else, but none
of the escape sequences [here](#escape-sequences) are interpreted.
-### Example
-
-```scala
- """the present string
- spans three
- lines."""
-```
-
-This would produce the string:
-
-```scala
-the present string
- spans three
- lines.
-```
-
-The Scala library contains a utility method `stripMargin`
-which can be used to strip leading whitespace from multi-line strings.
-The expression
-
-```scala
- """the present string
- |spans three
- |lines.""".stripMargin
-```
-
-evaluates to
-
-```scala
-the present string
-spans three
-lines.
-```
-
-Method `stripMargin` is defined in class
-[scala.collection.immutable.StringLike](http://www.scala-lang.org/api/current/index.html#scala.collection.immutable.StringLike).
-Because there is a predefined
-[implicit conversion](06-expressions.html#implicit-conversions) from `String` to
-`StringLike`, the method is applicable to all strings.
-
+> ```scala
+> """the present string
+> spans three
+> lines."""
+> ```
+>
+> This would produce the string:
+>
+> ```scala
+> the present string
+> spans three
+> lines.
+> ```
+>
+> The Scala library contains a utility method `stripMargin`
+> which can be used to strip leading whitespace from multi-line strings.
+> The expression
+>
+> ```scala
+> """the present string
+> |spans three
+> |lines.""".stripMargin
+> ```
+>
+> evaluates to
+>
+> ```scala
+> the present string
+> spans three
+> lines.
+> ```
+>
+> Method `stripMargin` is defined in class
+> [scala.collection.immutable.StringLike](http://www.scala-lang.org/api/current/#scala.collection.immutable.StringLike).
+> Because there is a predefined
+> [implicit conversion](06-expressions.html#implicit-conversions) from `String` to
+> `StringLike`, the method is applicable to all strings.
### Escape Sequences
@@ -526,15 +502,13 @@ The following escape sequences are recognized in character and string literals.
| `‘\‘ ‘'‘` | `\u0027` | single quote | `'` |
| `‘\‘ ‘\‘` | `\u005c` | backslash | `\` |
-
A character with Unicode between 0 and 255 may also be represented by
-an octal escape, i.e. a backslash ‘\’ followed by a
+an octal escape, i.e. a backslash `'\'` followed by a
sequence of up to three octal characters.
It is a compile time error if a backslash character in a character or
string literal does not start a valid escape sequence.
-
### Symbol literals
```ebnf
@@ -557,7 +531,6 @@ caches weak references to `Symbol`s, thus ensuring that
identical symbol literals are equivalent with respect to reference
equality.
-
## Whitespace and Comments
Tokens may be separated by whitespace characters
@@ -572,7 +545,6 @@ but are required to be properly nested. Therefore, a comment like
`/* /* */` will be rejected as having an unterminated
comment.
-
## XML mode
In order to allow literal inclusion of XML fragments, lexical analysis
@@ -589,10 +561,10 @@ brace and immediately followed by a character starting an XML name.
The scanner switches from XML mode to Scala mode if either
-- the XML expression or the XML pattern started by the initial ‘<’ has been
+- the XML expression or the XML pattern started by the initial ‘<’ has been
successfully parsed, or if
-- the parser encounters an embedded Scala expression or pattern and
- forces the Scanner
+- the parser encounters an embedded Scala expression or pattern and
+ forces the Scanner
back to normal mode, until the Scala expression or pattern is
successfully parsed. In this case, since code and XML fragments can be
nested, the parser has to maintain a stack that reflects the nesting
@@ -601,15 +573,13 @@ The scanner switches from XML mode to Scala mode if either
Note that no Scala tokens are constructed in XML mode, and that comments are interpreted
as text.
-### Example
-
-The following value definition uses an XML literal with two embedded
-Scala expressions:
-
-```scala
-val b = <book>
- <title>The Scala Language Specification</title>
- <version>{scalaBook.version}</version>
- <authors>{scalaBook.authors.mkList("", ", ", "")}</authors>
- </book>
-```
+> The following value definition uses an XML literal with two embedded
+> Scala expressions:
+>
+> ```scala
+> val b = <book>
+> <title>The Scala Language Specification</title>
+> <version>{scalaBook.version}</version>
+> <authors>{scalaBook.authors.mkList("", ", ", "")}</authors>
+> </book>
+> ```
diff --git a/spec/02-identifiers-names-and-scopes.md b/spec/02-identifiers-names-and-scopes.md
index bfb743dbe4..0a9c5dfe77 100644
--- a/spec/02-identifiers-names-and-scopes.md
+++ b/spec/02-identifiers-names-and-scopes.md
@@ -1,5 +1,5 @@
---
-title: Identifiers, Names and Scopes
+title: Identifiers, Names & Scopes
layout: default
chapter: 2
---
@@ -17,14 +17,13 @@ which are collectively called _bindings_.
Bindings of different kinds have a precedence defined on them:
1. Definitions and declarations that are local, inherited, or made
- available by a package clause in the same compilation unit where the
- definition occurs have highest precedence.
+ available by a package clause in the same compilation unit where the
+ definition occurs have highest precedence.
1. Explicit imports have next highest precedence.
1. Wildcard imports have next highest precedence.
1. Definitions made available by a package clause not in the
compilation unit where the definition occurs have lowest precedence.
-
There are two different name spaces, one for [types](03-types.html#types)
and one for [terms](06-expressions.html#expressions). The same name may designate a
type and a term, depending on the context where the name is used.
@@ -33,7 +32,7 @@ A binding has a _scope_ in which the entity defined by a single
name can be accessed using a simple name. Scopes are nested. A binding
in some inner scope _shadows_ bindings of lower precedence in the
same scope as well as bindings of the same or lower precedence in outer
-scopes.
+scopes.
<!-- TODO: either the example, the spec, or the compiler is wrong
@@ -55,7 +54,7 @@ A reference to an unqualified (type- or term-) identifier $x$ is bound
by the unique binding, which
- defines an entity with name $x$ in the same namespace as the identifier, and
-- shadows all other bindings that define entities with name $x$ in that
+- shadows all other bindings that define entities with name $x$ in that
namespace.
It is an error if no such binding exists. If $x$ is bound by an
@@ -70,10 +69,9 @@ the member of the type $T$ of $e$ which has the name $x$ in the same
namespace as the identifier. It is an error if $T$ is not a [value type](03-types.html#value-types).
The type of $e.x$ is the member type of the referenced entity in $T$.
+###### Example
-### Example
-
-Assume the following two definitions of a objects named `X` in packages `P` and `Q`.
+Assume the following two definitions of objects named `X` in packages `P` and `Q`.
```scala
package P {
@@ -111,4 +109,3 @@ object A {
println("L20: "+x) // `x' refers to string "abc" here
}}}}}}
```
-
diff --git a/spec/03-types.md b/spec/03-types.md
index 66ddee8b7e..94b7916634 100644
--- a/spec/03-types.md
+++ b/spec/03-types.md
@@ -11,9 +11,9 @@ chapter: 3
| InfixType [ExistentialClause]
FunctionArgTypes ::= InfixType
| ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’
- ExistentialClause ::= ‘forSome’ ‘{’ ExistentialDcl
+ ExistentialClause ::= ‘forSome’ ‘{’ ExistentialDcl
{semi ExistentialDcl} ‘}’
- ExistentialDcl ::= ‘type’ TypeDcl
+ ExistentialDcl ::= ‘type’ TypeDcl
| ‘val’ ValDcl
InfixType ::= CompoundType {id [nl] CompoundType}
CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement]
@@ -31,7 +31,7 @@ chapter: 3
We distinguish between first-order types and type constructors, which
take type parameters and yield types. A subset of first-order types
called _value types_ represents sets of (first-class) values.
-Value types are either _concrete_ or _abstract_.
+Value types are either _concrete_ or _abstract_.
Every concrete value type can be represented as a _class type_, i.e. a
[type designator](#type-designators) that refers to a
@@ -39,8 +39,8 @@ Every concrete value type can be represented as a _class type_, i.e. a
[compound type](#compound-types) representing an
intersection of types, possibly with a [refinement](#compound-types)
that further constrains the types of its members.
-<!--
-A shorthand exists for denoting [function types](#function-types)
+<!--
+A shorthand exists for denoting [function types](#function-types)
-->
Abstract value types are introduced by [type parameters](04-basic-declarations-and-definitions.html#type-parameters)
and [abstract type bindings](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases).
@@ -50,20 +50,19 @@ Parentheses in types can be used for grouping.
define a class (of the same name as the object or package, but
inaccessible to user programs).
-Non-value types capture properties of identifiers that
-[are not values](#non-value-types). For example, a
-[type constructor](#type-constructors) does not directly specify a type of
-values. However, when a type constructor is applied to the correct type
-arguments, it yields a first-order type, which may be a value type.
-
-Non-value types are expressed indirectly in Scala. E.g., a method type is
-described by writing down a method signature, which in itself is not a real
-type, although it gives rise to a corresponding [method type](#method-types).
-Type constructors are another example, as one can write
-`type Swap[m[_, _], a,b] = m[b, a]`, but there is no syntax to write
+Non-value types capture properties of identifiers that
+[are not values](#non-value-types). For example, a
+[type constructor](#type-constructors) does not directly specify a type of
+values. However, when a type constructor is applied to the correct type
+arguments, it yields a first-order type, which may be a value type.
+
+Non-value types are expressed indirectly in Scala. E.g., a method type is
+described by writing down a method signature, which in itself is not a real
+type, although it gives rise to a corresponding [method type](#method-types).
+Type constructors are another example, as one can write
+`type Swap[m[_, _], a,b] = m[b, a]`, but there is no syntax to write
the corresponding anonymous type function directly.
-
## Paths
```ebnf
@@ -83,19 +82,18 @@ A path is one of the following.
- The empty path ε (which cannot be written explicitly in user programs).
- $C.$`this`, where $C$ references a class.
The path `this` is taken as a shorthand for $C.$`this` where
- $C$ is the name of the class directly enclosing the reference.
+ $C$ is the name of the class directly enclosing the reference.
- $p.x$ where $p$ is a path and $x$ is a stable member of $p$.
- _Stable members_ are packages or members introduced by object definitions or
+ _Stable members_ are packages or members introduced by object definitions or
by value definitions of [non-volatile types](#volatile-types).
- $C.$`super`$.x$ or $C.$`super`$[M].x$
- where $C$ references a class and $x$ references a
- stable member of the super class or designated parent class $M$ of $C$.
+ where $C$ references a class and $x$ references a
+ stable member of the super class or designated parent class $M$ of $C$.
The prefix `super` is taken as a shorthand for $C.$`super` where
- $C$ is the name of the class directly enclosing the reference.
+ $C$ is the name of the class directly enclosing the reference.
A _stable identifier_ is a path which ends in an identifier.
-
## Value Types
Every value in Scala has a type which is of one of the following
@@ -110,7 +108,7 @@ SimpleType ::= Path ‘.’ type
A singleton type is of the form $p.$`type`, where $p$ is a
path pointing to a value expected to [conform](06-expressions.html#expression-typing)
to `scala.AnyRef`. The type denotes the set of values
-consisting of `null` and the value denoted by $p$.
+consisting of `null` and the value denoted by $p$.
A _stable type_ is either a singleton type or a type which is
declared to be a subtype of trait `scala.Singleton`.
@@ -122,11 +120,11 @@ SimpleType ::= SimpleType ‘#’ id
```
A type projection $T$#$x$ references the type member named
-$x$ of type $T$.
+$x$ of type $T$.
<!--
The following is no longer necessary:
-If $x$ references an abstract type member, then $T$ must be a
+If $x$ references an abstract type member, then $T$ must be a
[stable type](#singleton-types)
-->
@@ -149,7 +147,7 @@ A qualified type designator has the form `p.t` where `p` is
a [path](#paths) and _t_ is a type name. Such a type designator is
equivalent to the type projection `p.type#t`.
-### Example
+###### Example
Some type designators and their expansions are listed below. We assume
a local type parameter $t$, a value `maintable`
@@ -162,8 +160,6 @@ with a type member `Node` and the standard class `scala.Int`,
|scala.Int | scala.type#Int |
|data.maintable.Node | data.maintable.type#Node |
-
-
### Parameterized Types
```ebnf
@@ -171,8 +167,8 @@ SimpleType ::= SimpleType TypeArgs
TypeArgs ::= ‘[’ Types ‘]’
```
-A parameterized type $T[ U_1 , \ldots , U_n ]$ consists of a type
-designator $T$ and type parameters $U_1 , \ldots , U_n$ where
+A parameterized type $T[ T_1 , \ldots , T_n ]$ consists of a type
+designator $T$ and type parameters $T_1 , \ldots , T_n$ where
$n \geq 1$. $T$ must refer to a type constructor which takes $n$ type
parameters $a_1 , \ldots , a_n$.
@@ -182,7 +178,8 @@ well-formed if each actual type parameter
_conforms to its bounds_, i.e. $\sigma L_i <: T_i <: \sigma U_i$ where $\sigma$ is the
substitution $[ a_1 := T_1 , \ldots , a_n := T_n ]$.
-### Example
+###### Example Parameterized Types
+
Given the partial type definitions:
```scala
@@ -206,9 +203,9 @@ F[List, Int]
G[S, String]
```
-### Example
+###### Example
-Given the [above type definitions](example-parameterized-types),
+Given the [above type definitions](#example-parameterized-types),
the following types are ill-formed:
```scala
@@ -231,7 +228,7 @@ SimpleType ::= ‘(’ Types ‘)’
```
A tuple type $(T_1 , \ldots , T_n)$ is an alias for the
-class `scala.Tuple$_n$[$T_1$, … , $T_n$]`, where $n \geq 2$.
+class `scala.Tuple$n$[$T_1$, … , $T_n$]`, where $n \geq 2$.
Tuple classes are case classes whose fields can be accessed using
selectors `_1` , … , `_n`. Their functionality is
@@ -241,12 +238,12 @@ standard Scala library (they might also add other methods and
implement other traits).
```scala
-case class Tuple$n$[+T1, … , +$T_n$](_1: T1, … , _n: $T_n$)
-extends Product_n[T1, … , $T_n$]
+case class Tuple$n$[+$T_1$, … , +$T_n$](_1: $T_1$, … , _n: $T_n$)
+extends Product_n[$T_1$, … , $T_n$]
-trait Product_n[+T1, … , +$T_n$] {
+trait Product_n[+$T_1$, … , +$T_n$] {
override def productArity = $n$
- def _1: T1
+ def _1: $T_1$
def _n: $T_n$
}
@@ -259,10 +256,10 @@ AnnotType ::= SimpleType {Annotation}
```
An annotated type $T$ $a_1, \ldots, a_n$
-attaches [annotations](11-user-defined-annotations.html#user-defined-annotations)
+attaches [annotations](11-annotations.html#user-defined-annotations)
$a_1 , \ldots , a_n$ to the type $T$.
-### Example
+###### Example
The following type adds the `@suspendable` annotation to the type `String`:
@@ -270,7 +267,6 @@ The following type adds the `@suspendable` annotation to the type `String`:
String @suspendable
```
-
### Compound Types
```ebnf
@@ -283,7 +279,7 @@ RefineStat ::= Dcl
```
A compound type $T_1$ `with` … `with` $T_n \\{ R \\}$
-represents objects with members as given in the component types
+represents objects with members as given in the component types
$T_1 , \ldots , T_n$ and the refinement $\\{ R \\}$. A refinement
$\\{ R \\}$ contains declarations and type definitions.
If a declaration or definition overrides a declaration or definition in
@@ -291,7 +287,7 @@ one of the component types $T_1 , \ldots , T_n$, the usual rules for
[overriding](05-classes-and-objects.html#overriding) apply; otherwise the declaration
or definition is said to be “structural” [^2].
-[^2]: A reference to a structurally defined member (method call or access
+[^2]: A reference to a structurally defined member (method call or access
to a value or variable) may generate binary code that is significantly
slower than an equivalent code to a non-structural member.
@@ -309,7 +305,7 @@ A compound type may also consist of just a refinement
$\\{ R \\}$ with no preceding component types. Such a type is
equivalent to `AnyRef` $\\{ R \\}$.
-### Example
+###### Example
The following example shows how to declare and use a method which
a parameter type that contains a refinement with structural declarations.
@@ -341,7 +337,6 @@ Although `Bird` and `Plane` do not share any parent class other than
refinement with structural declarations to accept any object that declares
a value `callsign` and a `fly` method.
-
### Infix Types
```ebnf
@@ -350,24 +345,23 @@ InfixType ::= CompoundType {id [nl] CompoundType}
An infix type $T_1$ `op` $T_2$ consists of an infix
operator `op` which gets applied to two type operands $T_1$ and
-$T_2$. The type is equivalent to the type application
+$T_2$. The type is equivalent to the type application
`op`$[T_1, T_2]$. The infix operator `op` may be an
-arbitrary identifier, except for `*`, which is reserved as a postfix modifier
-denoting a [repeated parameter type](04-basic-declarations-and-definitions.html#repeated-parameters).
+arbitrary identifier.
All type infix operators have the same precedence; parentheses have to
-be used for grouping. The [associativity](06-expressions.html#prefix-infix-and-postfix-operations)
+be used for grouping. The [associativity](06-expressions.html#prefix,-infix,-and-postfix-operations)
of a type operator is determined as for term operators: type operators
ending in a colon ‘:’ are right-associative; all other
operators are left-associative.
-In a sequence of consecutive type infix operations
+In a sequence of consecutive type infix operations
$t_0 \, \mathit{op} \, t_1 \, \mathit{op_2} \, \ldots \, \mathit{op_n} \, t_n$,
-all operators $\mathit{op}_1 , \ldots , \mathit{op}_n$ must have the same
+all operators $\mathit{op}\_1 , \ldots , \mathit{op}\_n$ must have the same
associativity. If they are all left-associative, the sequence is
-interpreted as
+interpreted as
$(\ldots (t_0 \mathit{op_1} t_1) \mathit{op_2} \ldots) \mathit{op_n} t_n$,
-otherwise it is interpreted as
+otherwise it is interpreted as
$t_0 \mathit{op_1} (t_1 \mathit{op_2} ( \ldots \mathit{op_n} t_n) \ldots)$.
### Function Types
@@ -381,25 +375,25 @@ FunctionArgs ::= InfixType
The type $(T_1 , \ldots , T_n) \Rightarrow U$ represents the set of function
values that take arguments of types $T1 , \ldots , Tn$ and yield
results of type $U$. In the case of exactly one argument type
-$T \Rightarrow U$ is a shorthand for $(T) \Rightarrow U$.
+$T \Rightarrow U$ is a shorthand for $(T) \Rightarrow U$.
An argument type of the form $\Rightarrow T$
represents a [call-by-name parameter](04-basic-declarations-and-definitions.html#by-name-parameters) of type $T$.
Function types associate to the right, e.g.
-$S \Rightarrow T \Rightarrow U$ is the same as
+$S \Rightarrow T \Rightarrow U$ is the same as
$S \Rightarrow (T \Rightarrow U)$.
Function types are shorthands for class types that define `apply`
-functions. Specifically, the $n$-ary function type
+functions. Specifically, the $n$-ary function type
$(T_1 , \ldots , T_n) \Rightarrow U$ is a shorthand for the class type
`Function$_n$[T1 , … , $T_n$, U]`. Such class
types are defined in the Scala library for $n$ between 0 and 9 as follows.
```scala
-package scala
+package scala
trait Function_n[-T1 , … , -T$_n$, +R] {
- def apply(x1: T1 , … , x$_n$: T$_n$): R
- override def toString = "<function>"
+ def apply(x1: T1 , … , x$_n$: T$_n$): R
+ override def toString = "<function>"
}
```
@@ -410,28 +404,28 @@ result type and contravariant in their argument types.
```ebnf
Type ::= InfixType ExistentialClauses
-ExistentialClauses ::= ‘forSome’ ‘{’ ExistentialDcl
+ExistentialClauses ::= ‘forSome’ ‘{’ ExistentialDcl
{semi ExistentialDcl} ‘}’
-ExistentialDcl ::= ‘type’ TypeDcl
+ExistentialDcl ::= ‘type’ TypeDcl
| ‘val’ ValDcl
```
An existential type has the form `$T$ forSome { $Q$ }`
-where $Q$ is a sequence of
+where $Q$ is a sequence of
[type declarations](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases).
-Let
-$t_1[\mathit{tps}_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}_n] >: L_n <: U_n$
-be the types declared in $Q$ (any of the
+Let
+$t_1[\mathit{tps}\_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}\_n] >: L_n <: U_n$
+be the types declared in $Q$ (any of the
type parameter sections `[ $\mathit{tps}_i$ ]` might be missing).
-The scope of each type $t_i$ includes the type $T$ and the existential clause
-$Q$.
-The type variables $t_i$ are said to be _bound_ in the type
+The scope of each type $t_i$ includes the type $T$ and the existential clause
+$Q$.
+The type variables $t_i$ are said to be _bound_ in the type
`$T$ forSome { $Q$ }`.
Type variables which occur in a type $T$ but which are not bound in $T$ are said
to be _free_ in $T$.
-A _type instance_ of `$T$ forSome { $Q$ }`
+A _type instance_ of `$T$ forSome { $Q$ }`
is a type $\sigma T$ where $\sigma$ is a substitution over $t_1 , \ldots , t_n$
such that, for each $i$, $\sigma L_i <: \sigma t_i <: \sigma U_i$.
The set of values denoted by the existential type `$T$ forSome {$\,Q\,$}`
@@ -439,7 +433,7 @@ is the union of the set of values of all its type instances.
A _skolemization_ of `$T$ forSome { $Q$ }` is
a type instance $\sigma T$, where $\sigma$ is the substitution
-$[t'_1/t_1 , \ldots , t'_n/t_n]$ and each $t'_i$ is a fresh abstract type
+$[t_1'/t_1 , \ldots , t_n'/t_n]$ and each $t_i'$ is a fresh abstract type
with lower bound $\sigma L_i$ and upper bound $\sigma U_i$.
#### Simplification Rules
@@ -458,21 +452,20 @@ is equivalent to
1. An empty quantification can be dropped. E.g.,
`$T$ forSome { }` is equivalent to $T$.
1. An existential type `$T$ forSome { $Q$ }` where $Q$ contains
-a clause `type $t[\mathit{tps}] >: L <: U$` is equivalent
-to the type `$T'$ forSome { $Q$ }` where $T'$ results from $T$ by replacing
+a clause `type $t[\mathit{tps}] >: L <: U$` is equivalent
+to the type `$T'$ forSome { $Q$ }` where $T'$ results from $T$ by replacing
every [covariant occurrence](04-basic-declarations-and-definitions.html#variance-annotations) of $t$ in $T$ by $U$ and by
replacing every contravariant occurrence of $t$ in $T$ by $L$.
-
#### Existential Quantification over Values
As a syntactic convenience, the bindings clause
in an existential type may also contain
-value declarations `val $x$: $T$`.
+value declarations `val $x$: $T$`.
An existential type `$T$ forSome { $Q$; val $x$: $S\,$;$\,Q'$ }`
is treated as a shorthand for the type
-`$T'$ forSome { $Q$; type $t$ <: $S$ with Singleton; $Q'$ }`, where $t$ is a
-fresh type name and $T'$ results from $T$ by replacing every occurrence of
+`$T'$ forSome { $Q$; type $t$ <: $S$ with Singleton; $Q'$ }`, where $t$ is a
+fresh type name and $T'$ results from $T$ by replacing every occurrence of
`$x$.type` with $t$.
#### Placeholder Syntax for Existential Types
@@ -483,17 +476,17 @@ WildcardType ::= ‘_’ TypeBounds
Scala supports a placeholder syntax for existential types.
A _wildcard type_ is of the form `_$\;$>:$\,L\,$<:$\,U$`. Both bound
-clauses may be omitted. If a lower bound clause `>:$\,L$` is missing,
+clauses may be omitted. If a lower bound clause `>:$\,L$` is missing,
`>:$\,$scala.Nothing`
-is assumed. If an upper bound clause `<:$\,U$` is missing,
-`<:$\,$scala.Any` is assumed. A wildcard type is a shorthand for an
-existentially quantified type variable, where the existential quantification is
+is assumed. If an upper bound clause `<:$\,U$` is missing,
+`<:$\,$scala.Any` is assumed. A wildcard type is a shorthand for an
+existentially quantified type variable, where the existential quantification is
implicit.
A wildcard type must appear as type argument of a parameterized type.
-Let $T = p.c[\mathit{targs},T,\mathit{targs}']$ be a parameterized type where
+Let $T = p.c[\mathit{targs},T,\mathit{targs}']$ be a parameterized type where
$\mathit{targs}, \mathit{targs}'$ may be empty and
-$T$ is a wildcard type `_$\;$>:$\,L\,$<:$\,U$`. Then $T$ is equivalent to the
+$T$ is a wildcard type `_$\;$>:$\,L\,$<:$\,U$`. Then $T$ is equivalent to the
existential
type
@@ -501,14 +494,14 @@ type
$p.c[\mathit{targs},t,\mathit{targs}']$ forSome { type $t$ >: $L$ <: $U$ }
```
-where $t$ is some fresh type variable.
+where $t$ is some fresh type variable.
Wildcard types may also appear as parts of [infix types](#infix-types)
, [function types](#function-types),
or [tuple types](#tuple-types).
Their expansion is then the expansion in the equivalent parameterized
type.
-### Example
+###### Example
Assume the class definitions
@@ -532,7 +525,7 @@ An alternative formulation of the first type above using wildcard syntax is:
Ref[_ <: java.lang.Number]
```
-### Example
+###### Example
The type `List[List[_]]` is equivalent to the existential type
@@ -540,7 +533,7 @@ The type `List[List[_]]` is equivalent to the existential type
List[List[t] forSome { type t }] .
```
-### Example
+###### Example
Assume a covariant type
@@ -563,25 +556,23 @@ List[java.lang.Number] forSome { type T <: java.lang.Number }
which is in turn equivalent (by simplification rules 2 and 3 above) to
`List[java.lang.Number]`.
-
## Non-Value Types
The types explained in the following do not denote sets of values, nor
do they appear explicitly in programs. They are introduced in this
report as the internal types of defined identifiers.
-
### Method Types
-A method type is denoted internally as $(\mathit{Ps})U$, where $(\mathit{Ps})$
+A method type is denoted internally as $(\mathit{Ps})U$, where $(\mathit{Ps})$
is a sequence of parameter names and types $(p_1:T_1 , \ldots , p_n:T_n)$
for some $n \geq 0$ and $U$ is a (value or method) type. This type
-represents named methods that take arguments named $p_1 , \ldots , p_n$
+represents named methods that take arguments named $p_1 , \ldots , p_n$
of types $T_1 , \ldots , T_n$
and that return a result of type $U$.
-Method types associate to the right: $(\mathit{Ps}_1)(\mathit{Ps}_2)U$ is
-treated as $(\mathit{Ps}_1)((\mathit{Ps}_2)U)$.
+Method types associate to the right: $(\mathit{Ps}\_1)(\mathit{Ps}\_2)U$ is
+treated as $(\mathit{Ps}\_1)((\mathit{Ps}\_2)U)$.
A special case are types of methods without any parameters. They are
written here `=> T`. Parameterless methods name expressions
@@ -595,7 +586,7 @@ corresponding function type.
###### Example
The declarations
-
+
```
def a: Int
def b (x: Int): Boolean
@@ -613,8 +604,8 @@ c: (Int) (String, String) String
### Polymorphic Method Types
A polymorphic method type is denoted internally as `[$\mathit{tps}\,$]$T$` where
-`[$\mathit{tps}\,$]` is a type parameter section
-`[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]`
+`[$\mathit{tps}\,$]` is a type parameter section
+`[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]`
for some $n \geq 0$ and $T$ is a
(value or method) type. This type represents named methods that
take type arguments `$S_1 , \ldots , S_n$` which
@@ -635,14 +626,14 @@ produce the typings
```scala
empty : [A >: Nothing <: Any] List[A]
-union : [A >: Nothing <: Comparable[A]] (x: Set[A], xs: Set[A]) Set[A] .
+union : [A >: Nothing <: Comparable[A]] (x: Set[A], xs: Set[A]) Set[A]
```
### Type Constructors
A type constructor is represented internally much like a polymorphic method type.
-`[$\pm$ $a_1$ >: $L_1$ <: $U_1 , \ldots , \pm a_n$ >: $L_n$ <: $U_n$] $T$`
-represents a type that is expected by a
+`[$\pm$ $a_1$ >: $L_1$ <: $U_1 , \ldots , \pm a_n$ >: $L_n$ <: $U_n$] $T$`
+represents a type that is expected by a
[type constructor parameter](04-basic-declarations-and-definitions.html#type-parameters) or an
[abstract type constructor binding](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases) with
the corresponding type parameter clause.
@@ -650,7 +641,7 @@ the corresponding type parameter clause.
###### Example
Consider this fragment of the `Iterable[+X]` class:
-
+
```
trait Iterable[+X] {
def flatMap[newType[+X] <: Iterable[X], S](f: X => newType[S]): newType[S]
@@ -661,7 +652,6 @@ Conceptually, the type constructor `Iterable` is a name for the
anonymous type `[+X] Iterable[X]`, which may be passed to the
`newType` type constructor parameter in `flatMap`.
-
<!-- ### Overloaded Types
More than one values or methods are defined in the same scope with the
@@ -669,7 +659,7 @@ same name, we model
An overloaded type consisting of type alternatives $T_1 \commadots T_n (n \geq 2)$ is denoted internally $T_1 \overload \ldots \overload T_n$.
-### Example
+###### Example
```
def println: Unit
def println(s: String): Unit = $\ldots$
@@ -687,7 +677,7 @@ println: => Unit $\overload$
[A] (A) (A => String) Unit
```
-### Example
+###### Example
```
def f(x: T): T = $\ldots$
val f = 0
@@ -695,7 +685,6 @@ val f = 0
define a function `f} which has type `(x: T)T $\overload$ Int`.
-->
-
## Base Types and Member Definitions
Types of class members depend on the way the members are referenced.
@@ -705,7 +694,6 @@ Central here are three notions, namely:
prefix type $S$,
1. the notion of the set of member bindings of some type $T$.
-
These notions are defined mutually recursively as follows.
1. The set of _base types_ of a type is a set of class types,
@@ -715,25 +703,25 @@ These notions are defined mutually recursively as follows.
`$T_1$ with … with $T_n$ { $R$ }`.
- The base types of an aliased type are the base types of its alias.
- The base types of an abstract type are the base types of its upper bound.
- - The base types of a parameterized type
+ - The base types of a parameterized type
`$C$[$T_1 , \ldots , T_n$]` are the base types
- of type $C$, where every occurrence of a type parameter $a_i$
+ of type $C$, where every occurrence of a type parameter $a_i$
of $C$ has been replaced by the corresponding parameter type $T_i$.
- The base types of a singleton type `$p$.type` are the base types of
the type of $p$.
- - The base types of a compound type
+ - The base types of a compound type
`$T_1$ with $\ldots$ with $T_n$ { $R$ }`
are the _reduced union_ of the base
- classes of all $T_i$'s. This means:
+ classes of all $T_i$'s. This means:
Let the multi-set $\mathscr{S}$ be the multi-set-union of the
base types of all $T_i$'s.
If $\mathscr{S}$ contains several type instances of the same class, say
`$S^i$#$C$[$T^i_1 , \ldots , T^i_n$]` $(i \in I)$, then
- all those instances
+ all those instances
are replaced by one of them which conforms to all
- others. It is an error if no such instance exists. It follows that the
+ others. It is an error if no such instance exists. It follows that the
reduced union, if it exists,
- produces a set of class types, where different types are instances of
+ produces a set of class types, where different types are instances of
different classes.
- The base types of a type selection `$S$#$T$` are
determined as follows. If $T$ is an alias or abstract type, the
@@ -748,26 +736,26 @@ These notions are defined mutually recursively as follows.
makes sense only if the prefix type $S$
has a type instance of class $C$ as a base type, say
`$S'$#$C$[$T_1 , \ldots , T_n$]`. Then we define as follows.
- - If `$S$ = $\epsilon$.type`, then $T$ in $C$ seen from $S$ is
+ - If `$S$ = $\epsilon$.type`, then $T$ in $C$ seen from $S$ is
$T$ itself.
- Otherwise, if $S$ is an existential type `$S'$ forSome { $Q$ }`, and
- $T$ in $C$ seen from $S'$ is $T'$,
+ $T$ in $C$ seen from $S'$ is $T'$,
then $T$ in $C$ seen from $S$ is `$T'$ forSome {$\,Q\,$}`.
- Otherwise, if $T$ is the $i$'th type parameter of some class $D$, then
- - If $S$ has a base type `$D$[$U_1 , \ldots , U_n$]`, for some type
- parameters `[$U_1 , \ldots , U_n$]`, then $T$ in $C$ seen from $S$
+ - If $S$ has a base type `$D$[$U_1 , \ldots , U_n$]`, for some type
+ parameters `[$U_1 , \ldots , U_n$]`, then $T$ in $C$ seen from $S$
is $U_i$.
- Otherwise, if $C$ is defined in a class $C'$, then
$T$ in $C$ seen from $S$ is the same as $T$ in $C'$ seen from $S'$.
- - Otherwise, if $C$ is not defined in another class, then
+ - Otherwise, if $C$ is not defined in another class, then
$T$ in $C$ seen from $S$ is $T$ itself.
- Otherwise, if $T$ is the singleton type `$D$.this.type` for some class $D$
then
- - If $D$ is a subclass of $C$ and $S$ has a type instance of class $D$
+ - If $D$ is a subclass of $C$ and $S$ has a type instance of class $D$
among its base types, then $T$ in $C$ seen from $S$ is $S$.
- Otherwise, if $C$ is defined in a class $C'$, then
$T$ in $C$ seen from $S$ is the same as $T$ in $C'$ seen from $S'$.
- - Otherwise, if $C$ is not defined in another class, then
+ - Otherwise, if $C$ is not defined in another class, then
$T$ in $C$ seen from $S$ is $T$ itself.
- If $T$ is some other type, then the described mapping is performed
to all its type components.
@@ -788,8 +776,6 @@ These notions are defined mutually recursively as follows.
binding $d_T$ of the type `T` in `S`. In that case, we also say
that `S#T` _is defined by_ $d_T$.
-
-
## Relations between types
We define two relations between types.
@@ -799,7 +785,6 @@ We define two relations between types.
|Equivalence |$T \equiv U$ |$T$ and $U$ are interchangeable in all contexts. |
|Conformance |$T <: U$ |Type $T$ conforms to type $U$. |
-
### Equivalence
Equivalence $(\equiv)$ between types is the smallest congruence [^congruence] such that
@@ -813,7 +798,7 @@ the following holds:
consisting only of package or object selectors and ending in $O$, then
`$O$.this.type $\equiv p$.type`.
- Two [compound types](#compound-types) are equivalent if the sequences
- of their component are pairwise equivalent, and occur in the same order, and
+ of their component are pairwise equivalent, and occur in the same order, and
their refinements are equivalent. Two refinements are equivalent if they
bind the same names and the modifiers, types and bounds of every
declared entity are equivalent in both refinements.
@@ -823,43 +808,41 @@ the following holds:
- they have the same number of parameters; and
- corresponding parameters have equivalent types.
Note that the names of parameters do not matter for method type equivalence.
-- Two [polymorphic method types](#polymorphic-method-types) are equivalent if
- they have the same number of type parameters, and, after renaming one set of
+- Two [polymorphic method types](#polymorphic-method-types) are equivalent if
+ they have the same number of type parameters, and, after renaming one set of
type parameters by another, the result types as well as lower and upper bounds
of corresponding type parameters are equivalent.
-- Two [existential types](#existential-types)
+- Two [existential types](#existential-types)
are equivalent if they have the same number of
quantifiers, and, after renaming one list of type quantifiers by
another, the quantified types as well as lower and upper bounds of
corresponding quantifiers are equivalent.
-- Two [type constructors](#type-constructors) are equivalent if they have the
- same number of type parameters, and, after renaming one list of type
- parameters by another, the result types as well as variances, lower and upper
+- Two [type constructors](#type-constructors) are equivalent if they have the
+ same number of type parameters, and, after renaming one list of type
+ parameters by another, the result types as well as variances, lower and upper
bounds of corresponding type parameters are equivalent.
-
-[^congruence]: A congruence is an equivalence relation which is closed under formation of contexts
+[^congruence]: A congruence is an equivalence relation which is closed under formation of contexts.
[^implicit]: A method type is implicit if the parameter section that defines it starts with the `implicit` keyword.
### Conformance
-The conformance relation $(<:)$ is the smallest
+The conformance relation $(<:)$ is the smallest
transitive relation that satisfies the following conditions.
- Conformance includes equivalence. If $T \equiv U$ then $T <: U$.
-- For every value type $T$, `scala.Nothing <: $T$ <: scala.Any`.
-- For every type constructor $T$ (with any number of type parameters),
+- For every value type $T$, `scala.Nothing <: $T$ <: scala.Any`.
+- For every type constructor $T$ (with any number of type parameters),
`scala.Nothing <: $T$ <: scala.Any`.
-
-- For every class type $T$ such that `$T$ <: scala.AnyRef` and not
- `$T$ <: scala.NotNull` one has `scala.Null <: $T$`.
+
+- For every class type $T$ such that `$T$ <: scala.AnyRef` one has `scala.Null <: $T$`.
- A type variable or abstract type $t$ conforms to its upper bound and
- its lower bound conforms to $t$.
+ its lower bound conforms to $t$.
- A class type or parameterized type conforms to any of its base-types.
- A singleton type `$p$.type` conforms to the type of the path $p$.
- A singleton type `$p$.type` conforms to the type `scala.Singleton`.
- A type projection `$T$#$t$` conforms to `$U$#$t$` if $T$ conforms to $U$.
-- A parameterized type `$T$[$T_1$ , … , $T_n$]` conforms to
+- A parameterized type `$T$[$T_1$ , … , $T_n$]` conforms to
`$T$[$U_1$ , … , $U_n$]` if
the following three conditions hold for $i \in \{ 1 , \ldots , n \}$:
1. If the $i$'th type parameter of $T$ is declared covariant, then
@@ -874,63 +857,61 @@ transitive relation that satisfies the following conditions.
binding $d$ of a type or value $x$ in $R$ there exists a member
binding of $x$ in $T$ which subsumes $d$, then $T$ conforms to the
compound type `$U_1$ with $\ldots$ with $U_n$ {$R\,$}`.
-- The existential type `$T$ forSome {$\,Q\,$}` conforms to
+- The existential type `$T$ forSome {$\,Q\,$}` conforms to
$U$ if its [skolemization](#existential-types)
conforms to $U$.
-- The type $T$ conforms to the existential type `$U$ forSome {$\,Q\,$}`
- if $T$ conforms to one of the [type instances](#existential-types)
+- The type $T$ conforms to the existential type `$U$ forSome {$\,Q\,$}`
+ if $T$ conforms to one of the [type instances](#existential-types)
of `$U$ forSome {$\,Q\,$}`.
- If
- $T_i \equiv T'_i$ for $i \in \{ 1 , \ldots , n\}$ and $U$ conforms to $U'$
+ $T_i \equiv T_i'$ for $i \in \{ 1 , \ldots , n\}$ and $U$ conforms to $U'$
then the method type $(p_1:T_1 , \ldots , p_n:T_n) U$ conforms to
- $(p'_1:T'_1 , \ldots , p'_n:T'_n) U'$.
+ $(p_1':T_1' , \ldots , p_n':T_n') U'$.
- The polymorphic type
- $[a_1 >: L_1 <: U_1 , \ldots , a_n >: L_n <: U_n] T$ conforms to the
+ $[a_1 >: L_1 <: U_1 , \ldots , a_n >: L_n <: U_n] T$ conforms to the
polymorphic type
- $[a_1 >: L'_1 <: U'_1 , \ldots , a_n >: L'_n <: U'_n] T'$ if, assuming
- $L'_1 <: a_1 <: U'_1 , \ldots , L'_n <: a_n <: U'_n$
- one has $T <: T'$ and $L_i <: L'_i$ and $U'_i <: U_i$
+ $[a_1 >: L_1' <: U_1' , \ldots , a_n >: L_n' <: U_n'] T'$ if, assuming
+ $L_1' <: a_1 <: U_1' , \ldots , L_n' <: a_n <: U_n'$
+ one has $T <: T'$ and $L_i <: L_i'$ and $U_i' <: U_i$
for $i \in \{ 1 , \ldots , n \}$.
-- Type constructors $T$ and $T'$ follow a similar discipline. We characterize
+- Type constructors $T$ and $T'$ follow a similar discipline. We characterize
$T$ and $T'$ by their type parameter clauses
$[a_1 , \ldots , a_n]$ and
- $[a'_1 , \ldots , a'_n ]$, where an $a_i$ or $a'_i$ may include a variance
- annotation, a higher-order type parameter clause, and bounds. Then, $T$
- conforms to $T'$ if any list $[t_1 , \ldots , t_n]$ -- with declared
- variances, bounds and higher-order type parameter clauses -- of valid type
- arguments for $T'$ is also a valid list of type arguments for $T$ and
- $T[t_1 , \ldots , t_n] <: T'[t_1 , \ldots , t_n]$. Note that this entails
+ $[a_1' , \ldots , a_n']$, where an $a_i$ or $a_i'$ may include a variance
+ annotation, a higher-order type parameter clause, and bounds. Then, $T$
+ conforms to $T'$ if any list $[t_1 , \ldots , t_n]$ -- with declared
+ variances, bounds and higher-order type parameter clauses -- of valid type
+ arguments for $T'$ is also a valid list of type arguments for $T$ and
+ $T[t_1 , \ldots , t_n] <: T'[t_1 , \ldots , t_n]$. Note that this entails
that:
- - The bounds on $a_i$ must be weaker than the corresponding bounds declared
- for $a'_i$.
- - The variance of $a_i$ must match the variance of $a'_i$, where covariance
+ - The bounds on $a_i$ must be weaker than the corresponding bounds declared
+ for $a'_i$.
+ - The variance of $a_i$ must match the variance of $a'_i$, where covariance
matches covariance, contravariance matches contravariance and any variance
matches invariance.
- Recursively, these restrictions apply to the corresponding higher-order
type parameter clauses of $a_i$ and $a'_i$.
-
A declaration or definition in some compound type of class type $C$
_subsumes_ another declaration of the same name in some compound type or class
type $C'$, if one of the following holds.
-- A value declaration or definition that defines a name $x$ with type $T$
- subsumes a value or method declaration that defines $x$ with type $T'$, provided
+- A value declaration or definition that defines a name $x$ with type $T$
+ subsumes a value or method declaration that defines $x$ with type $T'$, provided
$T <: T'$.
-- A method declaration or definition that defines a name $x$ with type $T$
- subsumes a method declaration that defines $x$ with type $T'$, provided
+- A method declaration or definition that defines a name $x$ with type $T$
+ subsumes a method declaration that defines $x$ with type $T'$, provided
$T <: T'$.
- A type alias
- `type $t$[$T_1$ , … , $T_n$] = $T$` subsumes a type alias
- `type $t$[$T_1$ , … , $T_n$] = $T'$` if $T \equiv T'$.
+ `type $t$[$T_1$ , … , $T_n$] = $T$` subsumes a type alias
+ `type $t$[$T_1$ , … , $T_n$] = $T'$` if $T \equiv T'$.
- A type declaration `type $t$[$T_1$ , … , $T_n$] >: $L$ <: $U$` subsumes
- a type declaration `type $t$[$T_1$ , … , $T_n$] >: $L'$ <: $U'$` if
+ a type declaration `type $t$[$T_1$ , … , $T_n$] >: $L'$ <: $U'$` if
$L' <: L$ and $U <: U'$.
- A type or class definition that binds a type name $t$ subsumes an abstract
type declaration `type t[$T_1$ , … , $T_n$] >: L <: U` if
$L <: t <: U$.
-
The $(<:)$ relation forms pre-order between types,
i.e. it is transitive and reflexive. _least upper bounds_ and
_greatest lower bounds_ of a set of types
@@ -958,27 +939,24 @@ limit [^4].
The least upper bound or greatest lower bound might also not be
unique. For instance `A with B` and `B with A` are both
-greatest lower of `A` and `B`. If there are several
+greatest lower bounds of `A` and `B`. If there are several
least upper bounds or greatest lower bounds, the Scala compiler is
free to pick any one of them.
-
[^4]: The current Scala compiler limits the nesting level
of parameterization in such bounds to be at most two deeper than the
maximum nesting level of the operand types
-
-
### Weak Conformance
-In some situations Scala uses a more general conformance relation. A
-type $S$ _weakly conforms_
+In some situations Scala uses a more general conformance relation. A
+type $S$ _weakly conforms_
to a type $T$, written $S <:_w
T$, if $S <: T$ or both $S$ and $T$ are primitive number types
and $S$ precedes $T$ in the following ordering.
```scala
-Byte $<:_w$ Short
+Byte $<:_w$ Short
Short $<:_w$ Int
Char $<:_w$ Int
Int $<:_w$ Long
@@ -989,14 +967,13 @@ Float $<:_w$ Double
A _weak least upper bound_ is a least upper bound with respect to
weak conformance.
-
## Volatile Types
-Type volatility approximates the possibility that a type parameter or abstract
+Type volatility approximates the possibility that a type parameter or abstract
type instance
-of a type does not have any non-null values. A value member of a volatile type
+of a type does not have any non-null values. A value member of a volatile type
cannot appear in a [path](#paths).
-
+
A type is _volatile_ if it falls into one of four categories:
A compound type `$T_1$ with … with $T_n$ {$R\,$}`
@@ -1008,14 +985,13 @@ is volatile if one of the following two conditions hold.
to the compound type, or
1. one of $T_1 , \ldots , T_n$ is a singleton type.
-
Here, a type $S$ _contributes an abstract member_ to a type $T$ if
$S$ contains an abstract member that is also a member of $T$.
A refinement $R$ contributes an abstract member to a type $T$ if $R$
contains an abstract declaration which is also a member of $T$.
A type designator is volatile if it is an alias of a volatile type, or
-if it designates a type parameter or abstract type that has a volatile type as
+if it designates a type parameter or abstract type that has a volatile type as
its upper bound.
A singleton type `$p$.type` is volatile, if the underlying
@@ -1024,7 +1000,6 @@ type of path $p$ is volatile.
An existential type `$T$ forSome {$\,Q\,$}` is volatile if
$T$ is volatile.
-
## Type Erasure
A type is called _generic_ if it contains type arguments or type variables.
@@ -1037,20 +1012,19 @@ The erasure mapping is defined as follows.
- The erasure of the parameterized type `scala.Array$[T_1]$` is
`scala.Array$[|T_1|]$`.
- The erasure of every other parameterized type $T[T_1 , \ldots , T_n]$ is $|T|$.
-- The erasure of a singleton type `$p$.type` is the
+- The erasure of a singleton type `$p$.type` is the
erasure of the type of $p$.
- The erasure of a type projection `$T$#$x$` is `|$T$|#$x$`.
-- The erasure of a compound type
- `$T_1$ with $\ldots$ with $T_n$ {$R\,$}` is the erasure of the intersection
+- The erasure of a compound type
+ `$T_1$ with $\ldots$ with $T_n$ {$R\,$}` is the erasure of the intersection
dominator of $T_1 , \ldots , T_n$.
- The erasure of an existential type `$T$ forSome {$\,Q\,$}` is $|T|$.
The _intersection dominator_ of a list of types $T_1 , \ldots , T_n$ is computed
as follows.
-Let $T_{i_1} , \ldots , T_{i_m}$ be the subsequence of types $T_i$
-which are not supertypes of some other type $T_j$.
+Let $T_{i_1} , \ldots , T_{i_m}$ be the subsequence of types $T_i$
+which are not supertypes of some other type $T_j$.
If this subsequence contains a type designator $T_c$ that refers to a class
-which is not a trait,
+which is not a trait,
the intersection dominator is $T_c$. Otherwise, the intersection
dominator is the first element of the subsequence, $T_{i_1}$.
-
diff --git a/spec/04-basic-declarations-and-definitions.md b/spec/04-basic-declarations-and-definitions.md
index ab1f98ea07..7fb5427d36 100644
--- a/spec/04-basic-declarations-and-definitions.md
+++ b/spec/04-basic-declarations-and-definitions.md
@@ -1,12 +1,11 @@
---
-title: Basic Declarations and Definitions
+title: Basic Declarations & Definitions
layout: default
chapter: 4
---
# Basic Declarations and Definitions
-
```ebnf
Dcl ::= ‘val’ ValDcl
| ‘var’ VarDcl
@@ -40,7 +39,6 @@ between and including $s_i$ and $s_j$,
- $s_k$ cannot be a variable definition.
- If $s_k$ is a value definition, it must be lazy.
-
<!--
Every basic definition may introduce several defined names, separated
by commas. These are expanded according to the following scheme:
@@ -79,7 +77,6 @@ additional parts in the definition, then those parts are implicitly
copied from the next subsequent sequence element which consists of
more than just a defined name and parameters. Examples:
-
- []
The variable declaration `var x, y: Int`
expands to `var x: Int; var y: Int`.
@@ -98,26 +95,24 @@ case object Blue extends Color .
```
-->
-
-
## Value Declarations and Definitions
```ebnf
Dcl ::= ‘val’ ValDcl
ValDcl ::= ids ‘:’ Type
-PatVarDef ::= ‘val’ PatDef
+PatVarDef ::= ‘val’ PatDef
PatDef ::= Pattern2 {‘,’ Pattern2} [‘:’ Type] ‘=’ Expr
ids ::= id {‘,’ id}
```
A value declaration `val $x$: $T$` introduces $x$ as a name of a value of
-type $T$.
+type $T$.
A value definition `val $x$: $T$ = $e$` defines $x$ as a
-name of the value that results from the evaluation of $e$.
+name of the value that results from the evaluation of $e$.
If the value definition is not recursive, the type
$T$ may be omitted, in which case the [packed type](06-expressions.html#expression-typing) of
-expression $e$ is assumed. If a type $T$ is given, then $e$ is expected to
+expression $e$ is assumed. If a type $T$ is given, then $e$ is expected to
conform to it.
Evaluation of the value definition implies evaluation of its
@@ -152,7 +147,7 @@ $\ldots$
val $x_n$ = $\$ x$._n .
```
-Here, $\$ x$ is a fresh name.
+Here, $\$ x$ is a fresh name.
2. If $p$ has a unique bound variable $x$:
@@ -187,17 +182,15 @@ val x = x$\$$._1
val xs = x$\$$._2
```
-
The name of any declared or defined value may not end in `_=`.
-A value declaration `val $x_1 , \ldots , x_n$: $T$` is a shorthand for the
+A value declaration `val $x_1 , \ldots , x_n$: $T$` is a shorthand for the
sequence of value declarations `val $x_1$: $T$; ...; val $x_n$: $T$`.
-A value definition `val $p_1 , \ldots , p_n$ = $e$` is a shorthand for the
+A value definition `val $p_1 , \ldots , p_n$ = $e$` is a shorthand for the
sequence of value definitions `val $p_1$ = $e$; ...; val $p_n$ = $e$`.
-A value definition `val $p_1 , \ldots , p_n: T$ = $e$` is a shorthand for the
+A value definition `val $p_1 , \ldots , p_n: T$ = $e$` is a shorthand for the
sequence of value definitions `val $p_1: T$ = $e$; ...; val $p_n: T$ = $e$`.
-
## Variable Declarations and Definitions
```ebnf
@@ -212,7 +205,7 @@ A variable declaration `var $x$: $T$` is equivalent to the declarations
of both a _getter function_ $x$ *and* a _setter function_ `$x$_=`:
```scala
-def $x$: $T$
+def $x$: $T$
def $x$_= ($y$: $T$): Unit
```
@@ -222,13 +215,13 @@ using a variable definition, or by defining the corresponding setter and getter
A variable definition `var $x$: $T$ = $e$` introduces a
mutable variable with type $T$ and initial value as given by the
expression $e$. The type $T$ can be omitted, in which case the type of
-$e$ is assumed. If $T$ is given, then $e$ is expected to
+$e$ is assumed. If $T$ is given, then $e$ is expected to
[conform to it](06-expressions.html#expression-typing).
Variable definitions can alternatively have a [pattern](08-pattern-matching.html#patterns)
as left-hand side. A variable definition
`var $p$ = $e$` where $p$ is a pattern other
-than a simple name or a name followed by a colon and a type is expanded in the same way
+than a simple name or a name followed by a colon and a type is expanded in the same way
as a [value definition](#value-declarations-and-definitions)
`val $p$ = $e$`, except that
the free names in $p$ are introduced as mutable variables, not values.
@@ -249,7 +242,6 @@ The default value depends on the type $T$ as follows:
|`()` | `Unit` |
|`null` | all other types |
-
When they occur as members of a template, both forms of variable
definition also introduce a getter function $x$ which returns the
value currently assigned to the variable, as well as a setter function
@@ -291,13 +283,12 @@ d.hours = 8; d.minutes = 30; d.seconds = 0
d.hours = 25 // throws a DateError exception
```
-
-A variable declaration `var $x_1 , \ldots , x_n$: $T$` is a shorthand for the
+A variable declaration `var $x_1 , \ldots , x_n$: $T$` is a shorthand for the
sequence of variable declarations `var $x_1$: $T$; ...; var $x_n$: $T$`.
-A variable definition `var $x_1 , \ldots , x_n$ = $e$` is a shorthand for the
+A variable definition `var $x_1 , \ldots , x_n$ = $e$` is a shorthand for the
sequence of variable definitions `var $x_1$ = $e$; ...; var $x_n$ = $e$`.
-A variable definition `var $x_1 , \ldots , x_n: T$ = $e$` is a shorthand for
-the sequence of variable definitions
+A variable definition `var $x_1 , \ldots , x_n: T$ = $e$` is a shorthand for
+the sequence of variable definitions
`var $x_1: T$ = $e$; ...; var $x_n: T$ = $e$`.
## Type Declarations and Type Aliases
@@ -339,14 +330,14 @@ A _type alias_ `type $t$ = $T$` defines $t$ to be an alias
name for the type $T$. The left hand side of a type alias may
have a type parameter clause, e.g. `type $t$[$\mathit{tps}\,$] = $T$`. The scope
of a type parameter extends over the right hand side $T$ and the
-type parameter clause $\mathit{tps}$ itself.
+type parameter clause $\mathit{tps}$ itself.
-The scope rules for [definitions](#basic-declarations-and-definitions)
+The scope rules for [definitions](#basic-declarations-and-definitions)
and [type parameters](#function-declarations-and-definitions)
make it possible that a type name appears in its
own bound or in its right-hand side. However, it is a static error if
-a type alias refers recursively to the defined type constructor itself.
-That is, the type $T$ in a type alias `type $t$[$\mathit{tps}\,$] = $T$` may not
+a type alias refers recursively to the defined type constructor itself.
+That is, the type $T$ in a type alias `type $t$[$\mathit{tps}\,$] = $T$` may not
refer directly or indirectly to the name $t$. It is also an error if
an abstract type is directly or indirectly its own upper or lower bound.
@@ -400,7 +391,6 @@ As a consequence, for any two types $S$ and $T$, the type
val x: Pair[Int, String] = new Pair(1, "abc")
```
-
## Type Parameters
```ebnf
@@ -413,7 +403,7 @@ Type parameters appear in type definitions, class definitions, and
function definitions. In this section we consider only type parameter
definitions with lower bounds `>: $L$` and upper bounds
`<: $U$` whereas a discussion of context bounds
-`: $U$` and view bounds `<% $U$`
+`: $U$` and view bounds `<% $U$`
is deferred to [here](07-implicit-parameters-and-views.html#context-bounds-and-view-bounds).
The most general form of a first-order type parameter is
@@ -436,9 +426,9 @@ TODO: this is a pretty awkward description of scoping and distinctness of binder
The names of all type parameters must be pairwise different in their enclosing type parameter clause. The scope of a type parameter includes in each case the whole type parameter clause. Therefore it is possible that a type parameter appears as part of its own bounds or the bounds of other type parameters in the same clause. However, a type parameter may not be bounded directly or indirectly by itself.
-A type constructor parameter adds a nested type parameter clause to the type parameter. The most general form of a type constructor parameter is `$@a_1\ldots@a_n$ $\pm$ $t[\mathit{tps}\,]$ >: $L$ <: $U$`.
+A type constructor parameter adds a nested type parameter clause to the type parameter. The most general form of a type constructor parameter is `$@a_1\ldots@a_n$ $\pm$ $t[\mathit{tps}\,]$ >: $L$ <: $U$`.
-The above scoping restrictions are generalized to the case of nested type parameter clauses, which declare higher-order type parameters. Higher-order type parameters (the type parameters of a type parameter $t$) are only visible in their immediately surrounding parameter clause (possibly including clauses at a deeper nesting level) and in the bounds of $t$. Therefore, their names must only be pairwise different from the names of other visible parameters. Since the names of higher-order type parameters are thus often irrelevant, they may be denoted with a ‘_’, which is nowhere visible.
+The above scoping restrictions are generalized to the case of nested type parameter clauses, which declare higher-order type parameters. Higher-order type parameters (the type parameters of a type parameter $t$) are only visible in their immediately surrounding parameter clause (possibly including clauses at a deeper nesting level) and in the bounds of $t$. Therefore, their names must only be pairwise different from the names of other visible parameters. Since the names of higher-order type parameters are thus often irrelevant, they may be denoted with a `‘_’`, which is nowhere visible.
###### Example
Here are some well-formed type parameter clauses:
@@ -464,7 +454,6 @@ The following type parameter clauses are illegal:
// not conform to upper bound `B'.
```
-
## Variance Annotations
Variance annotations indicate how instances of parameterized types
@@ -475,17 +464,17 @@ missing variance indication indicates an invariant dependency.
A variance annotation constrains the way the annotated type variable
may appear in the type or class which binds the type parameter. In a
-type definition `type $T$[$\mathit{tps}\,$] = $S$`, or a type
+type definition `type $T$[$\mathit{tps}\,$] = $S$`, or a type
declaration `type $T$[$\mathit{tps}\,$] >: $L$ <: $U$` type parameters labeled
‘+’ must only appear in covariant position whereas
type parameters labeled ‘-’ must only appear in contravariant
position. Analogously, for a class definition
-`class $C$[$\mathit{tps}\,$]($\mathit{ps}\,$) extends $T$ { $x$: $S$ => ...}`,
+`class $C$[$\mathit{tps}\,$]($\mathit{ps}\,$) extends $T$ { $x$: $S$ => ...}`,
type parameters labeled
‘+’ must only appear in covariant position in the
self type $S$ and the template $T$, whereas type
parameters labeled ‘-’ must only appear in contravariant
-position.
+position.
The variance position of a type parameter in a type or template is
defined as follows. Let the opposite of covariance be contravariance,
@@ -493,12 +482,12 @@ and the opposite of invariance be itself. The top-level of the type
or template is always in covariant position. The variance position
changes at the following constructs.
-- The variance position of a method parameter is the opposite of the
+- The variance position of a method parameter is the opposite of the
variance position of the enclosing parameter clause.
- The variance position of a type parameter is the opposite of the
variance position of the enclosing type parameter clause.
-- The variance position of the lower bound of a type declaration or type parameter
- is the opposite of the variance position of the type declaration or parameter.
+- The variance position of the lower bound of a type declaration or type parameter
+ is the opposite of the variance position of the type declaration or parameter.
- The type of a mutable variable is always in invariant position.
- The right-hand side of a type alias is always in invariant position.
- The prefix $S$ of a type selection `$S$#$T$` is always in invariant position.
@@ -508,11 +497,11 @@ changes at the following constructs.
contravariant, the variance position of $T$ is the opposite of
the variance position of the enclosing type `$S$[$\ldots T \ldots$ ]`.
-<!-- TODO: handle type aliases -->
+<!-- TODO: handle type aliases -->
-References to the type parameters in
+References to the type parameters in
[object-private or object-protected values, types, variables, or methods](05-classes-and-objects.html#modifiers) of the class are not
-checked for their variance position. In these members the type parameter may
+checked for their variance position. In these members the type parameter may
appear anywhere without restricting its legal variance annotations.
###### Example
@@ -574,7 +563,7 @@ abstract class Sequence[+A] {
}
```
-### Example
+###### Example
```scala
abstract class OutputChannel[-A] {
@@ -588,22 +577,21 @@ That is, a
channel on which one can write any object can substitute for a channel
on which one can write only strings.
-
## Function Declarations and Definitions
```ebnf
Dcl ::= ‘def’ FunDcl
FunDcl ::= FunSig ‘:’ Type
Def ::= ‘def’ FunDef
-FunDef ::= FunSig [‘:’ Type] ‘=’ Expr
+FunDef ::= FunSig [‘:’ Type] ‘=’ Expr
FunSig ::= id [FunTypeParamClause] ParamClauses
-FunTypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’
+FunTypeParamClause ::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’
ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’]
-ParamClause ::= [nl] ‘(’ [Params] ‘)’}
+ParamClause ::= [nl] ‘(’ [Params] ‘)’}
Params ::= Param {‘,’ Param}
Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr]
-ParamType ::= Type
- | ‘=>’ Type
+ParamType ::= Type
+ | ‘=>’ Type
| Type ‘*’
```
@@ -624,11 +612,11 @@ result type, if one is given. If the function definition is not
recursive, the result type may be omitted, in which case it is
determined from the packed type of the function body.
-A type parameter clause $\mathit{tps}$ consists of one or more
-[type declarations](#type-declarations-and-type-aliases), which introduce type
+A type parameter clause $\mathit{tps}$ consists of one or more
+[type declarations](#type-declarations-and-type-aliases), which introduce type
parameters, possibly with bounds. The scope of a type parameter includes
the whole signature, including any of the type parameter bounds as
-well as the function body, if it is present.
+well as the function body, if it is present.
A value parameter clause $\mathit{ps}$ consists of zero or more formal
parameter bindings such as `$x$: $T$` or `$x: T = e$`, which bind value
@@ -643,11 +631,11 @@ For every parameter $p_{i,j}$ with a default argument a method named
expression. Here, $n$ denotes the parameter's position in the method
declaration. These methods are parametrized by the type parameter clause
`[$\mathit{tps}\,$]` and all value parameter clauses
-`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_{i-1}$)` preceeding $p_{i,j}$.
+`($\mathit{ps}_1$)$\ldots$($\mathit{ps}_{i-1}$)` preceding $p_{i,j}$.
The `$f\$$default$\$$n` methods are inaccessible for
user programs.
-The scope of a formal value parameter name $x$ comprises all subsequent
+The scope of a formal value parameter name $x$ comprises all subsequent
parameter clauses, as well as the method return type and the function body, if
they are given. Both type parameter names and value parameter names must
be pairwise distinct.
@@ -669,10 +657,8 @@ def compare$\$$default$\$$1[T]: Int = 0
def compare$\$$default$\$$2[T](a: T): T = a
```
-
### By-Name Parameters
-
```ebnf
ParamType ::= ‘=>’ Type
```
@@ -687,7 +673,7 @@ function. That is, the argument is evaluated using _call-by-name_.
The by-name modifier is disallowed for parameters of classes that
carry a `val` or `var` prefix, including parameters of case
classes for which a `val` prefix is implicitly generated. The
-by-name modifier is also disallowed for
+by-name modifier is also disallowed for
[implicit parameters](07-implicit-parameters-and-views.html#implicit-parameters).
###### Example
@@ -700,7 +686,6 @@ def whileLoop (cond: => Boolean) (stat: => Unit): Unit
indicates that both parameters of `whileLoop` are evaluated using
call-by-name.
-
### Repeated Parameters
```ebnf
@@ -708,21 +693,21 @@ ParamType ::= Type ‘*’
```
The last value parameter of a parameter section may be suffixed by
-“*”, e.g. `(..., $x$:$T$*)`. The type of such a
+`'*'`, e.g. `(..., $x$:$T$*)`. The type of such a
_repeated_ parameter inside the method is then the sequence type
`scala.Seq[$T$]`. Methods with repeated parameters
`$T$*` take a variable number of arguments of type $T$.
-That is, if a method $m$ with type
-`($p_1:T_1 , \ldots , p_n:T_n, p_s:S$*)$U$` is applied to arguments
-$(e_1 , \ldots , e_k)$ where $k \geq n$, then $m$ is taken in that application
-to have type $(p_1:T_1 , \ldots , p_n:T_n, p_s:S , \ldots , p_{s'}S)U$, with
+That is, if a method $m$ with type
+`($p_1:T_1 , \ldots , p_n:T_n, p_s:S$*)$U$` is applied to arguments
+$(e_1 , \ldots , e_k)$ where $k \geq n$, then $m$ is taken in that application
+to have type $(p_1:T_1 , \ldots , p_n:T_n, p_s:S , \ldots , p_{s'}S)U$, with
$k - n$ occurrences of type
-$S$ where any parameter names beyond $p_s$ are fresh. The only exception to
+$S$ where any parameter names beyond $p_s$ are fresh. The only exception to
this rule is if the last argument is
marked to be a _sequence argument_ via a `_*` type
annotation. If $m$ above is applied to arguments
`($e_1 , \ldots , e_n, e'$: _*)`, then the type of $m$ in
-that application is taken to be
+that application is taken to be
`($p_1:T_1, \ldots , p_n:T_n,p_{s}:$scala.Seq[$S$])`.
It is not allowed to define any default arguments in a parameter section
@@ -735,7 +720,7 @@ variable number of integer arguments.
```scala
def sum(args: Int*) = {
var result = 0
- for (arg <- args) result += arg * arg
+ for (arg <- args) result += arg
result
}
```
@@ -768,7 +753,6 @@ the result `6`:
sum(xs: _*)
```
-
### Procedures
```ebnf
@@ -777,7 +761,7 @@ FunDef ::= FunSig [nl] ‘{’ Block ‘}’
```
Special syntax exists for procedures, i.e. functions that return the
-`Unit` value `()`.
+`Unit` value `()`.
A procedure declaration is a function declaration where the result type
is omitted. The result type is then implicitly completed to the
`Unit` type. E.g., `def $f$($\mathit{ps}$)` is equivalent to
@@ -811,7 +795,6 @@ object Terminal extends Writer {
}
```
-
### Method Return Type Inference
A class member definition $m$ that overrides some other function $m'$
@@ -838,8 +821,6 @@ class C extends I {
Here, it is OK to leave out the result type of `factorial`
in `C`, even though the method is recursive.
-
-
<!-- ## Overloaded Definitions
\label{sec:overloaded-defs}
\todo{change}
@@ -862,12 +843,12 @@ $T_j$ have the same erasure (\sref{sec:erasure}).
```ebnf
Import ::= ‘import’ ImportExpr {‘,’ ImportExpr}
ImportExpr ::= StableId ‘.’ (id | ‘_’ | ImportSelectors)
-ImportSelectors ::= ‘{’ {ImportSelector ‘,’}
+ImportSelectors ::= ‘{’ {ImportSelector ‘,’}
(ImportSelector | ‘_’) ‘}’
ImportSelector ::= id [‘=>’ id | ‘=>’ ‘_’]
```
-An import clause has the form `import $p$.$I$` where $p$ is a
+An import clause has the form `import $p$.$I$` where $p$ is a
[stable identifier](03-types.html#paths) and $I$ is an import expression.
The import expression determines a set of names of importable members of $p$
which are made available without qualification. A member $m$ of $p$ is
@@ -875,10 +856,10 @@ _importable_ if it is not [object-private](05-classes-and-objects.html#modifiers
The most general form of an import expression is a list of _import selectors_
```scala
-{ $x_1$ => $y_1 , \ldots , x_n$ => $y_n$, _ }
+{ $x_1$ => $y_1 , \ldots , x_n$ => $y_n$, _ }
```
-for $n \geq 0$, where the final wildcard ‘_’ may be absent. It
+for $n \geq 0$, where the final wildcard `‘_’` may be absent. It
makes available each importable member `$p$.$x_i$` under the unqualified name
$y_i$. I.e. every import selector `$x_i$ => $y_i$` renames
`$p$.$x_i$` to
@@ -913,13 +894,13 @@ identifier or wildcard. The import clause `import $p$.$x$` is
equivalent to `import $p$.{$x\,$}`, i.e. it makes available without
qualification the member $x$ of $p$. The import clause
`import $p$._` is equivalent to
-`import $p$.{_}`,
+`import $p$.{_}`,
i.e. it makes available without qualification all members of $p$
(this is analogous to `import $p$.*` in Java).
An import clause with multiple import expressions
`import $p_1$.$I_1 , \ldots , p_n$.$I_n$` is interpreted as a
-sequence of import clauses
+sequence of import clauses
`import $p_1$.$I_1$; $\ldots$; import $p_n$.$I_n$`.
###### Example
diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md
index 70fa3e0272..a6908ba39f 100644
--- a/spec/05-classes-and-objects.md
+++ b/spec/05-classes-and-objects.md
@@ -1,5 +1,5 @@
---
-title: Classes and Objects
+title: Classes & Objects
layout: default
chapter: 5
---
@@ -15,7 +15,6 @@ TmplDef ::= [`case'] `class' ClassDef
[Classes](#class-definitions) and [objects](#object-definitions)
are both defined in terms of _templates_.
-
## Templates
```ebnf
@@ -31,8 +30,8 @@ SelfType ::= id [`:' Type] `=>'
A template defines the type signature, behavior and initial state of a
trait or class of objects or of a single object. Templates form part of
instance creation expressions, class definitions, and object
-definitions. A template
-`$sc$ with $mt_1$ with $\ldots$ with $mt_n$ { $\mathit{stats}$ }`
+definitions. A template
+`$sc$ with $mt_1$ with $\ldots$ with $mt_n$ { $\mathit{stats}$ }`
consists of a constructor invocation $sc$
which defines the template's _superclass_, trait references
`$mt_1 , \ldots , mt_n$` $(n \geq 0)$, which define the
@@ -61,7 +60,7 @@ superclass.
The _least proper supertype_ of a template is the class type or
[compound type](03-types.html#compound-types) consisting of all its parent
-class types.
+class types.
The statement sequence $\mathit{stats}$ contains member definitions that
define new members or overwrite members in the parent classes. If the
@@ -77,7 +76,7 @@ The sequence of template statements may be prefixed with a formal
parameter definition and an arrow, e.g. `$x$ =>`, or
`$x$:$T$ =>`. If a formal parameter is given, it can be
used as an alias for the reference `this` throughout the
-body of the template.
+body of the template.
If the formal parameter comes with a type $T$, this definition affects
the _self type_ $S$ of the underlying class or object as follows: Let $C$ be the type
of the class or trait or object defining the template.
@@ -87,11 +86,11 @@ If no type $T$ is given, $S$ is just $C$.
Inside the template, the type of `this` is assumed to be $S$.
The self type of a class or object must conform to the self types of
-all classes which are inherited by the template $t$.
+all classes which are inherited by the template $t$.
-A second form of self type annotation reads just
+A second form of self type annotation reads just
`this: $S$ =>`. It prescribes the type $S$ for `this`
-without introducing an alias name for it.
+without introducing an alias name for it.
###### Example
Consider the following class definitions:
@@ -108,21 +107,20 @@ In this case, the definition of `O` is expanded to:
object O extends Base with Mixin {}
```
-
<!-- TODO: Make all references to Java generic -->
**Inheriting from Java Types** A template may have a Java class as its superclass and Java interfaces as its
-mixins.
+mixins.
**Template Evaluation** Consider a template `$sc$ with $mt_1$ with $mt_n$ { $\mathit{stats}$ }`.
-If this is the template of a [trait](#traits) then its _mixin-evaluation_
+If this is the template of a [trait](#traits) then its _mixin-evaluation_
consists of an evaluation of the statement sequence $\mathit{stats}$.
If this is not a template of a trait, then its _evaluation_
consists of the following steps.
-- First, the superclass constructor $sc$ is
+- First, the superclass constructor $sc$ is
[evaluated](#constructor-invocations).
- Then, all base classes in the template's [linearization](#class-linearization)
up to the template's superclass denoted by $sc$ are
@@ -130,9 +128,8 @@ consists of the following steps.
occurrence in the linearization.
- Finally the statement sequence $\mathit{stats}\,$ is evaluated.
-
-###### Delayed Initializaton
-The initialization code of an object or class (but not a trait) that follows
+###### Delayed Initialization
+The initialization code of an object or class (but not a trait) that follows
the superclass
constructor invocation and the mixin-evaluation of the template's base
classes is passed to a special hook, which is inaccessible from user
@@ -145,7 +142,6 @@ method, which is defined as follows:
def delayedInit(body: => Unit)
```
-
### Constructor Invocations
```ebnf
@@ -156,7 +152,7 @@ Constructor invocations define the type, members, and initial state of
objects created by an instance creation expression, or of parts of an
object's definition which are inherited by a class or object
definition. A constructor invocation is a function application
-`$x$.$c$[$\mathit{targs}$]($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)`, where $x$ is a
+`$x$.$c$[$\mathit{targs}$]($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)`, where $x$ is a
[stable identifier](03-types.html#paths), $c$ is a type name which either designates a
class or defines an alias type for one, $\mathit{targs}$ is a type argument
list, $\mathit{args}_1 , \ldots , \mathit{args}_n$ are argument lists, and there is a
@@ -171,12 +167,12 @@ it can be omitted, in which case a type argument list is synthesized
using [local type inference](06-expressions.html#local-type-inference). If no explicit
arguments are given, an empty list `()` is implicitly supplied.
-An evaluation of a constructor invocation
+An evaluation of a constructor invocation
`$x$.$c$[$\mathit{targs}$]($\mathit{args}_1$)$\ldots$($\mathit{args}_n$)`
consists of the following steps:
- First, the prefix $x$ is evaluated.
-- Then, the arguments $\mathit{args}_1 , \ldots , \mathit{args}_n$ are evaluated from
+- Then, the arguments $\mathit{args}_1 , \ldots , \mathit{args}_n$ are evaluated from
left to right.
- Finally, the class being constructed is initialized by evaluating the
template of the class referred to by $c$.
@@ -188,7 +184,6 @@ inheritance relation from a class $C$ are called the _base classes_ of $C$. Bec
on base classes forms in general a directed acyclic graph. A
linearization of this graph is defined as follows.
-
###### Definition: linearization
Let $C$ be a class with template
`$C_1$ with ... with $C_n$ { $\mathit{stats}$ }`.
@@ -206,7 +201,6 @@ $$
\end{array}
$$
-
###### Example
Consider the following class definitions.
@@ -226,7 +220,7 @@ Then the linearization of class `Iter` is
Note that the linearization of a class refines the inheritance
relation: if $C$ is a subclass of $D$, then $C$ precedes $D$ in any
linearization where both $C$ and $D$ occur.
-[Linearization](#definition-linearization) also satisfies the property that
+[Linearization](#definition:-linearization) also satisfies the property that
a linearization of a class always contains the linearization of its direct superclass as a suffix.
For instance, the linearization of `StringIterator` is
@@ -245,7 +239,6 @@ For instance, the linearization of `RichIterator` is
which is not a suffix of the linearization of `Iter`.
-
### Class Members
A class $C$ defined by a template `$C_1$ with $\ldots$ with $C_n$ { $\mathit{stats}$ }`
@@ -294,8 +287,8 @@ $C_j \in \mathcal{L}(C)$ where $j < i$ which directly defines an abstract
member $M'$ matching $M$.
This definition also determines the [overriding](#overriding) relationships
-between matching members of a class $C$ and its parents.
-First, a concrete definition always overrides an abstract definition.
+between matching members of a class $C$ and its parents.
+First, a concrete definition always overrides an abstract definition.
Second, for definitions $M$ and $M$' which are both concrete or both abstract,
$M$ overrides $M'$ if $M$ appears in a class that precedes (in the
linearization of $C$) the class in which $M'$ is defined.
@@ -306,7 +299,6 @@ or inherited) with the same name and the same [erased type](03-types.html#type-e
Finally, a template is not allowed to contain two methods (directly
defined or inherited) with the same name which both define default arguments.
-
###### Example
Consider the trait definitions:
@@ -321,12 +313,11 @@ Then trait `D` has a directly defined abstract member `h`. It
inherits member `f` from trait `C` and member `g` from
trait `B`.
-
### Overriding
<!-- TODO: Explain that classes cannot override each other -->
-A member $M$ of class $C$ that [matches](#class-members)
+A member $M$ of class $C$ that [matches](#class-members)
a non-private member $M'$ of a
base class of $C$ is said to _override_ that member. In this case
the binding of the overriding member $M$ must [subsume](03-types.html#conformance)
@@ -339,14 +330,15 @@ $M'$:
- If $M$ is labeled `private[$C$]` for some enclosing class or package $C$,
then $M'$ must be labeled `private[$C'$]` for some class or package $C'$ where
$C'$ equals $C$ or $C'$ is contained in $C$.
- <!-- TODO: check whether this is accurate -->
+
+<!-- TODO: check whether this is accurate -->
- If $M$ is labeled `protected`, then $M'$ must also be
labeled `protected`.
- If $M'$ is not an abstract member, then $M$ must be labeled `override`.
Furthermore, one of two possibilities must hold:
- - either $M$ is defined in a subclass of the class where is $M'$ is defined,
+ - either $M$ is defined in a subclass of the class where is $M'$ is defined,
- or both $M$ and $M'$ override a third member $M''$ which is defined
- in a base class of both the classes containing $M$ and $M'$
+ in a base class of both the classes containing $M$ and $M'$
- If $M'$ is [incomplete](#modifiers) in $C$ then $M$ must be
labeled `abstract override`.
- If $M$ and $M'$ are both concrete value definitions, then either none
@@ -376,7 +368,7 @@ it is possible to add new defaults (if the corresponding parameter in the
superclass does not have a default) or to override the defaults of the
superclass (otherwise).
-### Example
+###### Example
Consider the definitions:
@@ -398,7 +390,6 @@ definition of type `T` in class `C`:
class C extends A with B { type T <: C }
```
-
### Inheritance Closure
Let $C$ be a class type. The _inheritance closure_ of $C$ is the
@@ -434,9 +425,9 @@ constructor is called. In a template
```
The initial pattern definitions of $p_1 , \ldots , p_n$ are called
-_early definitions_. They define fields
+_early definitions_. They define fields
which form part of the template. Every early definition must define
-at least one variable.
+at least one variable.
An early definition is type-checked and evaluated in the scope which
is in effect just before the template being defined, augmented by any
@@ -452,7 +443,6 @@ always refer to the value that's defined there, and do not take into account
overriding definitions. In other words, a block of early definitions
is evaluated exactly as if it was a local bock containing a number of value
definitions.
-
Early definitions are evaluated in the order they are being defined
before the superclass constructor of the template is called.
@@ -482,11 +472,10 @@ body, it would be initialized after the constructor of
`Greeting`. In that case, `msg` would be initialized to
`"How are you, <null>"`.
-
## Modifiers
```ebnf
-Modifier ::= LocalModifier
+Modifier ::= LocalModifier
| AccessModifier
| `override'
LocalModifier ::= `abstract'
@@ -537,7 +526,7 @@ case the member is called _qualified private_.
Class-private or object-private members may not be abstract, and may
not have `protected` or `override` modifiers.
-#### `protected`
+### `protected`
The `protected` modifier applies to class member definitions.
Protected members of a class can be accessed from within
- the template of the defining class,
@@ -568,14 +557,14 @@ legal if the prefix is `this` or `$O$.this`, for some
class $O$ enclosing the reference. In addition, the restrictions for
unqualified `protected` apply.
-#### `override`
+### `override`
The `override` modifier applies to class member definitions or declarations.
It is mandatory for member definitions or declarations that override some
other concrete member definition in a parent class. If an `override`
modifier is given, there must be at least one overridden member
definition or declaration (either concrete or abstract).
-#### `abstract override`
+### `abstract override`
The `override` modifier has an additional significance when
combined with the `abstract` modifier. That modifier combination
is only allowed for value members of traits.
@@ -590,7 +579,7 @@ influence the concept whether a member is concrete or abstract. A
member is _abstract_ if only a declaration is given for it;
it is _concrete_ if a full definition is given.
-#### `abstract`
+### `abstract`
The `abstract` modifier is used in class definitions. It is
redundant for traits, and mandatory for all other classes which have
incomplete members. Abstract classes cannot be
@@ -603,7 +592,7 @@ The `abstract` modifier can also be used in conjunction with
`override` for class member definitions. In that case the
previous discussion applies.
-#### `final`
+### `final`
The `final` modifier applies to class member definitions and to
class definitions. A `final` class member definition may not be
overridden in subclasses. A `final` class may not be inherited by
@@ -615,13 +604,13 @@ an explicit `final` modifier, even if they are defined in a final class or
object. `final` may not be applied to incomplete members, and it may not be
combined in one modifier list with `sealed`.
-#### `sealed`
+### `sealed`
The `sealed` modifier applies to class definitions. A
`sealed` class may not be directly inherited, except if the inheriting
template is defined in the same source file as the inherited class.
However, subclasses of a sealed class can be inherited anywhere.
-#### `lazy`
+### `lazy`
The `lazy` modifier applies to value definitions. A `lazy`
value is initialized the first time it is accessed (which might never
happen at all). Attempting to access a lazy value during its
@@ -629,7 +618,6 @@ initialization might lead to looping behavior. If an exception is
thrown during initialization, the value is considered uninitialized,
and a later access will retry to evaluate its right hand side.
-
###### Example
The following code illustrates the use of qualified private:
@@ -652,7 +640,6 @@ package-private methods in Java. Finally, accesses to method
`h` can appear anywhere within package `outerpkg`,
including packages contained in it.
-
###### Example
A useful idiom to prevent clients of a class from
constructing new instances of that class is to declare the class
@@ -680,14 +667,13 @@ new m.C(0) {} // **** error: illegal inheritance from sealed class.
A similar access restriction can be achieved by marking the primary
constructor `private` ([example](#example-private-constructor)).
-
## Class Definitions
```ebnf
-TmplDef ::= `class' ClassDef
-ClassDef ::= id [TypeParamClause] {Annotation}
- [AccessModifier] ClassParamClauses ClassTemplateOpt
-ClassParamClauses ::= {ClassParamClause}
+TmplDef ::= `class' ClassDef
+ClassDef ::= id [TypeParamClause] {Annotation}
+ [AccessModifier] ClassParamClauses ClassTemplateOpt
+ClassParamClauses ::= {ClassParamClause}
[[nl] `(' implicit ClassParams `)']
ClassParamClause ::= [nl] `(' [ClassParams] ')'
ClassParams ::= ClassParam {`,' ClassParam}
@@ -696,10 +682,10 @@ ClassParam ::= {Annotation} {Modifier} [(`val' | `var')]
ClassTemplateOpt ::= `extends' ClassTemplate | [[`extends'] TemplateBody]
```
-The most general form of class definition is
+The most general form of class definition is
```scala
-class $c$[$\mathit{tps}\,$] $as$ $m$($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$) extends $t$ $\gap(n \geq 0)$.
+class $c$[$\mathit{tps}\,$] $as$ $m$($\mathit{ps}_1$)$\ldots$($\mathit{ps}_n$) extends $t$ $\quad(n \geq 0)$.
```
Here,
@@ -712,16 +698,16 @@ Here,
parameter section `[$\mathit{tps}\,$]` may be omitted. A class with a type
parameter section is called _polymorphic_, otherwise it is called
_monomorphic_.
- - $as$ is a possibly empty sequence of
- [annotations](11-user-defined-annotations.html#user-defined-annotations).
- If any annotations are given, they apply to the primary constructor of the
+ - $as$ is a possibly empty sequence of
+ [annotations](11-annotations.html#user-defined-annotations).
+ If any annotations are given, they apply to the primary constructor of the
class.
- $m$ is an [access modifier](#modifiers) such as
`private` or `protected`, possibly with a qualification.
If such an access modifier is given it applies to the primary constructor of the class.
- - $(\mathit{ps}_1)\ldots(\mathit{ps}_n)$ are formal value parameter clauses for
+ - $(\mathit{ps}\_1)\ldots(\mathit{ps}\_n)$ are formal value parameter clauses for
the _primary constructor_ of the class. The scope of a formal value parameter includes
- all subsequent parameter sections and the template $t$. However, a formal
+ all subsequent parameter sections and the template $t$. However, a formal
value parameter may not form part of the types of any of the parent classes or members of the class template $t$.
It is illegal to define two formal value parameters with the same name.
@@ -741,25 +727,24 @@ Here,
- $t$ is a [template](#templates) of the form
- ```
+ ```
$sc$ with $mt_1$ with $\ldots$ with $mt_m$ { $\mathit{stats}$ } // $m \geq 0$
```
which defines the base classes, behavior and initial state of objects of
- the class. The extends clause
- `extends $sc$ with $mt_1$ with $\ldots$ with $mt_m$`
+ the class. The extends clause
+ `extends $sc$ with $mt_1$ with $\ldots$ with $mt_m$`
can be omitted, in which case
`extends scala.AnyRef` is assumed. The class body
`{ $\mathit{stats}$ }` may also be omitted, in which case the empty body
`{}` is assumed.
-
This class definition defines a type `$c$[$\mathit{tps}\,$]` and a constructor
which when applied to parameters conforming to types $\mathit{ps}$
initializes instances of type `$c$[$\mathit{tps}\,$]` by evaluating the template
$t$.
-### Example
+###### Example
The following example illustrates `val` and `var` parameters of a class `C`:
```scala
@@ -768,6 +753,7 @@ val c = new C(1, "abc", List())
c.z = c.y :: c.z
```
+### Example Private Constructor
The following class can be created only from its companion module.
```scala
@@ -781,11 +767,10 @@ class Sensitive private () {
}
```
-
### Constructor Definitions
```ebnf
-FunDef ::= `this' ParamClause ParamClauses
+FunDef ::= `this' ParamClause ParamClauses
(`=' ConstrExpr | [nl] ConstrBlock)
ConstrExpr ::= SelfInvocation
| ConstrBlock
@@ -813,16 +798,16 @@ to instantiate formal type parameters.
The signature and the self constructor invocation of a constructor
definition are type-checked and evaluated in the scope which is in
effect at the point of the enclosing class definition, augmented by
-any type parameters of the enclosing class and by any
+any type parameters of the enclosing class and by any
[early definitions](#early-definitions) of the enclosing template.
The rest of the
constructor expression is type-checked and evaluated as a function
body in the current class.
-
+
If there are auxiliary constructors of a class $C$, they form together
with $C$'s primary [constructor](#class-definitions)
an overloaded constructor
-definition. The usual rules for
+definition. The usual rules for
[overloading resolution](06-expressions.html#overloading-resolution)
apply for constructor invocations of $C$,
including for the self constructor invocations in the constructor
@@ -831,8 +816,7 @@ are never inherited. To prevent infinite cycles of constructor
invocations, there is the restriction that every self constructor
invocation must refer to a constructor definition which precedes it
(i.e. it must refer to either a preceding auxiliary constructor or the
-primary constructor of the class).
-
+primary constructor of the class).
###### Example
Consider the class definition
@@ -851,7 +835,6 @@ This defines a class `LinkedList` with three constructors. The
second constructor constructs an singleton list, while the
third one constructs a list with a given head and tail.
-
## Case Classes
```ebnf
@@ -859,7 +842,7 @@ TmplDef ::= `case' `class' ClassDef
```
If a class definition is prefixed with `case`, the class is said
-to be a _case class_.
+to be a _case class_.
The formal parameters in the first parameter section of a case class
are called _elements_; they are treated
@@ -885,10 +868,10 @@ object $c$ {
Here, $\mathit{Ts}$ stands for the vector of types defined in the type
parameter section $\mathit{tps}$,
-each $\mathit{xs}_i$ denotes the parameter names of the parameter
-section $\mathit{ps}_i$, and
-$\mathit{xs}_{11}, \ldots , \mathit{xs}_{1k}$ denote the names of all parameters
-in the first parameter section $\mathit{xs}_1$.
+each $\mathit{xs}\_i$ denotes the parameter names of the parameter
+section $\mathit{ps}\_i$, and
+$\mathit{xs}\_{11}, \ldots , \mathit{xs}\_{1k}$ denote the names of all parameters
+in the first parameter section $\mathit{xs}\_1$.
If a type parameter section is missing in the
class, it is also missing in the `apply` and
`unapply` methods.
@@ -904,7 +887,7 @@ def unapply[$\mathit{tps}\,$]($x$: $c$[$\mathit{tps}\,$]) = x ne null
```
The name of the `unapply` method is changed to `unapplySeq` if the first
-parameter section $\mathit{ps}_1$ of $c$ ends in a
+parameter section $\mathit{ps}_1$ of $c$ ends in a
[repeated parameter](04-basic-declarations-and-definitions.html#repeated-parameters).
If a companion object $c$ exists already, no new object is created,
but the `apply` and `unapply` methods are added to the existing
@@ -919,9 +902,9 @@ def copy[$\mathit{tps}\,$]($\mathit{ps}'_1\,$)$\ldots$($\mathit{ps}'_n$): $c$[$\
```
Again, `$\mathit{Ts}$` stands for the vector of types defined in the type parameter section `$\mathit{tps}$`
-and each `$\xs_i$` denotes the parameter names of the parameter section `$\ps'_i$`. The value
-parameters `$\ps'_{1,j}$` of first parameter list have the form `$x_{1,j}$:$T_{1,j}$=this.$x_{1,j}$`,
-the other parameters `$\ps'_{i,j}$` of the `copy` method are defined as `$x_{i,j}$:$T_{i,j}$`.
+and each `$xs_i$` denotes the parameter names of the parameter section `$ps'_i$`. The value
+parameters `$ps'_{1,j}$` of first parameter list have the form `$x_{1,j}$:$T_{1,j}$=this.$x_{1,j}$`,
+the other parameters `$ps'_{i,j}$` of the `copy` method are defined as `$x_{i,j}$:$T_{i,j}$`.
In all cases `$x_{i,j}$` and `$T_{i,j}$` refer to the name and type of the corresponding class parameter
`$\mathit{ps}_{i,j}$`.
@@ -941,7 +924,6 @@ class different from `AnyRef`. In particular:
- Method `toString: String` returns a string representation which
contains the name of the class and its elements.
-
###### Example
Here is the definition of abstract syntax for lambda calculus:
@@ -984,7 +966,6 @@ This form of extensibility can be excluded by declaring the base class
directly extend `Expr` must be in the same source file as
`Expr`.
-
### Traits
```ebnf
@@ -1009,7 +990,7 @@ it is not statically known at the time the trait is defined.
If $D$ is not a trait, then its actual supertype is simply its
least proper supertype (which is statically known).
-### Example
+###### Example
The following trait defines the property
of being comparable to objects of some type. It contains an abstract
method `<` and default implementations of the other
@@ -1089,17 +1070,16 @@ methods are re-bound to refer to the corresponding implementations in
`ListTable`, which is the actual supertype of `SynchronizedTable`
in `MyTable`.
-
## Object Definitions
```ebnf
ObjectDef ::= id ClassTemplate
```
-An object definition defines a single object of a new class. Its
+An object definition defines a single object of a new class. Its
most general form is
`object $m$ extends $t$`. Here,
-$m$ is the name of the object to be defined, and
+$m$ is the name of the object to be defined, and
$t$ is a [template](#templates) of the form
```scala
@@ -1107,7 +1087,7 @@ $sc$ with $mt_1$ with $\ldots$ with $mt_n$ { $\mathit{stats}$ }
```
which defines the base classes, behavior and initial state of $m$.
-The extends clause `extends $sc$ with $mt_1$ with $\ldots$ with $mt_n$`
+The extends clause `extends $sc$ with $mt_1$ with $\ldots$ with $mt_n$`
can be omitted, in which case
`extends scala.AnyRef` is assumed. The class body
`{ $\mathit{stats}$ }` may also be omitted, in which case the empty body
@@ -1127,7 +1107,7 @@ not at the point of the object definition, but is instead evaluated
the first time $m$ is dereferenced during execution of the program
(which might be never at all). An attempt to dereference $m$ again in
the course of evaluation of the constructor leads to a infinite loop
-or run-time error.
+or run-time error.
Other threads trying to dereference $m$ while the
constructor is being evaluated block until evaluation is complete.
diff --git a/spec/06-expressions.md b/spec/06-expressions.md
index b2144aac6d..da9e21f267 100644
--- a/spec/06-expressions.md
+++ b/spec/06-expressions.md
@@ -48,7 +48,7 @@ Ascription ::= `:' InfixType
```
Expressions are composed of operators and operands. Expression forms are
-discussed subsequently in decreasing order of precedence.
+discussed subsequently in decreasing order of precedence.
## Expression Typing
@@ -62,15 +62,14 @@ $T$, then the type of the expression is assumed instead to be a
[skolemization](03-types.html#existential-types) of $T$.
Skolemization is reversed by type packing. Assume an expression $e$ of
-type $T$ and let $t_1[\mathit{tps}_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}_n] >: L_n <: U_n$ be
+type $T$ and let $t_1[\mathit{tps}\_1] >: L_1 <: U_1 , \ldots , t_n[\mathit{tps}\_n] >: L_n <: U_n$ be
all the type variables created by skolemization of some part of $e$ which are free in $T$.
Then the _packed type_ of $e$ is
```scala
-$T$ forSome { type $t_1[\mathit{tps}_1] >: L_1 <: U_1$; $\ldots$; type $t_n[\mathit{tps}_n] >: L_n <: U_n$ }.
+$T$ forSome { type $t_1[\mathit{tps}\_1] >: L_1 <: U_1$; $\ldots$; type $t_n[\mathit{tps}\_n] >: L_n <: U_n$ }.
```
-
## Literals
```ebnf
@@ -80,7 +79,6 @@ SimpleExpr ::= Literal
Typing of literals is as described [here](01-lexical-syntax.html#literals); their
evaluation is immediate.
-
## The _Null_ Value
The `null` value is of type `scala.Null`, and is thus
@@ -90,15 +88,14 @@ implements methods in class `scala.AnyRef` as follows:
- `eq($x\,$)` and `==($x\,$)` return `true` iff the
argument $x$ is also the "null" object.
-- `ne($x\,$)` and `!=($x\,$)` return true iff the
+- `ne($x\,$)` and `!=($x\,$)` return true iff the
argument x is not also the "null" object.
- `isInstanceOf[$T\,$]` always returns `false`.
- `asInstanceOf[$T\,$]` returns the [default value](04-basic-declarations-and-definitions.html#value-declarations-and-definitions) of type $T$.
- `##` returns ``0``.
A reference to any other member of the "null" object causes a
-`NullPointerException` to be thrown.
-
+`NullPointerException` to be thrown.
## Designators
@@ -108,19 +105,19 @@ SimpleExpr ::= Path
```
A designator refers to a named term. It can be a _simple name_ or
-a _selection_.
+a _selection_.
-A simple name $x$ refers to a value as specified
-[here](02-identifiers-names-and-scopes.html#identifiers-names-and-scopes).
+A simple name $x$ refers to a value as specified
+[here](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes).
If $x$ is bound by a definition or declaration in an enclosing class
or object $C$, it is taken to be equivalent to the selection
`$C$.this.$x$` where $C$ is taken to refer to the class containing $x$
-even if the type name $C$ is [shadowed](02-identifiers-names-and-scopes.html#identifiers-names-and-scopes) at the
+even if the type name $C$ is [shadowed](02-identifiers-names-and-scopes.html#identifiers,-names-and-scopes) at the
occurrence of $x$.
If $r$ is a [stable identifier](03-types.html#paths) of type $T$, the selection $r.x$ refers
statically to a term member $m$ of $r$ that is identified in $T$ by
-the name $x$.
+the name $x$.
<!-- There might be several such members, in which
case overloading resolution (\sref{overloading-resolution}) is applied
@@ -128,7 +125,7 @@ to pick a unique one.} -->
For other expressions $e$, $e.x$ is typed as
if it was `{ val $y$ = $e$; $y$.$x$ }`, for some fresh name
-$y$.
+$y$.
The expected type of a designator's prefix is always undefined. The
type of a designator is the type $T$ of the entity it refers to, with
@@ -147,16 +144,10 @@ designate a constant, or
conform to $\mathit{pt}$, or
1. The path $p$ designates a module.
-
The selection $e.x$ is evaluated by first evaluating the qualifier
expression $e$, which yields an object $r$, say. The selection's
result is then the member of $r$ that is either defined by $m$ or defined
-by a definition overriding $m$.
-If that member has a type which
-conforms to `scala.NotNull`, the member's value must be initialized
-to a value different from `null`, otherwise a `scala.UnitializedError`
-is thrown.
-
+by a definition overriding $m$.
## This and Super
@@ -190,7 +181,7 @@ method. <!-- explanation: so that we need not create several fields for overrid
If it is
a method, it must be concrete, or the template
containing the reference must have a member $m'$ which overrides $m$
-and which is labeled `abstract override`.
+and which is labeled `abstract override`.
A reference `$C$.super.$m$` refers statically to a method
or type $m$ in the least proper supertype of the innermost enclosing class or
@@ -210,7 +201,7 @@ to the type or method of $x$ in the parent trait of $C$ whose simple
name is $T$. That member must be uniquely defined. If it is a method,
it must be concrete.
-### Example
+###### Example
Consider the following class definitions
```scala
@@ -238,7 +229,6 @@ Then we have:
Note that the `superB` function returns different results
depending on whether `B` is mixed in with class `Root` or `A`.
-
## Function Applications
```ebnf
@@ -269,20 +259,19 @@ it has the form $x_i=e'_i$ and $x_i$ is one of the parameter names
$p_1 , \ldots , p_n$. The function $f$ is applicable if all of the following conditions
hold:
-- For every named argument $x_i=e'_i$ the type $S_i$
+- For every named argument $x_i=e_i'$ the type $S_i$
is compatible with the parameter type $T_j$ whose name $p_j$ matches $x_i$.
- For every positional argument $e_i$ the type $S_i$
is compatible with $T_i$.
- If the expected type is defined, the result type $U$ is
compatible to it.
-If $f$ is a polymorphic method it is applicable if
+If $f$ is a polymorphic method it is applicable if
[local type inference](#local-type-inference) can
determine type arguments so that the instantiated method is applicable. If
$f$ has some value type it is applicable if it has a method member named
`apply` which is applicable.
-
Evaluation of `$f$($e_1 , \ldots , e_n$)` usually entails evaluation of
$f$ and $e_1 , \ldots , e_n$ in that order. Each argument expression
is converted to the type of its corresponding formal parameter. After
@@ -295,7 +284,7 @@ The case of a formal parameter with a parameterless
method type `=>$T$` is treated specially. In this case, the
corresponding actual argument expression $e$ is not evaluated before the
application. Instead, every use of the formal parameter on the
-right-hand side of the rewrite rule entails a re-evaluation of $e$.
+right-hand side of the rewrite rule entails a re-evaluation of $e$.
In other words, the evaluation order for
`=>`-parameters is _call-by-name_ whereas the evaluation
order for normal parameters is _call-by-value_.
@@ -345,7 +334,6 @@ sum(List(1, 2, 3, 4))
would not typecheck.
-
### Named and Default Arguments
If an application might uses named arguments $p = e$ or default
@@ -360,10 +348,9 @@ arguments, the following conditions must hold.
- Every formal parameter $p_j:T_j$ which is not specified by either a positional
or a named argument has a default argument.
-
If the application uses named or default
arguments the following transformation is applied to convert it into
-an application without named or default arguments.
+an application without named or default arguments.
If the function $f$
has the form `$p.m$[$\mathit{targs}$]` it is transformed into the
@@ -388,14 +375,14 @@ the form
}
```
-where every argument in $(\mathit{args}_1) , \ldots , (\mathit{args}_l)$ is a reference to
+where every argument in $(\mathit{args}\_1) , \ldots , (\mathit{args}\_l)$ is a reference to
one of the values $x_1 , \ldots , x_k$. To integrate the current application
into the block, first a value definition using a fresh name $y_i$ is created
for every argument in $e_1 , \ldots , e_m$, which is initialised to $e_i$ for
positional arguments and to $e'_i$ for named arguments of the form
`$x_i=e'_i$`. Then, for every parameter which is not specified
by the argument list, a value definition using a fresh name $z_i$ is created,
-which is initialized using the method computing the
+which is initialized using the method computing the
[default argument](04-basic-declarations-and-definitions.html#function-declarations-and-definitions) of
this parameter.
@@ -419,6 +406,18 @@ The final result of the transformation is a block of the form
}
```
+### Signature Polymorphic Methods
+
+For invocations of signature polymorphic methods of the target platform `$f$($e_1 , \ldots , e_m$)`,
+the invoked function has a different method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$` at each call
+site. The parameter types `$T_ , \ldots , T_n$` are the types of the argument expressions
+`$e_1 , \ldots , e_m$` and `$U$` is the expected type at the call site. If the expected type is
+undefined then `$U$` is `scala.AnyRef`. The parameter names `$p_1 , \ldots , p_n$` are fresh.
+
+###### Note
+
+On the Java platform version 7 and later, the methods `invoke` and `invokeExact` in class
+`java.lang.invoke.MethodHandle` are signature polymorphic.
## Method Values
@@ -430,7 +429,7 @@ The expression `$e$ _` is well-formed if $e$ is of method
type or if $e$ is a call-by-name parameter. If $e$ is a method with
parameters, `$e$ _` represents $e$ converted to a function
type by [eta expansion](#eta-expansion). If $e$ is a
-parameterless method or call-by-name parameter of type
+parameterless method or call-by-name parameter of type
`=>$T$`, `$e$ _` represents the function of type
`() => $T$`, which evaluates $e$ when it is applied to the empty
parameterlist `()`.
@@ -446,12 +445,9 @@ The method values in the left column are each equivalent to the [eta-expanded ex
|`(1 to 9).fold(z)_` | `{ val eta1 = z; val eta2 = 1 to 9; op => eta2.fold(eta1)(op) }` |
|`Some(1).fold(??? : Int)_` | `{ val eta1 = () => ???; val eta2 = Some(1); op => eta2.fold(eta1())(op) }` |
-
Note that a space is necessary between a method name and the trailing underscore
because otherwise the underscore would be considered part of the name.
-
-
## Type Applications
```ebnf
@@ -459,8 +455,8 @@ SimpleExpr ::= SimpleExpr TypeArgs
```
A type application `$e$[$T_1 , \ldots , T_n$]` instantiates
-a polymorphic value $e$ of type
-`[$a_1$ >: $L_1$ <: $U_1, \ldots , a_n$ >: $L_n$ <: $U_n$]$S$`
+a polymorphic value $e$ of type
+`[$a_1$ >: $L_1$ <: $U_1, \ldots , a_n$ >: $L_n$ <: $U_n$]$S$`
with argument types
`$T_1 , \ldots , T_n$`. Every argument type $T_i$ must obey
the corresponding bounds $L_i$ and $U_i$. That is, for each $i = 1
@@ -469,16 +465,15 @@ U_i$, where $\sigma$ is the substitution $[a_1 := T_1 , \ldots , a_n
:= T_n]$. The type of the application is $\sigma S$.
If the function part $e$ is of some value type, the type application
-is taken to be equivalent to
+is taken to be equivalent to
`$e$.apply[$T_1 , \ldots ,$ T$_n$]`, i.e. the application of an `apply` method defined by
$e$.
-Type applications can be omitted if
-[local type inference](#local-type-inference) can infer best type parameters
+Type applications can be omitted if
+[local type inference](#local-type-inference) can infer best type parameters
for a polymorphic functions from the types of the actual function arguments
and the expected result type.
-
## Tuples
```ebnf
@@ -486,20 +481,19 @@ SimpleExpr ::= `(' [Exprs] `)'
```
A tuple expression `($e_1 , \ldots , e_n$)` is an alias
-for the class instance creation
-`scala.Tuple$n$($e_1 , \ldots , e_n$)`, where $n \geq 2$.
+for the class instance creation
+`scala.Tuple$n$($e_1 , \ldots , e_n$)`, where $n \geq 2$.
The empty tuple
`()` is the unique value of type `scala.Unit`.
-
## Instance Creation Expressions
```ebnf
SimpleExpr ::= `new' (ClassTemplate | TemplateBody)
```
-A simple instance creation expression is of the form
-`new $c$`
+A simple instance creation expression is of the form
+`new $c$`
where $c$ is a [constructor invocation](05-classes-and-objects.html#constructor-invocations). Let $T$ be
the type of $c$. Then $T$ must
denote a (a type instance of) a non-abstract subclass of
@@ -521,7 +515,7 @@ The expression is evaluated by creating a fresh
object of type $T$ which is is initialized by evaluating $c$. The
type of the expression is $T$.
-A general instance creation expression is of the form
+A general instance creation expression is of the form
`new $t$` for some [class template](05-classes-and-objects.html#templates) $t$.
Such an expression is equivalent to the block
@@ -533,7 +527,7 @@ where $a$ is a fresh name of an _anonymous class_ which is
inaccessible to user programs.
There is also a shorthand form for creating values of structural
-types: If `{$D$}` is a class body, then
+types: If `{$D$}` is a class body, then
`new {$D$}` is equivalent to the general instance creation expression
`new AnyRef{$D$}`.
@@ -558,7 +552,6 @@ The latter is in turn a shorthand for the block
where `anon\$X` is some freshly created name.
-
## Blocks
```ebnf
@@ -574,31 +567,30 @@ two definitions or declarations that bind the same name in the same
namespace. The final expression can be omitted, in which
case the unit value `()` is assumed.
-
The expected type of the final expression $e$ is the expected
type of the block. The expected type of all preceding statements is
undefined.
The type of a block `$s_1$; $\ldots$; $s_n$; $e$` is
-`$T$ forSome {$\,Q\,$}`, where $T$ is the type of $e$ and $Q$
+`$T$ forSome {$\,Q\,$}`, where $T$ is the type of $e$ and $Q$
contains [existential clauses](03-types.html#existential-types)
-for every value or type name which is free in $T$
+for every value or type name which is free in $T$
and which is defined locally in one of the statements $s_1 , \ldots , s_n$.
We say the existential clause _binds_ the occurrence of the value or type name.
-Specifically,
+Specifically,
- A locally defined type definition `type$\;t = T$`
is bound by the existential clause `type$\;t >: T <: T$`.
- It is an error if $t$ carries type parameters.
+ It is an error if $t$ carries type parameters.
- A locally defined value definition `val$\;x: T = e$` is
bound by the existential clause `val$\;x: T$`.
- A locally defined class definition `class$\;c$ extends$\;t$`
is bound by the existential clause `type$\;c <: T$` where
$T$ is the least class type or refinement type which is a proper
- supertype of the type $c$. It is an error if $c$ carries type parameters.
+ supertype of the type $c$. It is an error if $c$ carries type parameters.
- A locally defined object definition `object$\;x\;$extends$\;t$`
is bound by the existential clause `val$\;x: T$` where
- $T$ is the least class type or refinement type which is a proper supertype of the type
+ $T$ is the least class type or refinement type which is a proper supertype of the type
`$x$.type`.
Evaluation of the block entails evaluation of its
@@ -623,18 +615,16 @@ simply has type `B`, because with the rules [here](03-types.html#simplification-
the existentially quantified type
`_1 forSome { type _1 <: B }` can be simplified to `B`.
-
## Prefix, Infix, and Postfix Operations
```ebnf
PostfixExpr ::= InfixExpr [id [nl]]
InfixExpr ::= PrefixExpr
| InfixExpr id [nl] InfixExpr
-PrefixExpr ::= [`-' | `+' | `!' | `~'] SimpleExpr
+PrefixExpr ::= [`-' | `+' | `!' | `~'] SimpleExpr
```
-Expressions can be constructed from operands and operators.
-
+Expressions can be constructed from operands and operators.
### Prefix Operations
@@ -656,7 +646,7 @@ application of the infix operator `sin` to the operands
### Postfix Operations
A postfix operator can be an arbitrary identifier. The postfix
-operation $e;\mathit{op}$ is interpreted as $e.\mathit{op}$.
+operation $e;\mathit{op}$ is interpreted as $e.\mathit{op}$.
### Infix Operations
@@ -685,9 +675,9 @@ followed by operators starting with ``|`', etc.
There's one exception to this rule, which concerns
[_assignment operators_](#assignment-operators).
-The precedence of an assigment operator is the same as the one
+The precedence of an assignment operator is the same as the one
of simple assignment `(=)`. That is, it is lower than the
-precedence of any other operator.
+precedence of any other operator.
The _associativity_ of an operator is determined by the operator's
last character. Operators ending in a colon ``:`' are
@@ -700,18 +690,18 @@ parts of an expression as follows.
expression, then operators with higher precedence bind more closely
than operators with lower precedence.
- If there are consecutive infix
- operations $e_0; \mathit{op}_1; e_1; \mathit{op}_2 \ldots \mathit{op}_n; e_n$
- with operators $\mathit{op}_1 , \ldots , \mathit{op}_n$ of the same precedence,
+ operations $e_0; \mathit{op}\_1; e_1; \mathit{op}\_2 \ldots \mathit{op}\_n; e_n$
+ with operators $\mathit{op}\_1 , \ldots , \mathit{op}\_n$ of the same precedence,
then all these operators must
have the same associativity. If all operators are left-associative,
the sequence is interpreted as
- $(\ldots(e_0;\mathit{op}_1;e_1);\mathit{op}_2\ldots);\mathit{op}_n;e_n$.
+ $(\ldots(e_0;\mathit{op}\_1;e_1);\mathit{op}\_2\ldots);\mathit{op}\_n;e_n$.
Otherwise, if all operators are right-associative, the
sequence is interpreted as
- $e_0;\mathit{op}_1;(e_1;\mathit{op}_2;(\ldots \mathit{op}_n;e_n)\ldots)$.
+ $e_0;\mathit{op}\_1;(e_1;\mathit{op}\_2;(\ldots \mathit{op}\_n;e_n)\ldots)$.
- Postfix operators always have lower precedence than infix
- operators. E.g. $e_1;\mathit{op}_1;e_2;\mathit{op}_2$ is always equivalent to
- $(e_1;\mathit{op}_1;e_2);\mathit{op}_2$.
+ operators. E.g. $e_1;\mathit{op}\_1;e_2;\mathit{op}\_2$ is always equivalent to
+ $(e_1;\mathit{op}\_1;e_2);\mathit{op}\_2$.
The right-hand operand of a left-associative operator may consist of
several arguments enclosed in parentheses, e.g. $e;\mathit{op};(e_1,\ldots,e_n)$.
@@ -721,13 +711,13 @@ A left-associative binary
operation $e_1;\mathit{op};e_2$ is interpreted as $e_1.\mathit{op}(e_2)$. If $\mathit{op}$ is
right-associative, the same operation is interpreted as
`{ val $x$=$e_1$; $e_2$.$\mathit{op}$($x\,$) }`, where $x$ is a fresh
-name.
+name.
### Assignment Operators
An assignment operator is an operator symbol (syntax category
`op` in [Identifiers](01-lexical-syntax.html#identifiers)) that ends in an equals character
-“`=`”, with the exception of operators for which one of
+“`=`”, with the exception of operators for which one of
the following conditions holds:
1. the operator also starts with an equals character, or
@@ -737,8 +727,8 @@ Assignment operators are treated specially in that they
can be expanded to assignments if no other interpretation is valid.
Let's consider an assignment operator such as `+=` in an infix
-operation `$l$ += $r$`, where $l$, $r$ are expressions.
-This operation can be re-interpreted as an operation which corresponds
+operation `$l$ += $r$`, where $l$, $r$ are expressions.
+This operation can be re-interpreted as an operation which corresponds
to the assignment
```scala
@@ -750,15 +740,14 @@ except that the operation's left-hand-side $l$ is evaluated only once.
The re-interpretation occurs if the following two conditions are fulfilled.
1. The left-hand-side $l$ does not have a member named
- `+=`, and also cannot be converted by an
+ `+=`, and also cannot be converted by an
[implicit conversion](#implicit-conversions)
to a value with a member named `+=`.
1. The assignment `$l$ = $l$ + $r$` is type-correct.
- In particular this implies that $l$ refers to a variable or object
- that can be assigned to, and that is convertible to a value with a member
+ In particular this implies that $l$ refers to a variable or object
+ that can be assigned to, and that is convertible to a value with a member
named `+`.
-
## Typed Expressions
```ebnf
@@ -778,18 +767,16 @@ Here are examples of well-typed and ill-typed expressions.
// 1: string // ***** illegal
```
-
## Annotated Expressions
```ebnf
-Expr1 ::= PostfixExpr `:' Annotation {Annotation}
+Expr1 ::= PostfixExpr `:' Annotation {Annotation}
```
An annotated expression `$e$: @$a_1$ $\ldots$ @$a_n$`
-attaches [annotations](11-user-defined-annotations.html#user-defined-annotations) $a_1 , \ldots , a_n$ to the
+attaches [annotations](11-annotations.html#user-defined-annotations) $a_1 , \ldots , a_n$ to the
expression $e$.
-
## Assignments
```ebnf
@@ -810,21 +797,21 @@ assignment `$f.x$ = $e$` to a parameterless function $x$
is interpreted as the invocation `$f.x$_=($e\,$)`.
An assignment `$f$($\mathit{args}\,$) = $e$` with a function application to the
-left of the ‘`=`’ operator is interpreted as
+left of the ‘`=`’ operator is interpreted as
`$f.$update($\mathit{args}$, $e\,$)`, i.e.
the invocation of an `update` function defined by $f$.
###### Example
Here are some assignment expressions and their equivalent expansions.
--------------------------- ---------------------
-`x.f = e` x.f_=(e)
-`x.f() = e` x.f.update(e)
-`x.f(i) = e` x.f.update(i, e)
-`x.f(i, j) = e` x.f.update(i, j, e)
--------------------------- ---------------------
+| assignment | expansion |
+|--------------------------|----------------------|
+|`x.f = e` | `x.f_=(e)` |
+|`x.f() = e` | `x.f.update(e)` |
+|`x.f(i) = e` | `x.f.update(i, e)` |
+|`x.f(i, j) = e` | `x.f.update(i, j, e)`|
-### Example
+###### Example Imperative Matrix Multiplication
Here is the usual imperative code for matrix multiplication.
@@ -875,7 +862,6 @@ def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
}
```
-
## Conditional Expressions
```ebnf
@@ -900,7 +886,7 @@ evaluating $e_3$ is returned.
A short form of the conditional expression eliminates the
else-part. The conditional expression `if ($e_1$) $e_2$` is
-evaluated as if it was `if ($e_1$) $e_2$ else ()`.
+evaluated as if it was `if ($e_1$) $e_2$ else ()`.
## While Loop Expressions
@@ -917,7 +903,6 @@ def whileLoop(cond: => Boolean)(body: => Unit): Unit =
if (cond) { body ; whileLoop(cond)(body) } else {}
```
-
## Do Loop Expressions
```ebnf
@@ -928,11 +913,10 @@ The do loop expression `do $e_1$ while ($e_2$)` is typed and
evaluated as if it was the expression `($e_1$ ; while ($e_2$) $e_1$)`.
A semicolon preceding the `while` symbol of a do loop expression is ignored.
-
## For Comprehensions and For Loops
```ebnf
-Expr1 ::= `for' (`(' Enumerators `)' | `{' Enumerators `}')
+Expr1 ::= `for' (`(' Enumerators `)' | `{' Enumerators `}')
{nl} [`yield'] Expr
Enumerators ::= Generator {semi Generator}
Generator ::= Pattern1 `<-' Expr {[semi] Guard | semi Pattern1 `=' Expr}
@@ -947,7 +931,7 @@ and collects the results. An enumerator sequence always starts with a
generator; this can be followed by further generators, value
definitions, or guards. A _generator_ `$p$ <- $e$`
produces bindings from an expression $e$ which is matched in some way
-against pattern $p$. A _value definition_ `$p$ = $e$`
+against pattern $p$. A _value definition_ `$p$ = $e$`
binds the value name $p$ (or several names in a pattern $p$) to
the result of evaluating the expression $e$. A _guard_
`if $e$` contains a boolean expression which restricts
@@ -967,17 +951,17 @@ $p$ <- $e$.withFilter { case $p$ => true; case _ => false }
Then, the following rules are applied repeatedly until all
comprehensions have been eliminated.
- - A for comprehension
- `for ($p$ <- $e\,$) yield $e'$`
+ - A for comprehension
+ `for ($p$ <- $e\,$) yield $e'$`
is translated to
`$e$.map { case $p$ => $e'$ }`.
- A for loop
- `for ($p$ <- $e\,$) $e'$`
+ `for ($p$ <- $e\,$) $e'$`
is translated to
`$e$.foreach { case $p$ => $e'$ }`.
- A for comprehension
- ```
+ ```
for ($p$ <- $e$; $p'$ <- $e'; \ldots$) yield $e''$
```
@@ -985,13 +969,13 @@ comprehensions have been eliminated.
sequence of generators, definitions, or guards,
is translated to
- ```
+ ```
$e$.flatMap { case $p$ => for ($p'$ <- $e'; \ldots$) yield $e''$ }
```
- A for loop
- ```
+ ```
for ($p$ <- $e$; $p'$ <- $e'; \ldots$) $e''$
```
@@ -999,24 +983,23 @@ comprehensions have been eliminated.
sequence of generators, definitions, or guards,
is translated to
- ```
+ ```
$e$.foreach { case $p$ => for ($p'$ <- $e'; \ldots$) $e''$ }
```
- A generator `$p$ <- $e$` followed by a guard
- `if $g$` is translated to a single generator
+ `if $g$` is translated to a single generator
`$p$ <- $e$.withFilter(($x_1 , \ldots , x_n$) => $g\,$)` where
$x_1 , \ldots , x_n$ are the free variables of $p$.
- - A generator `$p$ <- $e$` followed by a value definition
+ - A generator `$p$ <- $e$` followed by a value definition
`$p'$ = $e'$` is translated to the following generator of pairs of values, where
$x$ and $x'$ are fresh names:
- ```
+ ```
($p$, $p'$) <- for ($x @ p$ <- $e$) yield { val $x' @ p'$ = $e'$; ($x$, $x'$) }
```
-
###### Example
The following code produces all pairs of numbers between $1$ and $n-1$
whose sums are prime.
@@ -1078,7 +1061,6 @@ The code above makes use of the fact that `map`, `flatMap`,
`withFilter`, and `foreach` are defined for instances of class
`scala.Array`.
-
## Return Expressions
```ebnf
@@ -1088,11 +1070,11 @@ Expr1 ::= `return' [Expr]
A return expression `return $e$` must occur inside the body of some
enclosing named method or function. The innermost enclosing named
method or function in a source program, $f$, must have an explicitly declared result type,
-and the type of $e$ must conform to it.
+and the type of $e$ must conform to it.
The return expression
evaluates the expression $e$ and returns its value as the result of
$f$. The evaluation of any statements or
-expressions following the return expression is omitted. The type of
+expressions following the return expression is omitted. The type of
a return expression is `scala.Nothing`.
The expression $e$ may be omitted. The return expression
@@ -1116,7 +1098,6 @@ before the return expression is executed. In that case, the thrown
`scala.runtime.NonLocalReturnException` will not be caught,
and will propagate up the call stack.
-
## Throw Expressions
```ebnf
@@ -1134,17 +1115,16 @@ exception, evaluation resumes with the handler; otherwise the thread
executing the `throw` is aborted. The type of a throw expression
is `scala.Nothing`.
-
## Try Expressions
```ebnf
-Expr1 ::= `try' `{' Block `}' [`catch' `{' CaseClauses `}']
+Expr1 ::= `try' (`{' Block `}' | Expr) [`catch' `{' CaseClauses `}']
[`finally' Expr]
```
A try expression is of the form `try { $b$ } catch $h$`
-where the handler $h$ is a
-[pattern matching anonymous function](#pattern-matching-anonymous-functions)
+where the handler $h$ is a
+[pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions)
```scala
{ case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ }
@@ -1152,12 +1132,12 @@ where the handler $h$ is a
This expression is evaluated by evaluating the block
$b$. If evaluation of $b$ does not cause an exception to be
-thrown, the result of $b$ is returned. Otherwise the
-handler $h$ is applied to the thrown exception.
+thrown, the result of $b$ is returned. Otherwise the
+handler $h$ is applied to the thrown exception.
If the handler contains a case matching the thrown exception,
the first such case is invoked. If the handler contains
-no case matching the thrown exception, the exception is
-re-thrown.
+no case matching the thrown exception, the exception is
+re-thrown.
Let $\mathit{pt}$ be the expected type of the try expression. The block
$b$ is expected to conform to $\mathit{pt}$. The handler $h$
@@ -1173,7 +1153,7 @@ thrown, the expression $e$ is evaluated. If an exception is thrown
during evaluation of $e$, the evaluation of the try expression is
aborted with the thrown exception. If no exception is thrown during
evaluation of $e$, the result of $b$ is returned as the
-result of the try expression.
+result of the try expression.
If an exception is thrown during evaluation of $b$, the finally block
$e$ is also evaluated. If another exception $e$ is thrown
@@ -1185,11 +1165,10 @@ $b$ is expected to conform to the expected type of the try
expression. The finally expression $e$ is expected to conform to
type `Unit`.
-A try expression `try { $b$ } catch $e_1$ finally $e_2$`
+A try expression `try { $b$ } catch $e_1$ finally $e_2$`
is a shorthand
for `try { try { $b$ } catch $e_1$ } finally $e_2$`.
-
## Anonymous Functions
```ebnf
@@ -1199,7 +1178,7 @@ Bindings ::= `(' Binding {`,' Binding} `)'
Binding ::= (id | `_') [`:' Type]
```
-The anonymous function `($x_1$: $T_1 , \ldots , x_n$: $T_n$) => e`
+The anonymous function `($x_1$: $T_1 , \ldots , x_n$: $T_n$) => e`
maps parameters $x_i$ of types $T_i$ to a result given
by expression $e$. The scope of each formal parameter
$x_i$ is $e$. Formal parameters must have pairwise distinct names.
@@ -1226,21 +1205,21 @@ new scala.Function$n$[$T_1 , \ldots , T_n$, $T$] {
}
```
-In the case of a single untyped formal parameter,
-`($x\,$) => $e$`
+In the case of a single untyped formal parameter,
+`($x\,$) => $e$`
can be abbreviated to `$x$ => $e$`. If an
anonymous function `($x$: $T\,$) => $e$` with a single
typed parameter appears as the result expression of a block, it can be
abbreviated to `$x$: $T$ => e`.
-A formal parameter may also be a wildcard represented by an underscore `_`.
+A formal parameter may also be a wildcard represented by an underscore `_`.
In that case, a fresh name for the parameter is chosen arbitrarily.
A named parameter of an anonymous function may be optionally preceded
by an `implicit` modifier. In that case the parameter is
-labeled [`implicit`](07-implicit-parameters-and-views.html#implicit-parameters-and-views); however the
+labeled [`implicit`](07-implicits.html#implicit-parameters-and-views); however the
parameter section itself does not count as an implicit parameter
-section in the sense defined [here](07-implicit-parameters-and-views.html#implicit-parameters). Hence, arguments to
+section in the sense defined [here](07-implicits.html#implicit-parameters). Hence, arguments to
anonymous functions always have to be given explicitly.
###### Example
@@ -1262,7 +1241,6 @@ _ => 5 // The function that ignores its argument
// and always returns 5.
```
-
### Placeholder Syntax for Anonymous Functions
```ebnf
@@ -1281,10 +1259,10 @@ type ascription `_:$T$`.
An expression $e$ of syntactic category `Expr` _binds_ an underscore section
$u$, if the following two conditions hold: (1) $e$ properly contains $u$, and
-(2) there is no other expression of syntactic category `Expr`
+(2) there is no other expression of syntactic category `Expr`
which is properly contained in $e$ and which itself properly contains $u$.
-If an expression $e$ binds underscore sections $u_1 , \ldots , u_n$, in this order, it is equivalent to
+If an expression $e$ binds underscore sections $u_1 , \ldots , u_n$, in this order, it is equivalent to
the anonymous function `($u'_1$, ... $u'_n$) => $e'$`
where each $u_i'$ results from $u_i$ by replacing the underscore with a fresh identifier and
$e'$ results from $e$ by replacing each underscore section $u_i$ by $u_i'$.
@@ -1302,7 +1280,6 @@ syntax. Each of these is equivalent to the anonymous function on its right.
|`_.map(f)` | `x => x.map(f)` |
|`_.map(_ + 1)` | `x => x.map(y => y + 1)` |
-
## Constant Expressions
Constant expressions are expressions that the Scala compiler can evaluate to a constant.
@@ -1316,10 +1293,9 @@ include at least the expressions of the following forms:
- A literal array, of the form
`Array$(c_1 , \ldots , c_n)$`,
where all of the $c_i$'s are themselves constant expressions
-- An identifier defined by a
+- An identifier defined by a
[constant value definition](04-basic-declarations-and-definitions.html#value-declarations-and-definitions).
-
## Statements
```ebnf
@@ -1332,7 +1308,7 @@ TemplateStat ::= Import
| {Annotation} {Modifier} Def
| {Annotation} {Modifier} Dcl
| Expr
- |
+ |
```
Statements occur as parts of blocks and templates. A statement can be
@@ -1340,7 +1316,7 @@ an import, a definition or an expression, or it can be empty.
Statements used in the template of a class definition can also be
declarations. An expression that is used as a statement can have an
arbitrary value type. An expression statement $e$ is evaluated by
-evaluating $e$ and discarding the result of the evaluation.
+evaluating $e$ and discarding the result of the evaluation.
<!-- Generalize to implicit coercion? -->
@@ -1353,7 +1329,6 @@ permitted.
Evaluation of a statement sequence entails evaluation of the
statements in the order they are written.
-
## Implicit Conversions
Implicit conversions can be applied to expressions whose type does not
@@ -1361,8 +1336,8 @@ match their expected type, to qualifiers in selections, and to unapplied methods
available implicit conversions are given in the next two sub-sections.
We say, a type $T$ is _compatible_ to a type $U$ if $T$ weakly conforms
-to $U$ after applying [eta-expansion](#eta-expansion) and
-[view applications](07-implicit-parameters-and-views.html#views).
+to $U$ after applying [eta-expansion](#eta-expansion) and
+[view applications](07-implicits.html#views).
### Value Conversions
@@ -1370,12 +1345,11 @@ The following five implicit conversions can be applied to an
expression $e$ which has some value type $T$ and which is type-checked with
some expected type $\mathit{pt}$.
-#### Overloading Resolution
-If an expression denotes several possible members of a class,
+###### Static Overloading Resolution
+If an expression denotes several possible members of a class,
[overloading resolution](#overloading-resolution)
is applied to pick a unique member.
-
###### Type Instantiation
An expression $e$ of polymorphic type
@@ -1386,7 +1360,7 @@ An expression $e$ of polymorphic type
which does not appear as the function part of
a type application is converted to a type instance of $T$
by determining with [local type inference](#local-type-inference)
-instance types `$T_1 , \ldots , T_n$`
+instance types `$T_1 , \ldots , T_n$`
for the type variables `$a_1 , \ldots , a_n$` and
implicitly embedding $e$ in the [type application](#type-applications)
`$e$[$T_1 , \ldots , T_n$]`.
@@ -1405,18 +1379,18 @@ type, it is converted to the same literal in that type.
###### Value Discarding
If $e$ has some value type and the expected type is `Unit`,
-$e$ is converted to the expected type by embedding it in the
+$e$ is converted to the expected type by embedding it in the
term `{ $e$; () }`.
###### View Application
If none of the previous conversions applies, and $e$'s type
does not conform to the expected type $\mathit{pt}$, it is attempted to convert
-$e$ to the expected type with a [view](07-implicit-parameters-and-views.html#views).
+$e$ to the expected type with a [view](07-implicits.html#views).
###### Dynamic Member Selection
If none of the previous conversions applies, and $e$ is a prefix
of a selection $e.x$, and $e$'s type conforms to class `scala.Dynamic`,
-then the selection is rewritten according to the rules for
+then the selection is rewritten according to the rules for
[dynamic member selection](#dynamic-member-selection).
### Method Conversions
@@ -1430,7 +1404,7 @@ type $T$ by evaluating the expression to which $m$ is bound.
###### Implicit Application
If the method takes only implicit parameters, implicit
-arguments are passed following the rules [here](07-implicit-parameters-and-views.html#implicit-parameters).
+arguments are passed following the rules [here](07-implicits.html#implicit-parameters).
###### Eta Expansion
Otherwise, if the method is not a constructor,
@@ -1450,7 +1424,7 @@ member. The way this is done depends on whether or not $e$ is used as
a function. Let $\mathscr{A}$ be the set of members referenced by $e$.
Assume first that $e$ appears as a function in an application, as in
-`$e$($e_1 , \ldots , e_m$)`.
+`$e$($e_1 , \ldots , e_m$)`.
One first determines the set of functions that is potentially
applicable based on the _shape_ of the arguments.
@@ -1464,7 +1438,7 @@ a type that is defined as follows:
- For a named argument `$n$ = $e$`: $\mathit{shape}(e)$.
- For all other expressions: `Nothing`.
-Let $\mathscr{B}$ be the set of alternatives in $\mathscr{A}$ that are
+Let $\mathscr{B}$ be the set of alternatives in $\mathscr{A}$ that are
[_applicable_](#function-applications)
to expressions $(e_1 , \ldots , e_n)$ of types
$(\mathit{shape}(e_1) , \ldots , \mathit{shape}(e_n))$.
@@ -1473,7 +1447,7 @@ alternative in $\mathscr{B}$, that alternative is chosen.
Otherwise, let $S_1 , \ldots , S_m$ be the vector of types obtained by
typing each argument with an undefined expected type. For every
-member $m$ in $\mathscr{B}$ one determines whether it is
+member $m$ in $\mathscr{B}$ one determines whether it is
applicable to expressions ($e_1 , \ldots , e_m$) of types $S_1
, \ldots , S_m$.
It is an error if none of the members in $\mathscr{B}$ is applicable. If there is one
@@ -1501,16 +1475,16 @@ question: given
types $T_1 , \ldots , T_n$.
- A polymorphic method of type
`[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is
- as specific as some other member of type $S$ if $T$ is as
+ as specific as some other member of type $S$ if $T$ is as
specific as $S$ under the assumption that for
$i = 1 , \ldots , n$ each $a_i$ is an abstract type name
bounded from below by $L_i$ and from above by $U_i$.
- A member of any other type is always as specific as a parameterized method
or a polymorphic method.
-- Given two members of types $T$ and $U$ which are
+- Given two members of types $T$ and $U$ which are
neither parameterized nor polymorphic method types, the member of type $T$ is as specific as
- the member of type $U$ if the existential dual of $T$ conforms to the existential dual of $U$.
- Here, the existential dual of a polymorphic type
+ the member of type $U$ if the existential dual of $T$ conforms to the existential dual of $U$.
+ Here, the existential dual of a polymorphic type
`[$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$T$` is
`$T$ forSome { type $a_1$ >: $L_1$ <: $U_1$ $, \ldots ,$ type $a_n$ >: $L_n$ <: $U_n$}`.
The existential dual of every other type is the type itself.
@@ -1541,15 +1515,15 @@ in `$e$[$\mathit{targs}\,$]`. Then all alternatives in
$\mathscr{A}$ which take the same number of type parameters as there are type
arguments in $\mathit{targs}$ are chosen. It is an error if no such alternative exists.
If there are several such alternatives, overloading resolution is
-applied again to the whole expression `$e$[$\mathit{targs}\,$]`.
+applied again to the whole expression `$e$[$\mathit{targs}\,$]`.
Assume finally that $e$ does not appear as a function in either
an application or a type application. If an expected type is given,
let $\mathscr{B}$ be the set of those alternatives in $\mathscr{A}$ which are
-[compatible](#implicit-conversions) to it. Otherwise, let $\mathscr{B}$ be the same
+[compatible](#implicit-conversions) to it. Otherwise, let $\mathscr{B}$ be the same
as $\mathscr{A}$.
We choose in this case the most specific alternative among all
-alternatives in $\mathscr{B}$. It is an error if there is no
+alternatives in $\mathscr{B}$. It is an error if there is no
alternative in $\mathscr{B}$ which is more specific than all other
alternatives in $\mathscr{B}$.
@@ -1575,18 +1549,17 @@ def f(x: B, y: A) = $\ldots$
Then the application `f(a, a)` is rejected for being ambiguous, since
no most specific applicable signature exists.
-
### Local Type Inference
Local type inference infers type arguments to be passed to expressions
of polymorphic type. Say $e$ is of type [$a_1$ >: $L_1$ <: $U_1
, \ldots , a_n$ >: $L_n$ <: $U_n$]$T$ and no explicit type parameters
-are given.
+are given.
Local type inference converts this expression to a type
application `$e$[$T_1 , \ldots , T_n$]`. The choice of the
type arguments $T_1 , \ldots , T_n$ depends on the context in which
-the expression appears and on the expected type $\mathit{pt}$.
+the expression appears and on the expected type $\mathit{pt}$.
There are three cases.
###### Case 1: Selections
@@ -1594,7 +1567,7 @@ If the expression appears as the prefix of a selection with a name
$x$, then type inference is _deferred_ to the whole expression
$e.x$. That is, if $e.x$ has type $S$, it is now treated as having
type [$a_1$ >: $L_1$ <: $U_1 , \ldots , a_n$ >: $L_n$ <: $U_n$]$S$,
-and local type inference is applied in turn to infer type arguments
+and local type inference is applied in turn to infer type arguments
for $a_1 , \ldots , a_n$, using the context in which $e.x$ appears.
###### Case 2: Values
@@ -1602,18 +1575,18 @@ If the expression $e$ appears as a value without being applied to
value arguments, the type arguments are inferred by solving a
constraint system which relates the expression's type $T$ with the
expected type $\mathit{pt}$. Without loss of generality we can assume that
-$T$ is a value type; if it is a method type we apply
+$T$ is a value type; if it is a method type we apply
[eta-expansion](#eta-expansion) to convert it to a function type. Solving
means finding a substitution $\sigma$ of types $T_i$ for the type
parameters $a_i$ such that
-- None of inferred types $T_i$ is a [singleton type](03-types.html#singleton-types)
+- None of the inferred types $T_i$ is a [singleton type](03-types.html#singleton-types)
- All type parameter bounds are respected, i.e.
$\sigma L_i <: \sigma a_i$ and $\sigma a_i <: \sigma U_i$ for $i = 1 , \ldots , n$.
- The expression's type conforms to the expected type, i.e.
$\sigma T <: \sigma \mathit{pt}$.
-It is a compile time error if no such substitution exists.
+It is a compile time error if no such substitution exists.
If several substitutions exist, local-type inference will choose for
each type variable $a_i$ a minimal or maximal type $T_i$ of the
solution space. A _maximal_ type $T_i$ will be chosen if the type
@@ -1644,7 +1617,7 @@ constraint system means
finding a substitution $\sigma$ of types $T_i$ for the type parameters
$a_i$ such that
-- None of inferred types $T_i$ is a [singleton type](03-types.html#singleton-types)
+- None of the inferred types $T_i$ is a [singleton type](03-types.html#singleton-types)
- All type parameter bounds are respected, i.e. $\sigma L_i <: \sigma a_i$ and
$\sigma a_i <: \sigma U_i$ for $i = 1 , \ldots , n$.
- The method's result type $T'$ conforms to the expected type, i.e. $\sigma T' <: \sigma \mathit{pt}$.
@@ -1722,7 +1695,6 @@ a = Int
so `Int` is the type inferred for `a`.
-
###### Example
Consider now the definition
@@ -1759,7 +1731,6 @@ a = scala.Any
so `scala.Any` is the type inferred for `a`.
-
### Eta Expansion
_Eta-expansion_ converts an expression of method type to an
@@ -1774,10 +1745,10 @@ for every argument type $T_i$ of the method ($i = 1 , \ldots ,
n$). The result of eta-conversion is then:
```scala
-{ val $x_1$ = $e_1$;
- $\ldots$
- val $x_m$ = $e_m$;
- ($y_1: T_1 , \ldots , y_n: T_n$) => $e'$($y_1 , \ldots , y_n$)
+{ val $x_1$ = $e_1$;
+ $\ldots$
+ val $x_m$ = $e_m$;
+ ($y_1: T_1 , \ldots , y_n: T_n$) => $e'$($y_1 , \ldots , y_n$)
}
```
@@ -1788,7 +1759,7 @@ a sub-expression of parameterless method type, is not evaluated in the expanded
### Dynamic Member Selection
The standard Scala library defines a trait `scala.Dynamic` which defines a member
-\@invokeDynamic@ as follows:
+`applyDynamic` as follows:
```scala
package scala
@@ -1799,7 +1770,7 @@ trait Dynamic {
```
Assume a selection of the form $e.x$ where the type of $e$ conforms to `scala.Dynamic`.
-Further assuming the selection is not followed by any function arguments, such an expression can be rewitten under the conditions given [here](#implicit-conversions) to:
+Further assuming the selection is not followed by any function arguments, such an expression can be rewritten under the conditions given [here](#implicit-conversions) to:
```scala
$e$.applyDynamic("$x$")
@@ -1811,4 +1782,3 @@ is rewritten to
```scala
$e$.applyDynamic("$x$", $\mathit{args}$)
```
-
diff --git a/spec/07-implicit-parameters-and-views.md b/spec/07-implicits.md
index 1a4d70409c..5e10373959 100644
--- a/spec/07-implicit-parameters-and-views.md
+++ b/spec/07-implicits.md
@@ -1,10 +1,10 @@
---
-title: Implicit Parameters and Views
+title: Implicits
layout: default
chapter: 7
---
-# Implicit Parameters and Views
+# Implicits
## The Implicit Modifier
@@ -15,11 +15,12 @@ ParamClauses ::= {ParamClause} [nl] ‘(’ ‘implicit’ Params ‘)’
Template members and parameters labeled with an `implicit`
modifier can be passed to [implicit parameters](#implicit-parameters)
-and can be used as implicit conversions called [views](#views).
+and can be used as implicit conversions called [views](#views).
The `implicit` modifier is illegal for all
type members, as well as for [top-level objects](09-top-level-definitions.html#packagings).
-### Example
+###### Example Monoid
+
The following code defines an abstract class of monoids and
two concrete implementations, `StringMonoid` and
`IntMonoid`. The two implementations are marked implicit.
@@ -41,7 +42,6 @@ object Monoids {
}
```
-
## Implicit Parameters
An implicit parameter list
@@ -57,7 +57,7 @@ parameters, such arguments will be automatically provided.
The actual arguments that are eligible to be passed to an implicit
parameter of type $T$ fall into two categories. First, eligible are
all identifiers $x$ that can be accessed at the point of the method
-call without a prefix and that denote an
+call without a prefix and that denote an
[implicit definition](#the-implicit-modifier)
or an implicit parameter. An eligible
identifier may thus be a local name, or a member of an enclosing
@@ -117,7 +117,6 @@ eligible object which matches the implicit formal parameter type
`Monoid[Int]` is `intMonoid` so this object will
be passed as implicit parameter.
-
This discussion also shows that implicit parameters are inferred after
any type arguments are [inferred](06-expressions.html#local-type-inference).
@@ -128,7 +127,7 @@ type of the list is also convertible to this type.
```scala
implicit def list2ordered[A](x: List[A])
- (implicit elem2ordered: A => Ordered[A]): Ordered[List[A]] =
+ (implicit elem2ordered: A => Ordered[A]): Ordered[List[A]] =
...
```
@@ -145,8 +144,8 @@ define a `sort` method over ordered lists:
def sort[A](xs: List[A])(implicit a2ordered: A => Ordered[A]) = ...
```
-We can apply `sort` to a list of lists of integers
-`yss: List[List[Int]]`
+We can apply `sort` to a list of lists of integers
+`yss: List[List[Int]]`
as follows:
```scala
@@ -161,11 +160,11 @@ sort(yss)(xs: List[Int] => list2ordered[Int](xs)(int2ordered)) .
The possibility of passing implicit arguments to implicit arguments
raises the possibility of an infinite recursion. For instance, one
-might try to define the following method, which injects _every_ type into the
+might try to define the following method, which injects _every_ type into the
`Ordered` class:
```scala
-implicit def magic[A](x: A)(implicit a2ordered: A => Ordered[A]): Ordered[A] =
+implicit def magic[A](x: A)(implicit a2ordered: A => Ordered[A]): Ordered[A] =
a2ordered(x)
```
@@ -178,11 +177,11 @@ expansion:
sort(arg)(x => magic(x)(x => magic(x)(x => ... )))
```
-To prevent such infinite expansions, the compiler keeps track of
+To prevent such infinite expansions, the compiler keeps track of
a stack of “open implicit types” for which implicit arguments are currently being
searched. Whenever an implicit argument for type $T$ is searched, the
“core type” of $T$ is added to the stack. Here, the _core type_
-of $T$ is $T$ with aliases expanded, top-level type [annotations](11-user-defined-annotations.html#user-defined-annotations) and
+of $T$ is $T$ with aliases expanded, top-level type [annotations](11-annotations.html#user-defined-annotations) and
[refinements](03-types.html#compound-types) removed, and occurrences
of top-level existentially bound variables replaced by their upper
bounds. The core type is removed from the stack once the search for
@@ -190,8 +189,8 @@ the implicit argument either definitely fails or succeeds. Everytime a
core type is added to the stack, it is checked that this type does not
dominate any of the other types in the set.
-Here, a core type $T$ _dominates_ a type $U$ if $T$ is
-[equivalent](03-types.html#type-equivalence)
+Here, a core type $T$ _dominates_ a type $U$ if $T$ is
+[equivalent](03-types.html#equivalence)
to $U$, or if the top-level type constructors of $T$ and $U$ have a
common element and $T$ is more complex than $U$.
@@ -203,15 +202,14 @@ the type:
- For a singleton type, $\mathit{ttcs}(p.type) ~=~ \mathit{ttcs}(T)$, provided $p$ has type $T$;
- For a compound type, `$\mathit{ttcs}(T_1$ with $\ldots$ with $T_n)$` $~=~ \mathit{ttcs}(T_1) \cup \ldots \cup \mathit{ttcs}(T_n)$.
-The _complexity_ $\mathit{complexity}(T)$ of a core type is an integer which also depends on the form of
+The _complexity_ $\operatorname{complexity}(T)$ of a core type is an integer which also depends on the form of
the type:
-- For a type designator, $\mathit{complexity}(p.c) ~=~ 1 + \mathit{complexity}(p)$
-- For a parameterized type, $\mathit{complexity}(p.c[\mathit{targs}]) ~=~ 1 + \Sigma \mathit{complexity}(\mathit{targs})$
-- For a singleton type denoting a package $p$, $\mathit{complexity}(p.type) ~=~ 0$
-- For any other singleton type, $\mathit{complexity}(p.type) ~=~ 1 + \mathit{complexity}(T)$, provided $p$ has type $T$;
-- For a compound type, `$\mathit{complexity}(T_1$ with $\ldots$ with $T_n)$` $= \Sigma\mathit{complexity}(T_i)$
-
+- For a type designator, $\operatorname{complexity}(p.c) ~=~ 1 + \operatorname{complexity}(p)$
+- For a parameterized type, $\operatorname{complexity}(p.c[\mathit{targs}]) ~=~ 1 + \Sigma \operatorname{complexity}(\mathit{targs})$
+- For a singleton type denoting a package $p$, $\operatorname{complexity}(p.type) ~=~ 0$
+- For any other singleton type, $\operatorname{complexity}(p.type) ~=~ 1 + \operatorname{complexity}(T)$, provided $p$ has type $T$;
+- For a compound type, `$\operatorname{complexity}(T_1$ with $\ldots$ with $T_n)$` $= \Sigma\operatorname{complexity}(T_i)$
###### Example
When typing `sort(xs)` for some list `xs` of type `List[List[List[Int]]]`,
@@ -228,7 +226,6 @@ All types share the common type constructor `scala.Function1`,
but the complexity of the each new type is lower than the complexity of the previous types.
Hence, the code typechecks.
-
###### Example
Let `ys` be a list of some type which cannot be converted
to `Ordered`. For instance:
@@ -249,7 +246,6 @@ Throwable => Ordered[Throwable],
Since the second type in the sequence is equal to the first, the compiler
will issue an error signalling a divergent implicit expansion.
-
## Views
Implicit parameters and methods can also define implicit conversions
@@ -266,7 +262,7 @@ Views are applied in three situations:
$\mathit{pt}$. The search proceeds as in the case of implicit parameters,
where the implicit scope is the one of `$T$ => $\mathit{pt}$`. If
such a view is found, the expression $e$ is converted to
- `$v$($e$)`.
+ `$v$($e$)`.
1. In a selection $e.m$ with $e$ of type $T$, if the selector $m$ does
not denote an accessible member of $T$. In this case, a view $v$ is searched
which is applicable to $e$ and whose result contains a member named
@@ -275,13 +271,12 @@ Views are applied in three situations:
selection $e.m$ is converted to `$v$($e$).$m$`.
1. In a selection $e.m(\mathit{args})$ with $e$ of type $T$, if the selector
$m$ denotes some member(s) of $T$, but none of these members is applicable to the arguments
- $\mathit{args}$. In this case a view $v$ is searched which is applicable to $e$
+ $\mathit{args}$. In this case a view $v$ is searched which is applicable to $e$
and whose result contains a method $m$ which is applicable to $\mathit{args}$.
The search proceeds as in the case of implicit parameters, where
the implicit scope is the one of $T$. If such a view is found, the
selection $e.m$ is converted to `$v$($e$).$m(\mathit{args})$`.
-
The implicit view, if it is found, can accept is argument $e$ as a
call-by-value or as a call-by-name parameter. However, call-by-value
implicits take precedence over call-by-name implicits.
@@ -290,7 +285,8 @@ As for implicit parameters, overloading resolution is applied
if there are several possible candidates (of either the call-by-value
or the call-by-name category).
-### Example
+###### Example Ordered
+
Class `scala.Ordered[A]` contains a method
```scala
@@ -319,17 +315,16 @@ The first application of `list2ordered` converts the list
occurrence is part of an implicit parameter passed to the `<=`
method.
-
## Context Bounds and View Bounds
```ebnf
- TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
+ TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
{‘<%’ Type} {‘:’ Type}
```
A type parameter $A$ of a method or non-trait class may have one or more view
bounds `$A$ <% $T$`. In this case the type parameter may be
-instantiated to any type $S$ which is convertible by application of a
+instantiated to any type $S$ which is convertible by application of a
view to the bound $T$.
A type parameter $A$ of a method or non-trait class may also have one
@@ -375,10 +370,9 @@ def <= [B >: A <% Ordered[B]](that: B): Boolean
## Manifests
-
Manifests are type descriptors that can be automatically generated by
the Scala compiler as arguments to implicit parameters. The Scala
-standard library contains a hierarchy of four manifest classes,
+standard library contains a hierarchy of four manifest classes,
with `OptManifest`
at the top. Their signatures follow the outline below.
@@ -399,7 +393,7 @@ argument is selected.
Otherwise, let $\mathit{Mobj}$ be the companion object `scala.reflect.Manifest`
if $M$ is trait `Manifest`, or be
the companion object `scala.reflect.ClassManifest` otherwise. Let $M'$ be the trait
-`Manifest` if $M$ is trait `Manifest`, or be the trait `OptManifest` otherwise.
+`Manifest` if $M$ is trait `Manifest`, or be the trait `OptManifest` otherwise.
Then the following rules apply.
1. If $T$ is a value class or one of the classes `Any`, `AnyVal`, `Object`,
@@ -416,26 +410,25 @@ Then the following rules apply.
where $m_0$ is the manifest determined for $M'[S]$ and $ms$ are the
manifests determined for $M'[U_1], \ldots, M'[U_n]$.
1. If $T$ is some other class type with type arguments $U_1 , \ldots , U_n$,
- a manifest is generated
+ a manifest is generated
with the invocation `$\mathit{Mobj}$.classType[T](classOf[T], $ms$)`
where $ms$ are the
manifests determined for $M'[U_1] , \ldots , M'[U_n]$.
1. If $T$ is a singleton type `$p$.type`, a manifest is generated with
- the invocation `$\mathit{Mobj}$.singleType[T]($p$)`
+ the invocation `$\mathit{Mobj}$.singleType[T]($p$)`
1. If $T$ is a refined type $T' \{ R \}$, a manifest is generated for $T'$.
(That is, refinements are never reflected in manifests).
1. If $T$ is an intersection type
`$T_1$ with $, \ldots ,$ with $T_n$`
where $n > 1$, the result depends on whether a full manifest is
- to be determined or not.
+ to be determined or not.
If $M$ is trait `Manifest`, then
a manifest is generated with the invocation
`Manifest.intersectionType[T]($ms$)` where $ms$ are the manifests
determined for $M[T_1] , \ldots , M[T_n]$.
- Otherwise, if $M$ is trait `ClassManifest`,
+ Otherwise, if $M$ is trait `ClassManifest`,
then a manifest is generated for the [intersection dominator](03-types.html#type-erasure)
of the types $T_1 , \ldots , T_n$.
1. If $T$ is some other type, then if $M$ is trait `OptManifest`,
a manifest is generated from the designator `scala.reflect.NoManifest`.
If $M$ is a type different from `OptManifest`, a static error results.
-
diff --git a/spec/08-pattern-matching.md b/spec/08-pattern-matching.md
index 7b4d070181..c494fbcef5 100644
--- a/spec/08-pattern-matching.md
+++ b/spec/08-pattern-matching.md
@@ -15,7 +15,7 @@ chapter: 8
| Pattern2
Pattern2 ::= varid [‘@’ Pattern3]
| Pattern3
- Pattern3 ::= SimplePattern
+ Pattern3 ::= SimplePattern
| SimplePattern {id [nl] SimplePattern}
SimplePattern ::= ‘_’
| varid
@@ -63,23 +63,22 @@ patterns.
A variable pattern $x$ is a simple identifier which starts with a
lower case letter. It matches any value, and binds the variable name
to that value. The type of $x$ is the expected type of the pattern as
-given from outside. A special case is the wild-card pattern $\_$
+given from outside. A special case is the wild-card pattern `_`
which is treated as if it was a fresh variable on each occurrence.
### Typed Patterns
-
```ebnf
Pattern1 ::= varid `:' TypePat
| `_' `:' TypePat
```
A typed pattern $x: T$ consists of a pattern variable $x$ and a
-type pattern $T$. The type of $x$ is the type pattern $T$, where
+type pattern $T$. The type of $x$ is the type pattern $T$, where
each type variable and wildcard is replaced by a fresh, unknown type.
-This pattern matches any value matched by the [type pattern](#type-patterns)
+This pattern matches any value matched by the [type pattern](#type-patterns)
$T$; it binds the variable name to
-that value.
+that value.
### Pattern Binders
@@ -87,10 +86,10 @@ that value.
Pattern2 ::= varid `@' Pattern3
```
-A pattern binder `$x$@$p$` consists of a pattern variable $x$ and a
+A pattern binder `$x$@$p$` consists of a pattern variable $x$ and a
pattern $p$. The type of the variable $x$ is the static type $T$ of the pattern $p$.
-This pattern matches any value $v$ matched by the pattern $p$,
-provided the run-time type of $v$ is also an instance of $T$,
+This pattern matches any value $v$ matched by the pattern $p$,
+provided the run-time type of $v$ is also an instance of $T$,
and it binds the variable name to that value.
### Literal Patterns
@@ -100,7 +99,7 @@ and it binds the variable name to that value.
```
A literal pattern $L$ matches any value that is equal (in terms of
-$==$) to the literal $L$. The type of $L$ must conform to the
+`==`) to the literal $L$. The type of $L$ must conform to the
expected type of the pattern.
### Stable Identifier Patterns
@@ -116,7 +115,7 @@ type of the pattern. The pattern matches any value $v$ such that
To resolve the syntactic overlap with a variable pattern, a
stable identifier pattern may not be a simple name starting with a lower-case
-letter. However, it is possible to enclose a such a variable name in
+letter. However, it is possible to enclose such a variable name in
backquotes; then it is treated as a stable identifier pattern.
###### Example
@@ -175,7 +174,7 @@ repeated parameter. This is further discussed [here](#pattern-sequences).
```
A tuple pattern `($p_1 , \ldots , p_n$)` is an alias
-for the constructor pattern `scala.Tuple$n$($p_1 , \ldots , p_n$)`,
+for the constructor pattern `scala.Tuple$n$($p_1 , \ldots , p_n$)`,
where $n \geq 2$. The empty tuple
`()` is the unique value of type `scala.Unit`.
@@ -196,14 +195,14 @@ $x(p_1 , \ldots , p_n)$ if it takes exactly one argument and one of
the following applies:
* $n=0$ and `unapply`'s result type is `Boolean`. In this case
- the extractor pattern matches all values $v$ for which
+ the extractor pattern matches all values $v$ for which
`$x$.unapply($v$)` yields `true`.
* $n=1$ and `unapply`'s result type is `Option[$T$]`, for some
type $T$. In this case, the (only) argument pattern $p_1$ is typed in
turn with expected type $T$. The extractor pattern matches then all
values $v$ for which `$x$.unapply($v$)` yields a value of form
`Some($v_1$)`, and $p_1$ matches $v_1$.
-* $n>1$ and `unapply`'s result type is
+* $n>1$ and `unapply`'s result type is
`Option[($T_1 , \ldots , T_n$)]`, for some
types $T_1 , \ldots , T_n$. In this case, the argument patterns $p_1
, \ldots , p_n$ are typed in turn with expected types $T_1 , \ldots ,
@@ -250,7 +249,7 @@ First, in a constructor pattern $c(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$, whe
Second, in an extractor pattern $x(q_1 , \ldots , q_m, p_1 , \ldots , p_n)$ if the extractor object $x$ does not have an `unapply` method,
but it does define an `unapplySeq` method with a result type conforming to `Option[(T_1, ... , T_m, Seq[S])]` (if `m = 0`, the type `Option[Seq[S]]` is also accepted). The expected type for the patterns $p_i$ is $S$.
-The last pattern in a pattern sequence may be a _sequence wildcard_ `_*`.
+The last pattern in a pattern sequence may be a _sequence wildcard_ `_*`.
Each element pattern $p_i$ is type-checked with
$S$ as expected type, unless it is a sequence wildcard. If a final
sequence wildcard is present, the pattern matches all values $v$ that
@@ -268,8 +267,8 @@ p_n$.
An infix operation pattern $p;\mathit{op};q$ is a shorthand for the
constructor or extractor pattern $\mathit{op}(p, q)$. The precedence and
-associativity of operators in patterns is the same as in
-[expressions](06-expressions.html#prefix-infix-and-postfix-operations).
+associativity of operators in patterns is the same as in
+[expressions](06-expressions.html#prefix,-infix,-and-postfix-operations).
An infix operation pattern $p;\mathit{op};(q_1 , \ldots , q_n)$ is a
shorthand for the constructor or extractor pattern $\mathit{op}(p, q_1
@@ -284,7 +283,7 @@ shorthand for the constructor or extractor pattern $\mathit{op}(p, q_1
A pattern alternative `$p_1$ | $\ldots$ | $p_n$`
consists of a number of alternative patterns $p_i$. All alternative
patterns are type checked with the expected type of the pattern. They
-may no bind variables other than wildcards. The alternative pattern
+may not bind variables other than wildcards. The alternative pattern
matches a value $v$ if at least one its alternatives matches $v$.
### XML Patterns
@@ -315,7 +314,7 @@ A pattern $p$ is _irrefutable_ for a type $T$, if one of the following applies:
1. $p$ is a typed pattern $x: T'$, and $T <: T'$,
1. $p$ is a constructor pattern $c(p_1 , \ldots , p_n)$, the type $T$
is an instance of class $c$, the [primary constructor](05-classes-and-objects.html#class-definitions)
- of type $T$ has argument types $T_1 , \ldots , T_n$, and each $p_i$ is
+ of type $T$ has argument types $T_1 , \ldots , T_n$, and each $p_i$ is
irrefutable for $T_i$.
## Type Patterns
@@ -324,18 +323,18 @@ A pattern $p$ is _irrefutable_ for a type $T$, if one of the following applies:
TypePat ::= Type
```
-Type patterns consist of types, type variables, and wildcards.
+Type patterns consist of types, type variables, and wildcards.
A type pattern $T$ is of one of the following forms:
* A reference to a class $C$, $p.C$, or `$T$#$C$`. This
- type pattern matches any non-null instance of the given class.
+ type pattern matches any non-null instance of the given class.
Note that the prefix of the class, if it is given, is relevant for determining
class instances. For instance, the pattern $p.C$ matches only
instances of classes $C$ which were created with the path $p$ as
prefix.
The bottom types `scala.Nothing` and `scala.Null` cannot
- be used as type patterns, because they would match nothing in any case.
+ be used as type patterns, because they would match nothing in any case.
* A singleton type `$p$.type`. This type pattern matches only the value
denoted by the path $p$ (that is, a pattern match involved a
@@ -346,7 +345,7 @@ A type pattern $T$ is of one of the following forms:
the type patterns $T_i$.
* A parameterized type pattern $T[a_1 , \ldots , a_n]$, where the $a_i$
- are type variable patterns or wildcards $\_$.
+ are type variable patterns or wildcards `_`.
This type pattern matches all values which match $T$ for
some arbitrary instantiation of the type variables and wildcards. The
bounds or alias type of these type variable are determined as
@@ -356,8 +355,7 @@ A type pattern $T$ is of one of the following forms:
$T_1$ is a type pattern. This type pattern matches any non-null instance
of type `scala.Array$[U_1]$`, where $U_1$ is a type matched by $T_1$.
-
-Types which are not of one of the forms described above are also
+Types which are not of one of the forms described above are also
accepted as type patterns. However, such type patterns will be translated to their
[erasure](03-types.html#type-erasure). The Scala
compiler will issue an "unchecked" warning for these patterns to
@@ -373,73 +371,73 @@ bound type variables in a typed pattern or constructor
pattern. Inference takes into account the expected type of the
pattern.
-
-### Type parameter inference for typed patterns.
+### Type parameter inference for typed patterns
Assume a typed pattern $p: T'$. Let $T$ result from $T'$ where all wildcards in
-$T'$ are renamed to fresh variable names. Let $a_1 , \ldots , a_n$ be
+$T'$ are renamed to fresh variable names. Let $a_1 , \ldots , a_n$ be
the type variables in $T$. These type variables are considered bound
in the pattern. Let the expected type of the pattern be $\mathit{pt}$.
Type parameter inference constructs first a set of subtype constraints over
-the type variables $a_i$. The initial constraints set $\mathcal{C}_0$ reflects
+the type variables $a_i$. The initial constraints set $\mathcal{C}\_0$ reflects
just the bounds of these type variables. That is, assuming $T$ has
bound type variables $a_1 , \ldots , a_n$ which correspond to class
-type parameters $a'_1 , \ldots , a'_n$ with lower bounds $L_1, \ldots , L_n$
-and upper bounds $U_1 , \ldots , U_n$, $\mathcal{C}_0$ contains the constraints
+type parameters $a_1' , \ldots , a_n'$ with lower bounds $L_1, \ldots , L_n$
+and upper bounds $U_1 , \ldots , U_n$, $\mathcal{C}_0$ contains the constraints
-| | | | |
-|-------------|------|---------------|------------------------|
-|$a_i$ | $<:$ | $\sigma U_i$ | $(i = 1, \ldots , n)$ |
-|$\sigma L_i$ | $<:$ | $a_i$ | $(i = 1 , \ldots , n)$ |
+$$
+\begin{cases}
+a_i &<: \sigma U_i & \quad (i = 1, \ldots , n) \\\\
+\sigma L_i &<: a_i & \quad (i = 1, \ldots , n)
+\end{cases}
+$$
-
-where $\sigma$ is the substitution $[a'_1 := a_1 , \ldots , a'_n :=
-a_n]$.
+where $\sigma$ is the substitution $[a_1' := a_1 , \ldots , a_n' :=a_n]$.
The set $\mathcal{C}_0$ is then augmented by further subtype constraints. There are two
cases.
###### Case 1
-If there exists a substitution $\sigma$ over the type variables $a_i , \ldots , a_n$ such that $\sigma T$ conforms to $\mathit{pt}$, one determines the weakest subtype constraints $\mathcal{C}_1$ over the type variables $a_1, \ldots , a_n$ such that $\mathcal{C}_0 \wedge \mathcal{C}_1$ implies that $T$ conforms to $\mathit{pt}$.
+If there exists a substitution $\sigma$ over the type variables $a_i , \ldots , a_n$ such that $\sigma T$ conforms to $\mathit{pt}$, one determines the weakest subtype constraints
+$\mathcal{C}\_1$ over the type variables $a_1, \ldots , a_n$ such that $\mathcal{C}\_0 \wedge \mathcal{C}_1$ implies that $T$ conforms to $\mathit{pt}$.
###### Case 2
Otherwise, if $T$ can not be made to conform to $\mathit{pt}$ by
instantiating its type variables, one determines all type variables in
$\mathit{pt}$ which are defined as type parameters of a method enclosing
the pattern. Let the set of such type parameters be $b_1 , \ldots ,
-b_m$. Let $\mathcal{C}'_0$ be the subtype constraints reflecting the bounds of the
+b_m$. Let $\mathcal{C}\_0'$ be the subtype constraints reflecting the bounds of the
type variables $b_i$. If $T$ denotes an instance type of a final
-class, let $\mathcal{C}_2$ be the weakest set of subtype constraints over the type
+class, let $\mathcal{C}\_2$ be the weakest set of subtype constraints over the type
variables $a_1 , \ldots , a_n$ and $b_1 , \ldots , b_m$ such that
-$\mathcal{C}_0 \wedge \mathcal{C}'_0 \wedge \mathcal{C}_2$ implies that $T$ conforms to
+$\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}\_2$ implies that $T$ conforms to
$\mathit{pt}$. If $T$ does not denote an instance type of a final class,
-let $\mathcal{C}_2$ be the weakest set of subtype constraints over the type variables
-$a_1 , \ldots , a_n$ and $b_1 , \ldots , b_m$ such that $\mathcal{C}_0 \wedge
-\mathcal{C}'_0 \wedge \mathcal{C}_2$ implies that it is possible to construct a type
+let $\mathcal{C}\_2$ be the weakest set of subtype constraints over the type variables
+$a_1 , \ldots , a_n$ and $b_1 , \ldots , b_m$ such that $\mathcal{C}\_0 \wedge
+\mathcal{C}\_0' \wedge \mathcal{C}\_2$ implies that it is possible to construct a type
$T'$ which conforms to both $T$ and $\mathit{pt}$. It is a static error if
-there is no satisfiable set of constraints $\mathcal{C}_2$ with this property.
+there is no satisfiable set of constraints $\mathcal{C}\_2$ with this property.
The final step consists in choosing type bounds for the type
variables which imply the established constraint system. The process
is different for the two cases above.
###### Case 1
-We take $a_i >: L_i <: U_i$ where each $L_i$ is minimal and each $U_i$ is maximal wrt $<:$ such that $a_i >: L_i <: U_i$ for $i = 1, \ldots, n$ implies $\mathcal{C}_0 \wedge \mathcal{C}_1$.
+We take $a_i >: L_i <: U_i$ where each $L_i$ is minimal and each $U_i$ is maximal wrt $<:$ such that $a_i >: L_i <: U_i$ for $i = 1, \ldots, n$ implies $\mathcal{C}\_0 \wedge \mathcal{C}\_1$.
###### Case 2
-We take $a_i >: L_i <: U_i$ and $b_i >: L'_i <: U'_i$ where each $L_i$
-and $L'_j$ is minimal and each $U_i$ and $U'_j$ is maximal such that
-$a_i >: L_i <: U_i$ for $i = 1 , \ldots , n$ and
-$b_j >: L'_j <: U'_j$ for $j = 1 , \ldots , m$
-implies $\mathcal{C}_0 \wedge \mathcal{C}'_0 \wedge \mathcal{C}_2$.
+We take $a_i >: L_i <: U_i$ and $b\_i >: L_i' <: U_i' $ where each $L_i$
+and $L_j'$ is minimal and each $U_i$ and $U_j'$ is maximal such that
+$a_i >: L_i <: U_i$ for $i = 1 , \ldots , n$ and
+$b_j >: L_j' <: U_j'$ for $j = 1 , \ldots , m$
+implies $\mathcal{C}\_0 \wedge \mathcal{C}\_0' \wedge \mathcal{C}_2$.
In both cases, local type inference is permitted to limit the
complexity of inferred bounds. Minimality and maximality of types have
to be understood relative to the set of types of acceptable
complexity.
-#### Type parameter inference for constructor patterns.
+### Type parameter inference for constructor patterns
Assume a constructor pattern $C(p_1 , \ldots , p_n)$ where class $C$
has type type parameters $a_1 , \ldots , a_n$. These type parameters
are inferred in the same way as for the typed pattern
@@ -495,7 +493,6 @@ list `x` contains elements other than strings. The Scala
compiler will flag this potential loss of type-safety with an
"unchecked" warning message.
-
###### Example
Consider the program fragment
@@ -519,7 +516,6 @@ the case clause as an abstract type with lower and upper bound
`y.n`, of type `Int`, is found to conform to the
function's declared result type, `Number`.
-
## Pattern Matching Expressions
```ebnf
@@ -537,12 +533,12 @@ e match { case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ }
consists of a selector expression $e$ and a number $n > 0$ of
cases. Each case consists of a (possibly guarded) pattern $p_i$ and a
block $b_i$. Each $p_i$ might be complemented by a guard
-`if $e$` where $e$ is a boolean expression.
+`if $e$` where $e$ is a boolean expression.
The scope of the pattern
variables in $p_i$ comprises the pattern's guard and the corresponding block $b_i$.
Let $T$ be the type of the selector expression $e$ and let $a_1
-, \ldots , a_m$ be the type parameters of all methods enclosing
+, \ldots , a_m$ be the type parameters of all methods enclosing
the pattern matching expression. For every $a_i$, let $L_i$ be its
lower bound and $U_i$ be its higher bound. Every pattern $p \in \{p_1, , \ldots , p_n\}$
can be typed in two ways. First, it is attempted
@@ -552,16 +548,16 @@ $T$ by replacing every occurrence of a type parameter $a_i$ by
\mbox{\sl undefined}. If this second step fails also, a compile-time
error results. If the second step succeeds, let $T_p$ be the type of
pattern $p$ seen as an expression. One then determines minimal bounds
-$L'_1 , \ldots , L'_m$ and maximal bounds $U'_1 , \ldots , U'_m$ such
-that for all $i$, $L_i <: L'_i$ and $U'_i <: U_i$ and the following
+$L_11 , \ldots , L_m'$ and maximal bounds $U_1' , \ldots , U_m'$ such
+that for all $i$, $L_i <: L_i'$ and $U_i' <: U_i$ and the following
constraint system is satisfied:
$$L_1 <: a_1 <: U_1\;\wedge\;\ldots\;\wedge\;L_m <: a_m <: U_m \ \Rightarrow\ T_p <: T$$
If no such bounds can be found, a compile time error results. If such
bounds are found, the pattern matching clause starting with $p$ is
-then typed under the assumption that each $a_i$ has lower bound $L'_i$
-instead of $L_i$ and has upper bound $U'_i$ instead of $U_i$.
+then typed under the assumption that each $a_i$ has lower bound $L_i'$
+instead of $L_i$ and has upper bound $U_i'$ instead of $U_i$.
The expected type of every block $b_i$ is the expected type of the
whole pattern matching expression. The type of the pattern matching
@@ -571,7 +567,7 @@ $b_i$.
When applying a pattern matching expression to a selector value,
patterns are tried in sequence until one is found which matches the
-[selector value](#patterns). Say this case is `$case p_i \Rightarrow b_i$`.
+[selector value](#patterns). Say this case is `case $p_i \Rightarrow b_i$`.
The result of the whole expression is the result of evaluating $b_i$,
where all pattern variables of $p_i$ are bound to
the corresponding parts of the selector value. If no matching pattern
@@ -595,9 +591,9 @@ If the selector of a pattern match is an instance of a
[`sealed` class](05-classes-and-objects.html#modifiers),
the compilation of pattern matching can emit warnings which diagnose
that a given set of patterns is not exhaustive, i.e. that there is a
-possibility of a `MatchError` being raised at run-time.
+possibility of a `MatchError` being raised at run-time.
-### Example
+###### Example
Consider the following definitions of arithmetic terms:
@@ -613,7 +609,7 @@ case class If[T](c: Term[Boolean],
There are terms to represent numeric literals, incrementation, a zero
test, and a conditional. Every term carries as a type parameter the
-type of the expression it representes (either `Int` or `Boolean`).
+type of the expression it represents (either `Int` or `Boolean`).
A type-safe evaluator for such terms can be written as follows.
@@ -637,14 +633,13 @@ Under the assumption `Int <: T <: Int` we can also
verify that the type right hand side of the second case, `Int`
conforms to its expected type, `T`.
-
## Pattern Matching Anonymous Functions
```ebnf
BlockExpr ::= `{' CaseClauses `}'
```
-An anonymous function can be defined by a sequence of cases
+An anonymous function can be defined by a sequence of cases
```scala
{ case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$ }
@@ -661,8 +656,8 @@ If the expected type is `scala.Function$k$[$S_1 , \ldots , S_k$, $R$]`,
the expression is taken to be equivalent to the anonymous function:
```scala
-($x_1: S_1 , \ldots , x_k: S_k$) => ($x_1 , \ldots , x_k$) match {
- case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$
+($x_1: S_1 , \ldots , x_k: S_k$) => ($x_1 , \ldots , x_k$) match {
+ case $p_1$ => $b_1$ $\ldots$ case $p_n$ => $b_n$
}
```
@@ -719,4 +714,3 @@ anonymous function:
case (a, (b, c)) => a + b * c
}
```
-
diff --git a/spec/09-top-level-definitions.md b/spec/09-top-level-definitions.md
index b9c78b23a1..e3185d8b7d 100644
--- a/spec/09-top-level-definitions.md
+++ b/spec/09-top-level-definitions.md
@@ -23,7 +23,7 @@ A compilation unit consists of a sequence of packagings, import
clauses, and class and object definitions, which may be preceded by a
package clause.
-A compilation unit
+A compilation unit
```scala
package $p_1$;
@@ -34,7 +34,7 @@ $\mathit{stats}$
starting with one or more package
clauses is equivalent to a compilation unit consisting of the
-packaging
+packaging
```scala
package $p_1$ { $\ldots$
@@ -94,7 +94,6 @@ into a special empty package. That package cannot be named and
therefore cannot be imported. However, members of the empty package
are visible to each other without qualification.
-
## Package Objects
```ebnf
@@ -113,7 +112,6 @@ there is a name conflict, the behavior of the program is currently
undefined. It is expected that this restriction will be lifted in a
future version of Scala.
-
## Package References
```ebnf
@@ -121,12 +119,12 @@ QualId ::= id {‘.’ id}
```
A reference to a package takes the form of a qualified identifier.
-Like all other references, package references are relative. That is,
+Like all other references, package references are relative. That is,
a package reference starting in a name $p$ will be looked up in the
closest enclosing scope that defines a member named $p$.
The special predefined name `_root_` refers to the
-outermost root package which contains all top-level packages.
+outermost root package which contains all top-level packages.
###### Example
Consider the following program:
@@ -149,7 +147,6 @@ omitted, the name `b` would instead resolve to the package
`a.b`, and, provided that package does not also
contain a class `B`, a compiler-time error would result.
-
## Programs
A _program_ is a top-level object that has a member method
@@ -161,8 +158,8 @@ passed to the `main` method as a parameter of type
The `main` method of a program can be directly defined in the
object, or it can be inherited. The scala library defines a special class
`scala.App` whose body acts as a `main` method.
-An objects $m$ inheriting from this class is thus a program,
-which executes the initializaton code of the object $m$.
+An objects $m$ inheriting from this class is thus a program,
+which executes the initialization code of the object $m$.
###### Example
The following example will create a hello world program by defining
@@ -198,4 +195,3 @@ object HelloWorld extends App {
println("Hello World")
}
```
-
diff --git a/spec/10-xml-expressions-and-patterns.md b/spec/10-xml-expressions-and-patterns.md
index d8c45ecf85..b70fb86471 100644
--- a/spec/10-xml-expressions-and-patterns.md
+++ b/spec/10-xml-expressions-and-patterns.md
@@ -1,5 +1,5 @@
---
-title: XML Expressions and Patterns
+title: XML
layout: default
chapter: 10
---
@@ -14,7 +14,7 @@ changes being mandated by the possibility of embedding Scala code fragments.
## XML expressions
-XML expressions are expressions generated by the following production, where the
+XML expressions are expressions generated by the following production, where the
opening bracket `<` of the first element must be in a position to start the lexical
[XML mode](01-lexical-syntax.html#xml-mode).
@@ -29,18 +29,18 @@ related to entity resolution.
The following productions describe Scala's extensible markup language,
designed as close as possible to the W3C extensible markup language
-standard. Only the productions for attribute values and character data are changed.
+standard. Only the productions for attribute values and character data are changed.
Scala does not support declarations, CDATA sections or processing instructions.
Entity references are not resolved at runtime.
```ebnf
Element ::= EmptyElemTag
- | STag Content ETag
+ | STag Content ETag
-EmptyElemTag ::= ‘<’ Name {S Attribute} [S] ‘/>’
+EmptyElemTag ::= ‘<’ Name {S Attribute} [S] ‘/>’
-STag ::= ‘<’ Name {S Attribute} [S] ‘>’
-ETag ::= ‘</’ Name [S] ‘>’
+STag ::= ‘<’ Name {S Attribute} [S] ‘>’
+ETag ::= ‘</’ Name [S] ‘>’
Content ::= [CharData] {Content1 [CharData]}
Content1 ::= XmlContent
| Reference
@@ -52,23 +52,23 @@ XmlContent ::= Element
```
If an XML expression is a single element, its value is a runtime
-representation of an XML node (an instance of a subclass of
+representation of an XML node (an instance of a subclass of
`scala.xml.Node`). If the XML expression consists of more
than one element, then its value is a runtime representation of a
-sequence of XML nodes (an instance of a subclass of
+sequence of XML nodes (an instance of a subclass of
`scala.Seq[scala.xml.Node]`).
-If an XML expression is an entity reference, CDATA section, processing
-instructions or a comments, it is represented by an instance of the
+If an XML expression is an entity reference, CDATA section, processing
+instruction, or a comment, it is represented by an instance of the
corresponding Scala runtime class.
-By default, beginning and trailing whitespace in element content is removed,
+By default, beginning and trailing whitespace in element content is removed,
and consecutive occurrences of whitespace are replaced by a single space
character `\u0020`. This behavior can be changed to preserve all whitespace
with a compiler option.
```ebnf
-Attribute ::= Name Eq AttValue
+Attribute ::= Name Eq AttValue
AttValue ::= ‘"’ {CharQ | CharRef} ‘"’
| ‘'’ {CharA | CharRef} ‘'’
@@ -76,13 +76,13 @@ AttValue ::= ‘"’ {CharQ | CharRef} ‘"’
ScalaExpr ::= Block
-CharData ::= { CharNoRef } $\mbox{\rm\em without}$ {CharNoRef}`{'CharB {CharNoRef}
- $\mbox{\rm\em and without}$ {CharNoRef}`]]>'{CharNoRef}
+CharData ::= { CharNoRef } $\textit{ without}$ {CharNoRef}`{'CharB {CharNoRef}
+ $\textit{ and without}$ {CharNoRef}`]]>'{CharNoRef}
```
<!-- {% raw %} stupid liquid borks on the double brace below; brace yourself, liquid! -->
XML expressions may contain Scala expressions as attribute values or
-within nodes. In the latter case, these are embedded using a single opening
+within nodes. In the latter case, these are embedded using a single opening
brace `{` and ended by a closing brace `}`. To express a single opening braces
within XML text as generated by CharData, it must be doubled.
Thus, `{{` represents the XML text `{` and does not introduce an embedded Scala expression.
@@ -90,17 +90,17 @@ Thus, `{{` represents the XML text `{` and does not introduce an embedded Scala
```ebnf
BaseChar, Char, Comment, CombiningChar, Ideographic, NameChar, S, Reference
- ::= $\mbox{\rm\em “as in W3C XML”}$
+ ::= $\textit{“as in W3C XML”}$
-Char1 ::= Char $\mbox{\rm\em without}$ ‘<’ | ‘&’
-CharQ ::= Char1 $\mbox{\rm\em without}$ ‘"’
-CharA ::= Char1 $\mbox{\rm\em without}$ ‘'’
-CharB ::= Char1 $\mbox{\rm\em without}$ ‘{’
+Char1 ::= Char $\textit{ without}$ ‘<’ | ‘&’
+CharQ ::= Char1 $\textit{ without}$ ‘"’
+CharA ::= Char1 $\textit{ without}$ ‘'’
+CharB ::= Char1 $\textit{ without}$ ‘{’
Name ::= XNameStart {NameChar}
-XNameStart ::= ‘_’ | BaseChar | Ideographic
- $\mbox{\rm\em (as in W3C XML, but without }$ ‘:’
+XNameStart ::= ‘_’ | BaseChar | Ideographic
+ $\textit{ (as in W3C XML, but without }$ ‘:’$)$
```
## XML patterns
@@ -110,7 +110,7 @@ the opening bracket `<` of the element patterns must be in a position
to start the lexical [XML mode](01-lexical-syntax.html#xml-mode).
```ebnf
-XmlPattern ::= ElementPattern
+XmlPattern ::= ElementPattern
```
Well-formedness constraints of the XML specification apply.
@@ -123,18 +123,18 @@ XML patterns may contain [Scala patterns](08-pattern-matching.html#pattern-match
Whitespace is treated the same way as in XML expressions.
-By default, beginning and trailing whitespace in element content is removed,
+By default, beginning and trailing whitespace in element content is removed,
and consecutive occurrences of whitespace are replaced by a single space
character `\u0020`. This behavior can be changed to preserve all whitespace
with a compiler option.
```ebnf
ElemPattern ::= EmptyElemTagP
- | STagP ContentP ETagP
+ | STagP ContentP ETagP
EmptyElemTagP ::= ‘<’ Name [S] ‘/>’
-STagP ::= ‘<’ Name [S] ‘>’
-ETagP ::= ‘</’ Name [S] ‘>’
+STagP ::= ‘<’ Name [S] ‘>’
+ETagP ::= ‘</’ Name [S] ‘>’
ContentP ::= [CharData] {(ElemPattern|ScalaPatterns) [CharData]}
ContentP1 ::= ElemPattern
| Reference
@@ -144,4 +144,3 @@ ContentP1 ::= ElemPattern
| ScalaPatterns
ScalaPatterns ::= ‘{’ Patterns ‘}’
```
-
diff --git a/spec/11-user-defined-annotations.md b/spec/11-annotations.md
index fd7a7f9d3f..d66f24abf8 100644
--- a/spec/11-user-defined-annotations.md
+++ b/spec/11-annotations.md
@@ -1,20 +1,22 @@
---
-title: User-Defined Annotations
+title: Annotations
layout: default
chapter: 11
---
-# User-Defined Annotations
+# Annotations
```ebnf
Annotation ::= ‘@’ SimpleType {ArgumentExprs}
ConstrAnnotation ::= ‘@’ SimpleType ArgumentExprs
```
-User-defined annotations associate meta-information with definitions.
+## Definition
+
+Annotations associate meta-information with definitions.
A simple annotation has the form `@$c$` or `@$c(a_1 , \ldots , a_n)$`.
Here, $c$ is a constructor of a class $C$, which must conform
-to the class `scala.Annotation`.
+to the class `scala.Annotation`.
Annotations may apply to definitions or declarations, types, or
expressions. An annotation of a definition or declaration appears in
@@ -33,6 +35,10 @@ String @local // Type annotation
(e: @unchecked) match { ... } // Expression annotation
```
+## Predefined Annotations
+
+### Java Platform Annotations
+
The meaning of annotation clauses is implementation-dependent. On the
Java platform, the following annotations have a standard meaning.
@@ -61,7 +67,7 @@ Java platform, the following annotations have a standard meaning.
clause for the method or constructor must mention the class of that exception
or one of the superclasses of the class of that exception.
-## Java Beans Annotations
+### Java Beans Annotations
* `@scala.beans.BeanProperty` When prefixed to a definition of some variable `X`, this
annotation causes getter and setter methods `getX`, `setX`
@@ -76,18 +82,21 @@ Java platform, the following annotations have a standard meaning.
* `@scala.beans.BooleanBeanProperty` This annotation is equivalent to `scala.reflect.BeanProperty`, but
the generated getter method is named `isX` instead of `getX`.
-## Deprecation Annotations
+### Deprecation Annotations
- * `@deprecated(<stringlit>)` Marks a definition as deprecated. Accesses to the
+ * `@deprecated(message: <stringlit>, since: <stringlit>)`<br/>
+ Marks a definition as deprecated. Accesses to the
defined entity will then cause a deprecated warning mentioning the
- message `<stringlit>` to be issued from the compiler. Deprecated
- warnings are suppressed in code that belongs itself to a definition
+ _message_ `<stringlit>` to be issued from the compiler.
+ The argument _since_ documents since when the definition should be considered deprecated.<br/>
+ Deprecated warnings are suppressed in code that belongs itself to a definition
that is labeled deprecated.
- * `@deprecatedName(name: <symbollit>)` Marks a formal parameter name as deprecated. Invocations of this entity
+ * `@deprecatedName(name: <symbollit>)`<br/>
+ Marks a formal parameter name as deprecated. Invocations of this entity
using named parameter syntax refering to the deprecated parameter name cause a deprecation warning.
-## Scala Compiler Annotations
+### Scala Compiler Annotations
* `@unchecked` When applied to the selector of a `match` expression,
this attribute suppresses any warnings about non-exhaustive pattern
@@ -122,7 +131,6 @@ Java platform, the following annotations have a standard meaning.
When applied to value declarations or definitions that have non-volatile
types, the annotation has no effect.
-
* `@specialized` When applied to the definition of a type parameter, this annotation causes
the compiler
to generate specialized definitions for primitive types. An optional list of
@@ -142,6 +150,7 @@ Java platform, the following annotations have a standard meaning.
a definition, the compiler will instead use the specialized version.
See the [specialization sid](http://docs.scala-lang.org/sips/completed/scala-specialization.html) for more details of the implementation.
+## User-defined Annotations
Other annotations may be interpreted by platform- or
application-dependent tools. Class `scala.Annotation` has two
@@ -163,4 +172,3 @@ mapped to the host environment. In particular, on both the Java and
the .NET platforms, such classes must be toplevel; i.e. they may not
be contained in another class or object. Additionally, on both
Java and .NET, all constructor arguments must be constant expressions.
-
diff --git a/spec/12-the-scala-standard-library.md b/spec/12-the-scala-standard-library.md
index 9d4d69e52a..e76035f458 100644
--- a/spec/12-the-scala-standard-library.md
+++ b/spec/12-the-scala-standard-library.md
@@ -1,5 +1,5 @@
---
-title: The Scala Standard Library
+title: Standard Library
layout: default
chapter: 12
---
@@ -41,15 +41,15 @@ The signatures of these root classes are described by the following
definitions.
```scala
-package scala
+package scala
/** The universal root class */
abstract class Any {
/** Defined equality; abstract here */
- def equals(that: Any): Boolean
+ def equals(that: Any): Boolean
/** Semantic equality between values */
- final def == (that: Any): Boolean =
+ final def == (that: Any): Boolean =
if (null eq this) null eq that else this equals that
/** Semantic inequality between values */
@@ -73,11 +73,11 @@ abstract class Any {
}
/** The root class of all value types */
-final class AnyVal extends Any
+final class AnyVal extends Any
/** The root class of all reference types */
class AnyRef extends Any {
- def equals(that: Any): Boolean = this eq that
+ def equals(that: Any): Boolean = this eq that
final def eq(that: AnyRef): Boolean = $\ldots$ // reference equality
final def ne(that: AnyRef): Boolean = !(this eq that)
@@ -85,9 +85,9 @@ class AnyRef extends Any {
def toString: String = $\ldots$ // toString computed from hashCode and class name
def synchronized[T](body: => T): T // execute `body` in while locking `this`.
-}
+}
+```
-```scala
The type test `$x$.isInstanceOf[$T$]` is equivalent to a typed
pattern match
@@ -103,10 +103,9 @@ of the form $D$ or $D[\mathit{tps}]$ where $D$ is a type member of some outer cl
In this case $T'$ is `$C$#$D$` (or `$C$#$D[tps]$`, respectively), whereas $T$ itself would expand to `$C$.this.$D[tps]$`.
In other words, an `isInstanceOf` test does not check that types have the same enclosing instance.
-
The test `$x$.asInstanceOf[$T$]` is treated specially if $T$ is a
[numeric value type](#value-classes). In this case the cast will
-be translated to an application of a [conversion method](#numeric-value-types)
+be translated to an application of a [conversion method](#numeric-value-types)
`x.to$T$`. For non-numeric values $x$ the operation will raise a
`ClassCastException`.
@@ -131,19 +130,19 @@ Subrange types, as well as `Int` and `Long` are called _integer types_, whereas
Numeric value types are ranked in the following partial order:
```scala
-Byte - Short
+Byte - Short
\
Int - Long - Float - Double
- /
- Char
+ /
+ Char
```
-`Byte` and `Short` are the lowest-ranked types in this order,
+`Byte` and `Short` are the lowest-ranked types in this order,
whereas `Double` is the highest-ranked. Ranking does _not_
imply a [conformance relationship](03-types.html#conformance); for
instance `Int` is not a subtype of `Long`. However, object
[`Predef`](#the-predef-object) defines [views](07-implicit-parameters-and-views.html#views)
-from every numeric value type to all higher-ranked numeric value types.
+from every numeric value type to all higher-ranked numeric value types.
Therefore, lower-ranked types are implicitly converted to higher-ranked types
when required by the [context](06-expressions.html#implicit-conversions).
@@ -182,7 +181,7 @@ Any numeric value type $T$ supports the following methods.
numeric value (as when going from `Long` to `Int` or from
`Int` to `Byte`) or it might lose precision (as when going
from `Double` to `Float` or when converting between
- `Long` and `Float`).
+ `Long` and `Float`).
Integer numeric value types support in addition the following operations:
@@ -228,13 +227,13 @@ def equals(other: Any): Boolean = other match {
```
The `hashCode` method returns an integer hashcode that maps equal
-numeric values to equal results. It is guaranteed to be the identity for
+numeric values to equal results. It is guaranteed to be the identity for
for type `Int` and for all subrange types.
The `toString` method displays its receiver as an integer or
floating point number.
-### Example
+###### Example
This is the signature of the numeric value type `Int`:
@@ -284,7 +283,6 @@ abstract sealed class Int extends AnyVal {
}
```
-
### Class `Boolean`
Class `Boolean` has only two values: `true` and
@@ -292,7 +290,7 @@ Class `Boolean` has only two values: `true` and
class definition.
```scala
-package scala
+package scala
abstract sealed class Boolean extends AnyVal {
def && (p: => Boolean): Boolean = // boolean and
if (this) p else false
@@ -316,7 +314,7 @@ and `toString` from class `Any`.
The `equals` method returns `true` if the argument is the
same boolean value as the receiver, `false` otherwise. The
-`hashCode` method returns a fixed, implementation-specific hash-code when invoked on `true`,
+`hashCode` method returns a fixed, implementation-specific hash-code when invoked on `true`,
and a different, fixed, implementation-specific hash-code when invoked on `false`. The `toString` method
returns the receiver converted to a string, i.e. either `"true"` or `"false"`.
@@ -328,13 +326,13 @@ from class `Any`.
The `equals` method returns `true` if the argument is the
unit value `()`, `false` otherwise. The
-`hashCode` method returns a fixed, implementation-specific hash-code,
+`hashCode` method returns a fixed, implementation-specific hash-code,
The `toString` method returns `"()"`.
## Standard Reference Classes
This section presents some standard Scala reference classes which are
-treated in a special way in Scala compiler -- either Scala provides
+treated in a special way by the Scala compiler – either Scala provides
syntactic sugar for them, or the Scala compiler generates special code
for their operations. Other classes in the standard Scala library are
documented in the Scala library documentation by HTML pages.
@@ -347,7 +345,7 @@ it). For Scala clients the class is taken to support in each case a
method
```scala
-def + (that: Any): String
+def + (that: Any): String
```
which concatenates its left operand with the textual representation of its
@@ -359,7 +357,7 @@ Scala defines tuple classes `Tuple$n$` for $n = 2 , \ldots , 22$.
These are defined as follows.
```scala
-package scala
+package scala
case class Tuple$n$[+T_1, ..., +T_n](_1: T_1, ..., _$n$: T_$n$) {
def toString = "(" ++ _1 ++ "," ++ $\ldots$ ++ "," ++ _$n$ ++ ")"
}
@@ -375,10 +373,10 @@ Scala defines function classes `Function$n$` for $n = 1 , \ldots , 22$.
These are defined as follows.
```scala
-package scala
+package scala
trait Function$n$[-T_1, ..., -T_$n$, +R] {
def apply(x_1: T_1, ..., x_$n$: T_$n$): R
- def toString = "<function>"
+ def toString = "<function>"
}
```
@@ -391,7 +389,7 @@ class PartialFunction[-A, +B] extends Function1[A, B] {
}
```
-The implicitly imported [`Predef`](#the-predef-object) object defines the name
+The implicitly imported [`Predef`](#the-predef-object) object defines the name
`Function` as an alias of `Function1`.
### Class `Array`
@@ -449,8 +447,8 @@ explained in the following.
#### Variance
Unlike arrays in Java, arrays in Scala are _not_
-co-variant; That is, $S <: T$ does not imply
-`Array[$S$] $<:$ Array[$T$]` in Scala.
+co-variant; That is, $S <: T$ does not imply
+`Array[$S$] $<:$ Array[$T$]` in Scala.
However, it is possible to cast an array
of $S$ to an array of $T$ if such a cast is permitted in the host
environment.
@@ -505,7 +503,7 @@ over arrays and additional utility methods:
```scala
package scala
-object Array {
+object Array {
/** copies array elements from `src` to `dest`. */
def copy(src: AnyRef, srcPos: Int,
dest: AnyRef, destPos: Int, length: Int): Unit = $\ldots$
@@ -557,36 +555,36 @@ object Array {
## Class Node
```scala
-package scala.xml
+package scala.xml
trait Node {
/** the label of this node */
- def label: String
+ def label: String
/** attribute axis */
- def attribute: Map[String, String]
+ def attribute: Map[String, String]
/** child axis (all children of this node) */
- def child: Seq[Node]
+ def child: Seq[Node]
/** descendant axis (all descendants of this node) */
- def descendant: Seq[Node] = child.toList.flatMap {
- x => x::x.descendant.asInstanceOf[List[Node]]
- }
+ def descendant: Seq[Node] = child.toList.flatMap {
+ x => x::x.descendant.asInstanceOf[List[Node]]
+ }
/** descendant axis (all descendants of this node) */
- def descendant_or_self: Seq[Node] = this::child.toList.flatMap {
- x => x::x.descendant.asInstanceOf[List[Node]]
- }
+ def descendant_or_self: Seq[Node] = this::child.toList.flatMap {
+ x => x::x.descendant.asInstanceOf[List[Node]]
+ }
override def equals(x: Any): Boolean = x match {
- case that:Node =>
- that.label == this.label &&
- that.attribute.sameElements(this.attribute) &&
+ case that:Node =>
+ that.label == this.label &&
+ that.attribute.sameElements(this.attribute) &&
that.child.sameElements(this.child)
case _ => false
- }
+ }
/** XPath style projection function. Returns all children of this node
* that are labeled with 'that'. The document order is preserved.
@@ -594,40 +592,39 @@ trait Node {
def \(that: Symbol): NodeSeq = {
new NodeSeq({
that.name match {
- case "_" => child.toList
+ case "_" => child.toList
case _ =>
- var res:List[Node] = Nil
+ var res:List[Node] = Nil
for (x <- child.elements if x.label == that.name) {
- res = x::res
+ res = x::res
}
res.reverse
}
- })
+ })
}
- /** XPath style projection function. Returns all nodes labeled with the
+ /** XPath style projection function. Returns all nodes labeled with the
* name 'that' from the 'descendant_or_self' axis. Document order is preserved.
*/
def \\(that: Symbol): NodeSeq = {
new NodeSeq(
that.name match {
- case "_" => this.descendant_or_self
+ case "_" => this.descendant_or_self
case _ => this.descendant_or_self.asInstanceOf[List[Node]].
- filter(x => x.label == that.name)
+ filter(x => x.label == that.name)
})
}
/** hashcode for this XML node */
- override def hashCode =
- Utility.hashCode(label, attribute.toList.hashCode, child)
+ override def hashCode =
+ Utility.hashCode(label, attribute.toList.hashCode, child)
/** string representation of this node */
- override def toString = Utility.toXML(this)
+ override def toString = Utility.toXML(this)
}
```
-
## The `Predef` Object
The `Predef` object defines standard functions and type aliases
@@ -642,7 +639,7 @@ object Predef {
// classOf ---------------------------------------------------------
/** Returns the runtime representation of a class type. */
- def classOf[T]: Class[T] = null
+ def classOf[T]: Class[T] = null
// this is a dummy, classOf is handled by compiler.
// Standard type aliases ---------------------------------------------
@@ -651,7 +648,7 @@ object Predef {
type Class[T] = java.lang.Class[T]
// Miscellaneous -----------------------------------------------------
-
+
type Function[-A, +B] = Function1[A, B]
type Map[A, +B] = collection.immutable.Map[A, B]
@@ -668,7 +665,7 @@ object Predef {
val ClassManifest = scala.reflect.ClassManifest
val Manifest = scala.reflect.Manifest
val NoManifest = scala.reflect.NoManifest
-
+
def manifest[T](implicit m: Manifest[T]) = m
def classManifest[T](implicit m: ClassManifest[T]) = m
def optManifest[T](implicit m: OptManifest[T]) = m
@@ -711,7 +708,6 @@ object Predef {
}
```
-
```scala
// tupling ---------------------------------------------------------
@@ -755,13 +751,12 @@ object Predef {
}
```
-
### Predefined Implicit Definitions
The `Predef` object also contains a number of implicit definitions, which are available by default (because `Predef` is implicitly imported).
-Implicit definitions come in two priorities. High-priority implicits are defined in the `Predef` class itself whereas low priority implicits are defined in a class inherited by `Predef`. The rules of
+Implicit definitions come in two priorities. High-priority implicits are defined in the `Predef` class itself whereas low priority implicits are defined in a class inherited by `Predef`. The rules of
static [overloading resolution](06-expressions.html#overloading-resolution)
-stipulate that, all other things being equal, implicit resolution
+stipulate that, all other things being equal, implicit resolution
prefers high-priority implicits over low-priority ones.
The available low-priority implicits include definitions falling into the following categories.
@@ -777,13 +772,12 @@ The available low-priority implicits include definitions falling into the follow
1. An implicit conversion from `String` to `WrappedString`.
-
The available high-priority implicits include definitions falling into the following categories.
- * An implicit wrapper that adds `ensuring` methods
+ * An implicit wrapper that adds `ensuring` methods
with the following overloaded variants to type `Any`.
- ```
+ ```
def ensuring(cond: Boolean): A = { assert(cond); x }
def ensuring(cond: Boolean, msg: Any): A = { assert(cond, msg); x }
def ensuring(cond: A => Boolean): A = { assert(cond(x)); x }
@@ -793,7 +787,7 @@ The available high-priority implicits include definitions falling into the follo
* An implicit wrapper that adds a `->` method with the following implementation
to type `Any`.
- ```
+ ```
def -> [B](y: B): (A, B) = (x, y)
```
@@ -807,12 +801,12 @@ The available high-priority implicits include definitions falling into the follo
* An implicit wrapper that adds `+` and `formatted` method with the following
implementations to type `Any`.
- ```
+ ```
def +(other: String) = String.valueOf(self) + other
def formatted(fmtstr: String): String = fmtstr format self
```
- * Numeric primitive conversions that implement the transitive closure of the
+ * Numeric primitive conversions that implement the transitive closure of the
following mappings:
```
@@ -824,7 +818,7 @@ The available high-priority implicits include definitions falling into the follo
Float -> Double
```
- * Boxing and unboxing conversions between primitive types and their boxed
+ * Boxing and unboxing conversions between primitive types and their boxed
versions:
```
@@ -841,9 +835,8 @@ The available high-priority implicits include definitions falling into the follo
* An implicit definition that generates instances of type `T <:< T`, for
any type `T`. Here, `<:<` is a class defined as follows.
- ```
+ ```
sealed abstract class <:<[-From, +To] extends (From => To)
```
Implicit parameters of `<:<` types are typically used to implement type constraints.
-
diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md
index 3eecc26eb4..7f73e107de 100644
--- a/spec/13-syntax-summary.md
+++ b/spec/13-syntax-summary.md
@@ -15,6 +15,8 @@ UnicodeEscape ::= ‘\‘ ‘u‘ {‘u‘} hexDigit hexDigit hexDigit hexDigit
hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’
```
+## Lexical Syntax
+
The lexical syntax of Scala is given by the following grammar in EBNF form:
```ebnf
@@ -30,7 +32,7 @@ opchar ::= // printableChar not matched by (whiteSpace | upper | lower
printableChar ::= // all characters in [\u0020, \u007F] inclusive
charEscapeSeq ::= ‘\‘ (‘b‘ | ‘t‘ | ‘n‘ | ‘f‘ | ‘r‘ | ‘"‘ | ‘'‘ | ‘\‘)
-op ::= opchar {opchar}
+op ::= opchar {opchar}
varid ::= lower idrest
plainid ::= upper idrest
| varid
@@ -41,11 +43,11 @@ idrest ::= {letter | digit} [‘_’ op]
integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’]
decimalNumeral ::= ‘0’ | nonZeroDigit {digit}
-hexNumeral ::= ‘0’ ‘x’ hexDigit {hexDigit}
+hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit {hexDigit}
digit ::= ‘0’ | nonZeroDigit
nonZeroDigit ::= ‘1’ | … | ‘9’
-floatingPointLiteral
+floatingPointLiteral
::= digit {digit} ‘.’ digit {digit} [exponentPart] [floatType]
| ‘.’ digit {digit} [exponentPart] [floatType]
| digit {digit} exponentPart [floatType]
@@ -72,8 +74,10 @@ nl ::= $\mathit{“new line character”}$
semi ::= ‘;’ | nl {nl}
```
+## Context-free Syntax
+
The context-free syntax of Scala is given by the following EBNF
-grammar.
+grammar:
```ebnf
Literal ::= [‘-’] integerLiteral
@@ -99,7 +103,7 @@ grammar.
FunctionArgTypes ::= InfixType
| ‘(’ [ ParamType {‘,’ ParamType } ] ‘)’
ExistentialClause ::= ‘forSome’ ‘{’ ExistentialDcl {semi ExistentialDcl} ‘}’
- ExistentialDcl ::= ‘type’ TypeDcl
+ ExistentialDcl ::= ‘type’ TypeDcl
| ‘val’ ValDcl
InfixType ::= CompoundType {id [nl] CompoundType}
CompoundType ::= AnnotType {‘with’ AnnotType} [Refinement]
@@ -119,7 +123,7 @@ grammar.
TypePat ::= Type
Ascription ::= ‘:’ InfixType
- | ‘:’ Annotation {Annotation}
+ | ‘:’ Annotation {Annotation}
| ‘:’ ‘_’ ‘*’
Expr ::= (Bindings | [‘implicit’] id | ‘_’) ‘=>’ Expr
@@ -139,7 +143,7 @@ grammar.
PostfixExpr ::= InfixExpr [id [nl]]
InfixExpr ::= PrefixExpr
| InfixExpr id [nl] InfixExpr
- PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr
+ PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr
SimpleExpr ::= ‘new’ (ClassTemplate | TemplateBody)
| BlockExpr
| SimpleExpr1 [‘_’]
@@ -147,7 +151,7 @@ grammar.
| Path
| ‘_’
| ‘(’ [Exprs] ‘)’
- | SimpleExpr ‘.’ id
+ | SimpleExpr ‘.’ id
| SimpleExpr TypeArgs
| SimpleExpr1 ArgumentExprs
| XmlExpr
@@ -170,7 +174,7 @@ grammar.
Generator ::= Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr}
CaseClauses ::= CaseClause { CaseClause }
- CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block
+ CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block
Guard ::= ‘if’ PostfixExpr
Pattern ::= Pattern1 { ‘|’ Pattern1 }
@@ -195,25 +199,25 @@ grammar.
TypeParamClause ::= ‘[’ VariantTypeParam {‘,’ VariantTypeParam} ‘]’
FunTypeParamClause::= ‘[’ TypeParam {‘,’ TypeParam} ‘]’
VariantTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeParam
- TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
+ TypeParam ::= (id | ‘_’) [TypeParamClause] [‘>:’ Type] [‘<:’ Type]
{‘<%’ Type} {‘:’ Type}
ParamClauses ::= {ParamClause} [[nl] ‘(’ ‘implicit’ Params ‘)’]
ParamClause ::= [nl] ‘(’ [Params] ‘)’
Params ::= Param {‘,’ Param}
Param ::= {Annotation} id [‘:’ ParamType] [‘=’ Expr]
- ParamType ::= Type
- | ‘=>’ Type
+ ParamType ::= Type
+ | ‘=>’ Type
| Type ‘*’
- ClassParamClauses ::= {ClassParamClause}
+ ClassParamClauses ::= {ClassParamClause}
[[nl] ‘(’ ‘implicit’ ClassParams ‘)’]
ClassParamClause ::= [nl] ‘(’ [ClassParams] ‘)’
ClassParams ::= ClassParam {‘,’ ClassParam}
ClassParam ::= {Annotation} {Modifier} [(`val' | `var')]
id ‘:’ ParamType [‘=’ Expr]
- Bindings ::= ‘(’ Binding {‘,’ Binding ‘)’
+ Bindings ::= ‘(’ Binding {‘,’ Binding} ‘)’
Binding ::= (id | ‘_’) [‘:’ Type]
- Modifier ::= LocalModifier
+ Modifier ::= LocalModifier
| AccessModifier
| ‘override’
LocalModifier ::= ‘abstract’
@@ -234,7 +238,7 @@ grammar.
| Expr
|
SelfType ::= id [‘:’ Type] ‘=>’
- | ‘this’ ‘:’ Type ‘=>’
+ | ‘this’ ‘:’ Type ‘=>’
Import ::= ‘import’ ImportExpr {‘,’ ImportExpr}
ImportExpr ::= StableId ‘.’ (id | ‘_’ | ImportSelectors)
@@ -263,15 +267,15 @@ grammar.
| ids ‘:’ Type ‘=’ ‘_’
FunDef ::= FunSig [‘:’ Type] ‘=’ Expr
| FunSig [nl] ‘{’ Block ‘}’
- | ‘this’ ParamClause ParamClauses
+ | ‘this’ ParamClause ParamClauses
(‘=’ ConstrExpr | [nl] ConstrBlock)
TypeDef ::= id [TypeParamClause] ‘=’ Type
TmplDef ::= [‘case’] ‘class’ ClassDef
| [‘case’] ‘object’ ObjectDef
| ‘trait’ TraitDef
- ClassDef ::= id [TypeParamClause] {ConstrAnnotation} [AccessModifier]
- ClassParamClauses ClassTemplateOpt
+ ClassDef ::= id [TypeParamClause] {ConstrAnnotation} [AccessModifier]
+ ClassParamClauses ClassTemplateOpt
TraitDef ::= id [TypeParamClause] TraitTemplateOpt
ObjectDef ::= id ClassTemplateOpt
ClassTemplateOpt ::= ‘extends’ ClassTemplate | [[‘extends’] TemplateBody]
@@ -284,7 +288,7 @@ grammar.
EarlyDefs ::= ‘{’ [EarlyDef {semi EarlyDef}] ‘}’ ‘with’
EarlyDef ::= {Annotation [nl]} {Modifier} PatVarDef
- ConstrExpr ::= SelfInvocation
+ ConstrExpr ::= SelfInvocation
| ConstrBlock
ConstrBlock ::= ‘{’ SelfInvocation {semi BlockStat} ‘}’
SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs}
@@ -294,7 +298,7 @@ grammar.
| Import
| Packaging
| PackageObject
- |
+ |
Packaging ::= ‘package’ QualId [nl] ‘{’ TopStatSeq ‘}’
PackageObject ::= ‘package’ ‘object’ ObjectDef
diff --git a/spec/14-references.md b/spec/14-references.md
index 8c169b9ea4..caae5796b2 100644
--- a/spec/14-references.md
+++ b/spec/14-references.md
@@ -4,7 +4,6 @@ layout: default
chapter: 14
---
-
# References
TODO (see comments in markdown source)
@@ -49,7 +48,6 @@ for syntactic definitions?",
month = nov
}
-
%% Book
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -116,7 +114,6 @@ for syntactic definitions?",
OPTannote = {}
}
-
%% InProceedings
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -131,7 +128,6 @@ for syntactic definitions?",
\verb@http://www.cis.upenn.edu/~bcpierce/FOOL/FOOL10.html@}
}
-
%% Misc
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -208,6 +204,4 @@ for syntactic definitions?",
short = {http://www.cis.upenn.edu/~bcpierce/papers/variance.pdf}
}
-
-
---> \ No newline at end of file
+-->
diff --git a/spec/15-changelog.md b/spec/15-changelog.md
new file mode 100644
index 0000000000..54310c921c
--- /dev/null
+++ b/spec/15-changelog.md
@@ -0,0 +1,823 @@
+---
+title: Changelog
+layout: default
+chapter: 15
+---
+
+# Changelog
+
+Changes in Version 2.8.0
+------------------------
+
+#### Trailing commas
+
+Trailing commas in expression, argument, type or pattern sequences are
+no longer supported.
+
+Changes in Version 2.8
+----------------------
+
+Changed visibility rules for nested packages (where done?)
+
+Changed [visibility rules](02-identifiers-names-and-scopes.html)
+so that packages are no longer treated specially.
+
+Added section on [weak conformance](03-types.html#weak-conformance).
+Relaxed type rules for conditionals,
+match expressions, try expressions to compute their result type using
+least upper bound wrt weak conformance. Relaxed type rule for local type
+inference so that argument types need only weekly conform to inferred
+formal parameter types. Added section on
+[numeric widening](06-expressions.html#numeric-widening) to support
+weak conformance.
+
+Tightened rules to avoid accidential [overrides](05-classes-and-objects.html#overriding).
+
+Removed class literals.
+
+Added section on [context bounds](07-implicits.html#context-bounds-and-view-bounds).
+
+Clarified differences between [`isInstanceOf` and pattern matches](12-the-scala-standard-library.html#root-classes).
+
+Allowed [`implicit` modifier on function literals](06-expressions.html#anonymous-functions) with a single parameter.
+
+Changes in Version 2.7.2
+------------------------
+
+_(10-Nov-2008)_
+
+#### Precedence of Assignment Operators
+
+The [precedence of assignment operators](06-expressions.html#prefix,-infix,-and-postfix-operations)
+has been brought in line with. From now on `+=`, has the same precedence as `=`.
+
+#### Wildcards as function parameters
+
+A formal parameter to an anonymous fucntion may now be a
+[wildcard represented by an underscore](06-expressions.html#placeholder-syntax-for-anonymous-functions).
+
+> _ => 7 // The function that ignores its argument
+> // and always returns 7.
+
+#### Unicode alternative for left arrow
+
+The Unicode glyph ‘\\(\leftarrow\\)’ \\(`\u2190`\\) is now treated as a reserved
+identifier, equivalent to the ASCII symbol ‘`<-`’.
+
+Changes in Version 2.7.1
+------------------------
+
+_(09-April-2008)_
+
+#### Change in Scoping Rules for Wildcard Placeholders in Types
+
+A wildcard in a type now binds to the closest enclosing type
+application. For example `List[List[_]]` is now equivalent to this
+existential type:
+
+ List[List[t] forSome { type t }]
+
+In version 2.7.0, the type expanded instead to:
+
+ List[List[t]] forSome { type t }
+
+The new convention corresponds exactly to the way wildcards in Java are
+interpreted.
+
+#### No Contractiveness Requirement for Implicits
+
+The contractiveness requirement for
+[implicit method definitions](07-implicits.html#implicit-parameters)
+has been dropped. Instead it is checked for each implicit expansion individually
+that the expansion does not result in a cycle or a tree of infinitely
+growing types.
+
+Changes in Version 2.7.0
+------------------------
+
+_(07-Feb-2008)_
+
+#### Java Generics
+
+Scala now supports Java generic types by default:
+
+- A generic type in Java such as `ArrayList<String>` is translated to
+ a generic type in Scala: `ArrayList[String]`.
+
+- A wildcard type such as `ArrayList<? extends Number>` is translated
+ to `ArrayList[_ <: Number]`. This is itself a shorthand for the
+ existential type `ArrayList[T] forSome { type T <: Number }`.
+
+- A raw type in Java such as `ArrayList` is translated to
+ `ArrayList[_]`, which is a shorthand for
+ `ArrayList[T] forSome { type T }`.
+
+This translation works if `-target:jvm-1.5` is specified, which is the
+new default. For any other target, Java generics are not recognized. To
+ensure upgradability of Scala codebases, extraneous type parameters for
+Java classes under `-target:jvm-1.4` are simply ignored. For instance,
+when compiling with `-target:jvm-1.4`, a Scala type such as
+`ArrayList[String]` is simply treated as the unparameterized type
+`ArrayList`.
+
+#### Changes to Case Classes
+
+The Scala compiler generates now for every case class a companion
+extractor object (). For instance, given the case class:
+
+ case class X(elem: String)
+
+the following companion object is generated:
+
+ object X {
+ def unapply(x: X): Some[String] = Some(x.elem)
+ def apply(s: String): X = new X(s)
+ }
+
+If the object exists already, only the `apply` and `unapply` methods are
+added to it.
+
+Three restrictions on case classes have been removed.
+
+1. Case classes can now inherit from other case classes.
+
+2. Case classes may now be `abstract`.
+
+3. Case classes may now come with companion objects.
+
+Changes in Version 2.6.1
+------------------------
+
+_(30-Nov-2007)_
+
+#### Mutable variables introduced by pattern binding
+
+Mutable variables can now be introduced by a pattern matching definition
+(), just like values can. Examples:
+
+ var (x, y) = if (positive) (1, 2) else (-1, -3)
+ var hd :: tl = mylist
+
+#### Self-types
+
+Self types can now be introduced without defining an alias name for
+`this` (). Example:
+
+ class C {
+ type T <: Trait
+ trait Trait { this: T => ... }
+ }
+
+Changes in Version 2.6
+----------------------
+
+_(27-July-2007)_
+
+#### Existential types
+
+It is now possible to define existential types (). An existential type
+has the form `T forSome {Q}` where `Q` is a sequence of value and/or
+type declarations. Given the class definitions
+
+ class Ref[T]
+ abstract class Outer { type T }
+
+one may for example write the following existential types
+
+ Ref[T] forSome { type T <: java.lang.Number }
+ Ref[x.T] forSome { val x: Outer }
+
+#### Lazy values
+
+It is now possible to define lazy value declarations using the new
+modifier `lazy` (). A `lazy` value definition evaluates its right hand
+side \\(e\\) the first time the value is accessed. Example:
+
+ import compat.Platform._
+ val t0 = currentTime
+ lazy val t1 = currentTime
+ val t2 = currentTime
+
+ println("t0 <= t2: " + (t0 <= t2)) //true
+ println("t1 <= t2: " + (t1 <= t2)) //false (lazy evaluation of t1)
+
+#### Structural types
+
+It is now possible to declare structural types using type refinements
+(). For example:
+
+ class File(name: String) {
+ def getName(): String = name
+ def open() { /*..*/ }
+ def close() { println("close file") }
+ }
+ def test(f: { def getName(): String }) { println(f.getName) }
+
+ test(new File("test.txt"))
+ test(new java.io.File("test.txt"))
+
+There’s also a shorthand form for creating values of structural types.
+For instance,
+
+ new { def getName() = "aaron" }
+
+is a shorthand for
+
+ new AnyRef{ def getName() = "aaron" }
+
+Changes in Version 2.5
+----------------------
+
+_(02-May-2007)_
+
+#### Type constructor polymorphism[^1]
+
+Type parameters () and abstract type members () can now also abstract
+over type constructors ().
+
+This allows a more precise `Iterable` interface:
+
+ trait Iterable[+T] {
+ type MyType[+T] <: Iterable[T] // MyType is a type constructor
+
+ def filter(p: T => Boolean): MyType[T] = ...
+ def map[S](f: T => S): MyType[S] = ...
+ }
+
+ abstract class List[+T] extends Iterable[T] {
+ type MyType[+T] = List[T]
+ }
+
+This definition of `Iterable` makes explicit that mapping a function
+over a certain structure (e.g., a `List`) will yield the same structure
+(containing different elements).
+
+#### Early object initialization
+
+It is now possible to initialize some fields of an object before any
+parent constructors are called (). This is particularly useful for
+traits, which do not have normal constructor parameters. Example:
+
+ trait Greeting {
+ val name: String
+ val msg = "How are you, "+name
+ }
+ class C extends {
+ val name = "Bob"
+ } with Greeting {
+ println(msg)
+ }
+
+In the code above, the field is initialized before the constructor of is
+called. Therefore, field `msg` in class is properly initialized to .
+
+#### For-comprehensions, revised
+
+The syntax of for-comprehensions has changed (). In the new syntax,
+generators do not start with a anymore, but filters start with an (and
+are called guards). A semicolon in front of a guard is optional. For
+example:
+
+ for (val x <- List(1, 2, 3); x % 2 == 0) println(x)
+
+is now written
+
+ for (x <- List(1, 2, 3) if x % 2 == 0) println(x)
+
+The old syntax is still available but will be deprecated in the future.
+
+#### Implicit anonymous functions
+
+It is now possible to define anonymous functions using underscores in
+parameter position (). For instance, the expressions in the left column
+are each function values which expand to the anonymous functions on
+their right.
+
+ _ + 1 x => x + 1
+ _ * _ (x1, x2) => x1 * x2
+ (_: int) * 2 (x: int) => (x: int) * 2
+ if (_) x else y z => if (z) x else y
+ _.map(f) x => x.map(f)
+ _.map(_ + 1) x => x.map(y => y + 1)
+
+As a special case (), a partially unapplied method is now designated
+ `m _`   instead of the previous notation  `&m`.
+
+The new notation will displace the special syntax forms `.m()` for
+abstracting over method receivers and `&m` for treating an unapplied
+method as a function value. For the time being, the old syntax forms are
+still available, but they will be deprecated in the future.
+
+#### Pattern matching anonymous functions, refined
+
+It is now possible to use case clauses to define a function value
+directly for functions of arities greater than one (). Previously, only
+unary functions could be defined that way. Example:
+
+ def scalarProduct(xs: Array[Double], ys: Array[Double]) =
+ (0.0 /: (xs zip ys)) {
+ case (a, (b, c)) => a + b * c
+ }
+
+Changes in Version 2.4
+----------------------
+
+_(09-Mar-2007)_
+
+#### Object-local private and protected
+
+The `private` and `protected` modifiers now accept a `[this]` qualifier
+(). A definition \\(M\\) which is labelled `private[this]` is private,
+and in addition can be accessed only from within the current object.
+That is, the only legal prefixes for \\(M\\) are `this` or `$C$.this`.
+Analogously, a definition \\(M\\) which is labelled `protected[this]` is
+protected, and in addition can be accessed only from within the current
+object.
+
+#### Tuples, revised
+
+The syntax for [tuples](06-expressions.html#tuples) has been changed from \\(\\{…\\}\\) to
+\\((…)\\). For any sequence of types \\(T_1 , … , T_n\\),
+
+\\((T_1 , … , T_n)\\) is a shorthand for `Tuple$n$[$T_1 , … , T_n$]`.
+
+Analogously, for any sequence of expressions or patterns \\(x_1
+, … , x_n\\),
+
+\\((x_1 , … , x_n)\\) is a shorthand for `Tuple$n$($x_1 , … , x_n$)`.
+
+#### Access modifiers for primary constructors
+
+The primary constructor of a class can now be marked or (). If such an
+access modifier is given, it comes between the name of the class and its
+value parameters. Example:
+
+ class C[T] private (x: T) { ... }
+
+#### Annotations
+
+The support for attributes has been extended and its syntax changed ().
+Attributes are now called <span>*annotations*</span>. The syntax has
+been changed to follow Java’s conventions, e.g. `@attribute` instead of
+`[attribute]`. The old syntax is still available but will be deprecated
+in the future.
+
+Annotations are now serialized so that they can be read by compile-time
+or run-time tools. Class has two sub-traits which are used to indicate
+how annotations are retained. Instances of an annotation class
+inheriting from trait will be stored in the generated class files.
+Instances of an annotation class inheriting from trait will be visible
+to the Scala type-checker in every compilation unit where the annotated
+symbol is accessed.
+
+#### Decidable subtyping
+
+The implementation of subtyping has been changed to prevent infinite
+recursions. Termination of subtyping is now ensured by a new restriction
+of class graphs to be finitary ().
+
+#### Case classes cannot be abstract
+
+It is now explicitly ruled out that case classes can be abstract (). The
+specification was silent on this point before, but did not explain how
+abstract case classes were treated. The Scala compiler allowed the
+idiom.
+
+#### New syntax for self aliases and self types
+
+It is now possible to give an explicit alias name and/or type for the
+self reference (). For instance, in
+
+ class C { self: D =>
+ ...
+ }
+
+the name is introduced as an alias for within and the self type () of is
+assumed to be . This construct is introduced now in order to replace
+eventually both the qualified this construct and the clause in Scala.
+
+#### Assignment Operators
+
+It is now possible to combine operators with assignments (). Example:
+
+ var x: int = 0
+ x += 1
+
+Changes in Version 2.3.2
+------------------------
+
+_(23-Jan-2007)_
+
+#### Extractors
+
+It is now possible to define patterns independently of case classes,
+using methods in extractor objects (). Here is an example:
+
+ object Twice {
+ def apply(x:Int): int = x*2
+ def unapply(z:Int): Option[int] = if (z%2==0) Some(z/2) else None
+ }
+ val x = Twice(21)
+ x match { case Twice(n) => Console.println(n) } // prints 21
+
+In the example, `Twice` is an extractor object with two methods:
+
+- The method is used to build even numbers.
+
+- The method is used to decompose an even number; it is in a sense the
+ reverse of . `unapply` methods return option types: for a match that
+ suceeds, for a match that fails. Pattern variables are returned as
+ the elements of . If there are several variables, they are grouped
+ in a tuple.
+
+In the second-to-last line, ’s method is used to construct a number . In
+the last line, is tested against the pattern . This pattern succeeds for
+even numbers and assigns to the variable one half of the number that was
+tested. The pattern match makes use of the method of object . More
+details on extractors can be found in the paper “Matching Objects with
+Patterns” by Emir, Odersky and Williams.
+
+#### Tuples
+
+A new lightweight syntax for tuples has been introduced (). For any
+sequence of types \\(T_1 , … , T_n\\),
+
+\\(\{T_1 , … , T_n \}\\) is a shorthand for `Tuple$n$[$T_1 , … , T_n$]`.
+
+Analogously, for any sequence of expressions or patterns \\(x_1, … , x_n\\),
+
+\\(\{x_1 , … , x_n \}\\) is a shorthand for `Tuple$n$($x_1 , … , x_n$)`.
+
+#### Infix operators of greater arities
+
+It is now possible to use methods which have more than one parameter as
+infix operators (). In this case, all method arguments are written as a
+normal parameter list in parentheses. Example:
+
+ class C {
+ def +(x: int, y: String) = ...
+ }
+ val c = new C
+ c + (1, "abc")
+
+#### Deprecated attribute
+
+A new standard attribute `deprecated` is available (11-annotations.html#deprecation-annotations). If a member
+definition is marked with this attribute, any reference to the member
+will cause a “deprecated” warning message to be emitted.
+
+Changes in Version 2.3
+----------------------
+
+_(23-Nov-2006)_
+
+#### Procedures
+
+A simplified syntax for functions returning `unit` has been introduced
+(). Scala now allows the following shorthands:
+
+`def f(params)` \\(\mbox{for}\\) `def f(params): unit`
+`def f(params) { ... }` \\(\mbox{for}\\) `def f(params): unit = { ... }`
+
+#### Type Patterns
+
+The syntax of types in patterns has been refined (). Scala now
+distinguishes between type variables (starting with a lower case letter)
+and types as type arguments in patterns. Type variables are bound in the
+pattern. Other type arguments are, as in previous versions, erased. The
+Scala compiler will now issue an “unchecked” warning at places where
+type erasure might compromise type-safety.
+
+#### Standard Types
+
+The recommended names for the two bottom classes in Scala’s type
+hierarchy have changed as follows:
+
+ All ==> Nothing
+ AllRef ==> Null
+
+The old names are still available as type aliases.
+
+Changes in Version 2.1.8
+------------------------
+
+_(23-Aug-2006)_
+
+#### Visibility Qualifier for protected
+
+Protected members can now have a visibility qualifier (), e.g.
+`protected[<qualifier>]`. In particular, one can now simulate package
+protected access as in Java writing
+
+ protected[P] def X ...
+
+where would name the package containing .
+
+#### Relaxation of Private Acess
+
+Private members of a class can now be referenced from the companion
+module of the class and vice versa ()
+
+#### Implicit Lookup
+
+The lookup method for implicit definitions has been generalized (). When
+searching for an implicit definition matching a type \\(T\\), now are
+considered
+
+1. all identifiers accessible without prefix, and
+
+2. all members of companion modules of classes associated with \\(T\\).
+
+(The second clause is more general than before). Here, a class is
+<span>*associated*</span> with a type \\(T\\) if it is referenced by
+some part of \\(T\\), or if it is a base class of some part of \\(T\\).
+For instance, to find implicit members corresponding to the type
+
+ HashSet[List[Int], String]
+
+one would now look in the companion modules (aka static parts) of , , ,
+and . Before, it was just the static part of .
+
+#### Tightened Pattern Match
+
+A typed pattern match with a singleton type now tests whether the
+selector value is reference-equal to p (). Example:
+
+ val p = List(1, 2, 3)
+ val q = List(1, 2)
+ val r = q
+ r match {
+ case _: p.type => Console.println("p")
+ case _: q.type => Console.println("q")
+ }
+
+This will match the second case and hence will print “”. Before, the
+singleton types were erased to , and therefore the first case would have
+matched, which is non-sensical.
+
+Changes in Version 2.1.7
+------------------------
+
+_(19-Jul-2006)_
+
+#### Multi-Line string literals
+
+It is now possible to write multi-line string-literals enclosed in
+triple quotes (). Example:
+
+ """this is a
+ multi-line
+ string literal"""
+
+No escape substitutions except for unicode escapes are performed in such
+string literals.
+
+#### Closure Syntax
+
+The syntax of closures has been slightly restricted (). The form
+
+ x: T => E
+
+is valid only when enclosed in braces, i.e.  `{ x: T => E }`. The
+following is illegal, because it might be read as the value x typed with
+the type T =\> E:
+
+ val f = x: T => E
+
+Legal alternatives are:
+
+ val f = { x: T => E }
+ val f = (x: T) => E
+
+Changes in Version 2.1.5
+------------------------
+
+_(24-May-2006)_
+
+#### Class Literals
+
+There is a new syntax for class literals (): For any class type \\(C\\),
+`classOf[$C$]` designates the run-time representation of \\(C\\).
+
+Changes in Version 2.0
+----------------------
+
+_(12-Mar-2006)_
+
+Scala in its second version is different in some details from the first
+version of the language. There have been several additions and some old
+idioms are no longer supported. This appendix summarizes the main
+changes.
+
+#### New Keywords
+
+The following three words are now reserved; they cannot be used as
+identifiers ()
+
+ implicit match requires
+
+#### Newlines as Statement Separators
+
+Newlines can now be used as statement separators in place of semicolons
+()
+
+#### Syntax Restrictions
+
+There are some other situations where old constructs no longer work:
+
+##### *Pattern matching expressions*
+
+The `match` keyword now appears only as infix operator between a
+selector expression and a number of cases, as in:
+
+ expr match {
+ case Some(x) => ...
+ case None => ...
+ }
+
+Variants such as ` expr.match {...} ` or just ` match {...} ` are no
+longer supported.
+
+##### *“With” in extends clauses*
+
+The idiom
+
+ class C with M { ... }
+
+is no longer supported. A `with` connective is only allowed following an
+`extends` clause. For instance, the line above would have to be written
+
+ class C extends AnyRef with M { ... } .
+
+However, assuming `M` is a trait (see [sec:traits]), it is also legal to
+write
+
+ class C extends M { ... }
+
+The latter expression is treated as equivalent to
+
+ class C extends S with M { ... }
+
+where `S` is the superclass of `M`.
+
+##### *Regular Expression Patterns*
+
+The only form of regular expression pattern that is currently supported
+is a sequence pattern, which might end in a sequence wildcard . Example:
+
+ case List(1, 2, _*) => ... // will match all lists starting with \code{1,2}.
+
+It is at current not clear whether this is a permanent restriction. We
+are evaluating the possibility of re-introducing full regular expression
+patterns in Scala.
+
+#### Selftype Annotations
+
+The recommended syntax of selftype annotations has changed.
+
+ class C: T extends B { ... }
+
+becomes
+
+ class C requires T extends B { ... }
+
+That is, selftypes are now indicated by the new `requires` keyword. The
+old syntax is still available but is considered deprecated.
+
+#### For-comprehensions
+
+For-comprehensions () now admit value and pattern definitions. Example:
+
+ for {
+ val x <- List.range(1, 100)
+ val y <- List.range(1, x)
+ val z = x + y
+ isPrime(z)
+ } yield Pair(x, y)
+
+Note the definition  `val z = x + y` as the third item in the
+for-comprehension.
+
+#### Conversions
+
+The rules for implicit conversions of methods to functions () have been
+tightened. Previously, a parameterized method used as a value was always
+implicitly converted to a function. This could lead to unexpected
+results when method arguments where forgotten. Consider for instance the
+statement below:
+
+ show(x.toString)
+
+where `show` is defined as follows:
+
+ def show(x: String) = Console.println(x) .
+
+Most likely, the programmer forgot to supply an empty argument list `()`
+to `toString`. The previous Scala version would treat this code as a
+partially applied method, and expand it to:
+
+ show(() => x.toString())
+
+As a result, the address of a closure would be printed instead of the
+value of `s`.
+
+Scala version 2.0 will apply a conversion from partially applied method
+to function value only if the expected type of the expression is indeed
+a function type. For instance, the conversion would not be applied in
+the code above because the expected type of `show`’s parameter is
+`String`, not a function type.
+
+The new convention disallows some previously legal code. Example:
+
+ def sum(f: int => double)(a: int, b: int): double =
+ if (a > b) 0 else f(a) + sum(f)(a + 1, b)
+
+ val sumInts = sum(x => x) // error: missing arguments
+
+The partial application of `sum` in the last line of the code above will
+not be converted to a function type. Instead, the compiler will produce
+an error message which states that arguments for method `sum` are
+missing. The problem can be fixed by providing an expected type for the
+partial application, for instance by annotating the definition of
+`sumInts` with its type:
+
+ val sumInts: (int, int) => double = sum(x => x) // OK
+
+On the other hand, Scala version 2.0 now automatically applies methods
+with empty parameter lists to `()` argument lists when necessary. For
+instance, the `show` expression above will now be expanded to
+
+ show(x.toString()) .
+
+Scala version 2.0 also relaxes the rules of overriding with respect to
+empty parameter lists. The revised definition of <span>*matching
+members*</span> () makes it now possible to override a method with an
+explicit, but empty parameter list `()` with a parameterless method, and
+<span>*vice versa*</span>. For instance, the following class definition
+is now legal:
+
+ class C {
+ override def toString: String = ...
+ }
+
+Previously this definition would have been rejected, because the
+`toString` method as inherited from `java.lang.Object` takes an empty
+parameter list.
+
+#### Class Parameters
+
+A class parameter may now be prefixed by `val` or `var` ().
+
+#### Private Qualifiers
+
+Previously, Scala had three levels of visibility:
+<span>*private*</span>, <span>*protected*</span> and
+<span>*public*</span>. There was no way to restrict accesses to members
+of the current package, as in Java. Scala 2 now defines access
+qualifiers that let one express this level of visibility, among others.
+In the definition
+
+ private[C] def f(...)
+
+access to `f` is restricted to all code within the class or package `C`
+(which must contain the definition of `f`) ()
+
+#### Changes in the Mixin Model
+
+The model which details mixin composition of classes has changed
+significantly. The main differences are:
+
+1. We now distinguish between <span>*traits*</span> that are used as
+ mixin classes and normal classes. The syntax of traits has been
+ generalized from version 1.0, in that traits are now allowed to have
+ mutable fields. However, as in version 1.0, traits still may not
+ have constructor parameters.
+
+2. Member resolution and super accesses are now both defined in terms
+ of a <span>*class linearization*</span>.
+
+3. Scala’s notion of method overloading has been generalized; in
+ particular, it is now possible to have overloaded variants of the
+ same method in a subclass and in a superclass, or in several
+ different mixins. This makes method overloading in Scala
+ conceptually the same as in Java.
+
+The new mixin model is explained in more detail in .
+
+#### Implicit Parameters
+
+Views in Scala 1.0 have been replaced by the more general concept of
+implicit parameters ()
+
+#### Flexible Typing of Pattern Matching
+
+The new version of Scala implements more flexible typing rules when it
+comes to pattern matching over heterogeneous class hierarchies (). A
+<span>*heterogeneous class hierarchy*</span> is one where subclasses
+inherit a common superclass with different parameter types. With the new
+rules in Scala version 2.0 one can perform pattern matches over such
+hierarchies with more precise typings that keep track of the information
+gained by comparing the types of a selector and a matching pattern ().
+This gives Scala capabilities analogous to guarded algebraic data types.
+
+[^1]: Implemented by Adriaan Moors
diff --git a/spec/README.md b/spec/README.md
index 4bba86feba..2f582dec5c 100644
--- a/spec/README.md
+++ b/spec/README.md
@@ -8,23 +8,21 @@ Third, we'd like to support different output formats. An html page per chapter w
## Editing
-We use redcarpet 3.1 and jekyll 2 (currently in alpha) to generate the html. Essentially, this is what github pages use.
+We use redcarpet 3.1 and jekyll 2 to generate the html. Essentially, this is what github pages use.
## Building
-Travis CI builds the spec automatically on every commit to master and publishes to http://www.scala-lang.org/files/archive/spec/2.11/.
+Travis CI builds the spec automatically on every commit to master and publishes to http://www.scala-lang.org/files/archive/spec/2.11/.
To preview locally, run `bundle exec jekyll serve -d build/spec/ -s spec/ -w --baseurl=""` (in the root of your checkout of scala/scala),
and open http://0.0.0.0:4000/. Jekyll will rebuild as you edit the markdown, but make sure to restart it when you change `_config.yml`.
-
## General Advice for editors
- All files must be saved as UTF-8: ensure your editors are configured appropriately.
- Use of the appropriate unicode characters instead of the latex modifiers for accents, etc. is necessary. For example, é instead of `\'e`.
- MathJAX errors will appear within the rendered DOM as span elements with class `mtext` and style attribute `color: red` applied. It is possible to search for this combination in the development tools of the browser of your choice. In chrome, CTRL+F / CMD+F within the inspect element panel allows you to do this.
-
### Macro replacements:
- While MathJAX just support LaTeX style command definition, it is recommended to not use this as it will likely cause issues with preparing the document for PDF or ebook distribution.
@@ -36,7 +34,6 @@ and open http://0.0.0.0:4000/. Jekyll will rebuild as you edit the markdown, but
- The macro \commadots can be replaced with ` , … , `.
- There is no adequate replacement for `\textsc{...}` (small caps) in pandoc markdown. While unicode contains a number of small capital letters, it is notably missing Q and X as these glyphs are intended for phonetic spelling, therefore these cannot be reliably used. For now, the best option is to use underscore emphasis and capitalise the text manually, `_LIKE THIS_`.
-
### Unicode Character replacements
- The unicode left and right single quotation marks (‘ and ’) have been used in place of ` and ', where the quotation marks are intended to be paired. These can be typed on a mac using Option+] for a left quote and Option+Shift+] for the right quote.
diff --git a/spec/_includes/numbering.css b/spec/_includes/numbering.css
index 86b946354d..2a22ce28b5 100644
--- a/spec/_includes/numbering.css
+++ b/spec/_includes/numbering.css
@@ -1,4 +1,3 @@
-// based on http://philarcher.org/css/numberheadings.css,
h1 {
/* must reset here */
counter-reset: chapter {{ page.chapter }};
@@ -40,7 +39,6 @@ h3:before {
display: inline;
margin-right: 1em;
}
-
h3[id*='example'] {
/* must increment here */
counter-increment: example;
@@ -54,3 +52,9 @@ h3[id*='example']:before {
display: inline;
margin-right: 1em;
}
+
+.no-numbering, .no-numbering:before, .no-numbering:after {
+ content: normal;
+ counter-reset: none;
+ counter-increment: none;
+}
diff --git a/spec/_layouts/default.yml b/spec/_layouts/default.yml
index 7f17ba30b0..69791d26ad 100644
--- a/spec/_layouts/default.yml
+++ b/spec/_layouts/default.yml
@@ -2,6 +2,10 @@
<html>
<head>
<meta http-equiv='Content-Type' content='text/html; charset=utf-8' />
+
+ <link rel="icon" type="image/png" href="public/favicon.ico">
+ <link rel="shortcut icon" type="image/png" href="public/favicon.ico">
+
<script type="text/x-mathjax-config">
MathJax.Hub.Config({
tex2jax: {
@@ -11,26 +15,32 @@
}
});
</script>
- <script type="text/javascript" src="https://c328740.ssl.cf1.rackcdn.com/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
- <script src="//ajax.googleapis.com/ajax/libs/jquery/1.11.0/jquery.min.js"></script>
-
+ <script type="text/javascript" src="http://cdn.mathjax.org/mathjax/2.3-latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
+ <script src="//code.jquery.com/jquery-2.1.3.min.js"></script>
+ <link rel="stylesheet" href="http://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.2/styles/default.min.css">
<!-- need to use include to see value of page.chapter variable -->
<style type="text/css">
{% include numbering.css %}
</style>
- <script type="text/javascript">
- // clear content of H3 nodes that start with "Example:"
- // the content is only there to determine ID of the H3 element (redcarpet doesn't let us set css id)
- $( document ).ready(function(){ $("h3[id*='example']").text("") })
- </script>
-
<link rel="stylesheet" type="text/css" href="public/stylesheets/screen.css">
-
+ <link rel="stylesheet" type="text/css" media="(max-width: 1400px), (orientation: portrait)" href="public/stylesheets/screen-small.css">
+ <link rel="stylesheet" type="text/css" media="print" href="public/stylesheets/print.css">
+ <link rel="stylesheet" type="text/css" href="public/stylesheets/fonts.css">
+ <title>{{ page.title }}</title>
</head>
<body>
+ <header>
+ <nav id="chapters"><a id="github" href="https://github.com/scala/scala/tree/2.11.x/spec"><img src="public/images/github-logo@2x.png" alt="Edit at Github"></a>{% assign sorted_pages = site.pages | sort:"name" %}{% for post in sorted_pages %}{% if post.chapter >= 0 %}<a href="{{site.baseurl}}{{ post.url }}">{{post.chapter}} {{ post.title }}</a>{% endif %}{% endfor %}</nav>
+ </header>
+ <aside class="left"><nav id="toc"></nav></aside>
+ <main id="content">
{{ content }}
+ </main>
+ <script src="public/scripts/toc.js"></script>
+ <script src="public/scripts/highlight.pack.js"></script>
+ <script src="public/scripts/main.js"></script>
</body>
-</html> \ No newline at end of file
+</html>
diff --git a/spec/_layouts/toc.yml b/spec/_layouts/toc.yml
index d77ea62a19..4da7d41bea 100644
--- a/spec/_layouts/toc.yml
+++ b/spec/_layouts/toc.yml
@@ -3,13 +3,28 @@
<head>
<meta http-equiv='Content-Type' content='text/html; charset=utf-8' />
- <link rel="stylesheet" type="text/css" href="public/stylesheets/screen.css">
+ <link rel="icon" type="image/png" href="public/favicon.ico">
+ <link rel="shortcut icon" type="image/png" href="public/favicon.ico">
+
+ <script src="//code.jquery.com/jquery-2.1.3.min.js"></script>
+ <title>{{ page.title }}</title>
+ <link rel="stylesheet" type="text/css" href="public/stylesheets/screen.css">
+ <link rel="stylesheet" type="text/css" href="public/stylesheets/screen-toc.css">
+ <link rel="stylesheet" type="text/css" href="public/stylesheets/fonts.css">
</head>
<body>
-
+<header>
+ <div id="header-main">
+ <img id="scala-logo" src="public/images/scala-spiral-white.png" />
+ <span id="title">Scala Language Specification</span>
+ <a id="github" href="https://github.com/scala/scala/tree/2.11.x/spec"><img src="public/images/github-logo@2x.png" alt="Edit at Github"></a>
+ </div>
+ <div id="header-sub">Version 2.11</div>
+</header>
+<main>
{{ content }}
-
+</main>
</body>
-</html> \ No newline at end of file
+</html>
diff --git a/spec/index.md b/spec/index.md
index b47cb033cb..d7e79dafb7 100644
--- a/spec/index.md
+++ b/spec/index.md
@@ -1,16 +1,9 @@
---
-title: Scala Language Reference
+title: Scala Language Specification
layout: toc
---
-# The Scala Language Specification
-# Version 2.11
-
-### Martin Odersky, Philippe Altherr, Vincent Cremet, Gilles Dubochet, Burak Emir, Philipp Haller, Stéphane Micheloud, Nikolay Mihaylov, Adriaan Moors, Lukas Rytz, Michel Schinz, Erik Stenman, Matthias Zenger
-
-### Markdown Conversion by Iain McGinniss.
-
-## Table of Contents
+# Table of Contents
<ol>
{% assign sorted_pages = site.pages | sort:"name" %}
@@ -25,8 +18,13 @@ layout: toc
{% endfor %}
</ol>
+#### Authors and Contributors
-## Preface
+Martin Odersky, Philippe Altherr, Vincent Cremet, Gilles Dubochet, Burak Emir, Philipp Haller, Stéphane Micheloud, Nikolay Mihaylov, Adriaan Moors, Lukas Rytz, Michel Schinz, Erik Stenman, Matthias Zenger
+
+Markdown Conversion by Iain McGinniss.
+
+#### Preface
Scala is a Java-like programming language which unifies
object-oriented and functional programming. It is a pure
@@ -68,4 +66,3 @@ the language through lively and inspiring discussions and comments on
previous versions of this document. The contributors to the Scala
mailing list have also given very useful feedback that helped us
improve the language and its tools.
-
diff --git a/spec/public/favicon.ico b/spec/public/favicon.ico
new file mode 100644
index 0000000000..9eb6ef5164
--- /dev/null
+++ b/spec/public/favicon.ico
Binary files differ
diff --git a/spec/public/fonts/Heuristica-Bold.woff b/spec/public/fonts/Heuristica-Bold.woff
new file mode 100644
index 0000000000..904579683d
--- /dev/null
+++ b/spec/public/fonts/Heuristica-Bold.woff
Binary files differ
diff --git a/spec/public/fonts/Heuristica-BoldItalic.woff b/spec/public/fonts/Heuristica-BoldItalic.woff
new file mode 100644
index 0000000000..a3c5234453
--- /dev/null
+++ b/spec/public/fonts/Heuristica-BoldItalic.woff
Binary files differ
diff --git a/spec/public/fonts/Heuristica-Regular.woff b/spec/public/fonts/Heuristica-Regular.woff
new file mode 100644
index 0000000000..f5c1f8b2db
--- /dev/null
+++ b/spec/public/fonts/Heuristica-Regular.woff
Binary files differ
diff --git a/spec/public/fonts/Heuristica-RegularItalic.woff b/spec/public/fonts/Heuristica-RegularItalic.woff
new file mode 100644
index 0000000000..d2c8664593
--- /dev/null
+++ b/spec/public/fonts/Heuristica-RegularItalic.woff
Binary files differ
diff --git a/spec/public/fonts/LuxiMono-Bold.woff b/spec/public/fonts/LuxiMono-Bold.woff
new file mode 100644
index 0000000000..8581bb5aa4
--- /dev/null
+++ b/spec/public/fonts/LuxiMono-Bold.woff
Binary files differ
diff --git a/spec/public/fonts/LuxiMono-BoldOblique.woff b/spec/public/fonts/LuxiMono-BoldOblique.woff
new file mode 100644
index 0000000000..607ccf5cd0
--- /dev/null
+++ b/spec/public/fonts/LuxiMono-BoldOblique.woff
Binary files differ
diff --git a/spec/public/fonts/LuxiMono-Regular.woff b/spec/public/fonts/LuxiMono-Regular.woff
new file mode 100644
index 0000000000..a478ad9ef2
--- /dev/null
+++ b/spec/public/fonts/LuxiMono-Regular.woff
Binary files differ
diff --git a/spec/public/fonts/LuxiMono-RegularOblique.woff b/spec/public/fonts/LuxiMono-RegularOblique.woff
new file mode 100644
index 0000000000..26999f990f
--- /dev/null
+++ b/spec/public/fonts/LuxiMono-RegularOblique.woff
Binary files differ
diff --git a/spec/public/fonts/LuxiSans-Bold.woff b/spec/public/fonts/LuxiSans-Bold.woff
new file mode 100644
index 0000000000..162621568b
--- /dev/null
+++ b/spec/public/fonts/LuxiSans-Bold.woff
Binary files differ
diff --git a/spec/public/fonts/LuxiSans-Regular.woff b/spec/public/fonts/LuxiSans-Regular.woff
new file mode 100644
index 0000000000..89d980218f
--- /dev/null
+++ b/spec/public/fonts/LuxiSans-Regular.woff
Binary files differ
diff --git a/spec/public/images/github-logo@2x.png b/spec/public/images/github-logo@2x.png
new file mode 100644
index 0000000000..285b0fee2f
--- /dev/null
+++ b/spec/public/images/github-logo@2x.png
Binary files differ
diff --git a/spec/public/images/scala-spiral-white.png b/spec/public/images/scala-spiral-white.png
new file mode 100644
index 0000000000..46aaf80824
--- /dev/null
+++ b/spec/public/images/scala-spiral-white.png
Binary files differ
diff --git a/spec/public/octicons/LICENSE.txt b/spec/public/octicons/LICENSE.txt
new file mode 100644
index 0000000000..259b43d14d
--- /dev/null
+++ b/spec/public/octicons/LICENSE.txt
@@ -0,0 +1,9 @@
+(c) 2012-2014 GitHub
+
+When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos)
+
+Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL)
+Applies to all font files
+
+Code License: MIT (http://choosealicense.com/licenses/mit/)
+Applies to all other files
diff --git a/spec/public/octicons/octicons.css b/spec/public/octicons/octicons.css
new file mode 100644
index 0000000000..a5dcd153a8
--- /dev/null
+++ b/spec/public/octicons/octicons.css
@@ -0,0 +1,235 @@
+@font-face {
+ font-family: 'octicons';
+ src: url('octicons.eot?#iefix') format('embedded-opentype'),
+ url('octicons.woff') format('woff'),
+ url('octicons.ttf') format('truetype'),
+ url('octicons.svg#octicons') format('svg');
+ font-weight: normal;
+ font-style: normal;
+}
+
+/*
+
+.octicon is optimized for 16px.
+.mega-octicon is optimized for 32px but can be used larger.
+
+*/
+.octicon, .mega-octicon {
+ font: normal normal normal 16px/1 octicons;
+ display: inline-block;
+ text-decoration: none;
+ text-rendering: auto;
+ -webkit-font-smoothing: antialiased;
+ -moz-osx-font-smoothing: grayscale;
+ -webkit-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+}
+.mega-octicon { font-size: 32px; }
+
+
+.octicon-alert:before { content: '\f02d'} /*  */
+.octicon-alignment-align:before { content: '\f08a'} /*  */
+.octicon-alignment-aligned-to:before { content: '\f08e'} /*  */
+.octicon-alignment-unalign:before { content: '\f08b'} /*  */
+.octicon-arrow-down:before { content: '\f03f'} /*  */
+.octicon-arrow-left:before { content: '\f040'} /*  */
+.octicon-arrow-right:before { content: '\f03e'} /*  */
+.octicon-arrow-small-down:before { content: '\f0a0'} /*  */
+.octicon-arrow-small-left:before { content: '\f0a1'} /*  */
+.octicon-arrow-small-right:before { content: '\f071'} /*  */
+.octicon-arrow-small-up:before { content: '\f09f'} /*  */
+.octicon-arrow-up:before { content: '\f03d'} /*  */
+.octicon-beer:before { content: '\f069'} /*  */
+.octicon-book:before { content: '\f007'} /*  */
+.octicon-bookmark:before { content: '\f07b'} /*  */
+.octicon-briefcase:before { content: '\f0d3'} /*  */
+.octicon-broadcast:before { content: '\f048'} /*  */
+.octicon-browser:before { content: '\f0c5'} /*  */
+.octicon-bug:before { content: '\f091'} /*  */
+.octicon-calendar:before { content: '\f068'} /*  */
+.octicon-check:before { content: '\f03a'} /*  */
+.octicon-checklist:before { content: '\f076'} /*  */
+.octicon-chevron-down:before { content: '\f0a3'} /*  */
+.octicon-chevron-left:before { content: '\f0a4'} /*  */
+.octicon-chevron-right:before { content: '\f078'} /*  */
+.octicon-chevron-up:before { content: '\f0a2'} /*  */
+.octicon-circle-slash:before { content: '\f084'} /*  */
+.octicon-circuit-board:before { content: '\f0d6'} /*  */
+.octicon-clippy:before { content: '\f035'} /*  */
+.octicon-clock:before { content: '\f046'} /*  */
+.octicon-cloud-download:before { content: '\f00b'} /*  */
+.octicon-cloud-upload:before { content: '\f00c'} /*  */
+.octicon-code:before { content: '\f05f'} /*  */
+.octicon-color-mode:before { content: '\f065'} /*  */
+.octicon-comment-add:before,
+.octicon-comment:before { content: '\f02b'} /*  */
+.octicon-comment-discussion:before { content: '\f04f'} /*  */
+.octicon-credit-card:before { content: '\f045'} /*  */
+.octicon-dash:before { content: '\f0ca'} /*  */
+.octicon-dashboard:before { content: '\f07d'} /*  */
+.octicon-database:before { content: '\f096'} /*  */
+.octicon-device-camera:before { content: '\f056'} /*  */
+.octicon-device-camera-video:before { content: '\f057'} /*  */
+.octicon-device-desktop:before { content: '\f27c'} /*  */
+.octicon-device-mobile:before { content: '\f038'} /*  */
+.octicon-diff:before { content: '\f04d'} /*  */
+.octicon-diff-added:before { content: '\f06b'} /*  */
+.octicon-diff-ignored:before { content: '\f099'} /*  */
+.octicon-diff-modified:before { content: '\f06d'} /*  */
+.octicon-diff-removed:before { content: '\f06c'} /*  */
+.octicon-diff-renamed:before { content: '\f06e'} /*  */
+.octicon-ellipsis:before { content: '\f09a'} /*  */
+.octicon-eye-unwatch:before,
+.octicon-eye-watch:before,
+.octicon-eye:before { content: '\f04e'} /*  */
+.octicon-file-binary:before { content: '\f094'} /*  */
+.octicon-file-code:before { content: '\f010'} /*  */
+.octicon-file-directory:before { content: '\f016'} /*  */
+.octicon-file-media:before { content: '\f012'} /*  */
+.octicon-file-pdf:before { content: '\f014'} /*  */
+.octicon-file-submodule:before { content: '\f017'} /*  */
+.octicon-file-symlink-directory:before { content: '\f0b1'} /*  */
+.octicon-file-symlink-file:before { content: '\f0b0'} /*  */
+.octicon-file-text:before { content: '\f011'} /*  */
+.octicon-file-zip:before { content: '\f013'} /*  */
+.octicon-flame:before { content: '\f0d2'} /*  */
+.octicon-fold:before { content: '\f0cc'} /*  */
+.octicon-gear:before { content: '\f02f'} /*  */
+.octicon-gift:before { content: '\f042'} /*  */
+.octicon-gist:before { content: '\f00e'} /*  */
+.octicon-gist-secret:before { content: '\f08c'} /*  */
+.octicon-git-branch-create:before,
+.octicon-git-branch-delete:before,
+.octicon-git-branch:before { content: '\f020'} /*  */
+.octicon-git-commit:before { content: '\f01f'} /*  */
+.octicon-git-compare:before { content: '\f0ac'} /*  */
+.octicon-git-merge:before { content: '\f023'} /*  */
+.octicon-git-pull-request-abandoned:before,
+.octicon-git-pull-request:before { content: '\f009'} /*  */
+.octicon-globe:before { content: '\f0b6'} /*  */
+.octicon-graph:before { content: '\f043'} /*  */
+.octicon-heart:before { content: '\2665'} /* ♥ */
+.octicon-history:before { content: '\f07e'} /*  */
+.octicon-home:before { content: '\f08d'} /*  */
+.octicon-horizontal-rule:before { content: '\f070'} /*  */
+.octicon-hourglass:before { content: '\f09e'} /*  */
+.octicon-hubot:before { content: '\f09d'} /*  */
+.octicon-inbox:before { content: '\f0cf'} /*  */
+.octicon-info:before { content: '\f059'} /*  */
+.octicon-issue-closed:before { content: '\f028'} /*  */
+.octicon-issue-opened:before { content: '\f026'} /*  */
+.octicon-issue-reopened:before { content: '\f027'} /*  */
+.octicon-jersey:before { content: '\f019'} /*  */
+.octicon-jump-down:before { content: '\f072'} /*  */
+.octicon-jump-left:before { content: '\f0a5'} /*  */
+.octicon-jump-right:before { content: '\f0a6'} /*  */
+.octicon-jump-up:before { content: '\f073'} /*  */
+.octicon-key:before { content: '\f049'} /*  */
+.octicon-keyboard:before { content: '\f00d'} /*  */
+.octicon-law:before { content: '\f0d8'} /* */
+.octicon-light-bulb:before { content: '\f000'} /*  */
+.octicon-link:before { content: '\f05c'} /*  */
+.octicon-link-external:before { content: '\f07f'} /*  */
+.octicon-list-ordered:before { content: '\f062'} /*  */
+.octicon-list-unordered:before { content: '\f061'} /*  */
+.octicon-location:before { content: '\f060'} /*  */
+.octicon-gist-private:before,
+.octicon-mirror-private:before,
+.octicon-git-fork-private:before,
+.octicon-lock:before { content: '\f06a'} /*  */
+.octicon-logo-github:before { content: '\f092'} /*  */
+.octicon-mail:before { content: '\f03b'} /*  */
+.octicon-mail-read:before { content: '\f03c'} /*  */
+.octicon-mail-reply:before { content: '\f051'} /*  */
+.octicon-mark-github:before { content: '\f00a'} /*  */
+.octicon-markdown:before { content: '\f0c9'} /*  */
+.octicon-megaphone:before { content: '\f077'} /*  */
+.octicon-mention:before { content: '\f0be'} /*  */
+.octicon-microscope:before { content: '\f089'} /*  */
+.octicon-milestone:before { content: '\f075'} /*  */
+.octicon-mirror-public:before,
+.octicon-mirror:before { content: '\f024'} /*  */
+.octicon-mortar-board:before { content: '\f0d7'} /* */
+.octicon-move-down:before { content: '\f0a8'} /*  */
+.octicon-move-left:before { content: '\f074'} /*  */
+.octicon-move-right:before { content: '\f0a9'} /*  */
+.octicon-move-up:before { content: '\f0a7'} /*  */
+.octicon-mute:before { content: '\f080'} /*  */
+.octicon-no-newline:before { content: '\f09c'} /*  */
+.octicon-octoface:before { content: '\f008'} /*  */
+.octicon-organization:before { content: '\f037'} /*  */
+.octicon-package:before { content: '\f0c4'} /*  */
+.octicon-paintcan:before { content: '\f0d1'} /*  */
+.octicon-pencil:before { content: '\f058'} /*  */
+.octicon-person-add:before,
+.octicon-person-follow:before,
+.octicon-person:before { content: '\f018'} /*  */
+.octicon-pin:before { content: '\f041'} /*  */
+.octicon-playback-fast-forward:before { content: '\f0bd'} /*  */
+.octicon-playback-pause:before { content: '\f0bb'} /*  */
+.octicon-playback-play:before { content: '\f0bf'} /*  */
+.octicon-playback-rewind:before { content: '\f0bc'} /*  */
+.octicon-plug:before { content: '\f0d4'} /*  */
+.octicon-repo-create:before,
+.octicon-gist-new:before,
+.octicon-file-directory-create:before,
+.octicon-file-add:before,
+.octicon-plus:before { content: '\f05d'} /*  */
+.octicon-podium:before { content: '\f0af'} /*  */
+.octicon-primitive-dot:before { content: '\f052'} /*  */
+.octicon-primitive-square:before { content: '\f053'} /*  */
+.octicon-pulse:before { content: '\f085'} /*  */
+.octicon-puzzle:before { content: '\f0c0'} /*  */
+.octicon-question:before { content: '\f02c'} /*  */
+.octicon-quote:before { content: '\f063'} /*  */
+.octicon-radio-tower:before { content: '\f030'} /*  */
+.octicon-repo-delete:before,
+.octicon-repo:before { content: '\f001'} /*  */
+.octicon-repo-clone:before { content: '\f04c'} /*  */
+.octicon-repo-force-push:before { content: '\f04a'} /*  */
+.octicon-gist-fork:before,
+.octicon-repo-forked:before { content: '\f002'} /*  */
+.octicon-repo-pull:before { content: '\f006'} /*  */
+.octicon-repo-push:before { content: '\f005'} /*  */
+.octicon-rocket:before { content: '\f033'} /*  */
+.octicon-rss:before { content: '\f034'} /*  */
+.octicon-ruby:before { content: '\f047'} /*  */
+.octicon-screen-full:before { content: '\f066'} /*  */
+.octicon-screen-normal:before { content: '\f067'} /*  */
+.octicon-search-save:before,
+.octicon-search:before { content: '\f02e'} /*  */
+.octicon-server:before { content: '\f097'} /*  */
+.octicon-settings:before { content: '\f07c'} /*  */
+.octicon-log-in:before,
+.octicon-sign-in:before { content: '\f036'} /*  */
+.octicon-log-out:before,
+.octicon-sign-out:before { content: '\f032'} /*  */
+.octicon-split:before { content: '\f0c6'} /*  */
+.octicon-squirrel:before { content: '\f0b2'} /*  */
+.octicon-star-add:before,
+.octicon-star-delete:before,
+.octicon-star:before { content: '\f02a'} /*  */
+.octicon-steps:before { content: '\f0c7'} /*  */
+.octicon-stop:before { content: '\f08f'} /*  */
+.octicon-repo-sync:before,
+.octicon-sync:before { content: '\f087'} /*  */
+.octicon-tag-remove:before,
+.octicon-tag-add:before,
+.octicon-tag:before { content: '\f015'} /*  */
+.octicon-telescope:before { content: '\f088'} /*  */
+.octicon-terminal:before { content: '\f0c8'} /*  */
+.octicon-three-bars:before { content: '\f05e'} /*  */
+.octicon-tools:before { content: '\f031'} /*  */
+.octicon-trashcan:before { content: '\f0d0'} /*  */
+.octicon-triangle-down:before { content: '\f05b'} /*  */
+.octicon-triangle-left:before { content: '\f044'} /*  */
+.octicon-triangle-right:before { content: '\f05a'} /*  */
+.octicon-triangle-up:before { content: '\f0aa'} /*  */
+.octicon-unfold:before { content: '\f039'} /*  */
+.octicon-unmute:before { content: '\f0ba'} /*  */
+.octicon-versions:before { content: '\f064'} /*  */
+.octicon-remove-close:before,
+.octicon-x:before { content: '\f081'} /*  */
+.octicon-zap:before { content: '\26A1'} /* ⚡ */
diff --git a/spec/public/octicons/octicons.eot b/spec/public/octicons/octicons.eot
new file mode 100644
index 0000000000..22881a8b6c
--- /dev/null
+++ b/spec/public/octicons/octicons.eot
Binary files differ
diff --git a/spec/public/octicons/octicons.svg b/spec/public/octicons/octicons.svg
new file mode 100644
index 0000000000..ea3e0f1615
--- /dev/null
+++ b/spec/public/octicons/octicons.svg
@@ -0,0 +1,198 @@
+<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<svg xmlns="http://www.w3.org/2000/svg">
+<metadata>
+(c) 2012-2014 GitHub
+
+When using the GitHub logos, be sure to follow the GitHub logo guidelines (https://github.com/logos)
+
+Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL)
+Applies to all font files
+
+Code License: MIT (http://choosealicense.com/licenses/mit/)
+Applies to all other files
+</metadata>
+<defs>
+<font id="octicons" horiz-adv-x="1024" >
+<font-face font-family="octicons" font-weight="400" font-stretch="normal" units-per-em="1024" ascent="832" descent="-192" />
+<missing-glyph d="M512 832C229.25 832 0 602.75 0 320c0-226.25 146.688-418.125 350.156-485.812 25.594-4.688 34.938 11.125 34.938 24.625 0 12.188-0.469 52.562-0.719 95.312C242-76.81200000000001 211.906 14.5 211.906 14.5c-23.312 59.125-56.844 74.875-56.844 74.875-46.531 31.75 3.53 31.125 3.53 31.125 51.406-3.562 78.47-52.75 78.47-52.75 45.688-78.25 119.875-55.625 149-42.5 4.654 33 17.904 55.625 32.5 68.375C304.906 106.56200000000001 185.344 150.5 185.344 346.688c0 55.938 19.969 101.562 52.656 137.406-5.219 13-22.844 65.094 5.062 135.562 0 0 42.938 13.75 140.812-52.5 40.812 11.406 84.594 17.031 128.125 17.219 43.5-0.188 87.312-5.875 128.188-17.281 97.688 66.312 140.688 52.5 140.688 52.5 28-70.531 10.375-122.562 5.125-135.5 32.812-35.844 52.625-81.469 52.625-137.406 0-196.688-119.75-240-233.812-252.688 18.438-15.875 34.75-47 34.75-94.75 0-68.438-0.688-123.625-0.688-140.5 0-13.625 9.312-29.562 35.25-24.562C877.438-98 1024 93.875 1024 320 1024 602.75 794.75 832 512 832z" horiz-adv-x="1024" />
+<glyph glyph-name="alert" unicode="&#xf02d;" d="M1005.854 31.753000000000043l-438.286 767C556.173 818.694 534.967 831 512 831s-44.173-12.306-55.567-32.247l-438.286-767c-11.319-19.809-11.238-44.144 0.213-63.876C29.811-51.85500000000002 50.899-64 73.714-64h876.572c22.814 0 43.903 12.145 55.354 31.877S1017.173 11.94399999999996 1005.854 31.753000000000043zM576 64H448V192h128V64zM576 256H448V512h128V256z" horiz-adv-x="1024" />
+<glyph glyph-name="alignment-align" unicode="&#xf08a;" d="M192 768C85.938 768 0 682.062 0 576s85.938-192 192-192c106.062 0 192 85.938 192 192S298.062 768 192 768zM672 224l160 160H384v-448l160 160 288-288 128 128L672 224z" horiz-adv-x="960" />
+<glyph glyph-name="alignment-aligned-to" unicode="&#xf08e;" d="M384 256l128 128 288-288 160 160v-448H512l160 160L384 256zM192 384C85.938 384 0 469.938 0 576S85.938 768 192 768c106.062 0 192-85.938 192-192S298.062 384 192 384z" horiz-adv-x="960" />
+<glyph glyph-name="alignment-unalign" unicode="&#xf08b;" d="M512 640L384 512 128 768 0 640l256-256L128 256l64-64 384 384L512 640zM640 256l128 128-64 64L320 64l64-64 128 128 256-256 128 128L640 256z" horiz-adv-x="896" />
+<glyph glyph-name="arrow-down" unicode="&#xf03f;" d="M448 384V640H192v-256H0l320-384 320 384H448z" horiz-adv-x="640" />
+<glyph glyph-name="arrow-left" unicode="&#xf040;" d="M384 448V640L0 320l384-320V192h256V448H384z" horiz-adv-x="640" />
+<glyph glyph-name="arrow-right" unicode="&#xf03e;" d="M640 320L256 640v-192H0v-256h256v-192L640 320z" horiz-adv-x="640" />
+<glyph glyph-name="arrow-small-down" unicode="&#xf0a0;" d="M256 384V512H128v-128H0l192-256 192 256H256z" horiz-adv-x="384" />
+<glyph glyph-name="arrow-small-left" unicode="&#xf0a1;" d="M256 384V512L0 320l256-192V256h128V384H256z" horiz-adv-x="384" />
+<glyph glyph-name="arrow-small-right" unicode="&#xf071;" d="M384 320L128 512v-128H0v-128h128v-128L384 320z" horiz-adv-x="384" />
+<glyph glyph-name="arrow-small-up" unicode="&#xf09f;" d="M192 512L0 256h128v-128h128V256h128L192 512z" horiz-adv-x="384" />
+<glyph glyph-name="arrow-up" unicode="&#xf03d;" d="M320 640L0 256h192v-256h256V256h192L320 640z" horiz-adv-x="640" />
+<glyph glyph-name="beer" unicode="&#xf069;" d="M896 576c-31 0-192 0-192 0v128c0 71-158 128-352 128s-352-57-352-128v-768c0-71 158-128 352-128s352 57 352 128v128s160 0 192 0 64 30 64 64 0 350 0 384-29 64-64 64z m-704-576h-64v512h64v-512z m192-64h-64v512h64v-512z m192 64h-64v512h64v-512z m-224 640c-124 0-224 29-224 64s100 64 224 64 224-29 224-64-100-64-224-64z m480-448h-128v256h128v-256z" horiz-adv-x="1024" />
+<glyph glyph-name="book" unicode="&#xf007;" d="M768 256h-128c-34 0-64-32-64-64h256c0 34-32 64-64 64z m-55 416c-167 0-209-32-233-56-24 24-66 56-233 56s-247-46-247-78v-586c29 16 119 48 214 56 115 9 234-9 234-32 0-16 8-31 31-32 0 0 0 0 1 0 0 0 0 0 1 0 23 1 31 16 31 32 0 23 119 41 234 32 94-7 185-40 214-56v586c0 32-80 78-247 78z m-265-572c-30 16-103 28-192 28s-170-12-192-27c0 0 0 411 0 443s64 59 192 59 192-27 192-59 0-444 0-444z m448 1c-22 15-103 27-192 27s-162-12-192-28c0 0 0 412 0 444s64 59 192 59 192-27 192-59 0-443 0-443z m-128 283h-128c-34 0-64-32-64-64h256c0 34-32 64-64 64z m0 128h-128c-34 0-64-32-64-64h256c0 34-32 64-64 64z m-448-128h-128c-32 0-64-30-64-64h256c0 32-30 64-64 64z m0-128h-128c-32 0-64-30-64-64h256c0 32-30 64-64 64z m0 256h-128c-32 0-64-30-64-64h256c0 32-30 64-64 64z" horiz-adv-x="1024" />
+<glyph glyph-name="bookmark" unicode="&#xf07b;" d="M0 704v-768l192 128 192-128V704H0zM316.25 507.25l-71.875-51.938 27.188-83.406c2.75-8.375-0.688-11.062-7.562-6.594l-72 52.094-72-52.031c-6.844-4.469-10.312-1.781-7.562 6.594l27.219 83.406L67.783 507.25c-6.469 5.125-5 9.219 3.906 9.219l88 0.125 27.125 83.094c2.812 8.812 7.562 8.812 10.375 0l27.188-83.094 87.938-0.125C321.25 516.469 322.688 512.375 316.25 507.25z" horiz-adv-x="384" />
+<glyph glyph-name="briefcase" unicode="&#xf0d3;" d="M896 640H640v66c0 34.2-27.8 62-62 62H446c-34.2 0-62-27.8-62-62v-66H128c-35.3 0-64-28.7-64-64v-512c0-35.3 28.7-64 64-64h768c35.3 0 64 28.7 64 64V576C960 611.3 931.3 640 896 640zM448 688c0 8.8 7.2 16 16 16h96c8.8 0 16-7.2 16-16v-48H448V688zM896 320H576v-64H448v64H128V576h64v-192h640V576h64V320z" horiz-adv-x="1024" />
+<glyph glyph-name="broadcast" unicode="&#xf048;" d="M448 640c142 0 256-115 256-256 0-69-28-132-72-178l-16-93c91 56 152 156 152 271 0 177-143 320-320 320s-320-143-320-320c0-115 61-215 152-271l-16 93c-45 46-72 109-72 178 0 142 114 256 256 256z m-64-320c-36 0-64-29-64-64v-128c0-36 30-64 64-64v-256h128v256c34 0 64 28 64 64v128c0 35-28 64-64 64s-64 0-64 0-28 0-64 0z m192 128c0 71-57 128-128 128s-128-57-128-128 57-128 128-128 128 57 128 128z m-128 384c-247 0-448-201-448-448 0-197 128-363 305-423l-12 72c-135 60-229 194-229 351 0 212 172 384 384 384s384-172 384-384c0-157-94-291-229-351l-12-72c177 60 305 225 305 423 0 247-201 448-448 448z" horiz-adv-x="896" />
+<glyph glyph-name="browser" unicode="&#xf0c5;" d="M320 640h64v-64h-64V640zM192 640h64v-64h-64V640zM64 640h64v-64H64V640zM832 0H64V512h768V0zM832 576H448v64h384V576zM896 640c0 35.35-28.65 64-64 64H64c-35.35 0-64-28.65-64-64v-640c0-35.35 28.65-64 64-64h768c35.35 0 64 28.65 64 64V640z" horiz-adv-x="896" />
+<glyph glyph-name="bug" unicode="&#xf091;" d="M243.621 675.469C190.747 618.688 205.34 528 205.34 528s53.968-64 160-64c106.031 0 160.031 64 160.031 64s14.375 89.469-37.375 146.312c32.375 18.031 51.438 44.094 43.562 61.812-8.938 19.969-48.375 21.75-88.25 3.969-14.812-6.594-27.438-14.969-37.25-23.875-12.438 2.25-25.625 3.781-40.72 3.781-14.061 0-26.561-1.344-38.344-3.25-9.656 8.75-22.062 16.875-36.531 23.344-39.875 17.719-79.375 15.938-88.25-3.969C194.465 718.781 212.497 693.438 243.621 675.469zM644.746 262.25c-8.25 1.75-16.125 2.75-23.75 3.5 0 2.125 0.375 4.125 0.375 6.312 0 33.594-4.75 65.654-12.438 96.125 16.438-1.406 37.375 2.375 58.562 11.779 39.875 17.781 65 48.375 56.125 68.219-8.875 19.969-48.375 21.75-88.25 3.969-18.625-8.312-33.812-19.469-44-30.906-7.75 18.25-16.5 35.781-26.812 51.719-30.188-25.156-87.312-62.719-167.062-71.062v-321.781c0 0-0.25-32-32.031-32-31.75 0-32 32-32 32V401.781c-79.811 8.344-136.968 45.969-167.093 71.062-9.875-15.312-18.375-32-25.938-49.344-10.281 10.625-24.625 20.844-41.969 28.594-39.875 17.719-79.375 15.938-88.25-3.969-8.906-19.906 16.25-50.438 56.125-68.219 19.844-8.846 39.531-12.812 55.469-12.096-7.656-30.404-12.469-62.344-12.469-95.812 0-2.188 0.375-4.25 0.438-6.5-6.719-0.75-13.688-1.75-20.781-3.25-51.969-10.75-91.781-37.625-88.844-59.812 2.938-22.312 47.5-31.5 99.594-20.688 6.781 1.375 13.438 3.125 19.781 5.062C128.684 146 143.34 108.125 163.622 75.5c-12.031-6.062-24.531-15-36.031-26.625C95.715 17 82.779-21.75 98.715-37.68799999999999c15.938-15.937 54.656-3 86.531 28.812 9.344 9.375 16.844 19.25 22.656 29C251.434-22.5 305.965-48 365.465-48c60.343 0 115.781 26.25 159.531 69.938 5.875-10.312 13.75-20.812 23.625-30.688 31.812-31.875 70.625-44.812 86.562-28.875s3 54.625-28.875 86.5c-12.312 12.375-25.688 21.75-38.438 27.938 20.125 32.5 34.625 70.375 43.688 111.062 7.188-2.25 14.688-4.375 22.562-6.062 52.061-10.812 96.625-1.562 99.625 20.688C736.558 224.625 696.746 251.5 644.746 262.25z" horiz-adv-x="733.886" />
+<glyph glyph-name="calendar" unicode="&#xf068;" d="M704 320h-64v-128h64V320zM576 320h-64v-128h64V320zM704 512h-64v-128h64V512zM832 320h-64v-128h64V320zM576 128h-64v-128h64V128zM768 832h-64v-128h64V832zM256 832h-64v-128h64V832zM832 512h-64v-128h64V512zM576 512h-64v-128h64V512zM320 128h-64v-128h64V128zM192 320h-64v-128h64V320zM320 320h-64v-128h64V320zM832 768v-128H640V768H320v-128H128V768H0v-896h960V768H832zM896-64H64V576h832V-64zM192 128h-64v-128h64V128zM448 512h-64v-128h64V512zM448 128h-64v-128h64V128zM320 512h-64v-128h64V512zM448 320h-64v-128h64V320zM704 128h-64v-128h64V128z" horiz-adv-x="1024" />
+<glyph glyph-name="check" unicode="&#xf03a;" d="M640 640L256 256 128 384 0 256l256-256 512 512L640 640z" horiz-adv-x="768" />
+<glyph glyph-name="checklist" unicode="&#xf076;" d="M760.688 315.78099999999995l-49.812 49.656c-6.438 6.529-16.938 6.594-23.375 0L582.5 260.5 462.375 140.125l-93.031 93.125c-6.531 6.562-17.031 6.562-23.5 0l-49.719-49.688c-6.531-6.562-6.531-17.062 0-23.562l104.781-104.875 17.969-17.875 31.688-31.812c6.562-6.562 17.188-6.562 23.562 0l49.625 49.688L760.625 292.22C767.25 298.688 767.25 309.188 760.688 315.78099999999995zM228.469 251.188L278.156 301c42.469 42.375 116.344 42.438 158.781-0.062l25.312-25.312L576 384V704H0v-704h320l-91.531 92.125C184.688 136.062 184.688 207.375 228.469 251.188zM192 640h320v-64H192V640zM192 512h320v-64H192V512zM128 320H64v64h64V320zM128 448H64v64h64V448zM128 576H64v64h64V576zM192 384h64v-64h-64V384z" horiz-adv-x="765.602" />
+<glyph glyph-name="chevron-down" unicode="&#xf0a3;" d="M512 512L320 320 128 512 0 384l320-320 320 320L512 512z" horiz-adv-x="640" />
+<glyph glyph-name="chevron-left" unicode="&#xf0a4;" d="M448 512L320 640 0 320l320-320 128 128L256 320 448 512z" horiz-adv-x="448" />
+<glyph glyph-name="chevron-right" unicode="&#xf078;" d="M128 640L0 512l192-192L0 128l128-128 320 320L128 640z" horiz-adv-x="448" />
+<glyph glyph-name="chevron-up" unicode="&#xf0a2;" d="M320 576L0 256l128-128 192 192 192-192 128 128L320 576z" horiz-adv-x="640" />
+<glyph glyph-name="circle-slash" unicode="&#xf084;" d="M320 640C143.219 640 0 496.781 0 320c0-176.75 143.219-320 320-320 176.75 0 320 143.25 320 320C640 496.781 496.75 640 320 640zM320 512c27.656 0 53.688-6.094 77.438-16.562L144.562 242.562C134.094 266.312 128 292.34400000000005 128 320 128 426 213.938 512 320 512zM320 128c-28.031 0-54.531 6.375-78.594 17.125l253.906 252.5C505.875 373.812 512 347.719 512 320 512 213.938 426.062 128 320 128z" horiz-adv-x="640" />
+<glyph glyph-name="circuit-board" unicode="&#xf0d6;" d="M320 576c35.346 0 64-28.654 64-64 0-35.346-28.654-64-64-64s-64 28.654-64 64C256 547.346 284.654 576 320 576zM960 64c0-106.039-85.961-192-192-192H320l192 192h81.128c22.132-38.258 63.494-64 110.872-64 70.692 0 128 57.308 128 128s-57.308 128-128 128c-47.377 0-88.74-25.742-110.872-64H448L156.044-99.95600000000002C100.845-66.23199999999997 64-5.419999999999959 64 64V576c0 106.039 85.961 192 192 192v-145.128C217.742 600.74 192 559.377 192 512c0-70.692 57.308-128 128-128 47.276 0 88.56 25.633 110.727 63.756l162.416 0.219C615.279 409.731 656.633 384 704 384c70.692 0 128 57.308 128 128s-57.308 128-128 128c-47.388 0-88.758-25.753-110.887-64.025l-162.097-0.219c-11.246 19.54-27.503 35.828-47.016 47.116V768h384c106.039 0 192-85.961 192-192V64zM640 128c0 35.346 28.654 64 64 64s64-28.654 64-64c0-35.346-28.654-64-64-64S640 92.654 640 128zM640 512c0 35.346 28.654 64 64 64s64-28.654 64-64c0-35.346-28.654-64-64-64S640 476.654 640 512z" horiz-adv-x="1024" />
+<glyph glyph-name="clippy" unicode="&#xf035;" d="M704-64h-640v576h640v-192h64v320c0 35-29 64-64 64h-192c0 71-57 128-128 128s-128-57-128-128h-192c-35 0-64-29-64-64v-704c0-35 29-64 64-64h640c35 0 64 29 64 64v128h-64v-128z m-512 704c29 0 29 0 64 0s64 29 64 64 29 64 64 64 64-29 64-64 32-64 64-64 33 0 64 0 64-29 64-64h-512c0 39 28 64 64 64z m-64-512h128v64h-128v-64z m448 128v128l-256-192 256-192v128h320v128h-320z m-448-256h192v64h-192v-64z m320 448h-320v-64h320v64z m-192-128h-128v-64h128v64z" horiz-adv-x="896" />
+<glyph glyph-name="clock" unicode="&#xf046;" d="M384 256h256l64 64-64 64H512V576l-64 64-64-64V256zM448 768C200.562 768 0 567.438 0 320c0-247.438 200.562-448 448-448 247.438 0 448 200.562 448 448C896 567.438 695.438 768 448 768zM448 0c-176.25 0-320 143.75-320 320 0 175.938 144.188 319.5 320 320 175.812-0.5 320-144.062 320-320C768 143.75 624.25 0 448 0z" horiz-adv-x="896" />
+<glyph glyph-name="cloud-download" unicode="&#xf00b;" d="M832 512c-8.75 0-17.125-1.406-25.625-2.562C757.625 623.75 644.125 704 512 704c-132.156 0-245.562-80.25-294.406-194.562C209.156 510.594 200.781 512 192 512 85.938 512 0 426.062 0 320s85.938-192 192-192c20.531 0 39.875 4.25 58.375 10.375C284.469 100.625 331.312 75.25 384 67.5v65.25c-49.844 10.375-91.594 42.812-112.625 87.875C249.531 203 222.219 192 192 192c-70.656 0-128 57.375-128 128 0 70.656 57.344 128 128 128 25.281 0 48.625-7.562 68.406-20.094C281.344 548.219 385.594 640 512 640c126.5 0 229.75-92.219 250.5-212.75 20 13 43.875 20.75 69.5 20.75 70.625 0 128-57.344 128-128 0-70.625-57.375-128-128-128-10.25 0-20 1.5-29.625 3.75C773.438 154.875 725.938 128 672 128c-11.062 0-21.625 1.625-32 4v-64.938c10.438-1.688 21.062-3.062 32-3.062 61.188 0 116.5 24.625 156.938 64.438C830 128.375 830.875 128 832 128c106.062 0 192 85.938 192 192S938.062 512 832 512zM576 320H448v-320H320l192-192 192 192H576V320z" horiz-adv-x="1024" />
+<glyph glyph-name="cloud-upload" unicode="&#xf00c;" d="M512 448L320 256h128v-320h128V256h128L512 448zM832 512c-8.75 0-17.125-1.406-25.625-2.562C757.625 623.812 644.125 704 512 704c-132.156 0-245.562-80.188-294.406-194.562C209.156 510.594 200.781 512 192 512 85.938 512 0 426 0 320c0-106.062 85.938-192 192-192 20.531 0 39.875 4.25 58.375 10.438C284.469 100.625 331.312 75.25 384 67.5v65.25c-49.844 10.375-91.594 42.812-112.625 87.75C249.531 203 222.219 192 192 192c-70.656 0-128 57.375-128 128 0 70.656 57.344 128 128 128 25.281 0 48.625-7.562 68.406-20.156C281.344 548.219 385.594 640 512 640c126.5 0 229.75-92.219 250.5-212.75 20 13 43.875 20.75 69.5 20.75 70.625 0 128-57.344 128-128 0-70.625-57.375-128-128-128-10.25 0-20 1.5-29.625 3.75C773.438 154.875 725.938 128 672 128c-11.062 0-21.625 1.625-32 4v-64.938c10.438-1.688 21.062-3.062 32-3.062 61.188 0 116.5 24.688 157 64.438 1 0 1.875-0.438 3-0.438 106.062 0 192 85.938 192 192C1024 426 938.062 512 832 512z" horiz-adv-x="1024" />
+<glyph glyph-name="code" unicode="&#xf05f;" d="M608 640l-96-96 224-224L512 96l96-96 288 320L608 640zM288 640L0 320l288-320 96 96L160 320l224 224L288 640z" horiz-adv-x="896" />
+<glyph glyph-name="color-mode" unicode="&#xf065;" d="M0 704v-768h768V704H0zM64 0V640h640L64 0z" horiz-adv-x="768" />
+<glyph glyph-name="comment" unicode="&#xf02b;" d="M768 704H128C66 704 0 640 0 576v-384c0-128 128-128 128-128h64v-256l256 256c0 0 258 0 320 0s128 68 128 128V576C896 638 832 704 768 704z" horiz-adv-x="896" />
+<glyph glyph-name="comment-discussion" unicode="&#xf04f;" d="M256 320c0 64 0 192 0 192s-160 0-192 0-64-32-64-64 0-288 0-320 32-64 64-64 64 0 64 0v-192l194 192s162 0 192 0 62 32 62 64 0 64 0 64-128 0-192 0-128 64-128 128z m576 384c-32 0-416 0-448 0s-64-32-64-64 0-288 0-320 32-64 64-64 190 0 190 0l194-192v192s32 0 64 0 64 32 64 64 0 288 0 320-32 64-64 64z" horiz-adv-x="896" />
+<glyph glyph-name="credit-card" unicode="&#xf045;" d="M128 128h128v64h-128v-64z m192 0h128v64h-128v-64z m64 192h-256v-64h256v64z m-128 64h64l128 128h-64l-128-128z m192-128h192v64h-192v-64z m512 384c-32 0-864 0-896 0s-64-32-64-64 0-480 0-512 32-64 64-64 864 0 896 0 64 32 64 64 0 480 0 512-32 64-64 64z m0-256v-288s0-32-32-32h-832c-32 0-32 32-32 32v288h64l128 128h-192v32s0 32 32 32h832c32 0 32-32 32-32v-32h-384l-128-128h512z" horiz-adv-x="1024" />
+<glyph glyph-name="dash" unicode="&#xf0ca;" d="M0 384v-128h512V384H0z" horiz-adv-x="512" />
+<glyph glyph-name="dashboard" unicode="&#xf07d;" d="M416 367.5c-61.562 0-111.5-49.938-111.5-111.5S354.438 144.5 416 144.5 527.5 194.438 527.5 256c0 8.5-1.125 16.75-3 24.688C606.125 375.625 732.5 523.656 800 608c23.125 28.875-2.312 56.188-32 32-85.188-69.375-232.312-194.688-326.906-275.594C433.031 366.281 424.625 367.5 416 367.5zM447.875 576.125c0 17.656-14.344 32-32 32s-32-14.344-32-32 14.344-32 32-32S447.875 558.469 447.875 576.125zM639.875 320.125c0-17.656 14.375-32 32-32s32 14.344 32 32-14.375 32-32 32S639.875 337.781 639.875 320.125zM287.875 576.125c-17.656 0-32-14.344-32-32s14.344-32 32-32 32 14.344 32 32S305.531 576.125 287.875 576.125zM223.875 448.125c0 17.656-14.344 32-32 32s-32-14.344-32-32 14.344-32 32-32S223.875 430.469 223.875 448.125zM127.875 320.125c0-17.656 14.344-32 32-32s32 14.344 32 32-14.344 32-32 32S127.875 337.781 127.875 320.125zM575.875 544.125c0 17.656-14.375 32-32 32s-32-14.344-32-32 14.375-32 32-32S575.875 526.469 575.875 544.125zM792.875 495.312l-68.75-89.938C731.625 378.188 736 349.625 736 320c0-176.75-143.312-320-320-320S96 143.25 96 320c0 176.688 143.312 320 320 320 65.875 0 127-19.969 177.875-54.094l79.25 60.625C602.375 702.406 513.25 736 416 736 186.25 736 0 549.75 0 320s186.25-416 416-416 416 186.25 416 416C832 382.719 817.75 442 792.875 495.312z" horiz-adv-x="832" />
+<glyph glyph-name="database" unicode="&#xf096;" d="M384-128C171.969-128 0-70.625 0 0c0 38.625 0 80.875 0 128 0 11.125 5.562 21.688 13.562 32C56.375 104.875 205.25 64 384 64s327.625 40.875 370.438 96c8-10.312 13.562-20.875 13.562-32 0-37.062 0-76.375 0-128C768-70.625 596-128 384-128zM384 128C171.969 128 0 185.375 0 256c0 38.656 0 80.844 0 128 0 6.781 2.562 13.375 6 19.906l0 0C7.938 408 10.5 412.031 13.562 416 56.375 360.906 205.25 320 384 320s327.625 40.906 370.438 96c3.062-3.969 5.625-8 7.562-12.094l0 0c3.438-6.531 6-13.125 6-19.906 0-37.062 0-76.344 0-128C768 185.375 596 128 384 128zM384 384C171.969 384 0 441.344 0 512c0 20.219 0 41.594 0 64 0 20.344 0 41.469 0 64C0 710.656 171.969 768 384 768c212 0 384-57.344 384-128 0-19.969 0-41.156 0-64 0-19.594 0-40.25 0-64C768 441.344 596 384 384 384zM384 704c-141.375 0-256-28.594-256-64s114.625-64 256-64 256 28.594 256 64S525.375 704 384 704z" horiz-adv-x="768" />
+<glyph glyph-name="device-camera" unicode="&#xf056;" d="M512 447.999c-70.691 0-127.999-57.308-127.999-127.999S441.309 192.00099999999998 512 192.00099999999998c5.713 0 11.337 0.38 16.852 1.105-46.344 7.058-81.851 47.079-81.851 95.394 0 53.295 43.204 96.499 96.499 96.499 48.314 0 88.336-35.507 95.394-81.851 0.726 5.515 1.105 11.139 1.105 16.852C639.999 390.691 582.691 447.999 512 447.999zM896 576H767.999L640 704H384L255.999 576H128c-35.348 0-64-28.652-64-64v-448c0-35.347 28.652-64 64-64h768c35.347 0 64 28.653 64 64V512C960 547.348 931.347 576 896 576zM416 640h192l64-64H352L416 640zM160.143 64C142.391 64 128 78.39099999999996 128 96.14300000000003V384h64v64h-64v31.857C128 497.609 142.391 512 160.143 512h182.526c-3.98-3.518-7.881-7.174-11.688-10.98-99.974-99.975-99.974-262.064 0-362.039l74.98-74.98H160.143zM512 128.00099999999998c-106.038 0-191.999 85.961-191.999 191.999S405.962 511.999 512 511.999 703.999 426.038 703.999 320 618.038 128.00099999999998 512 128.00099999999998zM832 352L681.327 512H832V352z" horiz-adv-x="1024" />
+<glyph glyph-name="device-camera-video" unicode="&#xf057;" d="M576 640c-35.347 0-64-28.653-64-64s28.653-64 64-64 64 28.653 64 64S611.347 640 576 640zM896 448L768 320v64c0 30.625-21.515 56.21-50.25 62.503C748.958 480.646 768 526.097 768 575.998 768 682.038 682.039 768 576 768c-101.123 0-183.986-78.178-191.45-177.393C350.516 621.306 305.442 640 256 640c-106.038 0-192-85.962-192-192.002C64 341.961 149.962 256 256 256h-64v-128h64v-128c0-35.347 28.653-64 64-64h384c35.347 0 64 28.653 64 64v64l128-128h64V448H896zM256 512c-35.347 0-64-28.653-64-64s28.653-64 64-64v-64c-70.692 0-128 57.308-128 127.999C128 518.692 185.308 576 256 576s128-57.307 128-128h-64C320 483.347 291.347 512 256 512zM576 128H448V256h128V128zM704 237.21299999999997c-33.526 33.547-70.276 70.317-73.373 73.414C624.837 316.418 616.837 320 608 320H416c-17.674 0-32-14.326-32-32v-192c0-8.329 3.183-15.915 8.396-21.607 0.53-0.58 39.123-39.164 74.409-74.393H352c-17.674 0-32 14.326-32 32V352c0 17.674 14.326 32 32 32h320c17.674 0 32-14.326 32-32V237.21299999999997zM576 448c-70.692 0-128 57.308-128 127.999C448 646.692 505.308 704 576 704s128-57.308 128-128.001C704 505.308 646.692 448 576 448zM896 128l-64 64 0.082 128.084L896 384.002V128z" horiz-adv-x="1024" />
+<glyph glyph-name="device-desktop" unicode="&#xf27c;" d="M960 768c-32 0-864 0-896 0s-64-32-64-64 0-544 0-576 32-64 64-64 320 0 320 0-192-64-192-128c0-32 32-64 64-64s480 0 512 0 64 32 64 64c0 64-192 128-192 128s288 0 320 0 64 32 64 64 0 544 0 576-32 64-64 64z m0-640h-896v576h896v-576z m-64 512h-192c-384-64-542-300-576-384v-64h768v448z" horiz-adv-x="1024" />
+<glyph glyph-name="device-mobile" unicode="&#xf038;" d="M576 832H64C28.688 832 0 803.312 0 768v-896c0-35.375 28.688-64 64-64h512c35.375 0 64 28.625 64 64V768C640 803.312 611.375 832 576 832zM288 768h64c17.625 0 32-14.344 32-32s-14.375-32-32-32h-64c-17.656 0-32 14.344-32 32S270.344 768 288 768zM352-128h-64c-17.656 0-32 14.375-32 32s14.344 32 32 32h64c17.625 0 32-14.375 32-32S369.625-128 352-128zM576 0H64V640h512V0z" horiz-adv-x="640" />
+<glyph glyph-name="diff" unicode="&#xf04d;" d="M448 576H320v-128H192v-128h128v-128h128V320h128V448H448V576zM192-64h384V64H192V-64zM640 832H128v-64h480l224-224v-608h64V576L640 832zM0 704v-896h768V512L576 704H0zM704-128H64V640h480l160-160V-128z" horiz-adv-x="896" />
+<glyph glyph-name="diff-added" unicode="&#xf06b;" d="M512 512h-128v-128h-128v-128h128v-128h128v128h128v128h-128v128z m320 256c-32 0-736 0-768 0s-64-32-64-64 0-736 0-768 32-64 64-64 736 0 768 0 64 32 64 64 0 736 0 768-32 64-64 64z m-64-736c0-16-17-32-32-32s-558 0-576 0-32 12-32 32c0 16 0 560 0 576s16 32 32 32 561 0 576 0 32-16 32-32 0-560 0-576z" horiz-adv-x="896" />
+<glyph glyph-name="diff-ignored" unicode="&#xf099;" d="M832 768h-768c-32 0-64-32-64-64v-768c0-32 32-64 64-64h768c32 0 64 32 64 64v768c0 32-32 64-64 64z m-64-736c0-16-17-32-32-32h-576c-18 0-32 12-32 32v576c0 16 16 32 32 32h576c15 0 32-16 32-32v-576z m-512 194v-98h98l286 286v98h-98l-286-286z" horiz-adv-x="896" />
+<glyph glyph-name="diff-modified" unicode="&#xf06d;" d="M832 768h-768c-32 0-64-32-64-64v-768c0-32 32-64 64-64h768c32 0 64 32 64 64v768c0 32-32 64-64 64z m-64-736c0-16-17-32-32-32h-576c-18 0-32 12-32 32v576c0 16 16 32 32 32h576c15 0 32-16 32-32v-576z m-320 416c-71 0-128-57-128-128s57-128 128-128 128 57 128 128-57 128-128 128z" horiz-adv-x="896" />
+<glyph glyph-name="diff-removed" unicode="&#xf06c;" d="M832 768h-768c-32 0-64-32-64-64v-768c0-32 32-64 64-64h768c32 0 64 32 64 64v768c0 32-32 64-64 64z m-64-736c0-16-17-32-32-32h-576c-18 0-32 12-32 32v576c0 16 16 32 32 32h576c15 0 32-16 32-32v-576z m-512 224h384v128h-384v-128z" horiz-adv-x="896" />
+<glyph glyph-name="diff-renamed" unicode="&#xf06e;" d="M832 768h-768c-32 0-64-32-64-64v-768c0-32 32-64 64-64h768c32 0 64 32 64 64v768c0 32-32 64-64 64z m-64-736c0-16-17-32-32-32h-576c-18 0-32 12-32 32v576c0 16 16 32 32 32h576c15 0 32-16 32-32v-576z m-320 352h-192v-128h192v-128l256 192-256 192v-128z" horiz-adv-x="896" />
+<glyph glyph-name="ellipsis" unicode="&#xf09a;" d="M640 512c-64 0-448 0-512 0s-128-64-128-128 0-64 0-128 64-128 128-128 448 0 512 0 128 64 128 128 0 64 0 128-64 128-128 128z m-384-256h-128v128h128v-128z m192 0h-128v128h128v-128z m192 0h-128v128h128v-128z" horiz-adv-x="768" />
+<glyph glyph-name="eye" unicode="&#xf04e;" d="M512 704c-192 0-416-128-512-384 96-192 288-320 512-320s416 128 512 320c-96 256-320 384-512 384z m0-640c-192 0-352 128-384 256 32 128 192 256 384 256s352-128 384-256c-32-128-192-256-384-256z m0 448c-20 0-38-4-56-9 33-15 56-48 56-87 0-53-43-96-96-96-39 0-72 23-87 56-5-18-9-36-9-56 0-106 86-192 192-192s192 86 192 192-86 192-192 192z" horiz-adv-x="1024" />
+<glyph glyph-name="file-binary" unicode="&#xf094;" d="M0-128V768h576l192-192v-704H0zM704 512L512 704H64v-768h640V512zM320 320H128V576h192V320zM256 512h-64v-128h64V512zM256 64h64v-64H128v64h64V192h-64v64h128V64zM512 384h64v-64H384v64h64V512h-64v64h128V384zM576 0H384V256h192V0zM512 192h-64v-128h64V192z" horiz-adv-x="768" />
+<glyph glyph-name="file-code" unicode="&#xf010;" d="M288 448L128 288l160-160 64 64-96 96 96 96L288 448zM416 384l96-96-96-96 64-64 160 160L480 448 416 384zM576 768H0v-896h768V576L576 768zM704-64H64V704h448l192-192V-64z" horiz-adv-x="768" />
+<glyph glyph-name="file-directory" unicode="&#xf016;" d="M832 640c-32 0-336 0-352 0s-32 16-32 32 0 0 0 32-32 64-64 64-288 0-320 0-64-32-64-64 0-704 0-704h896s0 544 0 576-32 64-64 64z m-448 0h-320s0 15 0 32 16 32 32 32 241 0 256 0 32-15 32-32 0-32 0-32z" horiz-adv-x="896" />
+<glyph glyph-name="file-media" unicode="&#xf012;" d="M576 768H0v-896h768V576L576 768zM704-64H64V704h448l192-192V-64zM128 576v-512h128c0 70.625 57.344 128 128 128-70.656 0-128 57.375-128 128 0 70.656 57.344 128 128 128 70.625 0 128-57.344 128-128 0-70.625-57.375-128-128-128 70.625 0 128-57.375 128-128h128V448L512 576H128z" horiz-adv-x="768" />
+<glyph glyph-name="file-pdf" unicode="&#xf014;" d="M576 768H0v-896h768V576L576 768zM64 704h255.812c-13.188-4.094-27.281-15.031-34.625-42.875-13.25-49.406-7.031-130.75 15.625-209.344C276.688 370.562 178.188 175.125 171.531 163.5c-15.625-4.875-65.344-23.625-107.531-59.812V704zM347.125 396.531c57.625-149.781 95-149.531 135.188-167.594C398.344 216 334.219 206.75 249.781 169.5 246.094 163.062 326.281 315.40599999999995 347.125 396.531zM704-64H65.844 64v0.375c0.781-0.062 1.094-0.375 1.844-0.375 33.812 0 84.75 21 180.562 182.375 38.188 15.438 72.062 26.875 78.469 28.938 58.812 14.875 125 26.625 187.562 33.375C566.875 153.5 639.125 135 680.25 132.375c9.625-0.5 16.062 1.188 23.75 2V-64zM704 246.625c-23.688 14.688-54 25-89.125 25-24.25 0-50.625-1.375-78.688-4.375-26.938 13-92.562 32.719-147.188 190.219 17.094 103.625 12.719 173.562 12.719 173.562 6.781 52.938-23.344 72.844-51.625 72.844 0 0-0.279 0.125-0.344 0.125H512l192-192V246.625z" horiz-adv-x="768" />
+<glyph glyph-name="file-submodule" unicode="&#xf017;" d="M832 320c-32 0-192 0-192 0 0 32-32 64-64 64s-96 0-128 0-64-32-64-64 0-320 0-320h512s0 224 0 256-32 64-64 64z m-256-64h-128s0 17 0 32 15 32 32 32 48 0 64 0 32-15 32-32 0-32 0-32z m256 320c-32 0-336 0-352 0s-32 17-32 32 0 0 0 32-32 64-64 64-288 0-320 0-64-32-64-64 0-640 0-640h320s0 352 0 384 32 64 64 64 224 0 256 0 64-32 64-64h192s0 96 0 128-32 64-64 64z m-448 0h-320s0 16 0 32 16 32 32 32 240 0 256 0 32-17 32-32 0-32 0-32z" horiz-adv-x="896" />
+<glyph glyph-name="file-symlink-directory" unicode="&#xf0b1;" d="M832 640h-352c-16 0-32 16-32 32s0 0 0 32-32 64-64 64h-320c-32 0-64-32-64-64s0-704 0-704h896s0 544 0 576-32 64-64 64z m-768 32c0 17 16 32 32 32h256c15 0 32-15 32-32s0-32 0-32h-320s0 15 0 32z m384-544v128c-125 0-224-56-256-192 0 209 107 320 256 320 0 49 0 128 0 128l256-192-256-192z" horiz-adv-x="896" />
+<glyph glyph-name="file-symlink-file" unicode="&#xf0b0;" d="M576 768h-576v-896h768v704l-192 192z m128-832h-640v768h448l192-192v-576z m-320 448c-149 0-256-111-256-320 32 136 131 192 256 192v-128l256 192-256 192s0-79 0-128z" horiz-adv-x="768" />
+<glyph glyph-name="file-text" unicode="&#xf011;" d="M448 576H128v-64h320V576zM576 768H0v-896h768V576L576 768zM704-64H64V704h448l192-192V-64zM128 64h512v64H128V64zM128 192h512v64H128V192zM128 320h512v64H128V320z" horiz-adv-x="768" />
+<glyph glyph-name="file-zip" unicode="&#xf013;" d="M320 256v64h-64v-64H320zM320 384v64h-64v-64H320zM320 512v64h-64v-64H320zM192 448h64v64h-64V448zM576 768H0v-896h768V576L576 768zM704-64H64V704h192v-64h64v64h192l192-192V-64zM192 576h64v64h-64V576zM192 320h64v64h-64V320zM192 192l-64-64v-128h256V128l-64 64h-64v64h-64V192zM320 128v-64H192v64H320z" horiz-adv-x="768" />
+<glyph glyph-name="flame" unicode="&#xf0d2;" d="M433 787c50-134 24-207-32-265-61-64-156-112-223-206-89-125-104-400 217-472-135 71-164 277-18 406-38-125 32-205 119-176 85 29 141-32 139-102-1-48-20-89-69-112 209 37 293 210 293 342 0 174-155 198-77 344-93-8-125-69-116-169 6-66-63-111-114-81-41 25-40 73-4 109 77 76 107 251-115 382z" horiz-adv-x="1024" />
+<glyph glyph-name="fold" unicode="&#xf0cc;" d="M896 576H672l-64-64h192L672 384H224L96 512h192l-64 64H0v-63.999L160 352 0 192v-64h224l64 64H96l128 128h448l128-128H608l64-64h224v64L736 352l160 160.001V576zM640 640H512V832H384v-192H256l192-192L640 640zM256 64h128v-192h128V64h128L448 256 256 64z" horiz-adv-x="896" />
+<glyph glyph-name="gear" unicode="&#xf02f;" d="M447.938 482C358.531 482 286 409.469 286 320c0-89.375 72.531-162.062 161.938-162.062 89.438 0 161.438 72.688 161.438 162.062C609.375 409.469 537.375 482 447.938 482zM772.625 226.938l-29.188-70.312 52.062-102.25 6.875-13.5-72.188-72.188L611.75 24.625l-70.312-28.875L505.75-113.5l-4.562-14.5H399.156L355-4.687999999999988l-70.312 29-102.404-51.938-13.5-6.75-72.156 72.125 55.875 118.5-28.969 70.25L14.469 262.125 0 266.812V368.781L123.406 413l28.969 70.188-51.906 102.469-6.844 13.438 72.062 72.062 118.594-55.844 70.219 29.031 35.656 109.188L394.75 768h102l44.188-123.469 70.125-29.031L713.5 667.469l13.625 6.844 72.125-72.062-55.875-118.406L772.25 413.5l109.375-35.656L896 373.25v-101.938L772.625 226.938z" horiz-adv-x="896" />
+<glyph glyph-name="gift" unicode="&#xf042;" d="M448-128h320V192H448V-128zM64-128h320V192H64V-128zM447.75 455.812c31.469 3.5 66.875 7.406 87.375 9.719C619 474.875 694.5 550.406 703.812 634.25c9.312 83.75-51 144.125-134.688 134.719C503.688 761.656 443.844 714 416 653.625 388.156 714 328.312 761.656 262.906 769.031 179.188 778.375 118.781 718 128.188 634.25c9.344-83.844 84.875-159.312 168.656-168.719 20.531-2.312 55.938-6.281 87.406-9.719C383.75 451.594 384 448 384 448h64C448 448 448.25 451.594 447.75 455.812zM555.375 691.312c45.25 5.062 78-27.562 72.875-72.875-5-45.312-45.875-86.156-91.125-91.219-45.375-5.031-78 27.594-72.938 72.906C469.249 645.436 510.125 686.281 555.375 691.312zM294.906 527.219c-45.25 5.062-86.062 45.906-91.125 91.219-5.063 45.313 27.594 77.938 72.812 72.875 45.312-5.031 86.156-45.875 91.222-91.188C372.875 554.812 340.219 522.188 294.906 527.219zM448 448v-192h384V448H448zM0 256h384V448H0V256z" horiz-adv-x="896" />
+<glyph glyph-name="gist" unicode="&#xf00e;" d="M416 448l96-96-96-96 64-64 160 160-160 160-64-64z m-416 320v-832h768v832h-768z m704-768h-640v704h640v-704z m-352 256l-96 96 96 96-64 64-160-160 160-160 64 64z" horiz-adv-x="768" />
+<glyph glyph-name="gist-secret" unicode="&#xf08c;" d="M193 128l128-192h-256l-65 256 257 64-64-128z m448 128l64-128-128-192h256l64 256-256 64z m-84 0h-216l44-102-64-218h256l-64 218 44 102z m84 192h-384l-128-64h640l-128 64z m-64 256l-128-64-128 64-64-192h384l-64 192z" horiz-adv-x="896" />
+<glyph glyph-name="git-branch" unicode="&#xf020;" d="M512 640c-71 0-128-57-128-128 0-47 26-88 64-110v-18c0-64-64-128-128-128-53 0-95-11-128-29v303c38 22 64 63 64 110 0 71-57 128-128 128s-128-57-128-128c0-47 26-88 64-110v-419c-38-22-64-63-64-110 0-71 57-128 128-128s128 57 128 128c0 34-13 64-34 87 19 23 49 41 98 41 128 0 256 128 256 256v18c38 22 64 63 64 110 0 71-57 128-128 128z m-384 64c35 0 64-29 64-64s-29-64-64-64-64 29-64 64 29 64 64 64z m0-768c-35 0-64 29-64 64s29 64 64 64 64-29 64-64-29-64-64-64z m384 512c-35 0-64 29-64 64s29 64 64 64 64-29 64-64-29-64-64-64z" horiz-adv-x="640" />
+<glyph glyph-name="git-commit" unicode="&#xf01f;" d="M694.875 384C666.375 494.219 567.125 576 448 576c-119.094 0-218.375-81.781-246.906-192H0v-128h201.094C229.625 145.75 328.906 64 448 64c119.125 0 218.375 81.75 246.875 192H896V384H694.875zM448 192c-70.656 0-128 57.375-128 128 0 70.656 57.344 128 128 128 70.625 0 128-57.344 128-128C576 249.375 518.625 192 448 192z" horiz-adv-x="896" />
+<glyph glyph-name="git-compare" unicode="&#xf0ac;" d="M832 110s0 306 0 402-96 192-192 192c-64 0-64 0-64 0v128l-192-192 192-192v128s32 0 64 0 64-32 64-64 0-402 0-402c-38-22-64-63-64-110 0-71 57-128 128-128s128 57 128 128c0 47-26 88-64 110z m-64-174c-35 0-64 29-64 64s29 64 64 64 64-29 64-64-29-64-64-64z m-448 128s-32 0-64 0-64 32-64 64 0 402 0 402c38 22 64 63 64 110 0 71-57 128-128 128s-128-57-128-128c0-47 26-88 64-110 0 0 0-306 0-402s96-192 192-192c64 0 64 0 64 0v-128l192 192-192 192v-128z m-192 512c-35 0-64 29-64 64s29 64 64 64 64-29 64-64-29-64-64-64z" horiz-adv-x="896" />
+<glyph glyph-name="git-merge" unicode="&#xf023;" d="M640 384c-47.625 0-88.625-26.312-110.625-64.906C523.625 319.5 518 320 512 320c-131.062 0-255.438 99.844-300.812 223.438C238.469 566.906 256 601.281 256 640c0 70.656-57.344 128-128 128S0 710.656 0 640c0-47.219 25.844-88.062 64-110.281V110.25C25.844 88.06200000000001 0 47.25 0 0c0-70.625 57.344-128 128-128s128 57.375 128 128c0 47.25-25.844 88.062-64 110.25V340.531C276.156 251.5 392.375 192 512 192c6.375 0 11.625 0.438 17.375 0.625C551.5 154.188 592.5 128 640 128c70.625 0 128 57.375 128 128C768 326.656 710.625 384 640 384zM128-64c-35.312 0-64 28.625-64 64 0 35.312 28.688 64 64 64 35.406 0 64-28.688 64-64C192-35.375 163.406-64 128-64zM128 576c-35.312 0-64 28.594-64 64s28.688 64 64 64c35.406 0 64-28.594 64-64S163.406 576 128 576zM640 192c-35.312 0-64 28.625-64 64 0 35.406 28.688 64 64 64 35.375 0 64-28.594 64-64C704 220.625 675.375 192 640 192z" horiz-adv-x="768" />
+<glyph glyph-name="git-pull-request" unicode="&#xf009;" d="M704 110s0 306 0 402-96 192-192 192c-64 0-64 0-64 0v128l-192-192 192-192v128s32 0 64 0 64-32 64-64 0-402 0-402c-38-22-64-63-64-110 0-71 57-128 128-128s128 57 128 128c0 47-26 88-64 110z m-64-174c-35 0-64 29-64 64s29 64 64 64 64-29 64-64-29-64-64-64z m-512 832c-71 0-128-57-128-128 0-47 26-88 64-110v-419c-38-22-64-63-64-110 0-71 57-128 128-128s128 57 128 128c0 47-26 88-64 110v419c38 22 64 63 64 110 0 71-57 128-128 128z m0-832c-35 0-64 29-64 64s29 64 64 64 64-29 64-64-29-64-64-64z m0 640c-35 0-64 29-64 64s29 64 64 64 64-29 64-64-29-64-64-64z" horiz-adv-x="768" />
+<glyph glyph-name="globe" unicode="&#xf0b6;" d="M512 704c-212.077 0-384-171.923-384-384s171.923-384 384-384c25.953 0 51.303 2.582 75.812 7.49-9.879 4.725-10.957 40.174-1.188 60.385 10.875 22.5 45 79.5 11.25 98.625s-24.375 27.75-45 49.875-12.19 25.451-13.5 31.125c-4.5 19.5 19.875 48.75 21 51.75s1.125 14.25 0.75 17.625S545.75 265.25 542 265.625s-5.625-6-10.875-6.375-28.125 13.875-33 17.625-7.125 12.75-13.875 19.5-7.5 1.5-18 5.625-44.25 16.5-70.125 27-28.125 25.219-28.5 35.625-15.75 25.5-22.961 36.375c-7.209 10.875-8.539 25.875-11.164 22.5s13.5-42.75 10.875-43.875-8.25 10.875-15.75 20.625 7.875 4.5-16.125 51.75 7.5 71.344 9 96 20.25-9 10.5 6.75 0.75 48.75-6.75 60.75S275 602 275 602c1.125 11.625 37.5 31.5 63.75 49.875s42.281 4.125 63.375-2.625 22.5-4.5 15.375 2.25 3 10.125 19.5 7.5 21-22.5 46.125-20.625 2.625-4.875 6-11.25-3.75-5.625-20.25-16.875S469.25 599 498.5 577.625s20.25 14.25 17.25 30S537.125 611 537.125 611c18-12 14.674-0.66 27.799-4.785S613.625 572 613.625 572c-44.625-24.375-16.5-27-9-32.625s-15.375-16.5-15.375-16.5c-9.375 9.375-10.875-0.375-16.875-3.75s-0.375-12-0.375-12c-31.031-4.875-24-37.5-23.625-45.375s-19.875-19.875-25.125-31.125S536.75 395 527 393.5s-19.5 36.75-72 22.5c-15.828-4.297-51-22.5-32.25-59.625s49.875 10.5 60.375 5.25-3-28.875-0.75-29.25 29.625-1.031 31.125-33 41.625-29.25 50.25-30 37.5 23.625 41.625 24.75S626 309.125 662 288.5s54.375-17.625 66.75-26.25 3.75-25.875 15.375-31.5 58.125 1.875 69.75-17.25-48-115.125-66.75-125.625S719.75 53.375 701 38s-45-34.406-69.75-49.125c-21.908-13.027-25.85-36.365-35.609-43.732C767.496-16.67999999999995 896 136.64999999999998 896 320 896 532.077 724.077 704 512 704zM602 343.625c-5.25-1.5-16.125-11.25-42.75 4.5s-45 12.75-47.25 15.375c0 0-2.25 6.375 9.375 7.5 23.871 2.311 54-22.125 60.75-22.5s10.125 6.75 22.125 2.883C616.25 347.52 607.25 345.125 602 343.625zM476.375 665.75c-2.615 1.902 2.166 4.092 5.016 7.875 1.645 2.186 0.425 5.815 2.484 7.875 5.625 5.625 33.375 13.5 27.949-1.875C506.4 664.25 480.5 662.75 476.375 665.75zM543.5 617c-9.375 0.375-31.443 2.707-27.375 6.75 15.844 15.75-6 20.25-19.5 21.375S477.5 653.75 484.25 654.5s33.75-0.375 38.25-4.125 28.875-13.5 30.375-20.625S552.875 616.625 543.5 617zM624.875 619.625c-7.5-6-45.24 21.529-52.5 27.75-31.5 27-48.375 18-54.99 22.5-6.617 4.5-4.26 10.5 5.865 19.5s38.625-3 55.125-4.875 35.625-14.625 36-29.781C614.75 639.564 632.375 625.625 624.875 619.625z" horiz-adv-x="1024" />
+<glyph glyph-name="graph" unicode="&#xf043;" d="M704 576H512v-640h192V576zM960 384H768v-448h192V384zM64-128V0h64v64H64V192h64v64H64V384h64v64H64V576h64v64H64V768h64V832H0v-1024h1024v64H64zM448 256H256v-320h192V256z" horiz-adv-x="1024" />
+<glyph glyph-name="heart" unicode="&#x2665;" d="M384-32c399 314 384 425 384 512s-72 192-192 192-192-128-192-128-72 128-192 128-192-105-192-192-15-198 384-512z" horiz-adv-x="768.199" />
+<glyph glyph-name="history" unicode="&#xf07e;" d="M448 768c-90.938 0-175.312-27.531-245.938-74.062L128 768v-256h256l-88 88c45.438 24.688 96.688 40 152 40 176.75 0 320-143.219 320-320 0-176.75-143.25-320-320-320-176.781 0-320 143.25-320 320 0 45.562 9.781 88.781 27 128H64v99.406C24.312 480.5 0 403.406 0 320c0-247.438 200.562-448 448-448 247.438 0 448 200.562 448 448C896 567.438 695.438 768 448 768zM447.031 1L512 64V256h128l64 64-64 64H512l-64 64L320 320l64-64v-192L447.031 1z" horiz-adv-x="896" />
+<glyph glyph-name="home" unicode="&#xf08d;" d="M192 256l64-384h192V192h128v-320h192l64 384L512 576 192 256zM832 448V704H704l0.312-128.312L512 768 0 256h128l384 384 384-384h128L832 448z" horiz-adv-x="1024" />
+<glyph glyph-name="horizontal-rule" unicode="&#xf070;" d="M63.938 384h128v-128h64V639.938h-64V448h-128V639.938H0V256h63.938V384zM639.875 256V384h-63.938v-128H639.875zM639.875 448V575.938h-63.938V448H639.875zM447.938 448V575.938h128v64h-192V256h64V384h128v64H447.938zM0 0h639.875V128H0V0z" horiz-adv-x="639.875" />
+<glyph glyph-name="hourglass" unicode="&#xf09e;" d="M571 320c118 85 197 240 197 384 0 71-172 128-384 128s-384-57-384-128c0-144 80-299 197-384-118-85-197-240-197-384 0-71 172-128 384-128s384 57 384 128c0 144-80 299-197 384z m-187 448c141 0 256-29 256-64s-115-64-256-64-256 29-256 64 115 64 256 64z m-64-706c-154-7-238-40-253-82 16 114 75 189 141 251 73 68 112 60 112 103v-273z m-105 352c-70 55-122 130-142 215 70-32 183-53 311-53s241 21 311 53c-20-85-72-160-142-215-24 17-70 34-169 34s-145-17-169-34z m233-352v273c0-43 39-35 112-103 66-62 125-138 141-251-14 41-99 75-253 82z" horiz-adv-x="768" />
+<glyph glyph-name="hubot" unicode="&#xf09d;" d="M512 768c-283 0-512-229-512-512 0 0 0-192 0-256s64-128 128-128 704 0 768 0 128 64 128 128 0 256 0 256c0 283-229 512-512 512z m96-768h-192c-18 0-32 14-32 32s14 32 32 32h192c18 0 32-14 32-32s-14-32-32-32z m288 128c0-32-32-64-64-64s-128 0-128 0c0 32-32 64-64 64s-224 0-256 0-64-32-64-64c0 0-96 0-128 0s-64 32-64 64 0 360 0 360c78 129 220 216 384 216s306-87 384-216c0 0 0-328 0-360z m-128 384c-32 0-480 0-512 0s-64-32-64-64 0-96 0-128 32-64 64-64 480 0 512 0 64 32 64 64 0 96 0 128-32 64-64 64z m0-128l-64-64h-128l-64 64-64-64h-128l-64 64v64h64l64-64 64 64h128l64-64 64 64h64v-64z" horiz-adv-x="1024" />
+<glyph glyph-name="inbox" unicode="&#xf0cf;" d="M704 640H64L0 256v-256h768V256L704 640zM576 256l-64-128H256l-64 128H79l49 320h512l49-320H576z" horiz-adv-x="768" />
+<glyph glyph-name="info" unicode="&#xf059;" d="M448 448c35 0 64 29 64 64s-29 64-64 64-64-29-64-64 29-64 64-64z m0 320c-247 0-448-201-448-448s201-448 448-448 448 201 448 448-201 448-448 448z m0-768c-177 0-320 143-320 320s143 320 320 320 320-143 320-320-143-320-320-320z m64 320c0 32-32 64-64 64s-32 0-64 0-64-32-64-64h64s0-160 0-192 32-64 64-64 32 0 64 0 64 32 64 64h-64s0 160 0 192z" horiz-adv-x="896" />
+<glyph glyph-name="issue-closed" unicode="&#xf028;" d="M704 515.969l-96-96L768 256l256 256-96 96L769.25 449.219 704 515.969zM512 0c-176.781 0-320 143.25-320 320 0 176.781 143.219 320 320 320 88.375 0 168.375-35.844 226.25-93.75l90.562 90.5C747.75 717.875 635.75 768 512 768 264.562 768 64 567.438 64 320c0-247.438 200.562-448 448-448 247.438 0 448 200.562 448 448L759.75 119.75C768.688 130.75 684.75 0 512 0zM576 576H448v-320h128V576zM448 64h128V192H448V64z" horiz-adv-x="1024" />
+<glyph glyph-name="issue-opened" unicode="&#xf026;" d="M448 768C200.562 768 0 567.438 0 320c0-247.438 200.562-448 448-448 247.438 0 448 200.562 448 448C896 567.438 695.438 768 448 768zM448 0c-176.781 0-320 143.25-320 320 0 176.781 143.219 320 320 320 176.75 0 320-143.219 320-320C768 143.25 624.75 0 448 0zM384 64h128V192H384V64zM384 256h128V576H384V256z" horiz-adv-x="896" />
+<glyph glyph-name="issue-reopened" unicode="&#xf027;" d="M639.125 64.75C585.75 24.625 520 0 448 0c-176.781 0-320 143.25-320 320 0 45.562 9.781 88.781 27 128H64v99.469C24.312 480.562 0 403.406 0 320c0-247.438 200.562-448 448-448 107.375 0 204.5 39.312 281.75 102.25L768-64V128H576L639.125 64.75zM384 64h128V192H384V64zM512 576H384v-320h128V576zM896 320c0 247.438-200.562 448-448 448-107.406 0-204.531-39.312-281.656-102.344L128 704v-192h192l-63.156 63.156C310.281 615.312 376 640 448 640c176.75 0 320-143.219 320-320 0-45.562-9.75-88.75-27-128h91v-99.5C871.688 159.438 896 236.5 896 320z" horiz-adv-x="896" />
+<glyph glyph-name="jersey" unicode="&#xf019;" d="M704 832h-192c0-32-33-64-97-64s-95 32-95 64h-192c0-128-2-384-128-384 0 0-1-544-1-576s32-64 64-64 672 0 704 0 64 32 64 64 0 576 0 576c-126 0-128 256-128 384z m-609-960c-16 0-31 10-31 32 0 32 0 480 0 480 119 64 128 192 128 384h64c0-96 32-191 160-192s160 96 160 192h64c0-186 32-276 64-339v-557s-593 0-609 0z m385 576l-32-32v-320l32-32h128l32 32v320l-32 32h-128z m96-320h-64v256h64v-256z m-352 320l-32-32v-320l32-32h128l32 32v320l-32 32h-128z m96-320h-64v256h64v-256z" horiz-adv-x="896" />
+<glyph glyph-name="jump-down" unicode="&#xf072;" d="M767.75 640H0.25L384 256.25 767.75 640zM0 128v-128h768V128H0z" horiz-adv-x="768" />
+<glyph glyph-name="jump-left" unicode="&#xf0a5;" d="M256.25 320L640-63.75v767.5L256.25 320zM0-64h128V704H0V-64z" horiz-adv-x="640" />
+<glyph glyph-name="jump-right" unicode="&#xf0a6;" d="M0-63.75L383.75 320 0 703.812V-63.75zM512 704v-768h128V704H512z" horiz-adv-x="640" />
+<glyph glyph-name="jump-up" unicode="&#xf073;" d="M0.188 0h767.5L384 383.75 0.188 0zM0 640v-128h768V640H0z" horiz-adv-x="768" />
+<glyph glyph-name="key" unicode="&#xf049;" d="M640.9 768.1c-141.4 0-256-114.6-256-256 0-19.6 2.2-38.6 6.4-56.9L0 64v-64l64-64h128l64 64v64h64v64h64v64h128l70.8 70.8c18.7-4.3 38.1-6.6 58.1-6.6 141.4 0 256 114.6 256 256S782.2 768.1 640.9 768.1zM384 320L64 0v64l320 320V320zM704 512c-35.3 0-64 28.7-64 64 0 35.3 28.7 64 64 64s64-28.7 64-64C768 540.7 739.3 512 704 512z" horiz-adv-x="896.9" />
+<glyph glyph-name="keyboard" unicode="&#xf00d;" d="M640 256h64V384h-64V256zM768 576h-64v-128h64V576zM640 576h-64v-128h64V576zM512 256h64V384h-64V256zM384 64h320V192H384V64zM768 256h128V576h-64v-192h-64V256zM256 64h64V192h-64V64zM768 64h128V192H768V64zM512 576h-64v-128h64V576zM192 384h-64v-128h64V384zM192 192h-64v-128h64V192zM0 704v-768h1024V704H0zM960 0H64V640h896V0zM384 256h64V384h-64V256zM256 576H128v-128h128V576zM384 576h-64v-128h64V576zM256 256h64V384h-64V256z" horiz-adv-x="1024" />
+<glyph glyph-name="law" unicode="&#xf0d8;" d="M514 640c34 1 61 28 62 62 1 37-29 67-66 66-34-1-61-28-62-62-1-37 29-67 66-66z m464-384h-18l-127 246c18 2 36 9 52 16 24 11 29 43 11 62l-1 1c-11 11-28 15-43 8-14-6-34-13-53-13-56 0-81 64-287 64s-231-64-287-64c-20 0-39 6-53 13-15 6-32 3-43-8l-1-1c-18-19-13-50 11-62 16-8 34-14 52-16l-127-246h-18c-8 0-14-7-13-15 11-64 92-113 191-113s180 49 191 113c1 8-5 15-13 15h-18l-127 245c83 7 127 49 191 49v-486c-35 0-64-29-64-64h-71c-28 0-57-29-57-64h512c0 35-29 64-71 64h-57c0 35-29 64-64 64v486c64 0 108-42 191-49l-127-245h-18c-8 0-14-7-13-15 11-64 92-113 191-113s180 49 191 113c1 8-5 15-13 15z m-658 0h-192l96 180 96-180z m384 0l96 180 96-180h-192z" horiz-adv-x="1024" />
+<glyph glyph-name="light-bulb" unicode="&#xf000;" d="M512 768c-176.731 0-320-143.269-320-320 0-104.69 50.278-197.633 128-256.015V0c0-35.346 28.653-64 64-64 0-35.346 28.653-64 64-64h128c35.347 0 64 28.654 64 64 35.347 0 64 28.654 64 64V191.985C781.722 250.36699999999996 832 343.31 832 448 832 624.731 688.731 768 512 768zM640 32c0-17.673-14.326-32-32-32H416c-17.674 0-32 14.327-32 32v32h256V32zM704 278.693c-33.234-33.03-64-42.389-64-124.041V128h-64V256l128 128v64l-64 64-64-64-64 64-64-64-64 64-64-64v-64l128-128v-128h-64v26.652c0 81.652-30.766 91.011-64 124.041C280.177 323.82 256 383.082 256 448c0 141.385 114.615 256 256 256s256-114.615 256-256C768 383.082 743.823 323.82 704 278.693zM512 256L384 384v64l64-64 64 64 64-64 64 64v-64L512 256z" horiz-adv-x="1024" />
+<glyph glyph-name="link" unicode="&#xf05c;" d="M768 576h-138c48-32 93-89 107-128h30c65 0 128-64 128-128s-65-128-128-128h-192c-63 0-128 64-128 128 0 23 7 45 18 64h-137c-5-21-8-42-8-64 0-128 127-256 255-256s65 0 193 0 256 128 256 256-128 256-256 256z m-481-384h-30c-65 0-128 64-128 128s65 128 128 128h192c63 0 128-64 128-128 0-23-7-45-18-64h137c5 21 8 42 8 64 0 128-127 256-255 256s-65 0-193 0-256-128-256-256 128-256 256-256h138c-48 32-93 89-107 128z" horiz-adv-x="1024" />
+<glyph glyph-name="link-external" unicode="&#xf07f;" d="M640 64H128V574.094L256 576V704H0v-768h768V256H640V64zM384 704l128-128L320 384l128-128 192 192 128-128V704H384z" horiz-adv-x="768" />
+<glyph glyph-name="list-ordered" unicode="&#xf062;" d="M320 256h448v128h-448v-128z m0-256h448v128h-448v-128z m0 640v-128h448v128h-448z m-241-256h78v256h-36l-85-23v-50l43 2v-185z m110-206c0 36-12 78-96 78-33 0-64-6-83-16l1-66c21 10 42 15 67 15s32-11 32-28c0-26-30-58-110-112v-50h192v67l-91-2c49 30 87 66 87 113l1 1z" horiz-adv-x="768" />
+<glyph glyph-name="list-unordered" unicode="&#xf061;" d="M0 256h128v128h-128v-128z m0 256h128v128h-128v-128z m0-512h128v128h-128v-128z m256 256h512v128h-512v-128z m0 256h512v128h-512v-128z m0-512h512v128h-512v-128z" horiz-adv-x="768" />
+<glyph glyph-name="location" unicode="&#xf060;" d="M320 832c-177 0-320-143-320-320s160-416 320-704c160 288 320 527 320 704s-143 320-320 320z m0-448c-71 0-128 57-128 128s57 128 128 128 128-57 128-128-57-128-128-128z" horiz-adv-x="640" />
+<glyph glyph-name="lock" unicode="&#xf06a;" d="M704 384c-32 0-64 0-64 0s0 64 0 192-128 256-256 256-256-128-256-256 0-192 0-192-32 0-64 0-64-32-64-64 0-416 0-448 32-64 64-64 608 0 640 0 64 32 64 64 0 416 0 448-32 64-64 64z m-192-128h-384v-64h384v-64h-384v-64h384v-64h-384v-64h384v-64h-448v448h448v-64z m0 128h-256s0 128 0 192 64 128 128 128 128-64 128-128 0-192 0-192z" horiz-adv-x="768" />
+<glyph glyph-name="logo-github" unicode="&#xf092;" d="M552.73 499.865H311.557c-6.205 0-11.25-5.045-11.25-11.297v-117.887c0-6.252 5.045-11.272 11.25-11.272h94.109v-146.542c0 0-21.145-7.057-79.496-7.057-68.914 0-165.156 25.244-165.156 236.795 0 211.642 100.197 239.491 194.307 239.491 81.465 0 116.514-14.304 138.869-21.241 7.01-2.203 13.404 4.831 13.404 11.105L534.543 785.87c0 2.912-1.041 6.417-4.262 8.785C521.186 801.048 465.865 832 326.168 832 165.133 832 0 763.513 0 434.243 0 105.02099999999996 189.051 56 348.381 56c131.883 0 212.021 56.314 212.021 56.314 3.268 1.801 3.6 6.395 3.6 8.479V488.568C563.955 494.773 558.887 499.865 552.73 499.865zM1772.381 803.866h-135.695c-6.252 0-11.271-5.044-11.271-11.296v-262.393h-211.619V792.57c0 6.252-5.068 11.296-11.178 11.296h-135.838c-6.111 0-11.084-5.044-11.084-11.296v-710.473c0-6.299 5.021-11.32 11.084-11.32h135.838c6.203 0 11.178 5.068 11.178 11.32V385.933h211.619l-0.475-303.883c0-6.3 5.021-11.272 11.084-11.272h135.885c6.252 0 11.131 5.068 11.131 11.272l0.473 710.521C1783.607 798.822 1778.539 803.866 1772.381 803.866zM714.949 787.763c-48.357 0-87.574-39.572-87.574-88.403 0-48.855 39.217-88.428 87.574-88.428s87.527 39.572 87.527 88.428C802.477 748.19 763.307 787.763 714.949 787.763zM792.861 559.874c0 6.205-5.02 11.344-11.131 11.344H646.32c-6.348 0-11.746-6.394-11.746-12.67 0 0 0-394.654 0-469.867 0-13.735 8.572-17.903 19.703-17.903 0 0 57.688 0 121.959 0 13.311 0 16.814 6.536 16.814 18.188-0.094 25.197-0.094 123.808-0.094 142.942C792.861 250.09500000000003 792.861 559.874 792.861 559.874zM2297.973 570.152h-134.701c-6.158 0-11.084-5.092-11.084-11.344v-348.31c0 0-34.244-25.197-82.934-25.197-48.547 0-61.525 22.024-61.525 69.719 0 47.553 0 303.835 0 303.835 0 6.252-5.068 11.345-11.131 11.345h-136.643c-6.252 0-11.178-5.093-11.178-11.345 0 0 0-185.521 0-326.807 0-141.284 78.766-175.906 186.99-175.906 88.854 0 160.609 49.115 160.609 49.115s3.363-25.766 5.068-28.844c1.422-3.078 5.447-6.158 9.852-6.158h86.58c6.158 0 11.178 5.069 11.178 11.321l0.379 477.278C2309.15 565.0609999999999 2304.129 570.152 2297.973 570.152zM2666.932 586.1610000000001c-76.539 0-128.592-34.148-128.592-34.148V792.57c0 6.252-5.068 11.296-11.131 11.296h-136.264c-6.109 0-11.131-5.044-11.131-11.296l-0.379-710.521c0-6.3 5.068-11.272 11.225-11.272 0 0 94.773 0 94.869 0 4.215 0 7.389 2.179 9.805 5.968 2.369 3.837 5.73 32.775 5.73 32.775s55.557-52.763 161.035-52.763c123.807 0 194.758 62.804 194.758 281.906C2856.859 557.482 2743.471 586.1610000000001 2666.932 586.1610000000001zM2613.791 185.77499999999998c-46.701 1.421-78.34 22.64-78.34 22.64v225.07c0 0 31.307 19.206 69.672 22.593 48.547 4.31 95.438-10.326 95.438-126.13C2700.322 207.94100000000003 2679.199 183.83399999999995 2613.791 185.77499999999998zM1185.125 188.33299999999997c-5.969 0-21.219-2.368-36.85-2.368-49.92 0-66.971 23.256-66.971 53.331 0 30.218 0 199.85 0 199.85h101.926c6.252 0 11.178 5.044 11.178 11.343v109.48c0.094 6.299-4.926 11.344-11.178 11.344h-101.926l-0.143 134.535c0 5.092-2.699 7.625-8.572 7.625H933.861c-5.352 0-8.336-2.391-8.336-7.578v-139.035c0 0-69.576-16.79-74.266-18.188-4.641-1.326-8.051-5.684-8.051-10.822v-87.408c0-6.252 5.068-11.344 11.178-11.344h71.139c0 0 0-91.34 0-210.222 0-156.109 109.553-171.455 183.439-171.455 33.723 0 74.076 10.988 80.848 13.356 4.074 1.421 6.395 5.637 6.395 10.136l0.047 96.101C1196.254 183.312 1190.998 188.428 1185.125 188.33299999999997z" horiz-adv-x="2856.857" />
+<glyph glyph-name="mail" unicode="&#xf03b;" d="M0 640v-640h896V640H0zM768 576L448 312 128 576H768zM64 512l252.031-191.625L64 128V512zM128 64l254 206.25L448 220l65.875 50.125L768 64H128zM832 128L579.625 320.062 832 512V128z" horiz-adv-x="896" />
+<glyph glyph-name="mail-read" unicode="&#xf03c;" d="M576 448H256v-64h320V448zM384 576H256v-64h128V576zM768 603.469V704H627.188L448 832 268.812 704H128v-100.531L0 512v-640h896V512L768 603.469zM192 640h512v-244.812L448 184 192 395.188V640zM64 384l252.031-191.625L64 0V384zM128-64l254 206.25L448 92l65.875 50.125L768-64H128zM832 0L579.625 192.062 832 384V0z" horiz-adv-x="896" />
+<glyph glyph-name="mail-reply" unicode="&#xf051;" d="M384 672l-384-288 384-288v192c111 0 329-61 384-280 0 291-196 451-384 472v192z" horiz-adv-x="768" />
+<glyph glyph-name="mark-github" unicode="&#xf00a;" d="M512 832C229.25 832 0 602.75 0 320c0-226.25 146.688-418.125 350.156-485.812 25.594-4.688 34.938 11.125 34.938 24.625 0 12.188-0.469 52.562-0.719 95.312C242-76.81200000000001 211.906 14.5 211.906 14.5c-23.312 59.125-56.844 74.875-56.844 74.875-46.531 31.75 3.53 31.125 3.53 31.125 51.406-3.562 78.47-52.75 78.47-52.75 45.688-78.25 119.875-55.625 149-42.5 4.654 33 17.904 55.625 32.5 68.375C304.906 106.56200000000001 185.344 150.5 185.344 346.688c0 55.938 19.969 101.562 52.656 137.406-5.219 13-22.844 65.094 5.062 135.562 0 0 42.938 13.75 140.812-52.5 40.812 11.406 84.594 17.031 128.125 17.219 43.5-0.188 87.312-5.875 128.188-17.281 97.688 66.312 140.688 52.5 140.688 52.5 28-70.531 10.375-122.562 5.125-135.5 32.812-35.844 52.625-81.469 52.625-137.406 0-196.688-119.75-240-233.812-252.688 18.438-15.875 34.75-47 34.75-94.75 0-68.438-0.688-123.625-0.688-140.5 0-13.625 9.312-29.562 35.25-24.562C877.438-98 1024 93.875 1024 320 1024 602.75 794.75 832 512 832z" horiz-adv-x="1024" />
+<glyph glyph-name="markdown" unicode="&#xf0c9;" d="M950.154 640H73.846C33.127 640 0 606.873 0 566.154v-492.308C0 33.125 33.127 0 73.846 0h876.308c40.721 0 73.846 33.125 73.846 73.846V566.154C1024 606.873 990.875 640 950.154 640zM576 128.125L448 128V320l-96-123.077L256 320v-192H128V512h128l96-128 96 128 128 0.125V128.125zM767.091 96.125L608 320h96V512h128v-192h96L767.091 96.125z" horiz-adv-x="1024" />
+<glyph glyph-name="megaphone" unicode="&#xf077;" d="M832 800c-130 0-124-130-704-128C57.344 672 0 557.375 0 416s57.344-256 128-256c22.781 0 43.188-0.5 64.188-0.875L256-128l192-32 64 96-45.125 203.125C709.375 102.875 733.75 32 832 32c106 0 192 172 192 384C1024 628.031 938 800 832 800zM197 349.062c-39.188 1.469-82.188 2.25-127.562 2.625C66 371.406 64 393.094 64 416c0 88.375 28.688 192 64 192 39.031-0.125 75 0.438 109 1.406C209.656 562.438 192 493.688 192 416 192 392.688 194.062 370.562 197 349.062zM261.312 346.062C258.125 368.312 256 391.625 256 416c0 79.5 18.438 149.5 46.906 196.219 155.156 8.312 251.906 28.469 319.031 50.188C593.625 595.531 576 510.344 576 416c0-40 3.875-78 9.5-114.312C513.344 320.375 412.812 337.406 261.312 346.062zM832 128c-12.125 0-23.688 5.062-34.812 12.125-15.25 67.312-83.438 418.344 117.438 494.188C942.125 581.5 960 503.812 960 416 960 257 902.625 128 832 128z" horiz-adv-x="1024" />
+<glyph glyph-name="mention" unicode="&#xf0be;" d="M466.697 732.899C238.66 760.898 31.1 598.735 3.102 370.698c-28-228.038 134.163-435.598 362.2-463.597 71.429-8.756 145.115 0.913 213.325 29.946l-0.016 0.032c24.404 10.357 35.788 38.538 25.431 62.939-10.359 24.403-38.538 35.787-62.94 25.43l-0.001 0.004c-52.472-22.339-109.15-29.799-164.1-23.067-175.413 21.538-300.153 181.2-278.616 356.613 21.538 175.413 181.199 300.154 356.613 278.616 175.412-21.538 300.154-181.199 278.617-356.612-4.309-35.083-21.542-55.725-61.6-55.725-42.5 0-64 45.889-64 81.222V432c0 26.51-21.49 48-48 48-9.699 0-18.72-2.887-26.269-7.833-25.684 20.259-57.437 33.87-94.349 38.402-105.246 12.923-201.045-61.924-213.967-167.17C212.508 238.15200000000004 287.354 142.35400000000004 392.6 129.43200000000002c57.379-7.045 116.216 14.707 157.871 53.13 24.959-28.124 59.866-47.624 100.121-52.567 87.707-10.769 167.537 51.602 178.307 139.309C856.898 497.34 694.734 704.899 466.697 732.899zM511.285 308.30100000000004c-6.462-52.623-54.361-90.047-106.985-83.585-52.623 6.461-90.046 54.36-83.585 106.984 6.461 52.623 54.361 90.046 106.984 83.585C480.322 408.823 517.746 360.924 511.285 308.30100000000004z" horiz-adv-x="832" />
+<glyph glyph-name="microscope" unicode="&#xf089;" d="M617-64c86.312 18.75 151 100 151 192 0 58.438-26.625 110.125-67.875 145.375C702.5 288.625 704 304.125 704 320c0 104.844-49.875 197.875-128 256l64 64v64l64 64L640 832l-64-64h-64L256 512l-128-64v-128l64-64h128l64 128 96 96c55.5-33.406 96-90.438 96-160-106.062 0-192-85.938-192-192H0v-64h192c19.125-14.25 42.062-22.125 64-32v-96H128L0-192h768L640-64H617zM512 128c0 35.375 28.625 64 64 64s64-28.625 64-64c0-35.312-28.625-64-64-64S512 92.68799999999999 512 128z" horiz-adv-x="768" />
+<glyph glyph-name="milestone" unicode="&#xf075;" d="M704 640H0v-256h704l128 128L704 640zM448 448H320V576h128V448zM448 832H320v-128h128V832zM320-192h128V320H320V-192z" horiz-adv-x="832" />
+<glyph glyph-name="mirror" unicode="&#xf024;" d="M320 512L128 320l192-192V256h384v-128l192 192L704 512v-128H320V512zM512 832L0 512v-704l512 256 512-256V512L512 832zM960-64L576 128v64H448v-64L64-64V448l384 256v-256h128V704l384-256V-64z" horiz-adv-x="1024" />
+<glyph glyph-name="mortar-board" unicode="&#xf0d7;" d="M501 244l-245 76s0-96 0-160 115-96 256-96 256 32 256 96 0 160 0 160l-245-76c-7-2-15-2-23 0h1z m18 409c-4 1-9 1-13 0l-489-152c-21-7-21-36 0-43l111-35v-113c-19-11-32-32-32-55 0-12 3-23 9-32-5-9-9-20-9-32v-165c0-35 128-35 128 0v165c0 12-3 23-9 32 5 9 9 20 9 32 0 24-13 44-32 55v93l313-98c4-1 9-1 13 0l489 152c21 7 21 36 0 43l-488 153z m-6-205c-35 0-64 14-64 32s29 32 64 32 64-14 64-32-29-32-64-32z" horiz-adv-x="1024" />
+<glyph glyph-name="move-down" unicode="&#xf0a8;" d="M640 512H448V832H192v-320H0l320-384L640 512zM0-192h640V0H0V-192z" horiz-adv-x="640" />
+<glyph glyph-name="move-left" unicode="&#xf074;" d="M0 0h192V640H0V0zM704 448V640L320 320l384-320V192h320V448H704z" horiz-adv-x="1024" />
+<glyph glyph-name="move-right" unicode="&#xf0a9;" d="M832 640v-640h192V640H832zM320 448H0v-256h320v-192l384 320L320 640V448z" horiz-adv-x="1024" />
+<glyph glyph-name="move-up" unicode="&#xf0a7;" d="M0 128h192v-320h256V128h192L320 512 0 128zM0 832v-192h640V832H0z" horiz-adv-x="640" />
+<glyph glyph-name="mute" unicode="&#xf080;" d="M128 448H0v-256h128l256-192h64V640h-64L128 448zM864 416l-64 64-96-96-96 96-63-63.5 95-96.5-96-96 64-64 96 96 96-96 64 64-96 96L864 416z" horiz-adv-x="896" />
+<glyph glyph-name="no-newline" unicode="&#xf09c;" d="M896 512v-128H768V512L576 320l192-192V256h192c0 0 64 0.375 64 64s0 192 0 192H896zM224 544C100.281 544 0 443.719 0 320c0-123.75 100.281-224 224-224s224 100.25 224 224C448 443.719 347.719 544 224 544zM96 320c0 70.656 57.344 128 128 128 18.75 0 36.406-4.219 52.469-11.531L107.531 267.5C100.219 283.625 96 301.25 96 320zM224 192c-18.75 0-36.406 4.25-52.469 11.5l168.938 168.969C347.781 356.406 352 338.75 352 320 352 249.375 294.656 192 224 192z" horiz-adv-x="1024" />
+<glyph glyph-name="octoface" unicode="&#xf008;" d="M940.812 554.312c8.25 20.219 35.375 101.75-8.562 211.906 0 0-67.375 21.312-219.875-82.906C648.5 700.875 579.875 703.5 512 703.5c-67.906 0-136.438-2.625-200.5-20.25C159.031 787.531 91.719 766.219 91.719 766.219 47.812 656 74.938 574.531 83.188 554.312 31.5 498.438 0 427.125 0 339.656 0 10.437999999999988 213.25-64 510.844-64 808.562-64 1024 10.437999999999988 1024 339.656 1024 427.125 992.5 498.438 940.812 554.312zM512-1c-211.406 0-382.781 9.875-382.781 214.688 0 48.938 24.062 94.595 65.344 132.312 68.75 62.969 185.281 29.688 317.438 29.688 132.25 0 248.625 33.281 317.438-29.625 41.312-37.78 65.438-83.312 65.438-132.312C894.875 8.875 723.375-1 512-1zM351.156 319.562c-42.469 0-76.906-51.062-76.906-114.188s34.438-114.312 76.906-114.312c42.375 0 76.812 51.188 76.812 114.312S393.531 319.562 351.156 319.562zM672.875 319.562C630.5 319.562 596 268.5 596 205.375s34.5-114.312 76.875-114.312 76.812 51.188 76.812 114.312C749.75 268.5 715.312 319.562 672.875 319.562z" horiz-adv-x="1024" />
+<glyph glyph-name="organization" unicode="&#xf037;" d="M768 448h-64H576h-64-64-64-64H192h-64C57.344 448 0 390.656 0 320v-64c0-47.25 25.844-88.062 64-110.25V-64h256v-128h256V-64h256V145.75c38.125 22.188 64 62.938 64 110.25v64C896 390.656 838.625 448 768 448zM256 0H128V256H64v64c0 35.312 28.688 64 64 64h81.719c-11-18.875-17.719-40.562-17.719-64v-128c0-47.25 25.844-88.062 64-110.25V0zM576 128V256h-64v-384H384V256h-64v-128c-35.312 0-64 28.625-64 64V320c0 35.312 28.688 64 64 64h256c35.375 0 64-28.688 64-64v-128C640 156.625 611.375 128 576 128zM832 256h-64v-256H640v81.75c38.125 22.188 64 62.938 64 110.25V320c0 23.438-6.75 45.125-17.75 64H768c35.375 0 64-28.688 64-64V256zM303.688 514.625C338.875 474.125 390.156 448 448 448c57.875 0 109.125 26.125 144.312 66.625C614.125 475.062 655.688 448 704 448c70.625 0 128 57.344 128 128s-57.375 128-128 128c-25.625 0-49.375-7.688-69.375-20.688C614.875 768.438 539.062 832 448 832S281.094 768.438 261.375 683.312C241.344 696.312 217.594 704 192 704c-70.656 0-128-57.344-128-128s57.344-128 128-128C240.312 448 281.844 475.062 303.688 514.625zM704 640c35.375 0 64-28.594 64-64s-28.625-64-64-64c-35.312 0-64 28.594-64 64S668.688 640 704 640zM448 768c70.625 0 128-57.344 128-128s-57.375-128-128-128c-70.656 0-128 57.344-128 128S377.344 768 448 768zM192 512c-35.312 0-64 28.594-64 64s28.688 64 64 64c35.406 0 64-28.594 64-64S227.406 512 192 512z" horiz-adv-x="896" />
+<glyph glyph-name="package" unicode="&#xf0c4;" d="M480 768L0 640v-576l480-128 480 128V640L480 768zM63.875 111.06600000000003L63.5 544l384.498-102.533 0.001-432.833L63.875 111.06600000000003zM63.5 608l160.254 42.734L640 539.735v-0.135l-160-42.667L63.5 608zM896.125 111.06600000000003L512.001 8.634000000000015l0.001 432.833L640 475.6v-156l128 34.135V509.733L896.5 544 896.125 111.06600000000003zM768 573.733v0.125L351.734 684.862 480 719.066 896.5 608 768 573.733z" horiz-adv-x="1024" />
+<glyph glyph-name="paintcan" unicode="&#xf0d1;" d="M384 832C171.923 832 0 660.077 0 448v-64c0-35.346 28.654-64 64-64v-320c0-70.692 143.269-128 320-128s320 57.308 320 128V320c35.346 0 64 28.654 64 64v64C768 660.077 596.077 832 384 832zM576 192v-32c0-17.673-14.327-32-32-32s-32 14.327-32 32v32c0 17.673-14.327 32-32 32s-32-14.327-32-32v-160c0-17.673-14.327-32-32-32s-32 14.327-32 32V160c0 17.673-14.327 32-32 32s-32-14.327-32-32v-32c0-35.346-28.654-64-64-64s-64 28.654-64 64v64c-35.346 0-64 28.654-64 64V371.193C186.382 340.108 279.318 320 384 320s197.618 20.108 256 51.193V256C640 220.654 611.346 192 576 192zM384 384c-107.433 0-199.393 26.474-237.372 64 37.979 37.526 129.939 64 237.372 64s199.393-26.474 237.372-64C583.393 410.474 491.433 384 384 384zM384 576c-176.62 0-319.816-57.236-319.996-127.867-0.001 0.001-0.002 0.001-0.003 0.002C64.075 624.804 207.314 768 384 768c176.731 0 320-143.269 320-320C704 518.692 560.731 576 384 576z" horiz-adv-x="768" />
+<glyph glyph-name="pencil" unicode="&#xf058;" d="M704 768L576 640l192-192 128 128L704 768zM0 64l0.688-192.562L192-128l512 512L512 576 0 64zM192-64H64V64h64v-64h64V-64z" horiz-adv-x="896" />
+<glyph glyph-name="person" unicode="&#xf018;" d="M448 640C448 746 362.062 832 256 832S64 746 64 640c0-106.062 85.938-192 192-192S448 533.938 448 640zM256 512c-70.656 0-128 57.344-128 128S185.344 768 256 768c70.625 0 128-57.344 128-128S326.625 512 256 512zM384 448H256 128C57.344 448 0 390.656 0 320v-128c0-70.625 57.344-128 128-128v-256h256V64c70.625 0 128 57.375 128 128V320C512 390.656 454.625 448 384 448zM448 192c0-35.375-28.625-64-64-64V256h-64v-384H192V256h-64v-128c-35.312 0-64 28.625-64 64V320c0 35.312 28.688 64 64 64h256c35.375 0 64-28.688 64-64V192z" horiz-adv-x="512" />
+<glyph glyph-name="pin" unicode="&#xf041;" d="M196 128l64-320 64 320c-20-2-43-3-64-3s-44 1-64 3z m254 299c-33 17-62 59-62 85v64c0 22 12 39 23 52 15 13 24 29 24 45 0 53-61 95-175 95s-175-42-175-95c0-16 9-32 24-45 11-13 23-30 23-52v-64c0-26-29-68-62-85-38-19-70-54-70-88 0-74 101-148 260-148s260 73 260 148c0 33-31 68-70 88z" horiz-adv-x="519.657" />
+<glyph glyph-name="playback-fast-forward" unicode="&#xf0bd;" d="M0 64l384 256L0 576V64zM768 320L384 576v-256-256L768 320z" horiz-adv-x="768" />
+<glyph glyph-name="playback-pause" unicode="&#xf0bb;" d="M0 0h192V640H0V0zM320 640v-640h192V640H320z" horiz-adv-x="512" />
+<glyph glyph-name="playback-play" unicode="&#xf0bf;" d="M0 640l512-320L0 0V640z" horiz-adv-x="512" />
+<glyph glyph-name="playback-rewind" unicode="&#xf0bc;" d="M384 320l384-256V576L384 320zM0 320l384-256V320 576L0 320z" horiz-adv-x="768" />
+<glyph glyph-name="plug" unicode="&#xf0d4;" d="M1003.386 627.336l-0.905 0.905c-24.744 24.744-64.861 24.744-89.605 0l-45.707-45.707-90.51 90.51 45.707 45.707c24.744 24.744 24.744 64.861 0 89.605l-0.905 0.905c-24.744 24.744-64.861 24.744-89.605 0l-47.973-47.973C621.76 802.446 537.237 795.66 482.502 740.926l-24.89-24.89c-109.011-109.011-121.948-277.692-38.854-400.892l-4.138-4.138c-62.392-62.392-62.484-163.493-0.275-225.999 12.41-12.469 12.642-33.327 0.121-45.683-12.509-12.343-32.655-12.292-45.101 0.153l-89.427 89.427c-62.637 62.638-164.63 63.747-227.299 1.141-62.542-62.479-62.562-163.829-0.058-226.332l8.763-8.763c24.744-24.744 64.861-24.744 89.605 0l0.905 0.905c24.744 24.744 24.744 64.861 0 89.605l-8.292 8.292c-12.329 12.329-13.085 32.418-1.098 45.081 12.437 13.138 33.174 13.353 45.882 0.645l89.328-89.328c62.92-62.92 165.504-63.814 228.081-0.553 61.793 62.468 61.65 163.161-0.431 225.451-12.55 12.592-12.777 32.866-0.207 45.437l4.151 4.151c123.2-83.095 291.881-70.158 400.892 38.854l24.89 24.89c54.734 54.735 61.52 139.258 20.362 201.382l47.973 47.973C1028.129 562.475 1028.129 602.593 1003.386 627.336zM889.796 333.632c-37.49-37.49-98.274-37.49-135.765 0L527.757 559.906c-37.49 37.49-37.49 98.274 0 135.765 29.556 29.556 73.585 35.804 109.269 18.759l-41.839-41.839c-24.744-24.744-24.744-64.861 0-89.604l0.905-0.905c24.744-24.744 64.861-24.744 89.605 0l45.707 45.707 90.51-90.51-45.707-45.707c-24.744-24.744-24.744-64.861 0-89.605l0.905-0.905c24.744-24.744 64.861-24.744 89.604 0l41.839 41.839C925.6 407.218 919.351 363.188 889.796 333.632z" horiz-adv-x="1024" />
+<glyph glyph-name="plus" unicode="&#xf05d;" d="M384 384V640H256v-256H0v-128h256v-256h128V256h256V384H384z" horiz-adv-x="640" />
+<glyph glyph-name="podium" unicode="&#xf0af;" d="M320 832c-32 0-64-32-64-64s0-64 0-64h-64l-192-192v-128h192l64-384-128-64v-64h512v64l-128 64 64 384h192v128l-192 192h-256v64s14 0 32 0 32 17 32 32-16 32-32 32 0 0-32 0z m0-832l-53 320h118l-1-320h-64z m-224 512l128 128h32v-64h64v64h224l128-128h-576z" horiz-adv-x="768" />
+<glyph glyph-name="primitive-dot" unicode="&#xf052;" d="M-0.088 320c0 141.5 114.5 256 256 256 141.438 0 256-114.5 256-256s-114.562-256-256-256C114.413 64-0.088 178.5-0.088 320z" horiz-adv-x="511.825" />
+<glyph glyph-name="primitive-square" unicode="&#xf053;" d="M512 64H0V576h512V64z" horiz-adv-x="512" />
+<glyph glyph-name="pulse" unicode="&#xf085;" d="M736 320.062L563.188 486.406 422.406 288 352 729.594 152.438 320.062H0V192h230.406L288 307.188l57.594-345.562L576 288l102.375-96H896V320.062H736z" horiz-adv-x="896" />
+<glyph glyph-name="puzzle" unicode="&#xf0c0;" d="M755.75 256.85c-13.95 9.96-28.52 16.59-43.47 19.92-8.84 1.69-18.06 2.33-27.57 1.81-8.99-0.5-17.56-1.68-25.69-3.52-6.1-1.69-12.22-3.89-18.35-6.59-18.18-8.02-33.89-18.12-46.79-30.33-12.22-12.9-22.32-28.62-30.34-46.79-2.7-6.12-4.9-12.24-6.59-18.34-1.84-8.14-3.03-16.7-3.52-25.69-0.52-9.51 0.12-18.73 1.81-27.57 3.33-14.95 9.96-29.52 19.92-43.47 3.89-5.44 8.08-10.4 12.56-14.88 20.06-20.03 45.83-30.7 75.42-34.11 8.92-1.02 18.12-1.68 26.53-4.48 5.12-1.7 9.16-4.08 12.08-7.02 6.65-6.6 7.63-16.1 2.5-27.24-3.15-6.84-7.7-13.45-12.96-18.84l-2.79-2.86c-3.93-3.92-6.41-6.4-7.05-7.04-3.13-3.16-6.1-6.15-9.06-9.15l-2.96-2.92c-10.52-10.58-21.09-21.12-31.66-31.65-22.76-22.82-45.57-45.58-68.38-68.36-7.5-7.5-15-15-22.5-22.49-3.46-3.45-7.07-6.38-10.78-8.79-1.8-1.22-3.49-2.24-5.18-3.16-19.6-9.89-41.43-5.92-59.24 11.88-5.4 5.4-10.62 10.62-15.85 15.84-30.25 30.25-60.48 60.52-90.77 90.73-8.59 8.57-17.13 17.08-25.68 25.59-6.12 6.09-12.67 11.85-19.56 17.06-5.72 4.33-11.59 7.56-17.46 9.73-21.16 7.32-41.41 2.01-54.67-13.26-3.81-4.8-7-10.47-9.39-16.94-3.43-9.26-4.6-19.47-5.9-29.36-4.9-37.53-25.8-68.43-55.98-82.65-7.48-3.65-15.49-6.29-23.9-7.78-7.95-1.41-15.95-1.71-23.85-1.04-26.61 1.35-49.48 13.09-68.51 32.57-1.68 1.67-2.1 2.09-2.51 2.51-19.48 19.02-31.22 41.9-32.57 68.5-0.68 7.9-0.37 15.9 1.04 23.85 1.49 8.41 4.13 16.43 7.78 23.9 14.22 30.18 45.13 51.07 82.65 55.97 9.89 1.29 20.1 2.47 29.36 5.9 6.94 2.56 12.96 6.05 17.97 10.23 14.54 13.15 19.59 32.63 12.84 52.34-2.78 7.35-6 13.22-10.33 18.94-5.21 6.88-10.97 13.43-17.06 19.55-8.51 8.55-17.03 17.09-25.55 25.63-26.92 26.98-53.84 53.88-80.75 80.78l-10.03 10.03c-5.22 5.22-10.45 10.45-15.26 15.27-18.39 18.4-22.35 40.22-12.46 59.82 0.92 1.69 1.94 3.37 3.08 5.05 2.49 3.84 5.42 7.45 8.87 10.91 7.49 7.5 14.99 15 22.49 22.5 22.77 22.81 45.54 45.62 68.36 68.38 10.53 10.57 21.06 21.14 31.65 31.66l2.92 2.96c2.99 2.97 5.99 5.93 8.98 8.9 0.8 0.81 3.28 3.29 7.2 7.22l2.86 2.79c5.39 5.26 12 9.8 18.84 12.96 11.14 5.13 20.63 4.15 27.24-2.5 2.94-2.92 5.32-6.96 7.02-12.08 2.79-8.41 3.45-17.61 4.48-26.53 3.41-29.59 14.08-55.35 34.11-75.41 4.49-4.48 9.44-8.67 14.88-12.56 13.95-9.96 28.52-16.59 43.47-19.92 8.84-1.69 18.06-2.33 27.57-1.81 8.99 0.5 17.56 1.68 25.69 3.52 6.1 1.69 12.22 3.89 18.35 6.59 18.18 8.02 33.89 18.12 46.79 30.33 12.22 12.9 22.32 28.62 30.34 46.79 2.7 6.12 4.9 12.24 6.59 18.34 1.84 8.14 3.03 16.7 3.52 25.69 0.52 9.51-0.12 18.73-1.81 27.57-3.33 14.95-9.96 29.52-19.92 43.47-3.89 5.44-8.08 10.4-12.56 14.88-20.06 20.03-45.83 30.7-75.42 34.11-8.92 1.02-18.12 1.68-26.53 4.48-5.12 1.7-9.16 4.08-12.08 7.02-6.65 6.6-7.63 16.1-2.5 27.24 3.15 6.84 7.7 13.45 12.96 18.84l2.79 2.86c3.93 3.92 6.41 6.4 7.05 7.04 3.13 3.16 6.1 6.15 9.06 9.15l2.96 2.92c10.52 10.58 21.09 21.12 31.66 31.65 22.76 22.82 45.57 45.58 68.38 68.35 7.5 7.5 15 15 22.5 22.49 3.46 3.45 7.07 6.38 10.78 8.79 1.8 1.22 3.49 2.24 5.18 3.16 19.6 9.89 41.43 5.92 59.24-11.88 5.4-5.4 10.62-10.62 15.85-15.84 30.25-30.25 60.48-60.52 90.77-90.73 8.59-8.57 17.13-17.08 25.68-25.59 6.12-6.09 12.67-11.85 19.56-17.06 5.72-4.33 11.59-7.56 17.46-9.73 21.16-7.32 41.41-2.01 54.67 13.26 3.81 4.8 7 10.47 9.39 16.94 3.43 9.26 4.6 19.47 5.9 29.36 4.9 37.53 25.8 68.43 55.98 82.65 7.48 3.65 15.49 6.28 23.9 7.78 7.95 1.41 15.95 1.71 23.85 1.04 26.61-1.35 49.48-13.09 68.51-32.57 1.68-1.67 2.1-2.09 2.51-2.51 19.48-19.02 31.22-41.9 32.57-68.5 0.68-7.9 0.37-15.9-1.04-23.85-1.49-8.41-4.13-16.43-7.78-23.9-14.22-30.18-45.13-51.07-82.65-55.97-9.89-1.29-20.1-2.47-29.36-5.9-6.94-2.56-12.96-6.05-17.97-10.23-14.54-13.15-19.59-32.63-12.84-52.34 2.78-7.35 6-13.22 10.33-18.94 5.21-6.88 10.97-13.43 17.06-19.55 8.51-8.55 17.03-17.09 25.55-25.63 30.26-30.33 60.54-60.56 90.78-90.81 5.22-5.22 10.45-10.45 15.26-15.27 18.39-18.4 22.35-40.22 12.46-59.82-0.92-1.69-1.94-3.37-3.08-5.05-2.49-3.84-5.42-7.45-8.87-10.91-7.49-7.5-14.99-15-22.49-22.5-22.77-22.81-45.54-45.62-68.36-68.38-10.53-10.57-21.06-21.14-31.65-31.66l-2.92-2.96c-2.99-2.97-5.99-5.93-8.98-8.9-0.8-0.81-3.28-3.29-7.2-7.22l-2.86-2.79c-5.39-5.26-12-9.8-18.84-12.96-11.14-5.13-20.63-4.15-27.24 2.5-2.94 2.92-5.32 6.96-7.02 12.08-2.79 8.41-3.45 17.61-4.48 26.53-3.41 29.59-14.08 55.35-34.11 75.41C766.15 248.76999999999998 761.19 252.97000000000003 755.75 256.85z" horiz-adv-x="1024" />
+<glyph glyph-name="question" unicode="&#xf02c;" d="M448 64h128v128h-128v-128z m64 512c-96 0-192-96-192-192h128c0 32 32 64 64 64s64-32 64-64c0-64-128-64-128-128h128c64 22 128 64 128 160s-96 160-192 160z m0 256c-283 0-512-229-512-512s229-512 512-512 512 229 512 512-229 512-512 512z m0-896c-212 0-384 172-384 384s172 384 384 384 384-172 384-384-172-384-384-384z" horiz-adv-x="1024" />
+<glyph glyph-name="quote" unicode="&#xf063;" d="M0 320v-256h256V320H128c0 0 0 128 128 128V576C256 576 0 576 0 320zM640 448V576c0 0-256 0-256-256v-256h256V320H512C512 320 512 448 640 448z" horiz-adv-x="640" />
+<glyph glyph-name="radio-tower" unicode="&#xf030;" d="M306.838 441.261c15.868 16.306 15.868 42.731 0 59.037-20.521 21.116-30.643 48.417-30.705 76.124 0.062 27.77 10.183 55.039 30.705 76.186 15.868 16.337 15.868 42.764 0 59.069-7.934 8.184-18.272 12.275-28.706 12.275-10.371 0-20.804-4.029-28.738-12.213-36.266-37.297-54.633-86.433-54.57-135.317-0.062-48.792 18.305-97.927 54.57-135.161C265.262 424.955 290.97 424.955 306.838 441.261zM149.093 798.858c-8.121 8.309-18.68 12.463-29.3 12.463-10.558 0-21.179-4.154-29.237-12.463C30.8 737.509 0.751 656.856 0.813 576.422 0.751 496.081 30.8 415.272 90.494 353.985c16.181-16.618 42.356-16.618 58.537 0 16.118 16.587 16.118 43.513 0 60.067-43.7 44.98-65.44 103.456-65.44 162.368s21.74 117.449 65.44 162.368C165.149 755.439 165.149 782.365 149.093 798.858zM513.031 472.153c57.351 0 103.956 46.574 103.956 103.956 0 57.382-46.605 103.955-103.956 103.955-57.381 0-103.956-46.573-103.956-103.955C409.076 518.727 455.65 472.153 513.031 472.153zM933.539 798.233c-16.181 16.618-42.355 16.618-58.475 0-16.181-16.587-16.181-43.513 0-60.068 43.668-44.918 65.409-103.456 65.409-162.368 0-58.85-21.805-117.387-65.473-162.306-16.117-16.618-16.117-43.575 0.062-60.068 8.059-8.309 18.616-12.463 29.237-12.463 10.558 0 21.178 4.154 29.236 12.463 59.726 61.287 89.774 142.096 89.649 222.437C1023.313 656.138 993.264 736.947 933.539 798.233zM513.281 389.127L513.281 389.127c-26.489-0.062-53.04 6.466-77.091 19.429L235.057-127.59000000000003h95.209l54.819 63.973h255.891l53.977-63.973h95.272L589.124 408.431C565.384 395.655 539.395 389.127 513.281 389.127zM512.656 358.483L577.004 128.29999999999995H449.059L512.656 358.483zM385.086 0.3550000000000182l63.974 63.973h127.944l63.974-63.973H385.086zM717.194 710.958c-15.868-16.306-15.868-42.731 0-59.037 20.491-21.116 30.611-48.511 30.674-76.124-0.062-27.77-10.183-55.102-30.674-76.187-15.868-16.336-15.868-42.763 0-59.068 7.871-8.184 18.242-12.213 28.737-12.213 10.309 0 20.741 4.029 28.675 12.213 36.298 37.234 54.665 86.433 54.54 135.255 0.125 48.792-18.181 97.927-54.54 135.161C758.801 727.264 733.062 727.264 717.194 710.958z" horiz-adv-x="1024" />
+<glyph glyph-name="repo" unicode="&#xf001;" d="M320 576h-64v-64h64v64z m0 128h-64v-64h64v64z m384 128c-32 0-608 0-640 0s-64-32-64-64 0-736 0-768 32-64 64-64 128 0 128 0v-128l96 96 96-96v128s288 0 320 0 64 32 64 64 0 736 0 768-32 64-64 64z m0-800c0-16-15-32-32-32s-288 0-288 0v64h-192v-64s-79 0-96 0-32 17-32 32 0 96 0 96h640s0-80 0-96z m0 160h-512v576h513l-1-576z m-384 128h-64v-64h64v64z m0 128h-64v-64h64v64z" horiz-adv-x="768" />
+<glyph glyph-name="repo-clone" unicode="&#xf04c;" d="M320 448h-64v-64h64v64z m-128 320h256v64s-352 0-384 0-64-32-64-64 0-736 0-768 32-64 64-64 128 0 128 0v-128l96 96 96-96v128s286 0 320 0 64 32 64 64 0 192 0 192h-576v576z m512-640s0-79 0-96-14-32-32-32-288 0-288 0v64h-192v-64s-80 0-96 0-32 16-32 32 0 96 0 96h640z m-384 448h-64v-64h64v64z m-64-320h64v64h-64v-64z m704 576c-32 0-288 0-320 0s-64-32-64-64 0-352 0-384 32-64 64-64 64 0 64 0v-64l32 32 32-32v64s160 0 192 0 64 32 64 64 0 352 0 384-32 64-64 64z m-256-448s-15 0-32 0-32 15-32 32 0 32 0 32h64v-64z m256 32c0-16-15-32-32-32s-160 0-160 0v64h192s0-16 0-32z m0 96h-256v256h224s32 0 32-32 0-224 0-224z m-640 192h-64v-64h64v64z" horiz-adv-x="1024" />
+<glyph glyph-name="repo-force-push" unicode="&#xf04a;" d="M768 768c0 32-32 64-64 64s-608 0-640 0-64-32-64-64 0-768 0-768 0 32 0 0 32-64 64-64 128 0 128 0v-128l128 128v128h-128v-64s-79 0-96 0-32 15-32 32 0 96 0 96h256v64h-128v576h512v-576h-128v-64h128s0-80 0-96-15-32-32-32-96 0-96 0v-64s96 0 128 0 64 32 64 64 0 736 0 768z m-272-320h144l-192 256-192-256h144l-144-192h128v-448h128v448h128l-144 192z" horiz-adv-x="767.896" />
+<glyph glyph-name="repo-forked" unicode="&#xf002;" d="M768 704c0 71-57 128-128 128s-128-57-128-128c0-47 26-89 64-111v-106l-192-212-192 212v106c38 22 64 63 64 111 0 71-57 128-128 128s-128-57-128-128c0-47 26-89 64-111v-156l256-282v-109c-38-22-64-63-64-111 0-71 57-128 128-128s128 57 128 128c0 47-26 89-64 111v109l256 282v156c38 22 64 63 64 111z m-640 63c34 0 62-28 62-62s-28-62-62-62-62 28-62 62 28 62 62 62z m256-891c-34 0-62 28-62 62s28 62 62 62 62-28 62-62-28-62-62-62z m256 891c34 0 62-28 62-62s-28-62-62-62-62 28-62 62 28 62 62 62z" horiz-adv-x="768" />
+<glyph glyph-name="repo-pull" unicode="&#xf006;" d="M1024 512l-192 192v-128h-384v-128h384v-128l192 192z m-320-320h-512v576h512v-128h64s0 96 0 128-32 64-64 64-608 0-640 0-64-32-64-64 0-736 0-768 32-64 64-64 128 0 128 0v-128l96 96 96-96v128s288 0 320 0 64 32 64 64 0 384 0 384h-64v-192z m0-160c0-15-15-32-32-32s-288 0-288 0v64h-192v-64s-79 0-96 0-32 16-32 32 0 96 0 96h640s0-81 0-96z m-384 544h-64v-64h64v64z m0 128h-64v-64h64v64z m0-256h-64v-64h64v64z m-64-192h64v64h-64v-64z" horiz-adv-x="1024" />
+<glyph glyph-name="repo-push" unicode="&#xf005;" d="M448 512l-192-256h128v-448h128v448h128l-192 256z m-192 0h64v64h-64v-64z m64 192h-64v-64h64v64z m384 128c-32 0-608 0-640 0s-64-32-64-64 0-736 0-768 32-64 64-64 128 0 128 0v-128l128 128v128h-128v-64s-79 0-96 0-32 14-32 32 0 96 0 96h256v64h-128v576h513l-1-576h-128v-64h128s0-79 0-96-15-32-32-32-96 0-96 0v-64s96 0 128 0 64 32 64 64 0 736 0 768-32 64-64 64z" horiz-adv-x="768" />
+<glyph glyph-name="rocket" unicode="&#xf033;" d="M716.737 707.944c-71.926-41.686-148.041-96.13-218.436-166.555-45-45.031-81.213-88.78-110.39-129.778L209.538 378.65 0.047 169.00300000000004l186.818-5.815 131.562 131.562c-46.439-96.224-50.536-160.019-50.536-160.019l58.854-58.792c0 0 65.827 6.255 162.737 53.163L355.107-5.119000000000028l5.88-186.881 209.585 209.521 33.086 179.252c41.403 29.02 85.185 65.046 129.716 109.545 70.425 70.455 124.837 146.541 166.555 218.466-45.97 9.351-88.125 28.488-121.397 61.668C745.257 619.819 725.994 661.975 716.737 707.944zM786.161 745.157c5.004-45 19.952-81.274 44.78-105.98 24.769-24.985 60.98-39.902 106.138-44.844C1003.063 727.677 1023.953 832 1023.953 832S919.63 811.142 786.161 745.157z" horiz-adv-x="1024" />
+<glyph glyph-name="rss" unicode="&#xf034;" d="M128 192C57.344 192 0 134.625 0 64s57.344-128 128-128 128 57.375 128 128S198.656 192 128 192zM128 448c0 0-64-2-64-64s64-64 64-64c141.375 0 256-114.625 256-256 0 0 0-64 64-64s64 64 64 64C512 276 340.031 448 128 448zM128 704c0 0-64 0-64-64s64-64 64-64c282.75 0 512-229.25 512-512 0 0 0-64 64-64s64 64 64 64C768 417.406 481.5 704 128 704z" horiz-adv-x="768" />
+<glyph glyph-name="ruby" unicode="&#xf047;" d="M768 704H256L0 448l512-512 512 512L768 704zM128 448l192 192h384l192-192L512 64 128 448zM704 576H512v-448l320 320L704 576z" horiz-adv-x="1024" />
+<glyph glyph-name="screen-full" unicode="&#xf066;" d="M128 64h639.875V576H128V64zM255.938 448h384v-256h-384V448zM64 639.938h191.938v64H0V448h64V639.938zM64 192H0v-255.938h255.938V0H64V192zM639.938 703.938v-64h191.938V448h64V703.938H639.938zM831.875 0H639.938v-63.938h255.938V192h-64V0z" horiz-adv-x="895.875" />
+<glyph glyph-name="screen-normal" unicode="&#xf067;" d="M127.938 640.062H0v-64h191.938V768h-64V640.062zM0-0.06200000000001182h127.938V-128h64V63.93799999999999H0V-0.06200000000001182zM768.062 640.062V768h-64v-191.938H896v64H768.062zM704.062-128h64V-0.06200000000001182H896v64H704.062V-128zM192.062 128H704V512H192.062V128zM320 384h256v-128H320V384z" horiz-adv-x="896" />
+<glyph glyph-name="search" unicode="&#xf02e;" d="M960 0L710.875 249.125C746.438 307.188 768 374.844 768 448 768 660.031 596 832 384 832 171.969 832 0 660.031 0 448c0-212 171.969-384 384-384 73.156 0 140.812 21.562 198.875 57L832-128c17.5-17.5 46.5-17.375 64 0l64 64C977.5-46.5 977.5-17.5 960 0zM384 192c-141.375 0-256 114.625-256 256s114.625 256 256 256 256-114.625 256-256S525.375 192 384 192z" horiz-adv-x="973.125" />
+<glyph glyph-name="server" unicode="&#xf097;" d="M704 448h-640c-35 0-64-32-64-64v-128c0-32 32-64 64-64h640c32 0 64 32 64 64v128c0 32-32 64-64 64z m-576-192h-64v128h64v-128z m128 0h-64v128h64v-128z m128 0h-64v128h64v-128z m128 0h-64v128h64v-128z m192-128h-640c-35 0-64-32-64-64v-128c0-32 32-64 64-64h640c32 0 64 32 64 64v128c0 32-32 64-64 64z m-576-192h-64v128h64v-128z m128 0h-64v128h64v-128z m128 0h-64v128h64v-128z m128 0h-64v128h64v-128z m192 832h-640c-35 0-64-32-64-64v-128c0-32 32-64 64-64h640c32 0 64 32 64 64v128c0 32-32 64-64 64z m-576-192h-64v128h64v-128z m128 0h-64v128h64v-128z m128 0h-64v128h64v-128z m128 0h-64v128h64v-128z m192 64h-64v64h64v-64z" horiz-adv-x="768" />
+<glyph glyph-name="settings" unicode="&#xf07c;" d="M64-64h128V128H64V-64zM192 704H64v-320h128V704zM512 704H384v-128h128V704zM0 192h256V320H0V192zM384-64h128V320H384V-64zM320 384h256V512H320V384zM832 704H704v-384h128V704zM640 256v-128h256V256H640zM704-64h128V64H704V-64z" horiz-adv-x="896" />
+<glyph glyph-name="sign-in" unicode="&#xf036;" d="M640 256L640 384 896 384 896 512 640 512 640 640 448 496 448 640 192 768 704 768 704 576 768 576 768 832 64 832 64 0 448-192 448 0 768 0 768 320 704 320 704 64 448 64 448 400z" horiz-adv-x="896" />
+<glyph glyph-name="sign-out" unicode="&#xf032;" d="M640 64H384V640L128 768h512v-192h64V832H0v-832l384-192V0h320V320h-64V64zM1024 448L768 640v-128H512v-128h256v-128L1024 448z" horiz-adv-x="1024" />
+<glyph glyph-name="split" unicode="&#xf0c6;" d="M448 576l-256 256-192-192 311-300c15 81 43 136 133 230l5 6z m128 256l133-133-197-197c-99-99-128-162-128-309v-384h256v384c0 52 19 94 53 128l197 197 133-133v448h-448z" horiz-adv-x="1024" />
+<glyph glyph-name="squirrel" unicode="&#xf0b2;" d="M768 768c-141.385 0-256-83.75-256-186.875C512 457.25 544 387 512 192c0 288-177 405.783-256 405.783 3.266 32.17-30.955 42.217-30.955 42.217s-14-7.124-19.354-21.583c-17.231 20.053-36.154 17.54-36.154 17.54l-8.491-37.081c0 0-117.045-40.876-118.635-206.292C56 371 141.311 353.898 201.887 364.882c57.157-2.956 42.991-50.648 30.193-63.446C178.083 247.438 128 320 64 320s-64-64 0-64 64-64 192-64c-198-77 0-256 0-256h-64c-64 0-64-64-64-64s256 0 384 0c192 0 320 64 320 222.182 0 54.34-27.699 114.629-64 162.228C697.057 349.433 782.453 427.566 832 384s192-64 192 128C1024 653.385 909.385 768 768 768zM160 448c-17.674 0-32 14.327-32 32 0 17.674 14.326 32 32 32 17.673 0 32-14.326 32-32C192 462.327 177.673 448 160 448z" horiz-adv-x="1024" />
+<glyph glyph-name="star" unicode="&#xf02a;" d="M896 448l-313.5 40.781L448 768 313.469 488.781 0 448l230.469-208.875L171-63.93799999999999l277 148.812 277.062-148.812L665.5 239.125 896 448z" horiz-adv-x="896" />
+<glyph glyph-name="steps" unicode="&#xf0c7;" d="M136 768C60.89 768 0 667.71 0 544c0-68.83 17.02-141.84 34-254.54C47.3 201.16999999999996 79.67 128 136 128s94.08 48.79 94.08 137.97c0 30.37-24.97 78.75-26.08 120.03-2.02 74.46 49.93 104.17 49.93 173C253.93 682.71 211.1 768 136 768zM502.97 512c-75.1 0-117.93-85.29-117.93-209 0-68.83 51.95-98.54 49.93-173-1.109-41.28-26.08-89.66-26.08-120.03 0-89.18 37.75-137.97 94.08-137.97s88.7 73.17 102 161.46c16.98 112.7 34 185.71 34 254.54C638.97 411.71 578.08 512 502.97 512z" horiz-adv-x="640" />
+<glyph glyph-name="stop" unicode="&#xf08f;" d="M704 832H320L0 512v-384l320-320h384l320 320V512L704 832zM896 192L640-64H384L128 192V448l256 256h256l256-256V192zM448 256h128V576H448V256zM448 64h128V192H448V64z" horiz-adv-x="1024" />
+<glyph glyph-name="sync" unicode="&#xf087;" d="M655.461 358.531c11.875-81.719-13.062-167.781-76.812-230.594-94.188-92.938-239.5-104.375-346.375-34.562l74.875 73L31.96 204.75 70.367-64l84.031 80.5c150.907-111.25 364.938-100.75 502.063 34.562 79.5 78.438 115.75 182.562 111.25 285.312L655.461 358.531zM189.46 511.938c94.156 92.938 239.438 104.438 346.313 34.562l-75-72.969 275.188-38.406L697.586 704l-83.938-80.688C462.711 734.656 248.742 724.031 111.585 588.75 32.085 510.344-4.133 406.219 0.335 303.5l112.25-22.125C100.71 363.125 125.71 449.094 189.46 511.938z" horiz-adv-x="768.051" />
+<glyph glyph-name="tag" unicode="&#xf015;" d="M384 768H128L0 640v-256l512-512 384 384L384 768zM64 416V608l96 96h192l448-448L512-32 64 416zM448 512L256 320l256-256 192 192L448 512zM352 320l96 96 160-160-96-96L352 320zM320 544c0 53-43 96-96 96s-96-43-96-96 43-96 96-96S320 491 320 544zM224 512c-17.656 0-32 14.344-32 32s14.344 32 32 32 32-14.344 32-32S241.656 512 224 512z" horiz-adv-x="896" />
+<glyph glyph-name="telescope" unicode="&#xf088;" d="M76 409c32 8 229 59 229 59-1-6-2-19-2-19 0-71 49-128 128-128s128 59 128 128c0 11-8 22-19 32l49-3s7 2 31 8c-51-14-108 31-126 99s8 135 60 149c-24-6-31-8-31-8l-168-110c-34-9-55-46-46-80 2-9 7-17 12-23-7-12-12-26-15-40-27 1-51 19-59 46-9 34 11 69 45 78l-245-65c-34-9-54-43-45-77s41-54 73-46z m419-153h-128v-64l-320-320h128l192 128v-128h128v128l192-128h128l-320 320v64z m429 448c-18 68-70 110-122 96-69-18-98-28-186-51-51-14-79-80-61-148s74-115 125-102c87 23 117 33 186 51 51 14 76 85 58 154z m-70-90c-17-5-42 17-51 51s-4 66 13 70 42-17 51-51 4-66-13-70z" horiz-adv-x="929.875" />
+<glyph glyph-name="terminal" unicode="&#xf0c8;" d="M831 705H63c-35.35 0-64-28.65-64-64v-640c0-35.35 28.65-64 64-64h768c35.35 0 64 28.65 64 64V641C895 676.35 866.35 705 831 705zM127 257l128 128L127 513l64 64 192-192L191 193 127 257zM639 193H383v64h256V193z" horiz-adv-x="896" />
+<glyph glyph-name="three-bars" unicode="&#xf05e;" d="M0 640v-128h768v128h-768z m0-384h768v128h-768v-128z m0-256h768v128h-768v-128z" horiz-adv-x="768" />
+<glyph glyph-name="tools" unicode="&#xf031;" d="M286.547 366.984c16.843-16.812 81.716-85.279 81.716-85.279l35.968 37.093-56.373 58.248L456.072 491.98c0 0-48.842 47.623-27.468 28.655 20.438 75.903 1.812 160.589-55.842 220.243C315.608 800.064 234.392 819.47 161.425 799.096l123.653-127.715-32.53-125.309-121.06-33.438L7.898 640.3820000000001c-19.718-75.436-0.969-159.339 56.311-218.556C124.302 359.703 210.83 341.453 286.547 366.984zM698.815 242.769L549.694 95.46100000000001l245.932-254.805c20.062-20.812 46.498-31.188 72.872-31.188 26.25 0 52.624 10.375 72.811 31.188 40.249 41.624 40.249 108.997 0 150.62L698.815 242.769zM1023.681 670.162L867.06 832.001 405.387 354.703l56.373-58.248L185.425 10.839000000000055l-63.154-33.749-89.217-145.559 22.719-23.562 140.839 92.247 32.655 65.312 276.336 285.554 56.404-58.248L1023.681 670.162z" horiz-adv-x="1024" />
+<glyph glyph-name="trashcan" unicode="&#xf0d0;" d="M704 704H448c0 0 0 24.057 0 32 0 17.673-14.327 32-32 32s-32-14.327-32-32c0-17.673 0-32 0-32H128c-35.346 0-64-28.654-64-64v-64c0-35.346 28.654-64 64-64v-576c0-35.346 28.654-64 64-64h448c35.346 0 64 28.654 64 64V512c35.346 0 64 28.654 64 64v64C768 675.346 739.346 704 704 704zM640-32c0-17.673-14.327-32-32-32H224c-17.673 0-32 14.327-32 32V512h64v-480c0-17.673 14.327-32 32-32s32 14.327 32 32l0.387 480H384v-480c0-17.673 14.327-32 32-32s32 14.327 32 32l0.387 480h64L512 32c0-17.673 14.327-32 32-32s32 14.327 32 32V512h64V-32zM704 592c0-8.837-7.163-16-16-16H144c-8.837 0-16 7.163-16 16v32c0 8.837 7.163 16 16 16h544c8.837 0 16-7.163 16-16V592z" horiz-adv-x="768" />
+<glyph glyph-name="triangle-down" unicode="&#xf05b;" d="M0 448l383.75-383.75L767.5 448H0z" horiz-adv-x="767.5" />
+<glyph glyph-name="triangle-left" unicode="&#xf044;" d="M0 320.125l383.75-383.75v767.5L0 320.125z" horiz-adv-x="383.75" />
+<glyph glyph-name="triangle-right" unicode="&#xf05a;" d="M0.062 703.75L383.812 320 0.062-63.75V703.75z" horiz-adv-x="383.875" />
+<glyph glyph-name="triangle-up" unicode="&#xf0aa;" d="M383.75 576L0 192.25h767.5L383.75 576z" horiz-adv-x="767.5" />
+<glyph glyph-name="unfold" unicode="&#xf039;" d="M384 448h128V640h128L448 832 256 640h128V448zM576 576v-64h224L672 384H224L96 512h224v64H0v-63.999L160 352 0 192v-64h320v64H96l128 128h448l128-128H576v-64h320v64L736 352l160 160.001V576H576zM512 256H384v-192H256l192-192 192 192H512V256z" horiz-adv-x="896" />
+<glyph glyph-name="unmute" unicode="&#xf0ba;" d="M128 448H0v-256h128l256-192h64V640h-64L128 448zM538.51 410.51c-12.496 12.497-32.758 12.497-45.255 0-12.496-12.496-12.496-32.758 0-45.255 24.994-24.993 24.994-65.516 0-90.51-12.496-12.496-12.496-32.758 0-45.255 12.497-12.496 32.759-12.496 45.255 0C588.497 279.47900000000004 588.497 360.523 538.51 410.51zM629.02 501.019c-12.495 12.497-32.758 12.497-45.255 0-12.495-12.496-12.495-32.758 0-45.255 74.981-74.98 74.981-196.548 0-271.528-12.495-12.497-12.495-32.76 0-45.256 12.497-12.496 32.76-12.496 45.255 0C728.994 238.95399999999995 728.994 401.045 629.02 501.019zM719.529 591.529c-12.497 12.497-32.76 12.497-45.255 0-12.496-12.496-12.496-32.758 0-45.255 124.968-124.968 124.968-327.58 0-452.548-12.496-12.497-12.496-32.759 0-45.255 12.495-12.497 32.758-12.497 45.255 0C869.49 198.433 869.49 441.568 719.529 591.529z" horiz-adv-x="896" />
+<glyph glyph-name="versions" unicode="&#xf064;" d="M0 128h128v64H64V448h64v64H0V128zM384 640v-640h512V640H384zM768 128H512V512h256V128zM192 64h128v64h-64V512h64v64H192V64z" horiz-adv-x="896" />
+<glyph glyph-name="x" unicode="&#xf081;" d="M640 512L512 640 320 448 128 640 0 512l192-192L0 128l128-128 192 192 192-192 128 128L448 320 640 512z" horiz-adv-x="640" />
+<glyph glyph-name="zap" unicode="&#x26A1;" d="M640 384H384L576 832 0 256h256L64-192 640 384z" horiz-adv-x="640" />
+</font>
+</defs>
+</svg>
diff --git a/spec/public/octicons/octicons.ttf b/spec/public/octicons/octicons.ttf
new file mode 100644
index 0000000000..189ca2813d
--- /dev/null
+++ b/spec/public/octicons/octicons.ttf
Binary files differ
diff --git a/spec/public/octicons/octicons.woff b/spec/public/octicons/octicons.woff
new file mode 100644
index 0000000000..2b770e429f
--- /dev/null
+++ b/spec/public/octicons/octicons.woff
Binary files differ
diff --git a/spec/public/scripts/LICENSE-highlight b/spec/public/scripts/LICENSE-highlight
new file mode 100644
index 0000000000..fe2f67b162
--- /dev/null
+++ b/spec/public/scripts/LICENSE-highlight
@@ -0,0 +1,24 @@
+Copyright (c) 2006, Ivan Sagalaev
+All rights reserved.
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of highlight.js nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
+EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/spec/public/scripts/LICENSE-toc b/spec/public/scripts/LICENSE-toc
new file mode 100644
index 0000000000..4e236e8696
--- /dev/null
+++ b/spec/public/scripts/LICENSE-toc
@@ -0,0 +1,18 @@
+(The MIT License)
+Copyright (c) 2013 Greg Allen
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file
diff --git a/spec/public/scripts/highlight.pack.js b/spec/public/scripts/highlight.pack.js
new file mode 100644
index 0000000000..bfeca09abb
--- /dev/null
+++ b/spec/public/scripts/highlight.pack.js
@@ -0,0 +1 @@
+var hljs=new function(){function j(v){return v.replace(/&/gm,"&amp;").replace(/</gm,"&lt;").replace(/>/gm,"&gt;")}function t(v){return v.nodeName.toLowerCase()}function h(w,x){var v=w&&w.exec(x);return v&&v.index==0}function r(w){var v=(w.className+" "+(w.parentNode?w.parentNode.className:"")).split(/\s+/);v=v.map(function(x){return x.replace(/^lang(uage)?-/,"")});return v.filter(function(x){return i(x)||/no(-?)highlight/.test(x)})[0]}function o(x,y){var v={};for(var w in x){v[w]=x[w]}if(y){for(var w in y){v[w]=y[w]}}return v}function u(x){var v=[];(function w(y,z){for(var A=y.firstChild;A;A=A.nextSibling){if(A.nodeType==3){z+=A.nodeValue.length}else{if(A.nodeType==1){v.push({event:"start",offset:z,node:A});z=w(A,z);if(!t(A).match(/br|hr|img|input/)){v.push({event:"stop",offset:z,node:A})}}}}return z})(x,0);return v}function q(w,y,C){var x=0;var F="";var z=[];function B(){if(!w.length||!y.length){return w.length?w:y}if(w[0].offset!=y[0].offset){return(w[0].offset<y[0].offset)?w:y}return y[0].event=="start"?w:y}function A(H){function G(I){return" "+I.nodeName+'="'+j(I.value)+'"'}F+="<"+t(H)+Array.prototype.map.call(H.attributes,G).join("")+">"}function E(G){F+="</"+t(G)+">"}function v(G){(G.event=="start"?A:E)(G.node)}while(w.length||y.length){var D=B();F+=j(C.substr(x,D[0].offset-x));x=D[0].offset;if(D==w){z.reverse().forEach(E);do{v(D.splice(0,1)[0]);D=B()}while(D==w&&D.length&&D[0].offset==x);z.reverse().forEach(A)}else{if(D[0].event=="start"){z.push(D[0].node)}else{z.pop()}v(D.splice(0,1)[0])}}return F+j(C.substr(x))}function m(y){function v(z){return(z&&z.source)||z}function w(A,z){return RegExp(v(A),"m"+(y.cI?"i":"")+(z?"g":""))}function x(D,C){if(D.compiled){return}D.compiled=true;D.k=D.k||D.bK;if(D.k){var z={};var E=function(G,F){if(y.cI){F=F.toLowerCase()}F.split(" ").forEach(function(H){var I=H.split("|");z[I[0]]=[G,I[1]?Number(I[1]):1]})};if(typeof D.k=="string"){E("keyword",D.k)}else{Object.keys(D.k).forEach(function(F){E(F,D.k[F])})}D.k=z}D.lR=w(D.l||/\b[A-Za-z0-9_]+\b/,true);if(C){if(D.bK){D.b="\\b("+D.bK.split(" ").join("|")+")\\b"}if(!D.b){D.b=/\B|\b/}D.bR=w(D.b);if(!D.e&&!D.eW){D.e=/\B|\b/}if(D.e){D.eR=w(D.e)}D.tE=v(D.e)||"";if(D.eW&&C.tE){D.tE+=(D.e?"|":"")+C.tE}}if(D.i){D.iR=w(D.i)}if(D.r===undefined){D.r=1}if(!D.c){D.c=[]}var B=[];D.c.forEach(function(F){if(F.v){F.v.forEach(function(G){B.push(o(F,G))})}else{B.push(F=="self"?D:F)}});D.c=B;D.c.forEach(function(F){x(F,D)});if(D.starts){x(D.starts,C)}var A=D.c.map(function(F){return F.bK?"\\.?("+F.b+")\\.?":F.b}).concat([D.tE,D.i]).map(v).filter(Boolean);D.t=A.length?w(A.join("|"),true):{exec:function(F){return null}}}x(y)}function c(T,L,J,R){function v(V,W){for(var U=0;U<W.c.length;U++){if(h(W.c[U].bR,V)){return W.c[U]}}}function z(V,U){if(h(V.eR,U)){return V}if(V.eW){return z(V.parent,U)}}function A(U,V){return !J&&h(V.iR,U)}function E(W,U){var V=M.cI?U[0].toLowerCase():U[0];return W.k.hasOwnProperty(V)&&W.k[V]}function w(aa,Y,X,W){var U=W?"":b.classPrefix,V='<span class="'+U,Z=X?"":"</span>";V+=aa+'">';return V+Y+Z}function N(){if(!I.k){return j(C)}var U="";var X=0;I.lR.lastIndex=0;var V=I.lR.exec(C);while(V){U+=j(C.substr(X,V.index-X));var W=E(I,V);if(W){H+=W[1];U+=w(W[0],j(V[0]))}else{U+=j(V[0])}X=I.lR.lastIndex;V=I.lR.exec(C)}return U+j(C.substr(X))}function F(){if(I.sL&&!f[I.sL]){return j(C)}var U=I.sL?c(I.sL,C,true,S):e(C);if(I.r>0){H+=U.r}if(I.subLanguageMode=="continuous"){S=U.top}return w(U.language,U.value,false,true)}function Q(){return I.sL!==undefined?F():N()}function P(W,V){var U=W.cN?w(W.cN,"",true):"";if(W.rB){D+=U;C=""}else{if(W.eB){D+=j(V)+U;C=""}else{D+=U;C=V}}I=Object.create(W,{parent:{value:I}})}function G(U,Y){C+=U;if(Y===undefined){D+=Q();return 0}var W=v(Y,I);if(W){D+=Q();P(W,Y);return W.rB?0:Y.length}var X=z(I,Y);if(X){var V=I;if(!(V.rE||V.eE)){C+=Y}D+=Q();do{if(I.cN){D+="</span>"}H+=I.r;I=I.parent}while(I!=X.parent);if(V.eE){D+=j(Y)}C="";if(X.starts){P(X.starts,"")}return V.rE?0:Y.length}if(A(Y,I)){throw new Error('Illegal lexeme "'+Y+'" for mode "'+(I.cN||"<unnamed>")+'"')}C+=Y;return Y.length||1}var M=i(T);if(!M){throw new Error('Unknown language: "'+T+'"')}m(M);var I=R||M;var S;var D="";for(var K=I;K!=M;K=K.parent){if(K.cN){D=w(K.cN,"",true)+D}}var C="";var H=0;try{var B,y,x=0;while(true){I.t.lastIndex=x;B=I.t.exec(L);if(!B){break}y=G(L.substr(x,B.index-x),B[0]);x=B.index+y}G(L.substr(x));for(var K=I;K.parent;K=K.parent){if(K.cN){D+="</span>"}}return{r:H,value:D,language:T,top:I}}catch(O){if(O.message.indexOf("Illegal")!=-1){return{r:0,value:j(L)}}else{throw O}}}function e(y,x){x=x||b.languages||Object.keys(f);var v={r:0,value:j(y)};var w=v;x.forEach(function(z){if(!i(z)){return}var A=c(z,y,false);A.language=z;if(A.r>w.r){w=A}if(A.r>v.r){w=v;v=A}});if(w.language){v.second_best=w}return v}function g(v){if(b.tabReplace){v=v.replace(/^((<[^>]+>|\t)+)/gm,function(w,z,y,x){return z.replace(/\t/g,b.tabReplace)})}if(b.useBR){v=v.replace(/\n/g,"<br>")}return v}function p(A){var B=r(A);if(/no(-?)highlight/.test(B)){return}var y;if(b.useBR){y=document.createElementNS("http://www.w3.org/1999/xhtml","div");y.innerHTML=A.innerHTML.replace(/\n/g,"").replace(/<br[ \/]*>/g,"\n")}else{y=A}var z=y.textContent;var v=B?c(B,z,true):e(z);var x=u(y);if(x.length){var w=document.createElementNS("http://www.w3.org/1999/xhtml","div");w.innerHTML=v.value;v.value=q(x,u(w),z)}v.value=g(v.value);A.innerHTML=v.value;A.className+=" hljs "+(!B&&v.language||"");A.result={language:v.language,re:v.r};if(v.second_best){A.second_best={language:v.second_best.language,re:v.second_best.r}}}var b={classPrefix:"hljs-",tabReplace:null,useBR:false,languages:undefined};function s(v){b=o(b,v)}function l(){if(l.called){return}l.called=true;var v=document.querySelectorAll("pre code");Array.prototype.forEach.call(v,p)}function a(){addEventListener("DOMContentLoaded",l,false);addEventListener("load",l,false)}var f={};var n={};function d(v,x){var w=f[v]=x(this);if(w.aliases){w.aliases.forEach(function(y){n[y]=v})}}function k(){return Object.keys(f)}function i(v){return f[v]||f[n[v]]}this.highlight=c;this.highlightAuto=e;this.fixMarkup=g;this.highlightBlock=p;this.configure=s;this.initHighlighting=l;this.initHighlightingOnLoad=a;this.registerLanguage=d;this.listLanguages=k;this.getLanguage=i;this.inherit=o;this.IR="[a-zA-Z][a-zA-Z0-9_]*";this.UIR="[a-zA-Z_][a-zA-Z0-9_]*";this.NR="\\b\\d+(\\.\\d+)?";this.CNR="(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)";this.BNR="\\b(0b[01]+)";this.RSR="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~";this.BE={b:"\\\\[\\s\\S]",r:0};this.ASM={cN:"string",b:"'",e:"'",i:"\\n",c:[this.BE]};this.QSM={cN:"string",b:'"',e:'"',i:"\\n",c:[this.BE]};this.PWM={b:/\b(a|an|the|are|I|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such)\b/};this.CLCM={cN:"comment",b:"//",e:"$",c:[this.PWM]};this.CBCM={cN:"comment",b:"/\\*",e:"\\*/",c:[this.PWM]};this.HCM={cN:"comment",b:"#",e:"$",c:[this.PWM]};this.NM={cN:"number",b:this.NR,r:0};this.CNM={cN:"number",b:this.CNR,r:0};this.BNM={cN:"number",b:this.BNR,r:0};this.CSSNM={cN:"number",b:this.NR+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",r:0};this.RM={cN:"regexp",b:/\//,e:/\/[gim]*/,i:/\n/,c:[this.BE,{b:/\[/,e:/\]/,r:0,c:[this.BE]}]};this.TM={cN:"title",b:this.IR,r:0};this.UTM={cN:"title",b:this.UIR,r:0}}();hljs.registerLanguage("scala",function(d){var b={cN:"annotation",b:"@[A-Za-z]+"};var c={cN:"string",b:'u?r?"""',e:'"""',r:10};var a={cN:"symbol",b:"'\\w[\\w\\d_]*(?!')"};var e={cN:"type",b:"\\b[A-Z][A-Za-z0-9_]*",r:0};var h={cN:"title",b:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,r:0};var i={cN:"class",bK:"class object trait type",e:/[:={\[(\n;]/,c:[{cN:"keyword",bK:"extends with",r:10},h]};var g={cN:"function",bK:"def val",e:/[:={\[(\n;]/,c:[h]};var f={cN:"javadoc",b:"/\\*\\*",e:"\\*/",c:[{cN:"javadoctag",b:"@[A-Za-z]+"}],r:10};return{k:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},c:[d.CLCM,d.CBCM,c,d.QSM,a,e,g,i,d.CNM,b]}}); \ No newline at end of file
diff --git a/spec/public/scripts/main.js b/spec/public/scripts/main.js
new file mode 100644
index 0000000000..f0509aba41
--- /dev/null
+++ b/spec/public/scripts/main.js
@@ -0,0 +1,57 @@
+function currentChapter() {
+ var path = document.location.pathname;
+ var idx = path.lastIndexOf("/") + 1;
+ var chap = path.substring(idx, idx + 2);
+ return parseInt(chap, 10);
+}
+
+function heading(i, heading, $heading) {
+ var currentLevel = parseInt(heading.tagName.substring(1));
+ var result = "";
+ if (currentLevel === this.headerLevel) {
+ this.headerCounts[this.headerLevel] += 1;
+ return "" + this.headerCounts[this.headerLevel] + " " + $heading.text();
+ } else if (currentLevel < this.headerLevel) {
+ while(currentLevel < this.headerLevel) {
+ this.headerCounts[this.headerLevel] = 1;
+ this.headerLevel -= 1;
+ }
+ this.headerCounts[this.headerLevel] += 1;
+ return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text();
+ } else {
+ while(currentLevel > this.headerLevel) {
+ this.headerLevel += 1;
+ this.headerCounts[this.headerLevel] = 1;
+ }
+ return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text();
+ }
+}
+
+$('#toc').toc(
+ {
+ 'selectors': 'h1,h2,h3',
+ 'smoothScrolling': false,
+ 'chapter': currentChapter(),
+ 'headerLevel': 1,
+ 'headerCounts': [-1, currentChapter() - 1, 1, 1],
+ 'headerText': heading
+ }
+);
+
+// no language auto-detect so that EBNF isn't detected as scala
+hljs.configure({
+ languages: []
+});
+
+// syntax highlighting after mathjax is loaded so that mathjax can be used in code blocks
+MathJax.Hub.Queue(function () {
+ hljs.initHighlighting();
+ $("pre nobr").addClass("fixws");
+})
+
+$("#chapters a").each(function (index) {
+ if (document.location.pathname.endsWith($(this).attr("href")))
+ $(this).addClass("chapter-active");
+ else
+ $(this).removeClass("chapter-active");
+});
diff --git a/spec/public/scripts/toc.js b/spec/public/scripts/toc.js
new file mode 100644
index 0000000000..070d7b7a93
--- /dev/null
+++ b/spec/public/scripts/toc.js
@@ -0,0 +1,128 @@
+/*!
+ * toc - jQuery Table of Contents Plugin
+ * v0.3.2
+ * http://projects.jga.me/toc/
+ * copyright Greg Allen 2014
+ * MIT License
+*/
+(function($) {
+var verboseIdCache = {};
+$.fn.toc = function(options) {
+ var self = this;
+ var opts = $.extend({}, jQuery.fn.toc.defaults, options);
+
+ var container = $(opts.container);
+ var headings = $(opts.selectors, container);
+ var headingOffsets = [];
+ var activeClassName = opts.activeClass;
+
+ var scrollTo = function(e, callback) {
+ $('li', self).removeClass(activeClassName);
+ $(e.target).parent().addClass(activeClassName);
+ };
+
+ //highlight on scroll
+ var timeout;
+ var highlightOnScroll = function(e) {
+ if (timeout) {
+ clearTimeout(timeout);
+ }
+ timeout = setTimeout(function() {
+ var top = $(window).scrollTop(),
+ highlighted, closest = Number.MAX_VALUE, index = 0;
+
+ for (var i = 0, c = headingOffsets.length; i < c; i++) {
+ var currentClosest = Math.abs(headingOffsets[i] - top);
+ if (currentClosest < closest) {
+ index = i;
+ closest = currentClosest;
+ }
+ }
+
+ $('li', self).removeClass(activeClassName);
+ highlighted = $('li:eq('+ index +')', self).addClass(activeClassName);
+ opts.onHighlight(highlighted);
+ }, 50);
+ };
+ if (opts.highlightOnScroll) {
+ $(window).bind('scroll', highlightOnScroll);
+ highlightOnScroll();
+ }
+
+ return this.each(function() {
+ //build TOC
+ var el = $(this);
+ var ul = $(opts.listType);
+
+ headings.each(function(i, heading) {
+ var $h = $(heading);
+ headingOffsets.push($h.offset().top - opts.highlightOffset);
+
+ var anchorName = opts.anchorName(i, heading, opts.prefix);
+
+ //add anchor
+ if(heading.id !== anchorName) {
+ var anchor = $('<span/>').attr('id', anchorName).insertBefore($h);
+ }
+
+ //build TOC item
+ var a = $('<a/>')
+ .text(opts.headerText(i, heading, $h))
+ .attr('href', '#' + anchorName)
+ .bind('click', function(e) {
+ $(window).unbind('scroll', highlightOnScroll);
+ scrollTo(e, function() {
+ $(window).bind('scroll', highlightOnScroll);
+ });
+ el.trigger('selected', $(this).attr('href'));
+ });
+
+ var li = $('<li/>')
+ .addClass(opts.itemClass(i, heading, $h, opts.prefix))
+ .append(a);
+
+ ul.append(li);
+ });
+ el.html(ul);
+ });
+};
+
+
+jQuery.fn.toc.defaults = {
+ container: 'body',
+ listType: '<ul/>',
+ selectors: 'h1,h2,h3',
+ prefix: 'toc',
+ activeClass: 'toc-active',
+ onHighlight: function() {},
+ highlightOnScroll: true,
+ highlightOffset: 100,
+ anchorName: function(i, heading, prefix) {
+ if(heading.id.length) {
+ return heading.id;
+ }
+
+ var candidateId = $(heading).text().replace(/[^a-z0-9]/ig, ' ').replace(/\s+/g, '-').toLowerCase();
+ if (verboseIdCache[candidateId]) {
+ var j = 2;
+
+ while(verboseIdCache[candidateId + j]) {
+ j++;
+ }
+ candidateId = candidateId + '-' + j;
+
+ }
+ verboseIdCache[candidateId] = true;
+
+ return prefix + '-' + candidateId;
+ },
+ headerText: function(i, heading, $heading) {
+ return $heading.text();
+ },
+ itemClass: function(i, heading, $heading, prefix) {
+ return prefix + '-' + $heading[0].tagName.toLowerCase();
+ }
+
+};
+
+})(jQuery);
diff --git a/spec/public/stylesheets/fonts.css b/spec/public/stylesheets/fonts.css
new file mode 100644
index 0000000000..36efb2bbd5
--- /dev/null
+++ b/spec/public/stylesheets/fonts.css
@@ -0,0 +1,73 @@
+@font-face {
+ font-family: 'Luxi Sans';
+ src: local('Luxi Sans Regular'),
+ url('../fonts/LuxiSans-Regular.woff') format('woff');
+ font-weight: normal;
+ font-style: normal;
+}
+
+@font-face {
+ font-family: 'Luxi Sans';
+ src: local('Luxi Sans Bold'),
+ url('../fonts/LuxiSans-Bold.woff') format('woff');
+ font-weight: bold;
+ font-style: normal;
+}
+
+@font-face {
+ font-family: 'Luxi Mono';
+ src: local('Luxi Mono Regular'),
+ url('../fonts/LuxiMono-Regular.woff') format('woff');
+ font-weight: normal;
+ font-style: normal;
+}
+@font-face {
+ font-family: 'Luxi Mono';
+ src: local('Luxi Mono Oblique'),
+ url('../fonts/LuxiMono-BoldOblique.woff') format('woff');
+ font-weight: normal;
+ font-style: oblique;
+}
+@font-face {
+ font-family: 'Luxi Mono';
+ src: local('Luxi Mono Bold'),
+ url('../fonts/LuxiMono-Bold.woff') format('woff');
+ font-weight: bold;
+ font-style: normal;
+}
+@font-face {
+ font-family: 'Luxi Mono';
+ src: local('Luxi Mono Bold Oblique'),
+ url('../fonts/LuxiMono-BoldOblique.woff') format('woff');
+ font-weight: bold;
+ font-style: oblique;
+}
+
+@font-face {
+ font-family: 'Heuristica';
+ src: local('Heuristica Regular'),
+ url('../fonts/Heuristica-Regular.woff') format('woff');
+ font-weight: normal;
+ font-style: normal;
+}
+@font-face {
+ font-family: 'Heuristica';
+ src: local('Heuristica Italic'),
+ url('../fonts/Heuristica-RegularItalic.woff') format('woff');
+ font-weight: normal;
+ font-style: italic;
+}
+@font-face {
+ font-family: 'Heuristica';
+ src: local('Heuristica Bold'),
+ url('../fonts/Heuristica-Bold.woff') format('woff');
+ font-weight: bold;
+ font-style: normal;
+}
+@font-face {
+ font-family: 'Heuristica';
+ src: local('Heuristica Bold Italic'),
+ url('../fonts/Heuristica-BoldItalic.woff') format('woff');
+ font-weight: bold;
+ font-style: italic;
+}
diff --git a/spec/public/stylesheets/print.css b/spec/public/stylesheets/print.css
new file mode 100644
index 0000000000..3fbc5596c0
--- /dev/null
+++ b/spec/public/stylesheets/print.css
@@ -0,0 +1,15 @@
+/* This removes a few things from screen.css for printing */
+
+body {
+ padding: 0px;
+ margin: 0.5em;
+}
+
+.anchor, #navigation, .to_top {
+ display: none;
+}
+
+#content-container {
+ width: 100%;
+ float: none;
+}
diff --git a/spec/public/stylesheets/screen-small.css b/spec/public/stylesheets/screen-small.css
new file mode 100644
index 0000000000..674db7c490
--- /dev/null
+++ b/spec/public/stylesheets/screen-small.css
@@ -0,0 +1,57 @@
+body {
+ padding: 0px;
+ margin: 0px;
+}
+aside.left {
+ position: relative;
+ margin: 0px auto;
+ overflow: visible;
+ height: inherit;
+ margin-bottom: 40px;
+ background-color: #073642;
+}
+header {
+ position: relative;
+ height: inherit;
+ min-height: 32px;
+}
+main {
+ max-width: 1000px;
+ min-width: 600px;
+ margin: 0 auto;
+}
+
+#chapters a {
+ font-size: 14px;
+ max-height: 32px;
+ padding: 4px 8px;
+ white-space: nowrap;
+ display: inline-block;
+}
+#chapters > #github {
+ padding: 14px;
+}
+
+#toc {
+ overflow: visible;
+}
+#toc .toc-active {
+ background: inherit;
+}
+#toc .toc-h1 {
+ display: inherit;
+}
+#toc .toc-h1 a {
+ padding-left: 10px;
+ color: #FFFFFF;
+ background: #72D0EB;
+}
+#toc .toc-h2 a {
+ padding-left: 30px;
+}
+#toc .toc-h3 a {
+ padding-left: 50px;
+}
+#toc a {
+ font-size: 14px;
+}
diff --git a/spec/public/stylesheets/screen-toc.css b/spec/public/stylesheets/screen-toc.css
new file mode 100644
index 0000000000..7a04bd00f9
--- /dev/null
+++ b/spec/public/stylesheets/screen-toc.css
@@ -0,0 +1,37 @@
+body {
+ padding: 0px;
+ margin: 0px;
+}
+header {
+ height: 96px;
+ padding: 0px;
+ width: 100%;
+ position: relative;
+ color: #FFFFFF;
+}
+#header-main {
+ height: 68px;
+ line-height: 1.2;
+ font-size: 32px;
+}
+#header-sub {
+ padding-left: 64px;
+ height: 28px;
+ background-color:#72D0EB;
+ vertical-align: middle;
+}
+#scala-logo {
+ padding: 10px;
+}
+#title {
+ vertical-align: middle;
+}
+#github {
+ height: 40px;
+ padding: 14px;
+ float: right;
+ font-size: 0px;
+}
+li {
+ margin: 5px;
+}
diff --git a/spec/public/stylesheets/screen.css b/spec/public/stylesheets/screen.css
index 725eb0b3f3..fdddba0b45 100644
--- a/spec/public/stylesheets/screen.css
+++ b/spec/public/stylesheets/screen.css
@@ -1,53 +1,81 @@
/* from https://gist.github.com/andyferra/2554919 */
body {
- font-family: Helvetica, arial, sans-serif;
- font-size: 14px;
+ font-family:Heuristica,Georgia,serif;
+ color: #222222;
line-height: 1.6;
- padding-top: 10px;
+
padding-bottom: 10px;
background-color: white;
- padding: 30px;
+ padding-left: 30px;
}
-body > *:first-child {
+#content-container > *:first-child {
margin-top: 0 !important;
}
-body > *:last-child {
+#content-container > *:last-child {
margin-bottom: 0 !important;
}
a {
- color: #4183C4;
+ color: #08C;
+ text-decoration: none;
+}
+a:hover, a:focus {
+
}
a.absent {
color: #cc0000;
}
a.anchor {
display: block;
- padding-left: 30px;
- margin-left: -30px;
+ margin-left: -35px;
+ padding-left: 10px;
cursor: pointer;
position: absolute;
top: 0;
left: 0;
bottom: 0;
+ color: black;
+ width: 35px; height: 100%;
+}
+
+a.anchor span {
+ vertical-align: middle;
}
h1, h2, h3, h4, h5, h6 {
- margin: 20px 0 10px;
+ margin: 30px 0 0px;
padding: 0;
+ /* Fix anchor position due to header */
+ padding-top: 32px;
+ margin-top: -32px;
font-weight: bold;
-webkit-font-smoothing: antialiased;
cursor: text;
position: relative;
}
+h1, h2 {
+ font-weight: normal;
+}
+
h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, h5:hover a.anchor, h6:hover a.anchor {
- background: url("../../images/modules/styleguide/para.png") no-repeat 10px center;
text-decoration: none;
}
+h1:hover a.anchor span, h2:hover a.anchor span, h3:hover a.anchor span, h4:hover a.anchor span, h5:hover a.anchor span, h6:hover a.anchor span {
+ display: inline-block;
+}
+
+h1 a.anchor span, h2 a.anchor span, h3 a.anchor span, h4 a.anchor span, h5 a.anchor span, h6 a.anchor span {
+ display: none;
+}
+
+h1 a.anchor:hover span, h2 a.anchor:hover span, h3 a.anchor:hover span, h4 a.anchor:hover span, h5 a.anchor:hover span, h6 a.anchor:hover span {
+ display: inline-block;
+}
+
h1 tt, h1 code {
font-size: inherit;
}
@@ -79,7 +107,6 @@ h1 {
h2 {
font-size: 24px;
- border-bottom: 1px solid #cccccc;
color: black;
}
@@ -101,7 +128,7 @@ h6 {
}
p, blockquote, ul, ol, dl, li, table, pre {
- margin: 15px 0;
+ margin: 5px 0 15px;
-moz-font-feature-settings: "onum";
-ms-font-feature-settings: "onum";
-webkit-font-feature-settings: "onum";
@@ -109,7 +136,7 @@ p, blockquote, ul, ol, dl, li, table, pre {
}
hr {
- background: transparent url("../../images/modules/pulls/dirty-shade.png") repeat-x 0 0;
+ background: transparent repeat-x 0 0;
border: 0 none;
color: #cccccc;
height: 4px;
@@ -191,7 +218,7 @@ dl dd > :last-child {
blockquote {
border-left: 4px solid #dddddd;
padding: 0 15px;
- color: #777777;
+ color: #222222;
}
blockquote > :first-child {
margin-top: 0;
@@ -199,31 +226,34 @@ blockquote > :first-child {
blockquote > :last-child {
margin-bottom: 0;
}
+blockquote:before {
+ content: "Example";
+ color: #777777;
+ font-size: 14px;
+ font-weight: bold;
+}
table {
padding: 0;
+ margin: 0;
+ border: none;
+ border-collapse: collapse;
}
table tr {
- border-top: 1px solid #cccccc;
background-color: white;
- margin: 0;
- padding: 0;
}
table tr:nth-child(2n) {
background-color: #f8f8f8;
}
table tr th {
+ background-color: #EAEAEA;
font-weight: bold;
- border: 1px solid #cccccc;
text-align: left;
- margin: 0;
- padding: 6px 13px;
+ padding: 5px 13px;
}
table tr td {
- border: 1px solid #cccccc;
text-align: left;
- margin: 0;
- padding: 6px 13px;
+ padding: 5px 13px;
}
table tr th :first-child, table tr td :first-child {
margin-top: 0;
@@ -311,11 +341,14 @@ span.float-right > span {
text-align: right;
}
+pre, code, tt {
+ font:14px "Luxi Mono", 'andale mono', 'lucida console', monospace;
+ line-height:1.5;
+}
+
.highlight pre {
- border: 1px solid #eaeaea;
- background-color: #f8f8f8;
+ background-color: #F8F8F8;
border-radius: 3px;
- line-height: 19px;
overflow: auto;
padding: 6px 10px;
white-space: nowrap;
@@ -327,6 +360,144 @@ code {
margin: 0;
padding: 0;
white-space: pre;
- font-size: 16px;
}
+aside.left {
+ height: 100%;
+ position: fixed;
+ direction: rtl;
+ overflow: auto;
+ left: 0px;
+ width: 320px;
+ bottom: -32px;
+ font-family: "Luxi Sans", serif;
+ background-color: #073642;
+}
+
+aside.left > nav {
+ direction: ltr;
+ top: 32px;
+ padding-bottom: 32px;
+}
+
+article, aside, details, figcaption, figure, footer, header, hgroup, main, nav, section, summary {
+ display: block;
+}
+
+audio, canvas, img, svg, video {
+ vertical-align: middle;
+}
+
+audio, canvas, progress, video {
+ display: inline-block;
+ vertical-align: baseline;
+}
+
+main {
+ position: relative;
+ top: 32px;
+ margin: 0 0 0 320px;
+ padding: 0px 32px;
+ max-width: 800px;
+ min-width: 800px;
+ min-height: 580px;
+ background-color: #FFF;
+}
+
+header {
+ position: fixed;
+ top: 0px;
+ left: 0px;
+ height: 32px;
+ width: 100%;
+ background-color: #002B36;
+ margin: 0px 0px;
+ padding: 0px 0px;
+ font-family: "Luxi Sans", serif;
+ font-weight: bold;
+ z-index: 10;
+ overflow: hidden;
+ text-shadow: 1px 1px 0px rgba(0, 43, 54, 0.15);
+}
+
+#chapters a {
+ color: #FFFFFF;
+ text-decoration: none;
+ font-size: 0.63vw;
+ padding: 100% 8px;
+}
+
+#chapters a:hover, #chapters a:focus, #github:hover, #github:focus {
+ background: #DC322F;
+ -webkit-transition: background .2s ease-in;
+ -moz-transition: background .2s ease-in;
+ -ms-transition: background .2s ease-in;
+ -o-transition: background .2s ease-in;
+ transition: background .2s ease-in;
+}
+
+#chapters a.chapter-active {
+ background: #72D0EB;
+}
+
+
+#toc ul {
+ margin: 0;
+ padding: 0;
+ list-style: none;
+}
+
+#toc li {
+ margin: 0;
+ padding: 0;
+}
+
+#toc a {
+ color: #FFFFFF; /*#073642;*/
+ font-weight: bold;
+ font-size: 12px;
+ display: block;
+ text-shadow: 1px 1px 0px rgba(0, 43, 54, 0.15);
+}
+
+#toc a:hover, #toc a:focus {
+ background: #DC322F;
+ text-decoration: none;
+ -webkit-transition: background .2s ease-in;
+ -moz-transition: background .2s ease-in;
+ -ms-transition: background .2s ease-in;
+ -o-transition: background .2s ease-in;
+ transition: background .2s ease-in;
+}
+
+#toc .toc-h1 {
+ display: none;
+}
+
+#toc .toc-h2 a {
+ padding-left: 10px;
+}
+
+#toc .toc-h3 a {
+ padding-left: 30px;
+}
+
+#toc .toc-active {
+ background: #72D0EB;
+}
+
+#toc .toc-active a {
+ color: #FFFFFF;
+}
+
+#chapters > #github {
+ padding: 0px;
+ float: right;
+}
+
+.hljs{
+ background: #f8f8f8;
+}
+/* proper rendering of MathJax into highlighted code blocks */
+.fixws { white-space: pre; }
+.fixws .math { white-space: nowrap; }
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index 75160fa18f..293335f720 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -205,7 +205,7 @@ object Actor extends Combinators {
* Actions in `f` have to contain the rest of the computation of `self`,
* as this method will never return.
*
- * A common method of continuting the computation is to send a message
+ * A common method of continuing the computation is to send a message
* to another actor:
* {{{
* react {
diff --git a/src/actors/scala/actors/LinkedQueue.java b/src/actors/scala/actors/LinkedQueue.java
index 796f428cf5..3f7b93c386 100644
--- a/src/actors/scala/actors/LinkedQueue.java
+++ b/src/actors/scala/actors/LinkedQueue.java
@@ -22,7 +22,7 @@ package scala.actors;
* and takes when the queue is not empty.
* Normally a put and a take can proceed simultaneously.
* (Although it does not allow multiple concurrent puts or takes.)
- * This class tends to perform more efficently than
+ * This class tends to perform more efficiently than
* other Channel implementations in producer/consumer
* applications.
* <p>[<a href="http://gee.cs.oswego.edu/dl/classes/EDU/oswego/cs/dl/util/concurrent/intro.html"> Introduction to this package. </a>]
diff --git a/src/actors/scala/actors/remote/Proxy.scala b/src/actors/scala/actors/remote/Proxy.scala
index 9949b36181..2cb03544f2 100644
--- a/src/actors/scala/actors/remote/Proxy.scala
+++ b/src/actors/scala/actors/remote/Proxy.scala
@@ -84,7 +84,7 @@ private[remote] class Proxy(node: Node, name: Symbol, @transient var kernel: Net
}
// Proxy is private[remote], but these classes are public and use it in a public
-// method signature. That makes the only method they have non-overriddable.
+// method signature. That makes the only method they have non-overridable.
// So I made them final, which seems appropriate anyway.
final class LinkToFun extends Function2[AbstractActor, Proxy, Unit] with Serializable {
diff --git a/src/actors/scala/actors/threadpool/AbstractCollection.java b/src/actors/scala/actors/threadpool/AbstractCollection.java
index f3dc1e1292..195a0064ab 100644
--- a/src/actors/scala/actors/threadpool/AbstractCollection.java
+++ b/src/actors/scala/actors/threadpool/AbstractCollection.java
@@ -1,6 +1,6 @@
/*
* Written by Dawid Kurzyniec, based on public domain code written by Doug Lea
- * and publictly available documentation, and released to the public domain, as
+ * and publicly available documentation, and released to the public domain, as
* explained at http://creativecommons.org/licenses/publicdomain
*/
diff --git a/src/actors/scala/actors/threadpool/ExecutorCompletionService.java b/src/actors/scala/actors/threadpool/ExecutorCompletionService.java
index 9a4a4fb71c..02e9bbe297 100644
--- a/src/actors/scala/actors/threadpool/ExecutorCompletionService.java
+++ b/src/actors/scala/actors/threadpool/ExecutorCompletionService.java
@@ -135,7 +135,7 @@ public class ExecutorCompletionService implements CompletionService {
* @param completionQueue the queue to use as the completion queue
* normally one dedicated for use by this service. This queue is
* treated as unbounded -- failed attempted <tt>Queue.add</tt>
- * operations for completed taskes cause them not to be
+ * operations for completed tasks cause them not to be
* retrievable.
* @throws NullPointerException if executor or completionQueue are <tt>null</tt>
*/
diff --git a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
index 437af77c7a..914d242100 100644
--- a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
+++ b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
@@ -20,13 +20,13 @@ import scala.actors.threadpool.helpers.*;
*
* <p>The order of entry
* to the read and write lock is unspecified, subject to reentrancy
- * constraints. A nonfair lock that is continously contended may
+ * constraints. A nonfair lock that is continuously contended may
* indefinitely postpone one or more reader or writer threads, but
* will normally have higher throughput than a fair lock.
* <p>
*
* DEPARTURE FROM java.util.concurrent: this implementation impose
- * a writer-preferrence and thus its acquisition order may be different
+ * a writer-preference and thus its acquisition order may be different
* than in java.util.concurrent.
*
* <li><b>Reentrancy</b>
diff --git a/src/asm/README b/src/asm/README
new file mode 100644
index 0000000000..58d555acde
--- /dev/null
+++ b/src/asm/README
@@ -0,0 +1,37 @@
+Version 5.0.3, SVN r1748, tags/ASM_5_0_3
+
+Git SVN repo: https://github.com/lrytz/asm
+ - git svn howto: https://github.com/lrytz/asm/issues/1
+
+Upgrading ASM
+-------------
+
+Check the commit history of src/asm: https://github.com/scala/scala/commits/2.11.x/src/asm.
+Find the previous commit that upgraded ASM and take a look at its commit message. It should
+be a squashed version of a pull request that shows the precise procedure how the last upgrade
+was made.
+
+Start by deleting all source files in src/asm/ and copy the ones from the latest ASM release.
+
+Excluded Files (don't copy):
+ - package.html files
+ - org/objectweb/asm/commons, but keep CodeSizeEvaluator.java
+ - org/objectweb/asm/optimizer
+ - org/objectweb/asm/xml
+
+Re-packaging and cosmetic changes:
+ - convert line endings (there are some CRLF)
+ find src/asm/scala/tools/asm -name '*.java' | xargs dos2unix
+ - change package clauses
+ find src/asm/scala/tools/asm -name '*.java' | xargs sed -i '' -e 's/package org\.objectweb\.asm/package scala.tools.asm/'
+ - update imports
+ find src/asm/scala/tools/asm -name '*.java' | xargs sed -i '' -e 's/import org\.objectweb\.asm/import scala.tools.asm/'
+ - update @links, @associates
+ find src/asm/scala/tools/asm -name '*.java' | xargs sed -i '' -e 's/@link org\.objectweb\.asm/@link scala.tools.asm/'
+ find src/asm/scala/tools/asm -name '*.java' | xargs sed -i '' -e 's/@associates org\.objectweb\.asm/@associates scala.tools.asm/'
+ - remove trailing whitespace
+ find src/asm/scala/tools/asm -name '*.java' | xargs sed -i '' -e 's/[ ]*$//'
+
+Include the actual changes that we have in our repostiory
+ - Include the commits labelled [asm-cherry-pick] in the non-squashed PR of the previous upgrade
+ - Include the changes that were added to src/asm since the last upgrade and label them [asm-cherry-pick]
diff --git a/src/asm/scala/tools/asm/AnnotationVisitor.java b/src/asm/scala/tools/asm/AnnotationVisitor.java
index c806ca71e8..abcaf1d6d1 100644
--- a/src/asm/scala/tools/asm/AnnotationVisitor.java
+++ b/src/asm/scala/tools/asm/AnnotationVisitor.java
@@ -41,7 +41,7 @@ public abstract class AnnotationVisitor {
/**
* The ASM API version implemented by this visitor. The value of this field
- * must be one of {@link Opcodes#ASM4}.
+ * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
protected final int api;
@@ -56,7 +56,7 @@ public abstract class AnnotationVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
public AnnotationVisitor(final int api) {
this(api, null);
@@ -67,13 +67,13 @@ public abstract class AnnotationVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param av
* the annotation visitor to which this visitor must delegate
* method calls. May be null.
*/
public AnnotationVisitor(final int api, final AnnotationVisitor av) {
- if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4 && api != Opcodes.ASM5) {
throw new IllegalArgumentException();
}
this.api = api;
diff --git a/src/asm/scala/tools/asm/AnnotationWriter.java b/src/asm/scala/tools/asm/AnnotationWriter.java
index 8eb5b2ef48..6de74ce041 100644
--- a/src/asm/scala/tools/asm/AnnotationWriter.java
+++ b/src/asm/scala/tools/asm/AnnotationWriter.java
@@ -104,7 +104,7 @@ final class AnnotationWriter extends AnnotationVisitor {
*/
AnnotationWriter(final ClassWriter cw, final boolean named,
final ByteVector bv, final ByteVector parent, final int offset) {
- super(Opcodes.ASM4);
+ super(Opcodes.ASM5);
this.cw = cw;
this.named = named;
this.bv = bv;
@@ -315,4 +315,57 @@ final class AnnotationWriter extends AnnotationVisitor {
}
}
}
+
+ /**
+ * Puts the given type reference and type path into the given bytevector.
+ * LOCAL_VARIABLE and RESOURCE_VARIABLE target types are not supported.
+ *
+ * @param typeRef
+ * a reference to the annotated type. See {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param out
+ * where the type reference and type path must be put.
+ */
+ static void putTarget(int typeRef, TypePath typePath, ByteVector out) {
+ switch (typeRef >>> 24) {
+ case 0x00: // CLASS_TYPE_PARAMETER
+ case 0x01: // METHOD_TYPE_PARAMETER
+ case 0x16: // METHOD_FORMAL_PARAMETER
+ out.putShort(typeRef >>> 16);
+ break;
+ case 0x13: // FIELD
+ case 0x14: // METHOD_RETURN
+ case 0x15: // METHOD_RECEIVER
+ out.putByte(typeRef >>> 24);
+ break;
+ case 0x47: // CAST
+ case 0x48: // CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT
+ case 0x49: // METHOD_INVOCATION_TYPE_ARGUMENT
+ case 0x4A: // CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT
+ case 0x4B: // METHOD_REFERENCE_TYPE_ARGUMENT
+ out.putInt(typeRef);
+ break;
+ // case 0x10: // CLASS_EXTENDS
+ // case 0x11: // CLASS_TYPE_PARAMETER_BOUND
+ // case 0x12: // METHOD_TYPE_PARAMETER_BOUND
+ // case 0x17: // THROWS
+ // case 0x42: // EXCEPTION_PARAMETER
+ // case 0x43: // INSTANCEOF
+ // case 0x44: // NEW
+ // case 0x45: // CONSTRUCTOR_REFERENCE
+ // case 0x46: // METHOD_REFERENCE
+ default:
+ out.put12(typeRef >>> 24, (typeRef & 0xFFFF00) >> 8);
+ break;
+ }
+ if (typePath == null) {
+ out.putByte(0);
+ } else {
+ int length = typePath.b[typePath.offset] * 2 + 1;
+ out.putByteArray(typePath.b, typePath.offset, length);
+ }
+ }
}
diff --git a/src/asm/scala/tools/asm/ByteVector.java b/src/asm/scala/tools/asm/ByteVector.java
index 2bc63eb384..3bca7af12a 100644
--- a/src/asm/scala/tools/asm/ByteVector.java
+++ b/src/asm/scala/tools/asm/ByteVector.java
@@ -204,11 +204,14 @@ public class ByteVector {
* automatically enlarged if necessary.
*
* @param s
- * a String.
+ * a String whose UTF8 encoded length must be less than 65536.
* @return this byte vector.
*/
public ByteVector putUTF8(final String s) {
int charLength = s.length();
+ if (charLength > 65535) {
+ throw new IllegalArgumentException();
+ }
int len = length;
if (len + 2 + charLength > data.length) {
enlarge(2 + charLength);
@@ -227,38 +230,68 @@ public class ByteVector {
if (c >= '\001' && c <= '\177') {
data[len++] = (byte) c;
} else {
- int byteLength = i;
- for (int j = i; j < charLength; ++j) {
- c = s.charAt(j);
- if (c >= '\001' && c <= '\177') {
- byteLength++;
- } else if (c > '\u07FF') {
- byteLength += 3;
- } else {
- byteLength += 2;
- }
- }
- data[length] = (byte) (byteLength >>> 8);
- data[length + 1] = (byte) byteLength;
- if (length + 2 + byteLength > data.length) {
- length = len;
- enlarge(2 + byteLength);
- data = this.data;
- }
- for (int j = i; j < charLength; ++j) {
- c = s.charAt(j);
- if (c >= '\001' && c <= '\177') {
- data[len++] = (byte) c;
- } else if (c > '\u07FF') {
- data[len++] = (byte) (0xE0 | c >> 12 & 0xF);
- data[len++] = (byte) (0x80 | c >> 6 & 0x3F);
- data[len++] = (byte) (0x80 | c & 0x3F);
- } else {
- data[len++] = (byte) (0xC0 | c >> 6 & 0x1F);
- data[len++] = (byte) (0x80 | c & 0x3F);
- }
- }
- break;
+ length = len;
+ return encodeUTF8(s, i, 65535);
+ }
+ }
+ length = len;
+ return this;
+ }
+
+ /**
+ * Puts an UTF8 string into this byte vector. The byte vector is
+ * automatically enlarged if necessary. The string length is encoded in two
+ * bytes before the encoded characters, if there is space for that (i.e. if
+ * this.length - i - 2 >= 0).
+ *
+ * @param s
+ * the String to encode.
+ * @param i
+ * the index of the first character to encode. The previous
+ * characters are supposed to have already been encoded, using
+ * only one byte per character.
+ * @param maxByteLength
+ * the maximum byte length of the encoded string, including the
+ * already encoded characters.
+ * @return this byte vector.
+ */
+ ByteVector encodeUTF8(final String s, int i, int maxByteLength) {
+ int charLength = s.length();
+ int byteLength = i;
+ char c;
+ for (int j = i; j < charLength; ++j) {
+ c = s.charAt(j);
+ if (c >= '\001' && c <= '\177') {
+ byteLength++;
+ } else if (c > '\u07FF') {
+ byteLength += 3;
+ } else {
+ byteLength += 2;
+ }
+ }
+ if (byteLength > maxByteLength) {
+ throw new IllegalArgumentException();
+ }
+ int start = length - i - 2;
+ if (start >= 0) {
+ data[start] = (byte) (byteLength >>> 8);
+ data[start + 1] = (byte) byteLength;
+ }
+ if (length + byteLength - i > data.length) {
+ enlarge(byteLength - i);
+ }
+ int len = length;
+ for (int j = i; j < charLength; ++j) {
+ c = s.charAt(j);
+ if (c >= '\001' && c <= '\177') {
+ data[len++] = (byte) c;
+ } else if (c > '\u07FF') {
+ data[len++] = (byte) (0xE0 | c >> 12 & 0xF);
+ data[len++] = (byte) (0x80 | c >> 6 & 0x3F);
+ data[len++] = (byte) (0x80 | c & 0x3F);
+ } else {
+ data[len++] = (byte) (0xC0 | c >> 6 & 0x1F);
+ data[len++] = (byte) (0x80 | c & 0x3F);
}
}
length = len;
diff --git a/src/asm/scala/tools/asm/ClassReader.java b/src/asm/scala/tools/asm/ClassReader.java
index cc655c1b62..8b0e12cb04 100644
--- a/src/asm/scala/tools/asm/ClassReader.java
+++ b/src/asm/scala/tools/asm/ClassReader.java
@@ -166,7 +166,7 @@ public class ClassReader {
public ClassReader(final byte[] b, final int off, final int len) {
this.b = b;
// checks the class version
- if (readShort(off + 6) > Opcodes.V1_7) {
+ if (readShort(off + 6) > Opcodes.V1_8) {
throw new IllegalArgumentException();
}
// parses the constant pool
@@ -557,6 +557,8 @@ public class ClassReader {
String enclosingDesc = null;
int anns = 0;
int ianns = 0;
+ int tanns = 0;
+ int itanns = 0;
int innerClasses = 0;
Attribute attributes = null;
@@ -581,6 +583,9 @@ public class ClassReader {
} else if (ANNOTATIONS
&& "RuntimeVisibleAnnotations".equals(attrName)) {
anns = u + 8;
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleTypeAnnotations".equals(attrName)) {
+ tanns = u + 8;
} else if ("Deprecated".equals(attrName)) {
access |= Opcodes.ACC_DEPRECATED;
} else if ("Synthetic".equals(attrName)) {
@@ -592,6 +597,9 @@ public class ClassReader {
} else if (ANNOTATIONS
&& "RuntimeInvisibleAnnotations".equals(attrName)) {
ianns = u + 8;
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleTypeAnnotations".equals(attrName)) {
+ itanns = u + 8;
} else if ("BootstrapMethods".equals(attrName)) {
int[] bootstrapMethods = new int[readUnsignedShort(u + 8)];
for (int j = 0, v = u + 10; j < bootstrapMethods.length; j++) {
@@ -626,7 +634,7 @@ public class ClassReader {
enclosingDesc);
}
- // visits the class annotations
+ // visits the class annotations and type annotations
if (ANNOTATIONS && anns != 0) {
for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
v = readAnnotationValues(v + 2, c, true,
@@ -639,6 +647,22 @@ public class ClassReader {
classVisitor.visitAnnotation(readUTF8(v, c), false));
}
}
+ if (ANNOTATIONS && tanns != 0) {
+ for (int i = readUnsignedShort(tanns), v = tanns + 2; i > 0; --i) {
+ v = readAnnotationTarget(context, v);
+ v = readAnnotationValues(v + 2, c, true,
+ classVisitor.visitTypeAnnotation(context.typeRef,
+ context.typePath, readUTF8(v, c), true));
+ }
+ }
+ if (ANNOTATIONS && itanns != 0) {
+ for (int i = readUnsignedShort(itanns), v = itanns + 2; i > 0; --i) {
+ v = readAnnotationTarget(context, v);
+ v = readAnnotationValues(v + 2, c, true,
+ classVisitor.visitTypeAnnotation(context.typeRef,
+ context.typePath, readUTF8(v, c), false));
+ }
+ }
// visits the attributes
while (attributes != null) {
@@ -697,6 +721,8 @@ public class ClassReader {
String signature = null;
int anns = 0;
int ianns = 0;
+ int tanns = 0;
+ int itanns = 0;
Object value = null;
Attribute attributes = null;
@@ -718,8 +744,14 @@ public class ClassReader {
&& "RuntimeVisibleAnnotations".equals(attrName)) {
anns = u + 8;
} else if (ANNOTATIONS
+ && "RuntimeVisibleTypeAnnotations".equals(attrName)) {
+ tanns = u + 8;
+ } else if (ANNOTATIONS
&& "RuntimeInvisibleAnnotations".equals(attrName)) {
ianns = u + 8;
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleTypeAnnotations".equals(attrName)) {
+ itanns = u + 8;
} else {
Attribute attr = readAttribute(context.attrs, attrName, u + 8,
readInt(u + 4), c, -1, null);
@@ -739,7 +771,7 @@ public class ClassReader {
return u;
}
- // visits the field annotations
+ // visits the field annotations and type annotations
if (ANNOTATIONS && anns != 0) {
for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
v = readAnnotationValues(v + 2, c, true,
@@ -752,6 +784,22 @@ public class ClassReader {
fv.visitAnnotation(readUTF8(v, c), false));
}
}
+ if (ANNOTATIONS && tanns != 0) {
+ for (int i = readUnsignedShort(tanns), v = tanns + 2; i > 0; --i) {
+ v = readAnnotationTarget(context, v);
+ v = readAnnotationValues(v + 2, c, true,
+ fv.visitTypeAnnotation(context.typeRef,
+ context.typePath, readUTF8(v, c), true));
+ }
+ }
+ if (ANNOTATIONS && itanns != 0) {
+ for (int i = readUnsignedShort(itanns), v = itanns + 2; i > 0; --i) {
+ v = readAnnotationTarget(context, v);
+ v = readAnnotationValues(v + 2, c, true,
+ fv.visitTypeAnnotation(context.typeRef,
+ context.typePath, readUTF8(v, c), false));
+ }
+ }
// visits the field attributes
while (attributes != null) {
@@ -782,9 +830,9 @@ public class ClassReader {
final Context context, int u) {
// reads the method declaration
char[] c = context.buffer;
- int access = readUnsignedShort(u);
- String name = readUTF8(u + 2, c);
- String desc = readUTF8(u + 4, c);
+ context.access = readUnsignedShort(u);
+ context.name = readUTF8(u + 2, c);
+ context.desc = readUTF8(u + 4, c);
u += 6;
// reads the method attributes
@@ -792,8 +840,11 @@ public class ClassReader {
int exception = 0;
String[] exceptions = null;
String signature = null;
+ int methodParameters = 0;
int anns = 0;
int ianns = 0;
+ int tanns = 0;
+ int itanns = 0;
int dann = 0;
int mpanns = 0;
int impanns = 0;
@@ -818,24 +869,32 @@ public class ClassReader {
} else if (SIGNATURES && "Signature".equals(attrName)) {
signature = readUTF8(u + 8, c);
} else if ("Deprecated".equals(attrName)) {
- access |= Opcodes.ACC_DEPRECATED;
+ context.access |= Opcodes.ACC_DEPRECATED;
} else if (ANNOTATIONS
&& "RuntimeVisibleAnnotations".equals(attrName)) {
anns = u + 8;
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleTypeAnnotations".equals(attrName)) {
+ tanns = u + 8;
} else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) {
dann = u + 8;
} else if ("Synthetic".equals(attrName)) {
- access |= Opcodes.ACC_SYNTHETIC
+ context.access |= Opcodes.ACC_SYNTHETIC
| ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
} else if (ANNOTATIONS
&& "RuntimeInvisibleAnnotations".equals(attrName)) {
ianns = u + 8;
} else if (ANNOTATIONS
+ && "RuntimeInvisibleTypeAnnotations".equals(attrName)) {
+ itanns = u + 8;
+ } else if (ANNOTATIONS
&& "RuntimeVisibleParameterAnnotations".equals(attrName)) {
mpanns = u + 8;
} else if (ANNOTATIONS
&& "RuntimeInvisibleParameterAnnotations".equals(attrName)) {
impanns = u + 8;
+ } else if ("MethodParameters".equals(attrName)) {
+ methodParameters = u + 8;
} else {
Attribute attr = readAttribute(context.attrs, attrName, u + 8,
readInt(u + 4), c, -1, null);
@@ -849,8 +908,8 @@ public class ClassReader {
u += 2;
// visits the method declaration
- MethodVisitor mv = classVisitor.visitMethod(access, name, desc,
- signature, exceptions);
+ MethodVisitor mv = classVisitor.visitMethod(context.access,
+ context.name, context.desc, signature, exceptions);
if (mv == null) {
return u;
}
@@ -894,6 +953,13 @@ public class ClassReader {
}
}
+ // visit the method parameters
+ if (methodParameters != 0) {
+ for (int i = b[methodParameters] & 0xFF, v = methodParameters + 1; i > 0; --i, v = v + 4) {
+ mv.visitParameter(readUTF8(v, c), readUnsignedShort(v + 2));
+ }
+ }
+
// visits the method annotations
if (ANNOTATIONS && dann != 0) {
AnnotationVisitor dv = mv.visitAnnotationDefault();
@@ -914,11 +980,27 @@ public class ClassReader {
mv.visitAnnotation(readUTF8(v, c), false));
}
}
+ if (ANNOTATIONS && tanns != 0) {
+ for (int i = readUnsignedShort(tanns), v = tanns + 2; i > 0; --i) {
+ v = readAnnotationTarget(context, v);
+ v = readAnnotationValues(v + 2, c, true,
+ mv.visitTypeAnnotation(context.typeRef,
+ context.typePath, readUTF8(v, c), true));
+ }
+ }
+ if (ANNOTATIONS && itanns != 0) {
+ for (int i = readUnsignedShort(itanns), v = itanns + 2; i > 0; --i) {
+ v = readAnnotationTarget(context, v);
+ v = readAnnotationValues(v + 2, c, true,
+ mv.visitTypeAnnotation(context.typeRef,
+ context.typePath, readUTF8(v, c), false));
+ }
+ }
if (ANNOTATIONS && mpanns != 0) {
- readParameterAnnotations(mpanns, desc, c, true, mv);
+ readParameterAnnotations(mv, context, mpanns, true);
}
if (ANNOTATIONS && impanns != 0) {
- readParameterAnnotations(impanns, desc, c, false, mv);
+ readParameterAnnotations(mv, context, impanns, false);
}
// visits the method attributes
@@ -931,9 +1013,6 @@ public class ClassReader {
// visits the method code
if (code != 0) {
- context.access = access;
- context.name = name;
- context.desc = desc;
mv.visitCode();
readCode(mv, context, code);
}
@@ -966,7 +1045,7 @@ public class ClassReader {
// reads the bytecode to find the labels
int codeStart = u;
int codeEnd = u + codeLength;
- Label[] labels = new Label[codeLength + 2];
+ Label[] labels = context.labels = new Label[codeLength + 2];
readLabel(codeLength + 1, labels);
while (u < codeEnd) {
int offset = u - codeStart;
@@ -1049,6 +1128,12 @@ public class ClassReader {
u += 2;
// reads the code attributes
+ int[] tanns = null; // start index of each visible type annotation
+ int[] itanns = null; // start index of each invisible type annotation
+ int tann = 0; // current index in tanns array
+ int itann = 0; // current index in itanns array
+ int ntoff = -1; // next visible type annotation code offset
+ int nitoff = -1; // next invisible type annotation code offset
int varTable = 0;
int varTypeTable = 0;
boolean zip = true;
@@ -1089,6 +1174,16 @@ public class ClassReader {
v += 4;
}
}
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleTypeAnnotations".equals(attrName)) {
+ tanns = readTypeAnnotations(mv, context, u + 8, true);
+ ntoff = tanns.length == 0 || readByte(tanns[0]) < 0x43 ? -1
+ : readUnsignedShort(tanns[0] + 1);
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleTypeAnnotations".equals(attrName)) {
+ itanns = readTypeAnnotations(mv, context, u + 8, false);
+ nitoff = itanns.length == 0 || readByte(itanns[0]) < 0x43 ? -1
+ : readUnsignedShort(itanns[0] + 1);
} else if (FRAMES && "StackMapTable".equals(attrName)) {
if ((context.flags & SKIP_FRAMES) == 0) {
stackMap = u + 10;
@@ -1211,7 +1306,7 @@ public class ClassReader {
}
}
if (frameCount > 0) {
- stackMap = readFrame(stackMap, zip, unzip, labels, frame);
+ stackMap = readFrame(stackMap, zip, unzip, frame);
--frameCount;
} else {
frame = null;
@@ -1310,6 +1405,7 @@ public class ClassReader {
case ClassWriter.FIELDORMETH_INSN:
case ClassWriter.ITFMETH_INSN: {
int cpIndex = items[readUnsignedShort(u + 1)];
+ boolean itf = b[cpIndex - 1] == ClassWriter.IMETH;
String iowner = readClass(cpIndex, c);
cpIndex = items[readUnsignedShort(cpIndex + 2)];
String iname = readUTF8(cpIndex, c);
@@ -1317,7 +1413,7 @@ public class ClassReader {
if (opcode < Opcodes.INVOKEVIRTUAL) {
mv.visitFieldInsn(opcode, iowner, iname, idesc);
} else {
- mv.visitMethodInsn(opcode, iowner, iname, idesc);
+ mv.visitMethodInsn(opcode, iowner, iname, idesc, itf);
}
if (opcode == Opcodes.INVOKEINTERFACE) {
u += 5;
@@ -1358,6 +1454,29 @@ public class ClassReader {
u += 4;
break;
}
+
+ // visit the instruction annotations, if any
+ while (tanns != null && tann < tanns.length && ntoff <= offset) {
+ if (ntoff == offset) {
+ int v = readAnnotationTarget(context, tanns[tann]);
+ readAnnotationValues(v + 2, c, true,
+ mv.visitInsnAnnotation(context.typeRef,
+ context.typePath, readUTF8(v, c), true));
+ }
+ ntoff = ++tann >= tanns.length || readByte(tanns[tann]) < 0x43 ? -1
+ : readUnsignedShort(tanns[tann] + 1);
+ }
+ while (itanns != null && itann < itanns.length && nitoff <= offset) {
+ if (nitoff == offset) {
+ int v = readAnnotationTarget(context, itanns[itann]);
+ readAnnotationValues(v + 2, c, true,
+ mv.visitInsnAnnotation(context.typeRef,
+ context.typePath, readUTF8(v, c), false));
+ }
+ nitoff = ++itann >= itanns.length
+ || readByte(itanns[itann]) < 0x43 ? -1
+ : readUnsignedShort(itanns[itann] + 1);
+ }
}
if (labels[codeLength] != null) {
mv.visitLabel(labels[codeLength]);
@@ -1397,6 +1516,32 @@ public class ClassReader {
}
}
+ // visits the local variables type annotations
+ if (tanns != null) {
+ for (int i = 0; i < tanns.length; ++i) {
+ if ((readByte(tanns[i]) >> 1) == (0x40 >> 1)) {
+ int v = readAnnotationTarget(context, tanns[i]);
+ v = readAnnotationValues(v + 2, c, true,
+ mv.visitLocalVariableAnnotation(context.typeRef,
+ context.typePath, context.start,
+ context.end, context.index, readUTF8(v, c),
+ true));
+ }
+ }
+ }
+ if (itanns != null) {
+ for (int i = 0; i < itanns.length; ++i) {
+ if ((readByte(itanns[i]) >> 1) == (0x40 >> 1)) {
+ int v = readAnnotationTarget(context, itanns[i]);
+ v = readAnnotationValues(v + 2, c, true,
+ mv.visitLocalVariableAnnotation(context.typeRef,
+ context.typePath, context.start,
+ context.end, context.index, readUTF8(v, c),
+ false));
+ }
+ }
+ }
+
// visits the code attributes
while (attributes != null) {
Attribute attr = attributes.next;
@@ -1410,24 +1555,175 @@ public class ClassReader {
}
/**
+ * Parses a type annotation table to find the labels, and to visit the try
+ * catch block annotations.
+ *
+ * @param u
+ * the start offset of a type annotation table.
+ * @param mv
+ * the method visitor to be used to visit the try catch block
+ * annotations.
+ * @param context
+ * information about the class being parsed.
+ * @param visible
+ * if the type annotation table to parse contains runtime visible
+ * annotations.
+ * @return the start offset of each type annotation in the parsed table.
+ */
+ private int[] readTypeAnnotations(final MethodVisitor mv,
+ final Context context, int u, boolean visible) {
+ char[] c = context.buffer;
+ int[] offsets = new int[readUnsignedShort(u)];
+ u += 2;
+ for (int i = 0; i < offsets.length; ++i) {
+ offsets[i] = u;
+ int target = readInt(u);
+ switch (target >>> 24) {
+ case 0x00: // CLASS_TYPE_PARAMETER
+ case 0x01: // METHOD_TYPE_PARAMETER
+ case 0x16: // METHOD_FORMAL_PARAMETER
+ u += 2;
+ break;
+ case 0x13: // FIELD
+ case 0x14: // METHOD_RETURN
+ case 0x15: // METHOD_RECEIVER
+ u += 1;
+ break;
+ case 0x40: // LOCAL_VARIABLE
+ case 0x41: // RESOURCE_VARIABLE
+ for (int j = readUnsignedShort(u + 1); j > 0; --j) {
+ int start = readUnsignedShort(u + 3);
+ int length = readUnsignedShort(u + 5);
+ readLabel(start, context.labels);
+ readLabel(start + length, context.labels);
+ u += 6;
+ }
+ u += 3;
+ break;
+ case 0x47: // CAST
+ case 0x48: // CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT
+ case 0x49: // METHOD_INVOCATION_TYPE_ARGUMENT
+ case 0x4A: // CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT
+ case 0x4B: // METHOD_REFERENCE_TYPE_ARGUMENT
+ u += 4;
+ break;
+ // case 0x10: // CLASS_EXTENDS
+ // case 0x11: // CLASS_TYPE_PARAMETER_BOUND
+ // case 0x12: // METHOD_TYPE_PARAMETER_BOUND
+ // case 0x17: // THROWS
+ // case 0x42: // EXCEPTION_PARAMETER
+ // case 0x43: // INSTANCEOF
+ // case 0x44: // NEW
+ // case 0x45: // CONSTRUCTOR_REFERENCE
+ // case 0x46: // METHOD_REFERENCE
+ default:
+ u += 3;
+ break;
+ }
+ int pathLength = readByte(u);
+ if ((target >>> 24) == 0x42) {
+ TypePath path = pathLength == 0 ? null : new TypePath(b, u);
+ u += 1 + 2 * pathLength;
+ u = readAnnotationValues(u + 2, c, true,
+ mv.visitTryCatchAnnotation(target, path,
+ readUTF8(u, c), visible));
+ } else {
+ u = readAnnotationValues(u + 3 + 2 * pathLength, c, true, null);
+ }
+ }
+ return offsets;
+ }
+
+ /**
+ * Parses the header of a type annotation to extract its target_type and
+ * target_path (the result is stored in the given context), and returns the
+ * start offset of the rest of the type_annotation structure (i.e. the
+ * offset to the type_index field, which is followed by
+ * num_element_value_pairs and then the name,value pairs).
+ *
+ * @param context
+ * information about the class being parsed. This is where the
+ * extracted target_type and target_path must be stored.
+ * @param u
+ * the start offset of a type_annotation structure.
+ * @return the start offset of the rest of the type_annotation structure.
+ */
+ private int readAnnotationTarget(final Context context, int u) {
+ int target = readInt(u);
+ switch (target >>> 24) {
+ case 0x00: // CLASS_TYPE_PARAMETER
+ case 0x01: // METHOD_TYPE_PARAMETER
+ case 0x16: // METHOD_FORMAL_PARAMETER
+ target &= 0xFFFF0000;
+ u += 2;
+ break;
+ case 0x13: // FIELD
+ case 0x14: // METHOD_RETURN
+ case 0x15: // METHOD_RECEIVER
+ target &= 0xFF000000;
+ u += 1;
+ break;
+ case 0x40: // LOCAL_VARIABLE
+ case 0x41: { // RESOURCE_VARIABLE
+ target &= 0xFF000000;
+ int n = readUnsignedShort(u + 1);
+ context.start = new Label[n];
+ context.end = new Label[n];
+ context.index = new int[n];
+ u += 3;
+ for (int i = 0; i < n; ++i) {
+ int start = readUnsignedShort(u);
+ int length = readUnsignedShort(u + 2);
+ context.start[i] = readLabel(start, context.labels);
+ context.end[i] = readLabel(start + length, context.labels);
+ context.index[i] = readUnsignedShort(u + 4);
+ u += 6;
+ }
+ break;
+ }
+ case 0x47: // CAST
+ case 0x48: // CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT
+ case 0x49: // METHOD_INVOCATION_TYPE_ARGUMENT
+ case 0x4A: // CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT
+ case 0x4B: // METHOD_REFERENCE_TYPE_ARGUMENT
+ target &= 0xFF0000FF;
+ u += 4;
+ break;
+ // case 0x10: // CLASS_EXTENDS
+ // case 0x11: // CLASS_TYPE_PARAMETER_BOUND
+ // case 0x12: // METHOD_TYPE_PARAMETER_BOUND
+ // case 0x17: // THROWS
+ // case 0x42: // EXCEPTION_PARAMETER
+ // case 0x43: // INSTANCEOF
+ // case 0x44: // NEW
+ // case 0x45: // CONSTRUCTOR_REFERENCE
+ // case 0x46: // METHOD_REFERENCE
+ default:
+ target &= (target >>> 24) < 0x43 ? 0xFFFFFF00 : 0xFF000000;
+ u += 3;
+ break;
+ }
+ int pathLength = readByte(u);
+ context.typeRef = target;
+ context.typePath = pathLength == 0 ? null : new TypePath(b, u);
+ return u + 1 + 2 * pathLength;
+ }
+
+ /**
* Reads parameter annotations and makes the given visitor visit them.
*
+ * @param mv
+ * the visitor that must visit the annotations.
+ * @param context
+ * information about the class being parsed.
* @param v
* start offset in {@link #b b} of the annotations to be read.
- * @param desc
- * the method descriptor.
- * @param buf
- * buffer to be used to call {@link #readUTF8 readUTF8},
- * {@link #readClass(int,char[]) readClass} or {@link #readConst
- * readConst}.
* @param visible
* <tt>true</tt> if the annotations to be read are visible at
* runtime.
- * @param mv
- * the visitor that must visit the annotations.
*/
- private void readParameterAnnotations(int v, final String desc,
- final char[] buf, final boolean visible, final MethodVisitor mv) {
+ private void readParameterAnnotations(final MethodVisitor mv,
+ final Context context, int v, final boolean visible) {
int i;
int n = b[v++] & 0xFF;
// workaround for a bug in javac (javac compiler generates a parameter
@@ -1436,7 +1732,7 @@ public class ClassReader {
// equal to the number of parameters in the method descriptor - which
// includes the synthetic parameters added by the compiler). This work-
// around supposes that the synthetic parameters are the first ones.
- int synthetics = Type.getArgumentTypes(desc).length - n;
+ int synthetics = Type.getArgumentTypes(context.desc).length - n;
AnnotationVisitor av;
for (i = 0; i < synthetics; ++i) {
// virtual annotation to detect synthetic parameters in MethodWriter
@@ -1445,12 +1741,13 @@ public class ClassReader {
av.visitEnd();
}
}
+ char[] c = context.buffer;
for (; i < n + synthetics; ++i) {
int j = readUnsignedShort(v);
v += 2;
for (; j > 0; --j) {
- av = mv.visitParameterAnnotation(i, readUTF8(v, buf), visible);
- v = readAnnotationValues(v + 2, buf, true, av);
+ av = mv.visitParameterAnnotation(i, readUTF8(v, c), visible);
+ v = readAnnotationValues(v + 2, c, true, av);
}
}
}
@@ -1729,17 +2026,14 @@ public class ClassReader {
* if the stack map frame at stackMap is compressed or not.
* @param unzip
* if the stack map frame must be uncompressed.
- * @param labels
- * the labels of the method currently being parsed, indexed by
- * their offset. A new label for the parsed stack map frame is
- * stored in this array if it does not already exist.
* @param frame
* where the parsed stack map frame must be stored.
* @return the offset of the first byte following the parsed frame.
*/
private int readFrame(int stackMap, boolean zip, boolean unzip,
- Label[] labels, Context frame) {
+ Context frame) {
char[] c = frame.buffer;
+ Label[] labels = frame.labels;
int tag;
int delta;
if (zip) {
diff --git a/src/asm/scala/tools/asm/ClassVisitor.java b/src/asm/scala/tools/asm/ClassVisitor.java
index 3fc364d5e5..48dc2ca6ae 100644
--- a/src/asm/scala/tools/asm/ClassVisitor.java
+++ b/src/asm/scala/tools/asm/ClassVisitor.java
@@ -33,8 +33,9 @@ package scala.tools.asm;
* A visitor to visit a Java class. The methods of this class must be called in
* the following order: <tt>visit</tt> [ <tt>visitSource</tt> ] [
* <tt>visitOuterClass</tt> ] ( <tt>visitAnnotation</tt> |
- * <tt>visitAttribute</tt> )* ( <tt>visitInnerClass</tt> | <tt>visitField</tt> |
- * <tt>visitMethod</tt> )* <tt>visitEnd</tt>.
+ * <tt>visitTypeAnnotation</tt> | <tt>visitAttribute</tt> )* (
+ * <tt>visitInnerClass</tt> | <tt>visitField</tt> | <tt>visitMethod</tt> )*
+ * <tt>visitEnd</tt>.
*
* @author Eric Bruneton
*/
@@ -42,7 +43,7 @@ public abstract class ClassVisitor {
/**
* The ASM API version implemented by this visitor. The value of this field
- * must be one of {@link Opcodes#ASM4}.
+ * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
protected final int api;
@@ -57,7 +58,7 @@ public abstract class ClassVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
public ClassVisitor(final int api) {
this(api, null);
@@ -68,13 +69,13 @@ public abstract class ClassVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param cv
* the class visitor to which this visitor must delegate method
* calls. May be null.
*/
public ClassVisitor(final int api, final ClassVisitor cv) {
- if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4 && api != Opcodes.ASM5) {
throw new IllegalArgumentException();
}
this.api = api;
@@ -169,6 +170,39 @@ public abstract class ClassVisitor {
}
/**
+ * Visits an annotation on a type in the class signature.
+ *
+ * @param typeRef
+ * a reference to the annotated type. The sort of this type
+ * reference must be {@link TypeReference#CLASS_TYPE_PARAMETER
+ * CLASS_TYPE_PARAMETER},
+ * {@link TypeReference#CLASS_TYPE_PARAMETER_BOUND
+ * CLASS_TYPE_PARAMETER_BOUND} or
+ * {@link TypeReference#CLASS_EXTENDS CLASS_EXTENDS}. See
+ * {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitTypeAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ if (api < Opcodes.ASM5) {
+ throw new RuntimeException();
+ }
+ if (cv != null) {
+ return cv.visitTypeAnnotation(typeRef, typePath, desc, visible);
+ }
+ return null;
+ }
+
+ /**
* Visits a non standard attribute of the class.
*
* @param attr
diff --git a/src/asm/scala/tools/asm/ClassWriter.java b/src/asm/scala/tools/asm/ClassWriter.java
index 93ed7313c7..5c2de3f982 100644
--- a/src/asm/scala/tools/asm/ClassWriter.java
+++ b/src/asm/scala/tools/asm/ClassWriter.java
@@ -417,6 +417,16 @@ public class ClassWriter extends ClassVisitor {
private AnnotationWriter ianns;
/**
+ * The runtime visible type annotations of this class.
+ */
+ private AnnotationWriter tanns;
+
+ /**
+ * The runtime invisible type annotations of this class.
+ */
+ private AnnotationWriter itanns;
+
+ /**
* The non standard attributes of this class.
*/
private Attribute attrs;
@@ -477,12 +487,12 @@ public class ClassWriter extends ClassVisitor {
* <tt>true</tt> if the maximum stack size and number of local variables
* must be automatically computed.
*/
- private final boolean computeMaxs;
+ private boolean computeMaxs;
/**
* <tt>true</tt> if the stack map frames must be recomputed from scratch.
*/
- private final boolean computeFrames;
+ private boolean computeFrames;
/**
* <tt>true</tt> if the stack map tables of this class are invalid. The
@@ -595,7 +605,7 @@ public class ClassWriter extends ClassVisitor {
* {@link #COMPUTE_FRAMES}.
*/
public ClassWriter(final int flags) {
- super(Opcodes.ASM4);
+ super(Opcodes.ASM5);
index = 1;
pool = new ByteVector();
items = new Item[256];
@@ -677,7 +687,8 @@ public class ClassWriter extends ClassVisitor {
sourceFile = newUTF8(file);
}
if (debug != null) {
- sourceDebug = new ByteVector().putUTF8(debug);
+ sourceDebug = new ByteVector().encodeUTF8(debug, 0,
+ Integer.MAX_VALUE);
}
}
@@ -711,6 +722,29 @@ public class ClassWriter extends ClassVisitor {
}
@Override
+ public final AnnotationVisitor visitTypeAnnotation(int typeRef,
+ TypePath typePath, final String desc, final boolean visible) {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write target_type and target_info
+ AnnotationWriter.putTarget(typeRef, typePath, bv);
+ // write type, and reserve space for values count
+ bv.putShort(newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(this, true, bv, bv,
+ bv.length - 2);
+ if (visible) {
+ aw.next = tanns;
+ tanns = aw;
+ } else {
+ aw.next = itanns;
+ itanns = aw;
+ }
+ return aw;
+ }
+
+ @Override
public final void visitAttribute(final Attribute attr) {
attr.next = attrs;
attrs = attr;
@@ -722,11 +756,29 @@ public class ClassWriter extends ClassVisitor {
if (innerClasses == null) {
innerClasses = new ByteVector();
}
- ++innerClassesCount;
- innerClasses.putShort(name == null ? 0 : newClass(name));
- innerClasses.putShort(outerName == null ? 0 : newClass(outerName));
- innerClasses.putShort(innerName == null ? 0 : newUTF8(innerName));
- innerClasses.putShort(access);
+ // Sec. 4.7.6 of the JVMS states "Every CONSTANT_Class_info entry in the
+ // constant_pool table which represents a class or interface C that is
+ // not a package member must have exactly one corresponding entry in the
+ // classes array". To avoid duplicates we keep track in the intVal field
+ // of the Item of each CONSTANT_Class_info entry C whether an inner
+ // class entry has already been added for C (this field is unused for
+ // class entries, and changing its value does not change the hashcode
+ // and equality tests). If so we store the index of this inner class
+ // entry (plus one) in intVal. This hack allows duplicate detection in
+ // O(1) time.
+ Item nameItem = newClassItem(name);
+ if (nameItem.intVal == 0) {
+ ++innerClassesCount;
+ innerClasses.putShort(nameItem.index);
+ innerClasses.putShort(outerName == null ? 0 : newClass(outerName));
+ innerClasses.putShort(innerName == null ? 0 : newUTF8(innerName));
+ innerClasses.putShort(access);
+ nameItem.intVal = innerClassesCount;
+ } else {
+ // Compare the inner classes entry nameItem.intVal - 1 with the
+ // arguments of this method and throw an exception if there is a
+ // difference?
+ }
}
@Override
@@ -795,7 +847,7 @@ public class ClassWriter extends ClassVisitor {
}
if (sourceDebug != null) {
++attributeCount;
- size += sourceDebug.length + 4;
+ size += sourceDebug.length + 6;
newUTF8("SourceDebugExtension");
}
if (enclosingMethodOwner != 0) {
@@ -831,6 +883,16 @@ public class ClassWriter extends ClassVisitor {
size += 8 + ianns.getSize();
newUTF8("RuntimeInvisibleAnnotations");
}
+ if (ClassReader.ANNOTATIONS && tanns != null) {
+ ++attributeCount;
+ size += 8 + tanns.getSize();
+ newUTF8("RuntimeVisibleTypeAnnotations");
+ }
+ if (ClassReader.ANNOTATIONS && itanns != null) {
+ ++attributeCount;
+ size += 8 + itanns.getSize();
+ newUTF8("RuntimeInvisibleTypeAnnotations");
+ }
if (attrs != null) {
attributeCount += attrs.getCount();
size += attrs.getSize(this, null, 0, -1, -1);
@@ -874,9 +936,9 @@ public class ClassWriter extends ClassVisitor {
out.putShort(newUTF8("SourceFile")).putInt(2).putShort(sourceFile);
}
if (sourceDebug != null) {
- int len = sourceDebug.length - 2;
+ int len = sourceDebug.length;
out.putShort(newUTF8("SourceDebugExtension")).putInt(len);
- out.putByteArray(sourceDebug.data, 2, len);
+ out.putByteArray(sourceDebug.data, 0, len);
}
if (enclosingMethodOwner != 0) {
out.putShort(newUTF8("EnclosingMethod")).putInt(4);
@@ -904,13 +966,34 @@ public class ClassWriter extends ClassVisitor {
out.putShort(newUTF8("RuntimeInvisibleAnnotations"));
ianns.put(out);
}
+ if (ClassReader.ANNOTATIONS && tanns != null) {
+ out.putShort(newUTF8("RuntimeVisibleTypeAnnotations"));
+ tanns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && itanns != null) {
+ out.putShort(newUTF8("RuntimeInvisibleTypeAnnotations"));
+ itanns.put(out);
+ }
if (attrs != null) {
attrs.put(this, null, 0, -1, -1, out);
}
if (invalidFrames) {
- ClassWriter cw = new ClassWriter(COMPUTE_FRAMES);
- new ClassReader(out.data).accept(cw, ClassReader.SKIP_FRAMES);
- return cw.toByteArray();
+ anns = null;
+ ianns = null;
+ attrs = null;
+ innerClassesCount = 0;
+ innerClasses = null;
+ bootstrapMethodsCount = 0;
+ bootstrapMethods = null;
+ firstField = null;
+ lastField = null;
+ firstMethod = null;
+ lastMethod = null;
+ computeMaxs = false;
+ computeFrames = true;
+ invalidFrames = false;
+ new ClassReader(out.data).accept(this, ClassReader.SKIP_FRAMES);
+ return toByteArray();
}
return out.data;
}
@@ -1577,7 +1660,7 @@ public class ClassWriter extends ClassVisitor {
/**
* Returns the common super type of the two given types. The default
- * implementation of this method <i>loads<i> the two given classes and uses
+ * implementation of this method <i>loads</i> the two given classes and uses
* the java.lang.Class methods to find the common super class. It can be
* overridden to compute this common super type in other ways, in particular
* without actually loading any class, or to take into account the class
@@ -1664,6 +1747,15 @@ public class ClassWriter extends ClassVisitor {
}
/**
+ * Find item that whose index is `index`.
+ */
+ public Item findItemByIndex(int index) {
+ int i = 0;
+ while (i < items.length && (items[i] == null || items[i].index != index)) i++;
+ return items[i];
+ }
+
+ /**
* Puts one byte and two shorts into the constant pool.
*
* @param b
diff --git a/src/asm/scala/tools/asm/Context.java b/src/asm/scala/tools/asm/Context.java
index 7b3a2ad9dd..24546969e3 100644
--- a/src/asm/scala/tools/asm/Context.java
+++ b/src/asm/scala/tools/asm/Context.java
@@ -73,11 +73,46 @@ class Context {
String desc;
/**
+ * The label objects, indexed by bytecode offset, of the method currently
+ * being parsed (only bytecode offsets for which a label is needed have a
+ * non null associated Label object).
+ */
+ Label[] labels;
+
+ /**
+ * The target of the type annotation currently being parsed.
+ */
+ int typeRef;
+
+ /**
+ * The path of the type annotation currently being parsed.
+ */
+ TypePath typePath;
+
+ /**
* The offset of the latest stack map frame that has been parsed.
*/
int offset;
/**
+ * The labels corresponding to the start of the local variable ranges in the
+ * local variable type annotation currently being parsed.
+ */
+ Label[] start;
+
+ /**
+ * The labels corresponding to the end of the local variable ranges in the
+ * local variable type annotation currently being parsed.
+ */
+ Label[] end;
+
+ /**
+ * The local variable indices for each local variable range in the local
+ * variable type annotation currently being parsed.
+ */
+ int[] index;
+
+ /**
* The encoding of the latest stack map frame that has been parsed.
*/
int mode;
diff --git a/src/asm/scala/tools/asm/CustomAttr.java b/src/asm/scala/tools/asm/CustomAttr.java
index 22b5d287b7..5ecfd283d0 100644
--- a/src/asm/scala/tools/asm/CustomAttr.java
+++ b/src/asm/scala/tools/asm/CustomAttr.java
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
+ * Copyright 2005-2012 LAMP/EPFL
*/
package scala.tools.asm;
diff --git a/src/asm/scala/tools/asm/FieldVisitor.java b/src/asm/scala/tools/asm/FieldVisitor.java
index 9171f331e5..708c1d322e 100644
--- a/src/asm/scala/tools/asm/FieldVisitor.java
+++ b/src/asm/scala/tools/asm/FieldVisitor.java
@@ -31,8 +31,8 @@ package scala.tools.asm;
/**
* A visitor to visit a Java field. The methods of this class must be called in
- * the following order: ( <tt>visitAnnotation</tt> | <tt>visitAttribute</tt> )*
- * <tt>visitEnd</tt>.
+ * the following order: ( <tt>visitAnnotation</tt> |
+ * <tt>visitTypeAnnotation</tt> | <tt>visitAttribute</tt> )* <tt>visitEnd</tt>.
*
* @author Eric Bruneton
*/
@@ -40,7 +40,7 @@ public abstract class FieldVisitor {
/**
* The ASM API version implemented by this visitor. The value of this field
- * must be one of {@link Opcodes#ASM4}.
+ * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
protected final int api;
@@ -55,7 +55,7 @@ public abstract class FieldVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
public FieldVisitor(final int api) {
this(api, null);
@@ -66,13 +66,13 @@ public abstract class FieldVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param fv
* the field visitor to which this visitor must delegate method
* calls. May be null.
*/
public FieldVisitor(final int api, final FieldVisitor fv) {
- if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4 && api != Opcodes.ASM5) {
throw new IllegalArgumentException();
}
this.api = api;
@@ -97,6 +97,35 @@ public abstract class FieldVisitor {
}
/**
+ * Visits an annotation on the type of the field.
+ *
+ * @param typeRef
+ * a reference to the annotated type. The sort of this type
+ * reference must be {@link TypeReference#FIELD FIELD}. See
+ * {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitTypeAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ if (api < Opcodes.ASM5) {
+ throw new RuntimeException();
+ }
+ if (fv != null) {
+ return fv.visitTypeAnnotation(typeRef, typePath, desc, visible);
+ }
+ return null;
+ }
+
+ /**
* Visits a non standard attribute of the field.
*
* @param attr
diff --git a/src/asm/scala/tools/asm/FieldWriter.java b/src/asm/scala/tools/asm/FieldWriter.java
index 02c6059b91..e640a8d406 100644
--- a/src/asm/scala/tools/asm/FieldWriter.java
+++ b/src/asm/scala/tools/asm/FieldWriter.java
@@ -81,6 +81,17 @@ final class FieldWriter extends FieldVisitor {
private AnnotationWriter ianns;
/**
+ * The runtime visible type annotations of this field. May be <tt>null</tt>.
+ */
+ private AnnotationWriter tanns;
+
+ /**
+ * The runtime invisible type annotations of this field. May be
+ * <tt>null</tt>.
+ */
+ private AnnotationWriter itanns;
+
+ /**
* The non standard attributes of this field. May be <tt>null</tt>.
*/
private Attribute attrs;
@@ -107,7 +118,7 @@ final class FieldWriter extends FieldVisitor {
*/
FieldWriter(final ClassWriter cw, final int access, final String name,
final String desc, final String signature, final Object value) {
- super(Opcodes.ASM4);
+ super(Opcodes.ASM5);
if (cw.firstField == null) {
cw.firstField = this;
} else {
@@ -151,6 +162,29 @@ final class FieldWriter extends FieldVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write target_type and target_info
+ AnnotationWriter.putTarget(typeRef, typePath, bv);
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv,
+ bv.length - 2);
+ if (visible) {
+ aw.next = tanns;
+ tanns = aw;
+ } else {
+ aw.next = itanns;
+ itanns = aw;
+ }
+ return aw;
+ }
+
+ @Override
public void visitAttribute(final Attribute attr) {
attr.next = attrs;
attrs = attr;
@@ -198,6 +232,14 @@ final class FieldWriter extends FieldVisitor {
cw.newUTF8("RuntimeInvisibleAnnotations");
size += 8 + ianns.getSize();
}
+ if (ClassReader.ANNOTATIONS && tanns != null) {
+ cw.newUTF8("RuntimeVisibleTypeAnnotations");
+ size += 8 + tanns.getSize();
+ }
+ if (ClassReader.ANNOTATIONS && itanns != null) {
+ cw.newUTF8("RuntimeInvisibleTypeAnnotations");
+ size += 8 + itanns.getSize();
+ }
if (attrs != null) {
size += attrs.getSize(cw, null, 0, -1, -1);
}
@@ -237,6 +279,12 @@ final class FieldWriter extends FieldVisitor {
if (ClassReader.ANNOTATIONS && ianns != null) {
++attributeCount;
}
+ if (ClassReader.ANNOTATIONS && tanns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && itanns != null) {
+ ++attributeCount;
+ }
if (attrs != null) {
attributeCount += attrs.getCount();
}
@@ -266,6 +314,14 @@ final class FieldWriter extends FieldVisitor {
out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations"));
ianns.put(out);
}
+ if (ClassReader.ANNOTATIONS && tanns != null) {
+ out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations"));
+ tanns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && itanns != null) {
+ out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations"));
+ itanns.put(out);
+ }
if (attrs != null) {
attrs.put(cw, null, 0, -1, -1, out);
}
diff --git a/src/asm/scala/tools/asm/Frame.java b/src/asm/scala/tools/asm/Frame.java
index bcc3e8450b..85ad3269ab 100644
--- a/src/asm/scala/tools/asm/Frame.java
+++ b/src/asm/scala/tools/asm/Frame.java
@@ -70,8 +70,8 @@ final class Frame {
* stack types. VALUE depends on KIND. For LOCAL types, it is an index in
* the input local variable types. For STACK types, it is a position
* relatively to the top of input frame stack. For BASE types, it is either
- * one of the constants defined in FrameVisitor, or for OBJECT and
- * UNINITIALIZED types, a tag and an index in the type table.
+ * one of the constants defined below, or for OBJECT and UNINITIALIZED
+ * types, a tag and an index in the type table.
*
* Output frames can contain types of any kind and with a positive or
* negative dimension (and even unassigned types, represented by 0 - which
@@ -1417,6 +1417,7 @@ final class Frame {
// if t is the NULL type, merge(u,t)=u, so there is no change
return false;
} else if ((t & (DIM | BASE_KIND)) == (u & (DIM | BASE_KIND))) {
+ // if t and u have the same dimension and same base kind
if ((u & BASE_KIND) == OBJECT) {
// if t is also a reference type, and if u and t have the
// same dimension merge(u,t) = dim(t) | common parent of the
@@ -1425,13 +1426,21 @@ final class Frame {
| cw.getMergedType(t & BASE_VALUE, u & BASE_VALUE);
} else {
// if u and t are array types, but not with the same element
- // type, merge(u,t)=java/lang/Object
- v = OBJECT | cw.addType("java/lang/Object");
+ // type, merge(u,t) = dim(u) - 1 | java/lang/Object
+ int vdim = ELEMENT_OF + (u & DIM);
+ v = vdim | OBJECT | cw.addType("java/lang/Object");
}
} else if ((t & BASE_KIND) == OBJECT || (t & DIM) != 0) {
- // if t is any other reference or array type,
- // merge(u,t)=java/lang/Object
- v = OBJECT | cw.addType("java/lang/Object");
+ // if t is any other reference or array type, the merged type
+ // is min(udim, tdim) | java/lang/Object, where udim is the
+ // array dimension of u, minus 1 if u is an array type with a
+ // primitive element type (and similarly for tdim).
+ int tdim = (((t & DIM) == 0 || (t & BASE_KIND) == OBJECT) ? 0
+ : ELEMENT_OF) + (t & DIM);
+ int udim = (((u & DIM) == 0 || (u & BASE_KIND) == OBJECT) ? 0
+ : ELEMENT_OF) + (u & DIM);
+ v = Math.min(tdim, udim) | OBJECT
+ | cw.addType("java/lang/Object");
} else {
// if t is any other type, merge(u,t)=TOP
v = TOP;
diff --git a/src/asm/scala/tools/asm/Handle.java b/src/asm/scala/tools/asm/Handle.java
index 5dd06a54b9..cf12bb7613 100644
--- a/src/asm/scala/tools/asm/Handle.java
+++ b/src/asm/scala/tools/asm/Handle.java
@@ -49,7 +49,8 @@ public final class Handle {
final int tag;
/**
- * The internal name of the field or method designed by this handle.
+ * The internal name of the class that owns the field or method designated
+ * by this handle.
*/
final String owner;
@@ -76,8 +77,8 @@ public final class Handle {
* {@link Opcodes#H_NEWINVOKESPECIAL} or
* {@link Opcodes#H_INVOKEINTERFACE}.
* @param owner
- * the internal name of the field or method designed by this
- * handle.
+ * the internal name of the class that owns the field or method
+ * designated by this handle.
* @param name
* the name of the field or method designated by this handle.
* @param desc
@@ -106,9 +107,11 @@ public final class Handle {
}
/**
- * Returns the internal name of the field or method designed by this handle.
+ * Returns the internal name of the class that owns the field or method
+ * designated by this handle.
*
- * @return the internal name of the field or method designed by this handle.
+ * @return the internal name of the class that owns the field or method
+ * designated by this handle.
*/
public String getOwner() {
return owner;
diff --git a/src/asm/scala/tools/asm/Item.java b/src/asm/scala/tools/asm/Item.java
index 94195a1082..4693f5ae99 100644
--- a/src/asm/scala/tools/asm/Item.java
+++ b/src/asm/scala/tools/asm/Item.java
@@ -208,9 +208,10 @@ final class Item {
this.strVal2 = strVal2;
this.strVal3 = strVal3;
switch (type) {
+ case ClassWriter.CLASS:
+ this.intVal = 0; // intVal of a class must be zero, see visitInnerClass
case ClassWriter.UTF8:
case ClassWriter.STR:
- case ClassWriter.CLASS:
case ClassWriter.MTYPE:
case ClassWriter.TYPE_NORMAL:
hashCode = 0x7FFFFFFF & (type + strVal1.hashCode());
diff --git a/src/asm/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java
index 5d5529ce74..22b6798fb5 100644
--- a/src/asm/scala/tools/asm/Label.java
+++ b/src/asm/scala/tools/asm/Label.java
@@ -473,7 +473,7 @@ public class Label {
void addToSubroutine(final long id, final int nbSubroutines) {
if ((status & VISITED) == 0) {
status |= VISITED;
- srcAndRefPositions = new int[(nbSubroutines - 1) / 32 + 1];
+ srcAndRefPositions = new int[nbSubroutines / 32 + 1];
}
srcAndRefPositions[(int) (id >>> 32)] |= (int) id;
}
@@ -545,7 +545,7 @@ public class Label {
}
// ------------------------------------------------------------------------
- // Overriden Object methods
+ // Overridden Object methods
// ------------------------------------------------------------------------
/**
diff --git a/src/asm/scala/tools/asm/MethodVisitor.java b/src/asm/scala/tools/asm/MethodVisitor.java
index e43ca97823..bddc325020 100644
--- a/src/asm/scala/tools/asm/MethodVisitor.java
+++ b/src/asm/scala/tools/asm/MethodVisitor.java
@@ -31,18 +31,24 @@ package scala.tools.asm;
/**
* A visitor to visit a Java method. The methods of this class must be called in
- * the following order: [ <tt>visitAnnotationDefault</tt> ] (
- * <tt>visitAnnotation</tt> | <tt>visitParameterAnnotation</tt> |
- * <tt>visitAttribute</tt> )* [ <tt>visitCode</tt> ( <tt>visitFrame</tt> |
- * <tt>visit</tt><i>X</i>Insn</tt> | <tt>visitLabel</tt> |
- * <tt>visitTryCatchBlock</tt> | <tt>visitLocalVariable</tt> |
+ * the following order: ( <tt>visitParameter</tt> )* [
+ * <tt>visitAnnotationDefault</tt> ] ( <tt>visitAnnotation</tt> |
+ * <tt>visitTypeAnnotation</tt> | <tt>visitAttribute</tt> )* [
+ * <tt>visitCode</tt> ( <tt>visitFrame</tt> | <tt>visit<i>X</i>Insn</tt> |
+ * <tt>visitLabel</tt> | <tt>visitInsnAnnotation</tt> |
+ * <tt>visitTryCatchBlock</tt> | <tt>visitTryCatchBlockAnnotation</tt> |
+ * <tt>visitLocalVariable</tt> | <tt>visitLocalVariableAnnotation</tt> |
* <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ] <tt>visitEnd</tt>. In
- * addition, the <tt>visit</tt><i>X</i>Insn</tt> and <tt>visitLabel</tt> methods
- * must be called in the sequential order of the bytecode instructions of the
- * visited code, <tt>visitTryCatchBlock</tt> must be called <i>before</i> the
- * labels passed as arguments have been visited, and the
- * <tt>visitLocalVariable</tt> and <tt>visitLineNumber</tt> methods must be
- * called <i>after</i> the labels passed as arguments have been visited.
+ * addition, the <tt>visit<i>X</i>Insn</tt> and <tt>visitLabel</tt> methods must
+ * be called in the sequential order of the bytecode instructions of the visited
+ * code, <tt>visitInsnAnnotation</tt> must be called <i>after</i> the annotated
+ * instruction, <tt>visitTryCatchBlock</tt> must be called <i>before</i> the
+ * labels passed as arguments have been visited,
+ * <tt>visitTryCatchBlockAnnotation</tt> must be called <i>after</i> the
+ * corresponding try catch block has been visited, and the
+ * <tt>visitLocalVariable</tt>, <tt>visitLocalVariableAnnotation</tt> and
+ * <tt>visitLineNumber</tt> methods must be called <i>after</i> the labels
+ * passed as arguments have been visited.
*
* @author Eric Bruneton
*/
@@ -50,7 +56,7 @@ public abstract class MethodVisitor {
/**
* The ASM API version implemented by this visitor. The value of this field
- * must be one of {@link Opcodes#ASM4}.
+ * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
protected final int api;
@@ -65,7 +71,7 @@ public abstract class MethodVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
public MethodVisitor(final int api) {
this(api, null);
@@ -76,13 +82,13 @@ public abstract class MethodVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param mv
* the method visitor to which this visitor must delegate method
* calls. May be null.
*/
public MethodVisitor(final int api, final MethodVisitor mv) {
- if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4 && api != Opcodes.ASM5) {
throw new IllegalArgumentException();
}
this.api = api;
@@ -90,10 +96,29 @@ public abstract class MethodVisitor {
}
// -------------------------------------------------------------------------
- // Annotations and non standard attributes
+ // Parameters, annotations and non standard attributes
// -------------------------------------------------------------------------
/**
+ * Visits a parameter of this method.
+ *
+ * @param name
+ * parameter name or null if none is provided.
+ * @param access
+ * the parameter's access flags, only <tt>ACC_FINAL</tt>,
+ * <tt>ACC_SYNTHETIC</tt> or/and <tt>ACC_MANDATED</tt> are
+ * allowed (see {@link Opcodes}).
+ */
+ public void visitParameter(String name, int access) {
+ if (api < Opcodes.ASM5) {
+ throw new RuntimeException();
+ }
+ if (mv != null) {
+ mv.visitParameter(name, access);
+ }
+ }
+
+ /**
* Visits the default value of this annotation interface method.
*
* @return a visitor to the visit the actual default value of this
@@ -128,6 +153,42 @@ public abstract class MethodVisitor {
}
/**
+ * Visits an annotation on a type in the method signature.
+ *
+ * @param typeRef
+ * a reference to the annotated type. The sort of this type
+ * reference must be {@link TypeReference#METHOD_TYPE_PARAMETER
+ * METHOD_TYPE_PARAMETER},
+ * {@link TypeReference#METHOD_TYPE_PARAMETER_BOUND
+ * METHOD_TYPE_PARAMETER_BOUND},
+ * {@link TypeReference#METHOD_RETURN METHOD_RETURN},
+ * {@link TypeReference#METHOD_RECEIVER METHOD_RECEIVER},
+ * {@link TypeReference#METHOD_FORMAL_PARAMETER
+ * METHOD_FORMAL_PARAMETER} or {@link TypeReference#THROWS
+ * THROWS}. See {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitTypeAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ if (api < Opcodes.ASM5) {
+ throw new RuntimeException();
+ }
+ if (mv != null) {
+ return mv.visitTypeAnnotation(typeRef, typePath, desc, visible);
+ }
+ return null;
+ }
+
+ /**
* Visits an annotation of a parameter this method.
*
* @param parameter
@@ -201,9 +262,11 @@ public abstract class MethodVisitor {
* <li>{@link Opcodes#F_CHOP} representing frame with current locals are the
* same as the locals in the previous frame, except that the last 1-3 locals
* are absent and with the empty stack (<code>nLocals</code> is 1, 2 or 3).</li>
- * <li>{@link Opcodes#F_FULL} representing complete frame data.</li></li>
+ * <li>{@link Opcodes#F_FULL} representing complete frame data.</li>
+ * </ul>
+ * </li>
* </ul>
- * </ul> <br>
+ * <br>
* In both cases the first frame, corresponding to the method's parameters
* and access flags, is implicit and must not be visited. Also, it is
* illegal to visit two or more frames for the same code location (i.e., at
@@ -376,14 +439,53 @@ public abstract class MethodVisitor {
* @param desc
* the method's descriptor (see {@link Type Type}).
*/
+ @Deprecated
public void visitMethodInsn(int opcode, String owner, String name,
String desc) {
+ if (api >= Opcodes.ASM5) {
+ boolean itf = opcode == Opcodes.INVOKEINTERFACE;
+ visitMethodInsn(opcode, owner, name, desc, itf);
+ return;
+ }
if (mv != null) {
mv.visitMethodInsn(opcode, owner, name, desc);
}
}
/**
+ * Visits a method instruction. A method instruction is an instruction that
+ * invokes a method.
+ *
+ * @param opcode
+ * the opcode of the type instruction to be visited. This opcode
+ * is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
+ * INVOKEINTERFACE.
+ * @param owner
+ * the internal name of the method's owner class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type Type}).
+ * @param itf
+ * if the method's owner class is an interface.
+ */
+ public void visitMethodInsn(int opcode, String owner, String name,
+ String desc, boolean itf) {
+ if (api < Opcodes.ASM5) {
+ if (itf != (opcode == Opcodes.INVOKEINTERFACE)) {
+ throw new IllegalArgumentException(
+ "INVOKESPECIAL/STATIC on interfaces require ASM 5");
+ }
+ visitMethodInsn(opcode, owner, name, desc);
+ return;
+ }
+ if (mv != null) {
+ mv.visitMethodInsn(opcode, owner, name, desc, itf);
+ }
+ }
+
+ /**
* Visits an invokedynamic instruction.
*
* @param name
@@ -558,6 +660,48 @@ public abstract class MethodVisitor {
}
}
+ /**
+ * Visits an annotation on an instruction. This method must be called just
+ * <i>after</i> the annotated instruction. It can be called several times
+ * for the same instruction.
+ *
+ * @param typeRef
+ * a reference to the annotated type. The sort of this type
+ * reference must be {@link TypeReference#INSTANCEOF INSTANCEOF},
+ * {@link TypeReference#NEW NEW},
+ * {@link TypeReference#CONSTRUCTOR_REFERENCE
+ * CONSTRUCTOR_REFERENCE}, {@link TypeReference#METHOD_REFERENCE
+ * METHOD_REFERENCE}, {@link TypeReference#CAST CAST},
+ * {@link TypeReference#CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT
+ * CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT},
+ * {@link TypeReference#METHOD_INVOCATION_TYPE_ARGUMENT
+ * METHOD_INVOCATION_TYPE_ARGUMENT},
+ * {@link TypeReference#CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT
+ * CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT}, or
+ * {@link TypeReference#METHOD_REFERENCE_TYPE_ARGUMENT
+ * METHOD_REFERENCE_TYPE_ARGUMENT}. See {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitInsnAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ if (api < Opcodes.ASM5) {
+ throw new RuntimeException();
+ }
+ if (mv != null) {
+ return mv.visitInsnAnnotation(typeRef, typePath, desc, visible);
+ }
+ return null;
+ }
+
// -------------------------------------------------------------------------
// Exceptions table entries, debug information, max stack and max locals
// -------------------------------------------------------------------------
@@ -587,6 +731,38 @@ public abstract class MethodVisitor {
}
/**
+ * Visits an annotation on an exception handler type. This method must be
+ * called <i>after</i> the {@link #visitTryCatchBlock} for the annotated
+ * exception handler. It can be called several times for the same exception
+ * handler.
+ *
+ * @param typeRef
+ * a reference to the annotated type. The sort of this type
+ * reference must be {@link TypeReference#EXCEPTION_PARAMETER
+ * EXCEPTION_PARAMETER}. See {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitTryCatchAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ if (api < Opcodes.ASM5) {
+ throw new RuntimeException();
+ }
+ if (mv != null) {
+ return mv.visitTryCatchAnnotation(typeRef, typePath, desc, visible);
+ }
+ return null;
+ }
+
+ /**
* Visits a local variable declaration.
*
* @param name
@@ -617,6 +793,48 @@ public abstract class MethodVisitor {
}
/**
+ * Visits an annotation on a local variable type.
+ *
+ * @param typeRef
+ * a reference to the annotated type. The sort of this type
+ * reference must be {@link TypeReference#LOCAL_VARIABLE
+ * LOCAL_VARIABLE} or {@link TypeReference#RESOURCE_VARIABLE
+ * RESOURCE_VARIABLE}. See {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param start
+ * the fist instructions corresponding to the continuous ranges
+ * that make the scope of this local variable (inclusive).
+ * @param end
+ * the last instructions corresponding to the continuous ranges
+ * that make the scope of this local variable (exclusive). This
+ * array must have the same size as the 'start' array.
+ * @param index
+ * the local variable's index in each range. This array must have
+ * the same size as the 'start' array.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitLocalVariableAnnotation(int typeRef,
+ TypePath typePath, Label[] start, Label[] end, int[] index,
+ String desc, boolean visible) {
+ if (api < Opcodes.ASM5) {
+ throw new RuntimeException();
+ }
+ if (mv != null) {
+ return mv.visitLocalVariableAnnotation(typeRef, typePath, start,
+ end, index, desc, visible);
+ }
+ return null;
+ }
+
+ /**
* Visits a line number declaration.
*
* @param line
diff --git a/src/asm/scala/tools/asm/MethodWriter.java b/src/asm/scala/tools/asm/MethodWriter.java
index 87acab17c9..9c72ead61d 100644
--- a/src/asm/scala/tools/asm/MethodWriter.java
+++ b/src/asm/scala/tools/asm/MethodWriter.java
@@ -37,7 +37,7 @@ package scala.tools.asm;
* @author Eric Bruneton
* @author Eugene Kuleshov
*/
-class MethodWriter extends MethodVisitor {
+public class MethodWriter extends MethodVisitor {
/**
* Pseudo access flag used to denote constructors.
@@ -192,6 +192,18 @@ class MethodWriter extends MethodVisitor {
private AnnotationWriter ianns;
/**
+ * The runtime visible type annotations of this method. May be <tt>null</tt>
+ * .
+ */
+ private AnnotationWriter tanns;
+
+ /**
+ * The runtime invisible type annotations of this method. May be
+ * <tt>null</tt>.
+ */
+ private AnnotationWriter itanns;
+
+ /**
* The runtime visible parameter annotations of this method. May be
* <tt>null</tt>.
*/
@@ -223,11 +235,19 @@ class MethodWriter extends MethodVisitor {
*/
private int maxStack;
+ public int getMaxStack() {
+ return maxStack;
+ }
+
/**
* Maximum number of local variables for this method.
*/
private int maxLocals;
+ public int getMaxLocals() {
+ return maxLocals;
+ }
+
/**
* Number of local variables in the current stack map frame.
*/
@@ -283,6 +303,16 @@ class MethodWriter extends MethodVisitor {
private Handler lastHandler;
/**
+ * Number of entries in the MethodParameters attribute.
+ */
+ private int methodParametersCount;
+
+ /**
+ * The MethodParameters attribute.
+ */
+ private ByteVector methodParameters;
+
+ /**
* Number of entries in the LocalVariableTable attribute.
*/
private int localVarCount;
@@ -313,6 +343,21 @@ class MethodWriter extends MethodVisitor {
private ByteVector lineNumber;
/**
+ * The start offset of the last visited instruction.
+ */
+ private int lastCodeOffset;
+
+ /**
+ * The runtime visible type annotations of the code. May be <tt>null</tt>.
+ */
+ private AnnotationWriter ctanns;
+
+ /**
+ * The runtime invisible type annotations of the code. May be <tt>null</tt>.
+ */
+ private AnnotationWriter ictanns;
+
+ /**
* The non standard attributes of the method's code.
*/
private Attribute cattrs;
@@ -416,7 +461,7 @@ class MethodWriter extends MethodVisitor {
final String desc, final String signature,
final String[] exceptions, final boolean computeMaxs,
final boolean computeFrames) {
- super(Opcodes.ASM4);
+ super(Opcodes.ASM5);
if (cw.firstMethod == null) {
cw.firstMethod = this;
} else {
@@ -462,6 +507,16 @@ class MethodWriter extends MethodVisitor {
// ------------------------------------------------------------------------
@Override
+ public void visitParameter(String name, int access) {
+ if (methodParameters == null) {
+ methodParameters = new ByteVector();
+ }
+ ++methodParametersCount;
+ methodParameters.putShort((name == null) ? 0 : cw.newUTF8(name))
+ .putShort(access);
+ }
+
+ @Override
public AnnotationVisitor visitAnnotationDefault() {
if (!ClassReader.ANNOTATIONS) {
return null;
@@ -491,6 +546,29 @@ class MethodWriter extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write target_type and target_info
+ AnnotationWriter.putTarget(typeRef, typePath, bv);
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv,
+ bv.length - 2);
+ if (visible) {
+ aw.next = tanns;
+ tanns = aw;
+ } else {
+ aw.next = itanns;
+ itanns = aw;
+ }
+ return aw;
+ }
+
+ @Override
public AnnotationVisitor visitParameterAnnotation(final int parameter,
final String desc, final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
@@ -642,6 +720,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitInsn(final int opcode) {
+ lastCodeOffset = code.length;
// adds the instruction to the bytecode of the method
code.putByte(opcode);
// update currentBlock
@@ -667,6 +746,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitIntInsn(final int opcode, final int operand) {
+ lastCodeOffset = code.length;
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES) {
@@ -691,6 +771,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitVarInsn(final int opcode, final int var) {
+ lastCodeOffset = code.length;
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES) {
@@ -749,6 +830,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitTypeInsn(final int opcode, final String type) {
+ lastCodeOffset = code.length;
Item i = cw.newClassItem(type);
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
@@ -771,6 +853,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitFieldInsn(final int opcode, final String owner,
final String name, final String desc) {
+ lastCodeOffset = code.length;
Item i = cw.newFieldItem(owner, name, desc);
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
@@ -808,8 +891,8 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitMethodInsn(final int opcode, final String owner,
- final String name, final String desc) {
- boolean itf = opcode == Opcodes.INVOKEINTERFACE;
+ final String name, final String desc, final boolean itf) {
+ lastCodeOffset = code.length;
Item i = cw.newMethodItem(owner, name, desc, itf);
int argSize = i.intVal;
// Label currentBlock = this.currentBlock;
@@ -847,7 +930,7 @@ class MethodWriter extends MethodVisitor {
}
}
// adds the instruction to the bytecode of the method
- if (itf) {
+ if (opcode == Opcodes.INVOKEINTERFACE) {
if (argSize == 0) {
argSize = Type.getArgumentsAndReturnSizes(desc);
i.intVal = argSize;
@@ -861,6 +944,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitInvokeDynamicInsn(final String name, final String desc,
final Handle bsm, final Object... bsmArgs) {
+ lastCodeOffset = code.length;
Item i = cw.newInvokeDynamicItem(name, desc, bsm, bsmArgs);
int argSize = i.intVal;
// Label currentBlock = this.currentBlock;
@@ -900,6 +984,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitJumpInsn(final int opcode, final Label label) {
+ lastCodeOffset = code.length;
Label nextInsn = null;
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
@@ -1045,6 +1130,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitLdcInsn(final Object cst) {
+ lastCodeOffset = code.length;
Item i = cw.newConstItem(cst);
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
@@ -1078,6 +1164,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitIincInsn(final int var, final int increment) {
+ lastCodeOffset = code.length;
if (currentBlock != null) {
if (compute == FRAMES) {
currentBlock.frame.execute(Opcodes.IINC, var, null, null);
@@ -1102,6 +1189,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitTableSwitchInsn(final int min, final int max,
final Label dflt, final Label... labels) {
+ lastCodeOffset = code.length;
// adds the instruction to the bytecode of the method
int source = code.length;
code.putByte(Opcodes.TABLESWITCH);
@@ -1118,6 +1206,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
final Label[] labels) {
+ lastCodeOffset = code.length;
// adds the instruction to the bytecode of the method
int source = code.length;
code.putByte(Opcodes.LOOKUPSWITCH);
@@ -1160,6 +1249,7 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ lastCodeOffset = code.length;
Item i = cw.newClassItem(desc);
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
@@ -1176,6 +1266,30 @@ class MethodWriter extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitInsnAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write target_type and target_info
+ typeRef = (typeRef & 0xFF0000FF) | (lastCodeOffset << 8);
+ AnnotationWriter.putTarget(typeRef, typePath, bv);
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv,
+ bv.length - 2);
+ if (visible) {
+ aw.next = ctanns;
+ ctanns = aw;
+ } else {
+ aw.next = ictanns;
+ ictanns = aw;
+ }
+ return aw;
+ }
+
+ @Override
public void visitTryCatchBlock(final Label start, final Label end,
final Label handler, final String type) {
++handlerCount;
@@ -1194,6 +1308,29 @@ class MethodWriter extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitTryCatchAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write target_type and target_info
+ AnnotationWriter.putTarget(typeRef, typePath, bv);
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv,
+ bv.length - 2);
+ if (visible) {
+ aw.next = ctanns;
+ ctanns = aw;
+ } else {
+ aw.next = ictanns;
+ ictanns = aw;
+ }
+ return aw;
+ }
+
+ @Override
public void visitLocalVariable(final String name, final String desc,
final String signature, final Label start, final Label end,
final int index) {
@@ -1226,6 +1363,41 @@ class MethodWriter extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitLocalVariableAnnotation(int typeRef,
+ TypePath typePath, Label[] start, Label[] end, int[] index,
+ String desc, boolean visible) {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write target_type and target_info
+ bv.putByte(typeRef >>> 24).putShort(start.length);
+ for (int i = 0; i < start.length; ++i) {
+ bv.putShort(start[i].position)
+ .putShort(end[i].position - start[i].position)
+ .putShort(index[i]);
+ }
+ if (typePath == null) {
+ bv.putByte(0);
+ } else {
+ int length = typePath.b[typePath.offset] * 2 + 1;
+ bv.putByteArray(typePath.b, typePath.offset, length);
+ }
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv,
+ bv.length - 2);
+ if (visible) {
+ aw.next = ctanns;
+ ctanns = aw;
+ } else {
+ aw.next = ictanns;
+ ictanns = aw;
+ }
+ return aw;
+ }
+
+ @Override
public void visitLineNumber(final int line, final Label start) {
if (lineNumber == null) {
lineNumber = new ByteVector();
@@ -1237,6 +1409,14 @@ class MethodWriter extends MethodVisitor {
@Override
public void visitMaxs(final int maxStack, final int maxLocals) {
+ if (resize) {
+ // replaces the temporary jump opcodes introduced by Label.resolve.
+ if (ClassReader.RESIZE) {
+ resizeInstructions();
+ } else {
+ throw new RuntimeException("Method code too large!");
+ }
+ }
if (ClassReader.FRAMES && compute == FRAMES) {
// completes the control flow graph with exception handler blocks
Handler handler = firstHandler;
@@ -1794,43 +1974,43 @@ class MethodWriter extends MethodVisitor {
stackMap.putByte(v);
}
} else {
- StringBuffer buf = new StringBuffer();
+ StringBuilder sb = new StringBuilder();
d >>= 28;
while (d-- > 0) {
- buf.append('[');
+ sb.append('[');
}
if ((t & Frame.BASE_KIND) == Frame.OBJECT) {
- buf.append('L');
- buf.append(cw.typeTable[t & Frame.BASE_VALUE].strVal1);
- buf.append(';');
+ sb.append('L');
+ sb.append(cw.typeTable[t & Frame.BASE_VALUE].strVal1);
+ sb.append(';');
} else {
switch (t & 0xF) {
case 1:
- buf.append('I');
+ sb.append('I');
break;
case 2:
- buf.append('F');
+ sb.append('F');
break;
case 3:
- buf.append('D');
+ sb.append('D');
break;
case 9:
- buf.append('Z');
+ sb.append('Z');
break;
case 10:
- buf.append('B');
+ sb.append('B');
break;
case 11:
- buf.append('C');
+ sb.append('C');
break;
case 12:
- buf.append('S');
+ sb.append('S');
break;
default:
- buf.append('J');
+ sb.append('J');
}
}
- stackMap.putByte(7).putShort(cw.newClass(buf.toString()));
+ stackMap.putByte(7).putShort(cw.newClass(sb.toString()));
}
}
}
@@ -1858,22 +2038,12 @@ class MethodWriter extends MethodVisitor {
if (classReaderOffset != 0) {
return 6 + classReaderLength;
}
- if (resize) {
- // replaces the temporary jump opcodes introduced by Label.resolve.
- if (ClassReader.RESIZE) {
- resizeInstructions();
- } else {
- throw new RuntimeException("Method code too large!");
- }
- }
int size = 8;
if (code.length > 0) {
if (code.length > 65536) {
String nameString = "";
- int i = 0;
- // find item that corresponds to the index of our name
- while (i < cw.items.length && (cw.items[i] == null || cw.items[i].index != name)) i++;
- if (cw.items[i] != null) nameString = cw.items[i].strVal1 +"'s ";
+ Item nameItem = cw.findItemByIndex(name);
+ if (nameItem != null) nameString = nameItem.strVal1 +"'s ";
throw new RuntimeException("Method "+ nameString +"code too large!");
}
cw.newUTF8("Code");
@@ -1895,6 +2065,14 @@ class MethodWriter extends MethodVisitor {
cw.newUTF8(zip ? "StackMapTable" : "StackMap");
size += 8 + stackMap.length;
}
+ if (ClassReader.ANNOTATIONS && ctanns != null) {
+ cw.newUTF8("RuntimeVisibleTypeAnnotations");
+ size += 8 + ctanns.getSize();
+ }
+ if (ClassReader.ANNOTATIONS && ictanns != null) {
+ cw.newUTF8("RuntimeInvisibleTypeAnnotations");
+ size += 8 + ictanns.getSize();
+ }
if (cattrs != null) {
size += cattrs.getSize(cw, code.data, code.length, maxStack,
maxLocals);
@@ -1920,6 +2098,10 @@ class MethodWriter extends MethodVisitor {
cw.newUTF8(signature);
size += 8;
}
+ if (methodParameters != null) {
+ cw.newUTF8("MethodParameters");
+ size += 7 + methodParameters.length;
+ }
if (ClassReader.ANNOTATIONS && annd != null) {
cw.newUTF8("AnnotationDefault");
size += 6 + annd.length;
@@ -1932,6 +2114,14 @@ class MethodWriter extends MethodVisitor {
cw.newUTF8("RuntimeInvisibleAnnotations");
size += 8 + ianns.getSize();
}
+ if (ClassReader.ANNOTATIONS && tanns != null) {
+ cw.newUTF8("RuntimeVisibleTypeAnnotations");
+ size += 8 + tanns.getSize();
+ }
+ if (ClassReader.ANNOTATIONS && itanns != null) {
+ cw.newUTF8("RuntimeInvisibleTypeAnnotations");
+ size += 8 + itanns.getSize();
+ }
if (ClassReader.ANNOTATIONS && panns != null) {
cw.newUTF8("RuntimeVisibleParameterAnnotations");
size += 7 + 2 * (panns.length - synthetics);
@@ -1988,6 +2178,9 @@ class MethodWriter extends MethodVisitor {
if (ClassReader.SIGNATURES && signature != null) {
++attributeCount;
}
+ if (methodParameters != null) {
+ ++attributeCount;
+ }
if (ClassReader.ANNOTATIONS && annd != null) {
++attributeCount;
}
@@ -1997,6 +2190,12 @@ class MethodWriter extends MethodVisitor {
if (ClassReader.ANNOTATIONS && ianns != null) {
++attributeCount;
}
+ if (ClassReader.ANNOTATIONS && tanns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && itanns != null) {
+ ++attributeCount;
+ }
if (ClassReader.ANNOTATIONS && panns != null) {
++attributeCount;
}
@@ -2021,6 +2220,12 @@ class MethodWriter extends MethodVisitor {
if (stackMap != null) {
size += 8 + stackMap.length;
}
+ if (ClassReader.ANNOTATIONS && ctanns != null) {
+ size += 8 + ctanns.getSize();
+ }
+ if (ClassReader.ANNOTATIONS && ictanns != null) {
+ size += 8 + ictanns.getSize();
+ }
if (cattrs != null) {
size += cattrs.getSize(cw, code.data, code.length, maxStack,
maxLocals);
@@ -2050,6 +2255,12 @@ class MethodWriter extends MethodVisitor {
if (stackMap != null) {
++attributeCount;
}
+ if (ClassReader.ANNOTATIONS && ctanns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && ictanns != null) {
+ ++attributeCount;
+ }
if (cattrs != null) {
attributeCount += cattrs.getCount();
}
@@ -2075,6 +2286,14 @@ class MethodWriter extends MethodVisitor {
out.putInt(stackMap.length + 2).putShort(frameCount);
out.putByteArray(stackMap.data, 0, stackMap.length);
}
+ if (ClassReader.ANNOTATIONS && ctanns != null) {
+ out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations"));
+ ctanns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && ictanns != null) {
+ out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations"));
+ ictanns.put(out);
+ }
if (cattrs != null) {
cattrs.put(cw, code.data, code.length, maxLocals, maxStack, out);
}
@@ -2100,6 +2319,12 @@ class MethodWriter extends MethodVisitor {
out.putShort(cw.newUTF8("Signature")).putInt(2)
.putShort(cw.newUTF8(signature));
}
+ if (methodParameters != null) {
+ out.putShort(cw.newUTF8("MethodParameters"));
+ out.putInt(methodParameters.length + 1).putByte(
+ methodParametersCount);
+ out.putByteArray(methodParameters.data, 0, methodParameters.length);
+ }
if (ClassReader.ANNOTATIONS && annd != null) {
out.putShort(cw.newUTF8("AnnotationDefault"));
out.putInt(annd.length);
@@ -2113,6 +2338,14 @@ class MethodWriter extends MethodVisitor {
out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations"));
ianns.put(out);
}
+ if (ClassReader.ANNOTATIONS && tanns != null) {
+ out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations"));
+ tanns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && itanns != null) {
+ out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations"));
+ itanns.put(out);
+ }
if (ClassReader.ANNOTATIONS && panns != null) {
out.putShort(cw.newUTF8("RuntimeVisibleParameterAnnotations"));
AnnotationWriter.put(panns, synthetics, out);
@@ -2464,49 +2697,50 @@ class MethodWriter extends MethodVisitor {
}
}
- // recomputes the stack map frames
- if (frameCount > 0) {
- if (compute == FRAMES) {
- frameCount = 0;
- stackMap = null;
- previousFrame = null;
- frame = null;
- Frame f = new Frame();
- f.owner = labels;
- Type[] args = Type.getArgumentTypes(descriptor);
- f.initInputFrame(cw, access, args, maxLocals);
- visitFrame(f);
- Label l = labels;
- while (l != null) {
- /*
- * here we need the original label position. getNewOffset
- * must therefore never have been called for this label.
- */
- u = l.position - 3;
- if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u])) {
- getNewOffset(allIndexes, allSizes, l);
- // TODO update offsets in UNINITIALIZED values
- visitFrame(l.frame);
- }
- l = l.successor;
- }
- } else {
+ // updates the stack map frame labels
+ if (compute == FRAMES) {
+ Label l = labels;
+ while (l != null) {
/*
- * Resizing an existing stack map frame table is really hard.
- * Not only the table must be parsed to update the offets, but
- * new frames may be needed for jump instructions that were
- * inserted by this method. And updating the offsets or
- * inserting frames can change the format of the following
- * frames, in case of packed frames. In practice the whole table
- * must be recomputed. For this the frames are marked as
- * potentially invalid. This will cause the whole class to be
- * reread and rewritten with the COMPUTE_FRAMES option (see the
- * ClassWriter.toByteArray method). This is not very efficient
- * but is much easier and requires much less code than any other
- * method I can think of.
+ * Detects the labels that are just after an IF instruction that
+ * has been resized with the IFNOT GOTO_W pattern. These labels
+ * are now the target of a jump instruction (the IFNOT
+ * instruction). Note that we need the original label position
+ * here. getNewOffset must therefore never have been called for
+ * this label.
*/
- cw.invalidFrames = true;
+ u = l.position - 3;
+ if (u >= 0 && resize[u]) {
+ l.status |= Label.TARGET;
+ }
+ getNewOffset(allIndexes, allSizes, l);
+ l = l.successor;
}
+ // Update the offsets in the uninitialized types
+ for (i = 0; i < cw.typeTable.length; ++i) {
+ Item item = cw.typeTable[i];
+ if (item != null && item.type == ClassWriter.TYPE_UNINIT) {
+ item.intVal = getNewOffset(allIndexes, allSizes, 0,
+ item.intVal);
+ }
+ }
+ // The stack map frames are not serialized yet, so we don't need
+ // to update them. They will be serialized in visitMaxs.
+ } else if (frameCount > 0) {
+ /*
+ * Resizing an existing stack map frame table is really hard. Not
+ * only the table must be parsed to update the offets, but new
+ * frames may be needed for jump instructions that were inserted by
+ * this method. And updating the offsets or inserting frames can
+ * change the format of the following frames, in case of packed
+ * frames. In practice the whole table must be recomputed. For this
+ * the frames are marked as potentially invalid. This will cause the
+ * whole class to be reread and rewritten with the COMPUTE_FRAMES
+ * option (see the ClassWriter.toByteArray method). This is not very
+ * efficient but is much easier and requires much less code than any
+ * other method I can think of.
+ */
+ cw.invalidFrames = true;
}
// updates the exception handler block labels
Handler h = firstHandler;
diff --git a/src/asm/scala/tools/asm/Opcodes.java b/src/asm/scala/tools/asm/Opcodes.java
index 809e5ae590..24eaffa717 100644
--- a/src/asm/scala/tools/asm/Opcodes.java
+++ b/src/asm/scala/tools/asm/Opcodes.java
@@ -46,6 +46,7 @@ public interface Opcodes {
// ASM API versions
int ASM4 = 4 << 16 | 0 << 8 | 0;
+ int ASM5 = 5 << 16 | 0 << 8 | 0;
// versions
@@ -56,6 +57,7 @@ public interface Opcodes {
int V1_5 = 0 << 16 | 49;
int V1_6 = 0 << 16 | 50;
int V1_7 = 0 << 16 | 51;
+ int V1_8 = 0 << 16 | 52;
// access flags
@@ -63,7 +65,7 @@ public interface Opcodes {
int ACC_PRIVATE = 0x0002; // class, field, method
int ACC_PROTECTED = 0x0004; // class, field, method
int ACC_STATIC = 0x0008; // field, method
- int ACC_FINAL = 0x0010; // class, field, method
+ int ACC_FINAL = 0x0010; // class, field, method, parameter
int ACC_SUPER = 0x0020; // class
int ACC_SYNCHRONIZED = 0x0020; // method
int ACC_VOLATILE = 0x0040; // field
@@ -74,9 +76,10 @@ public interface Opcodes {
int ACC_INTERFACE = 0x0200; // class
int ACC_ABSTRACT = 0x0400; // class, method
int ACC_STRICT = 0x0800; // method
- int ACC_SYNTHETIC = 0x1000; // class, field, method
+ int ACC_SYNTHETIC = 0x1000; // class, field, method, parameter
int ACC_ANNOTATION = 0x2000; // class
int ACC_ENUM = 0x4000; // class(?) field inner
+ int ACC_MANDATED = 0x8000; // parameter
// ASM specific pseudo access flags
diff --git a/src/asm/scala/tools/asm/Type.java b/src/asm/scala/tools/asm/Type.java
index 7821a492e6..c8f0048588 100644
--- a/src/asm/scala/tools/asm/Type.java
+++ b/src/asm/scala/tools/asm/Type.java
@@ -401,8 +401,8 @@ public class Type {
* @return the size of the arguments of the method (plus one for the
* implicit this argument), argSize, and the size of its return
* value, retSize, packed into a single int i =
- * <tt>(argSize << 2) | retSize</tt> (argSize is therefore equal to
- * <tt>i >> 2</tt>, and retSize to <tt>i & 0x03</tt>).
+ * <tt>(argSize &lt;&lt; 2) | retSize</tt> (argSize is therefore equal to
+ * <tt>i &gt;&gt; 2</tt>, and retSize to <tt>i &amp; 0x03</tt>).
*/
public static int getArgumentsAndReturnSizes(final String desc) {
int n = 1;
@@ -556,11 +556,11 @@ public class Type {
case DOUBLE:
return "double";
case ARRAY:
- StringBuffer b = new StringBuffer(getElementType().getClassName());
+ StringBuilder sb = new StringBuilder(getElementType().getClassName());
for (int i = getDimensions(); i > 0; --i) {
- b.append("[]");
+ sb.append("[]");
}
- return b.toString();
+ return sb.toString();
case OBJECT:
return new String(buf, off, len).replace('/', '.');
default:
@@ -606,9 +606,10 @@ public class Type {
*
* @return the size of the arguments (plus one for the implicit this
* argument), argSize, and the size of the return value, retSize,
- * packed into a single int i = <tt>(argSize << 2) | retSize</tt>
- * (argSize is therefore equal to <tt>i >> 2</tt>, and retSize to
- * <tt>i & 0x03</tt>).
+ * packed into a single
+ * int i = <tt>(argSize &lt;&lt; 2) | retSize</tt>
+ * (argSize is therefore equal to <tt>i &gt;&gt; 2</tt>,
+ * and retSize to <tt>i &amp; 0x03</tt>).
*/
public int getArgumentsAndReturnSizes() {
return getArgumentsAndReturnSizes(getDescriptor());
diff --git a/src/asm/scala/tools/asm/TypePath.java b/src/asm/scala/tools/asm/TypePath.java
new file mode 100644
index 0000000000..d4c6f0d857
--- /dev/null
+++ b/src/asm/scala/tools/asm/TypePath.java
@@ -0,0 +1,193 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2013 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package scala.tools.asm;
+
+/**
+ * The path to a type argument, wildcard bound, array element type, or static
+ * inner type within an enclosing type.
+ *
+ * @author Eric Bruneton
+ */
+public class TypePath {
+
+ /**
+ * A type path step that steps into the element type of an array type. See
+ * {@link #getStep getStep}.
+ */
+ public final static int ARRAY_ELEMENT = 0;
+
+ /**
+ * A type path step that steps into the nested type of a class type. See
+ * {@link #getStep getStep}.
+ */
+ public final static int INNER_TYPE = 1;
+
+ /**
+ * A type path step that steps into the bound of a wildcard type. See
+ * {@link #getStep getStep}.
+ */
+ public final static int WILDCARD_BOUND = 2;
+
+ /**
+ * A type path step that steps into a type argument of a generic type. See
+ * {@link #getStep getStep}.
+ */
+ public final static int TYPE_ARGUMENT = 3;
+
+ /**
+ * The byte array where the path is stored, in Java class file format.
+ */
+ byte[] b;
+
+ /**
+ * The offset of the first byte of the type path in 'b'.
+ */
+ int offset;
+
+ /**
+ * Creates a new type path.
+ *
+ * @param b
+ * the byte array containing the type path in Java class file
+ * format.
+ * @param offset
+ * the offset of the first byte of the type path in 'b'.
+ */
+ TypePath(byte[] b, int offset) {
+ this.b = b;
+ this.offset = offset;
+ }
+
+ /**
+ * Returns the length of this path.
+ *
+ * @return the length of this path.
+ */
+ public int getLength() {
+ return b[offset];
+ }
+
+ /**
+ * Returns the value of the given step of this path.
+ *
+ * @param index
+ * an index between 0 and {@link #getLength()}, exclusive.
+ * @return {@link #ARRAY_ELEMENT ARRAY_ELEMENT}, {@link #INNER_TYPE
+ * INNER_TYPE}, {@link #WILDCARD_BOUND WILDCARD_BOUND}, or
+ * {@link #TYPE_ARGUMENT TYPE_ARGUMENT}.
+ */
+ public int getStep(int index) {
+ return b[offset + 2 * index + 1];
+ }
+
+ /**
+ * Returns the index of the type argument that the given step is stepping
+ * into. This method should only be used for steps whose value is
+ * {@link #TYPE_ARGUMENT TYPE_ARGUMENT}.
+ *
+ * @param index
+ * an index between 0 and {@link #getLength()}, exclusive.
+ * @return the index of the type argument that the given step is stepping
+ * into.
+ */
+ public int getStepArgument(int index) {
+ return b[offset + 2 * index + 2];
+ }
+
+ /**
+ * Converts a type path in string form, in the format used by
+ * {@link #toString()}, into a TypePath object.
+ *
+ * @param typePath
+ * a type path in string form, in the format used by
+ * {@link #toString()}. May be null or empty.
+ * @return the corresponding TypePath object, or null if the path is empty.
+ */
+ public static TypePath fromString(final String typePath) {
+ if (typePath == null || typePath.length() == 0) {
+ return null;
+ }
+ int n = typePath.length();
+ ByteVector out = new ByteVector(n);
+ out.putByte(0);
+ for (int i = 0; i < n;) {
+ char c = typePath.charAt(i++);
+ if (c == '[') {
+ out.put11(ARRAY_ELEMENT, 0);
+ } else if (c == '.') {
+ out.put11(INNER_TYPE, 0);
+ } else if (c == '*') {
+ out.put11(WILDCARD_BOUND, 0);
+ } else if (c >= '0' && c <= '9') {
+ int typeArg = c - '0';
+ while (i < n && (c = typePath.charAt(i)) >= '0' && c <= '9') {
+ typeArg = typeArg * 10 + c - '0';
+ i += 1;
+ }
+ out.put11(TYPE_ARGUMENT, typeArg);
+ }
+ }
+ out.data[0] = (byte) (out.length / 2);
+ return new TypePath(out.data, 0);
+ }
+
+ /**
+ * Returns a string representation of this type path. {@link #ARRAY_ELEMENT
+ * ARRAY_ELEMENT} steps are represented with '[', {@link #INNER_TYPE
+ * INNER_TYPE} steps with '.', {@link #WILDCARD_BOUND WILDCARD_BOUND} steps
+ * with '*' and {@link #TYPE_ARGUMENT TYPE_ARGUMENT} steps with their type
+ * argument index in decimal form.
+ */
+ @Override
+ public String toString() {
+ int length = getLength();
+ StringBuilder result = new StringBuilder(length * 2);
+ for (int i = 0; i < length; ++i) {
+ switch (getStep(i)) {
+ case ARRAY_ELEMENT:
+ result.append('[');
+ break;
+ case INNER_TYPE:
+ result.append('.');
+ break;
+ case WILDCARD_BOUND:
+ result.append('*');
+ break;
+ case TYPE_ARGUMENT:
+ result.append(getStepArgument(i));
+ break;
+ default:
+ result.append('_');
+ }
+ }
+ return result.toString();
+ }
+}
diff --git a/src/asm/scala/tools/asm/TypeReference.java b/src/asm/scala/tools/asm/TypeReference.java
new file mode 100644
index 0000000000..118b0f6529
--- /dev/null
+++ b/src/asm/scala/tools/asm/TypeReference.java
@@ -0,0 +1,452 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2013 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package scala.tools.asm;
+
+/**
+ * A reference to a type appearing in a class, field or method declaration, or
+ * on an instruction. Such a reference designates the part of the class where
+ * the referenced type is appearing (e.g. an 'extends', 'implements' or 'throws'
+ * clause, a 'new' instruction, a 'catch' clause, a type cast, a local variable
+ * declaration, etc).
+ *
+ * @author Eric Bruneton
+ */
+public class TypeReference {
+
+ /**
+ * The sort of type references that target a type parameter of a generic
+ * class. See {@link #getSort getSort}.
+ */
+ public final static int CLASS_TYPE_PARAMETER = 0x00;
+
+ /**
+ * The sort of type references that target a type parameter of a generic
+ * method. See {@link #getSort getSort}.
+ */
+ public final static int METHOD_TYPE_PARAMETER = 0x01;
+
+ /**
+ * The sort of type references that target the super class of a class or one
+ * of the interfaces it implements. See {@link #getSort getSort}.
+ */
+ public final static int CLASS_EXTENDS = 0x10;
+
+ /**
+ * The sort of type references that target a bound of a type parameter of a
+ * generic class. See {@link #getSort getSort}.
+ */
+ public final static int CLASS_TYPE_PARAMETER_BOUND = 0x11;
+
+ /**
+ * The sort of type references that target a bound of a type parameter of a
+ * generic method. See {@link #getSort getSort}.
+ */
+ public final static int METHOD_TYPE_PARAMETER_BOUND = 0x12;
+
+ /**
+ * The sort of type references that target the type of a field. See
+ * {@link #getSort getSort}.
+ */
+ public final static int FIELD = 0x13;
+
+ /**
+ * The sort of type references that target the return type of a method. See
+ * {@link #getSort getSort}.
+ */
+ public final static int METHOD_RETURN = 0x14;
+
+ /**
+ * The sort of type references that target the receiver type of a method.
+ * See {@link #getSort getSort}.
+ */
+ public final static int METHOD_RECEIVER = 0x15;
+
+ /**
+ * The sort of type references that target the type of a formal parameter of
+ * a method. See {@link #getSort getSort}.
+ */
+ public final static int METHOD_FORMAL_PARAMETER = 0x16;
+
+ /**
+ * The sort of type references that target the type of an exception declared
+ * in the throws clause of a method. See {@link #getSort getSort}.
+ */
+ public final static int THROWS = 0x17;
+
+ /**
+ * The sort of type references that target the type of a local variable in a
+ * method. See {@link #getSort getSort}.
+ */
+ public final static int LOCAL_VARIABLE = 0x40;
+
+ /**
+ * The sort of type references that target the type of a resource variable
+ * in a method. See {@link #getSort getSort}.
+ */
+ public final static int RESOURCE_VARIABLE = 0x41;
+
+ /**
+ * The sort of type references that target the type of the exception of a
+ * 'catch' clause in a method. See {@link #getSort getSort}.
+ */
+ public final static int EXCEPTION_PARAMETER = 0x42;
+
+ /**
+ * The sort of type references that target the type declared in an
+ * 'instanceof' instruction. See {@link #getSort getSort}.
+ */
+ public final static int INSTANCEOF = 0x43;
+
+ /**
+ * The sort of type references that target the type of the object created by
+ * a 'new' instruction. See {@link #getSort getSort}.
+ */
+ public final static int NEW = 0x44;
+
+ /**
+ * The sort of type references that target the receiver type of a
+ * constructor reference. See {@link #getSort getSort}.
+ */
+ public final static int CONSTRUCTOR_REFERENCE = 0x45;
+
+ /**
+ * The sort of type references that target the receiver type of a method
+ * reference. See {@link #getSort getSort}.
+ */
+ public final static int METHOD_REFERENCE = 0x46;
+
+ /**
+ * The sort of type references that target the type declared in an explicit
+ * or implicit cast instruction. See {@link #getSort getSort}.
+ */
+ public final static int CAST = 0x47;
+
+ /**
+ * The sort of type references that target a type parameter of a generic
+ * constructor in a constructor call. See {@link #getSort getSort}.
+ */
+ public final static int CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT = 0x48;
+
+ /**
+ * The sort of type references that target a type parameter of a generic
+ * method in a method call. See {@link #getSort getSort}.
+ */
+ public final static int METHOD_INVOCATION_TYPE_ARGUMENT = 0x49;
+
+ /**
+ * The sort of type references that target a type parameter of a generic
+ * constructor in a constructor reference. See {@link #getSort getSort}.
+ */
+ public final static int CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT = 0x4A;
+
+ /**
+ * The sort of type references that target a type parameter of a generic
+ * method in a method reference. See {@link #getSort getSort}.
+ */
+ public final static int METHOD_REFERENCE_TYPE_ARGUMENT = 0x4B;
+
+ /**
+ * The type reference value in Java class file format.
+ */
+ private int value;
+
+ /**
+ * Creates a new TypeReference.
+ *
+ * @param typeRef
+ * the int encoded value of the type reference, as received in a
+ * visit method related to type annotations, like
+ * visitTypeAnnotation.
+ */
+ public TypeReference(int typeRef) {
+ this.value = typeRef;
+ }
+
+ /**
+ * Returns a type reference of the given sort.
+ *
+ * @param sort
+ * {@link #FIELD FIELD}, {@link #METHOD_RETURN METHOD_RETURN},
+ * {@link #METHOD_RECEIVER METHOD_RECEIVER},
+ * {@link #LOCAL_VARIABLE LOCAL_VARIABLE},
+ * {@link #RESOURCE_VARIABLE RESOURCE_VARIABLE},
+ * {@link #INSTANCEOF INSTANCEOF}, {@link #NEW NEW},
+ * {@link #CONSTRUCTOR_REFERENCE CONSTRUCTOR_REFERENCE}, or
+ * {@link #METHOD_REFERENCE METHOD_REFERENCE}.
+ * @return a type reference of the given sort.
+ */
+ public static TypeReference newTypeReference(int sort) {
+ return new TypeReference(sort << 24);
+ }
+
+ /**
+ * Returns a reference to a type parameter of a generic class or method.
+ *
+ * @param sort
+ * {@link #CLASS_TYPE_PARAMETER CLASS_TYPE_PARAMETER} or
+ * {@link #METHOD_TYPE_PARAMETER METHOD_TYPE_PARAMETER}.
+ * @param paramIndex
+ * the type parameter index.
+ * @return a reference to the given generic class or method type parameter.
+ */
+ public static TypeReference newTypeParameterReference(int sort,
+ int paramIndex) {
+ return new TypeReference((sort << 24) | (paramIndex << 16));
+ }
+
+ /**
+ * Returns a reference to a type parameter bound of a generic class or
+ * method.
+ *
+ * @param sort
+ * {@link #CLASS_TYPE_PARAMETER CLASS_TYPE_PARAMETER} or
+ * {@link #METHOD_TYPE_PARAMETER METHOD_TYPE_PARAMETER}.
+ * @param paramIndex
+ * the type parameter index.
+ * @param boundIndex
+ * the type bound index within the above type parameters.
+ * @return a reference to the given generic class or method type parameter
+ * bound.
+ */
+ public static TypeReference newTypeParameterBoundReference(int sort,
+ int paramIndex, int boundIndex) {
+ return new TypeReference((sort << 24) | (paramIndex << 16)
+ | (boundIndex << 8));
+ }
+
+ /**
+ * Returns a reference to the super class or to an interface of the
+ * 'implements' clause of a class.
+ *
+ * @param itfIndex
+ * the index of an interface in the 'implements' clause of a
+ * class, or -1 to reference the super class of the class.
+ * @return a reference to the given super type of a class.
+ */
+ public static TypeReference newSuperTypeReference(int itfIndex) {
+ itfIndex &= 0xFFFF;
+ return new TypeReference((CLASS_EXTENDS << 24) | (itfIndex << 8));
+ }
+
+ /**
+ * Returns a reference to the type of a formal parameter of a method.
+ *
+ * @param paramIndex
+ * the formal parameter index.
+ *
+ * @return a reference to the type of the given method formal parameter.
+ */
+ public static TypeReference newFormalParameterReference(int paramIndex) {
+ return new TypeReference((METHOD_FORMAL_PARAMETER << 24)
+ | (paramIndex << 16));
+ }
+
+ /**
+ * Returns a reference to the type of an exception, in a 'throws' clause of
+ * a method.
+ *
+ * @param exceptionIndex
+ * the index of an exception in a 'throws' clause of a method.
+ *
+ * @return a reference to the type of the given exception.
+ */
+ public static TypeReference newExceptionReference(int exceptionIndex) {
+ return new TypeReference((THROWS << 24) | (exceptionIndex << 8));
+ }
+
+ /**
+ * Returns a reference to the type of the exception declared in a 'catch'
+ * clause of a method.
+ *
+ * @param tryCatchBlockIndex
+ * the index of a try catch block (using the order in which they
+ * are visited with visitTryCatchBlock).
+ *
+ * @return a reference to the type of the given exception.
+ */
+ public static TypeReference newTryCatchReference(int tryCatchBlockIndex) {
+ return new TypeReference((EXCEPTION_PARAMETER << 24)
+ | (tryCatchBlockIndex << 8));
+ }
+
+ /**
+ * Returns a reference to the type of a type argument in a constructor or
+ * method call or reference.
+ *
+ * @param sort
+ * {@link #CAST CAST},
+ * {@link #CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT
+ * CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT},
+ * {@link #METHOD_INVOCATION_TYPE_ARGUMENT
+ * METHOD_INVOCATION_TYPE_ARGUMENT},
+ * {@link #CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT
+ * CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT}, or
+ * {@link #METHOD_REFERENCE_TYPE_ARGUMENT
+ * METHOD_REFERENCE_TYPE_ARGUMENT}.
+ * @param argIndex
+ * the type argument index.
+ *
+ * @return a reference to the type of the given type argument.
+ */
+ public static TypeReference newTypeArgumentReference(int sort, int argIndex) {
+ return new TypeReference((sort << 24) | argIndex);
+ }
+
+ /**
+ * Returns the sort of this type reference.
+ *
+ * @return {@link #CLASS_TYPE_PARAMETER CLASS_TYPE_PARAMETER},
+ * {@link #METHOD_TYPE_PARAMETER METHOD_TYPE_PARAMETER},
+ * {@link #CLASS_EXTENDS CLASS_EXTENDS},
+ * {@link #CLASS_TYPE_PARAMETER_BOUND CLASS_TYPE_PARAMETER_BOUND},
+ * {@link #METHOD_TYPE_PARAMETER_BOUND METHOD_TYPE_PARAMETER_BOUND},
+ * {@link #FIELD FIELD}, {@link #METHOD_RETURN METHOD_RETURN},
+ * {@link #METHOD_RECEIVER METHOD_RECEIVER},
+ * {@link #METHOD_FORMAL_PARAMETER METHOD_FORMAL_PARAMETER},
+ * {@link #THROWS THROWS}, {@link #LOCAL_VARIABLE LOCAL_VARIABLE},
+ * {@link #RESOURCE_VARIABLE RESOURCE_VARIABLE},
+ * {@link #EXCEPTION_PARAMETER EXCEPTION_PARAMETER},
+ * {@link #INSTANCEOF INSTANCEOF}, {@link #NEW NEW},
+ * {@link #CONSTRUCTOR_REFERENCE CONSTRUCTOR_REFERENCE},
+ * {@link #METHOD_REFERENCE METHOD_REFERENCE}, {@link #CAST CAST},
+ * {@link #CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT
+ * CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT},
+ * {@link #METHOD_INVOCATION_TYPE_ARGUMENT
+ * METHOD_INVOCATION_TYPE_ARGUMENT},
+ * {@link #CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT
+ * CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT}, or
+ * {@link #METHOD_REFERENCE_TYPE_ARGUMENT
+ * METHOD_REFERENCE_TYPE_ARGUMENT}.
+ */
+ public int getSort() {
+ return value >>> 24;
+ }
+
+ /**
+ * Returns the index of the type parameter referenced by this type
+ * reference. This method must only be used for type references whose sort
+ * is {@link #CLASS_TYPE_PARAMETER CLASS_TYPE_PARAMETER},
+ * {@link #METHOD_TYPE_PARAMETER METHOD_TYPE_PARAMETER},
+ * {@link #CLASS_TYPE_PARAMETER_BOUND CLASS_TYPE_PARAMETER_BOUND} or
+ * {@link #METHOD_TYPE_PARAMETER_BOUND METHOD_TYPE_PARAMETER_BOUND}.
+ *
+ * @return a type parameter index.
+ */
+ public int getTypeParameterIndex() {
+ return (value & 0x00FF0000) >> 16;
+ }
+
+ /**
+ * Returns the index of the type parameter bound, within the type parameter
+ * {@link #getTypeParameterIndex}, referenced by this type reference. This
+ * method must only be used for type references whose sort is
+ * {@link #CLASS_TYPE_PARAMETER_BOUND CLASS_TYPE_PARAMETER_BOUND} or
+ * {@link #METHOD_TYPE_PARAMETER_BOUND METHOD_TYPE_PARAMETER_BOUND}.
+ *
+ * @return a type parameter bound index.
+ */
+ public int getTypeParameterBoundIndex() {
+ return (value & 0x0000FF00) >> 8;
+ }
+
+ /**
+ * Returns the index of the "super type" of a class that is referenced by
+ * this type reference. This method must only be used for type references
+ * whose sort is {@link #CLASS_EXTENDS CLASS_EXTENDS}.
+ *
+ * @return the index of an interface in the 'implements' clause of a class,
+ * or -1 if this type reference references the type of the super
+ * class.
+ */
+ public int getSuperTypeIndex() {
+ return (short) ((value & 0x00FFFF00) >> 8);
+ }
+
+ /**
+ * Returns the index of the formal parameter whose type is referenced by
+ * this type reference. This method must only be used for type references
+ * whose sort is {@link #METHOD_FORMAL_PARAMETER METHOD_FORMAL_PARAMETER}.
+ *
+ * @return a formal parameter index.
+ */
+ public int getFormalParameterIndex() {
+ return (value & 0x00FF0000) >> 16;
+ }
+
+ /**
+ * Returns the index of the exception, in a 'throws' clause of a method,
+ * whose type is referenced by this type reference. This method must only be
+ * used for type references whose sort is {@link #THROWS THROWS}.
+ *
+ * @return the index of an exception in the 'throws' clause of a method.
+ */
+ public int getExceptionIndex() {
+ return (value & 0x00FFFF00) >> 8;
+ }
+
+ /**
+ * Returns the index of the try catch block (using the order in which they
+ * are visited with visitTryCatchBlock), whose 'catch' type is referenced by
+ * this type reference. This method must only be used for type references
+ * whose sort is {@link #EXCEPTION_PARAMETER EXCEPTION_PARAMETER} .
+ *
+ * @return the index of an exception in the 'throws' clause of a method.
+ */
+ public int getTryCatchBlockIndex() {
+ return (value & 0x00FFFF00) >> 8;
+ }
+
+ /**
+ * Returns the index of the type argument referenced by this type reference.
+ * This method must only be used for type references whose sort is
+ * {@link #CAST CAST}, {@link #CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT
+ * CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT},
+ * {@link #METHOD_INVOCATION_TYPE_ARGUMENT METHOD_INVOCATION_TYPE_ARGUMENT},
+ * {@link #CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT
+ * CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT}, or
+ * {@link #METHOD_REFERENCE_TYPE_ARGUMENT METHOD_REFERENCE_TYPE_ARGUMENT}.
+ *
+ * @return a type parameter index.
+ */
+ public int getTypeArgumentIndex() {
+ return value & 0xFF;
+ }
+
+ /**
+ * Returns the int encoded value of this type reference, suitable for use in
+ * visit methods related to type annotations, like visitTypeAnnotation.
+ *
+ * @return the int encoded value of this type reference.
+ */
+ public int getValue() {
+ return value;
+ }
+}
diff --git a/src/asm/scala/tools/asm/commons/CodeSizeEvaluator.java b/src/asm/scala/tools/asm/commons/CodeSizeEvaluator.java
new file mode 100644
index 0000000000..80c07bdae0
--- /dev/null
+++ b/src/asm/scala/tools/asm/commons/CodeSizeEvaluator.java
@@ -0,0 +1,238 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.commons;
+
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A {@link MethodVisitor} that can be used to approximate method size.
+ *
+ * @author Eugene Kuleshov
+ */
+public class CodeSizeEvaluator extends MethodVisitor implements Opcodes {
+
+ private int minSize;
+
+ private int maxSize;
+
+ public CodeSizeEvaluator(final MethodVisitor mv) {
+ this(Opcodes.ASM5, mv);
+ }
+
+ protected CodeSizeEvaluator(final int api, final MethodVisitor mv) {
+ super(api, mv);
+ }
+
+ public int getMinSize() {
+ return this.minSize;
+ }
+
+ public int getMaxSize() {
+ return this.maxSize;
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ minSize += 1;
+ maxSize += 1;
+ if (mv != null) {
+ mv.visitInsn(opcode);
+ }
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ if (opcode == SIPUSH) {
+ minSize += 3;
+ maxSize += 3;
+ } else {
+ minSize += 2;
+ maxSize += 2;
+ }
+ if (mv != null) {
+ mv.visitIntInsn(opcode, operand);
+ }
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ if (var < 4 && opcode != RET) {
+ minSize += 1;
+ maxSize += 1;
+ } else if (var >= 256) {
+ minSize += 4;
+ maxSize += 4;
+ } else {
+ minSize += 2;
+ maxSize += 2;
+ }
+ if (mv != null) {
+ mv.visitVarInsn(opcode, var);
+ }
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ minSize += 3;
+ maxSize += 3;
+ if (mv != null) {
+ mv.visitTypeInsn(opcode, type);
+ }
+ }
+
+ @Override
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
+ minSize += 3;
+ maxSize += 3;
+ if (mv != null) {
+ mv.visitFieldInsn(opcode, owner, name, desc);
+ }
+ }
+
+ @Deprecated
+ @Override
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
+ if (api >= Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc);
+ return;
+ }
+ doVisitMethodInsn(opcode, owner, name, desc,
+ opcode == Opcodes.INVOKEINTERFACE);
+ }
+
+ @Override
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc, final boolean itf) {
+ if (api < Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc, itf);
+ return;
+ }
+ doVisitMethodInsn(opcode, owner, name, desc, itf);
+ }
+
+ private void doVisitMethodInsn(int opcode, final String owner,
+ final String name, final String desc, final boolean itf) {
+ if (opcode == INVOKEINTERFACE) {
+ minSize += 5;
+ maxSize += 5;
+ } else {
+ minSize += 3;
+ maxSize += 3;
+ }
+ if (mv != null) {
+ mv.visitMethodInsn(opcode, owner, name, desc, itf);
+ }
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
+ minSize += 5;
+ maxSize += 5;
+ if (mv != null) {
+ mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ }
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ minSize += 3;
+ if (opcode == GOTO || opcode == JSR) {
+ maxSize += 5;
+ } else {
+ maxSize += 8;
+ }
+ if (mv != null) {
+ mv.visitJumpInsn(opcode, label);
+ }
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ if (cst instanceof Long || cst instanceof Double) {
+ minSize += 3;
+ maxSize += 3;
+ } else {
+ minSize += 2;
+ maxSize += 3;
+ }
+ if (mv != null) {
+ mv.visitLdcInsn(cst);
+ }
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ if (var > 255 || increment > 127 || increment < -128) {
+ minSize += 6;
+ maxSize += 6;
+ } else {
+ minSize += 3;
+ maxSize += 3;
+ }
+ if (mv != null) {
+ mv.visitIincInsn(var, increment);
+ }
+ }
+
+ @Override
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
+ minSize += 13 + labels.length * 4;
+ maxSize += 16 + labels.length * 4;
+ if (mv != null) {
+ mv.visitTableSwitchInsn(min, max, dflt, labels);
+ }
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
+ minSize += 9 + keys.length * 8;
+ maxSize += 12 + keys.length * 8;
+ if (mv != null) {
+ mv.visitLookupSwitchInsn(dflt, keys, labels);
+ }
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ minSize += 4;
+ maxSize += 4;
+ if (mv != null) {
+ mv.visitMultiANewArrayInsn(desc, dims);
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/signature/SignatureVisitor.java b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
index f38f81f53b..1e16bd3f7c 100644
--- a/src/asm/scala/tools/asm/signature/SignatureVisitor.java
+++ b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
@@ -73,7 +73,7 @@ public abstract class SignatureVisitor {
/**
* The ASM API version implemented by this visitor. The value of this field
- * must be one of {@link Opcodes#ASM4}.
+ * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
protected final int api;
@@ -82,9 +82,12 @@ public abstract class SignatureVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
public SignatureVisitor(final int api) {
+ if (api != Opcodes.ASM4 && api != Opcodes.ASM5) {
+ throw new IllegalArgumentException();
+ }
this.api = api;
}
diff --git a/src/asm/scala/tools/asm/signature/SignatureWriter.java b/src/asm/scala/tools/asm/signature/SignatureWriter.java
index ebf4fe07b4..65756eee51 100644
--- a/src/asm/scala/tools/asm/signature/SignatureWriter.java
+++ b/src/asm/scala/tools/asm/signature/SignatureWriter.java
@@ -66,7 +66,7 @@ public class SignatureWriter extends SignatureVisitor {
* Constructs a new {@link SignatureWriter} object.
*/
public SignatureWriter() {
- super(Opcodes.ASM4);
+ super(Opcodes.ASM5);
}
// ------------------------------------------------------------------------
diff --git a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
index 411eead3c7..2ce0c8b6ee 100644
--- a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
@@ -29,6 +29,7 @@
*/
package scala.tools.asm.tree;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -128,6 +129,28 @@ public abstract class AbstractInsnNode {
protected int opcode;
/**
+ * The runtime visible type annotations of this instruction. This field is
+ * only used for real instructions (i.e. not for labels, frames, or line
+ * number nodes). This list is a list of {@link TypeAnnotationNode} objects.
+ * May be <tt>null</tt>.
+ *
+ * @associates scala.tools.asm.tree.TypeAnnotationNode
+ * @label visible
+ */
+ public List<TypeAnnotationNode> visibleTypeAnnotations;
+
+ /**
+ * The runtime invisible type annotations of this instruction. This field is
+ * only used for real instructions (i.e. not for labels, frames, or line
+ * number nodes). This list is a list of {@link TypeAnnotationNode} objects.
+ * May be <tt>null</tt>.
+ *
+ * @associates scala.tools.asm.tree.TypeAnnotationNode
+ * @label invisible
+ */
+ public List<TypeAnnotationNode> invisibleTypeAnnotations;
+
+ /**
* Previous instruction in the list to which this instruction belongs.
*/
AbstractInsnNode prev;
@@ -204,6 +227,29 @@ public abstract class AbstractInsnNode {
public abstract void accept(final MethodVisitor cv);
/**
+ * Makes the given visitor visit the annotations of this instruction.
+ *
+ * @param mv
+ * a method visitor.
+ */
+ protected final void acceptAnnotations(final MethodVisitor mv) {
+ int n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations
+ .size();
+ for (int i = 0; i < n; ++i) {
+ TypeAnnotationNode an = visibleTypeAnnotations.get(i);
+ an.accept(mv.visitInsnAnnotation(an.typeRef, an.typePath, an.desc,
+ true));
+ }
+ n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations
+ .size();
+ for (int i = 0; i < n; ++i) {
+ TypeAnnotationNode an = invisibleTypeAnnotations.get(i);
+ an.accept(mv.visitInsnAnnotation(an.typeRef, an.typePath, an.desc,
+ false));
+ }
+ }
+
+ /**
* Returns a copy of this instruction.
*
* @param labels
@@ -245,4 +291,36 @@ public abstract class AbstractInsnNode {
}
return clones;
}
+
+ /**
+ * Clones the annotations of the given instruction into this instruction.
+ *
+ * @param insn
+ * the source instruction.
+ * @return this instruction.
+ */
+ protected final AbstractInsnNode cloneAnnotations(
+ final AbstractInsnNode insn) {
+ if (insn.visibleTypeAnnotations != null) {
+ this.visibleTypeAnnotations = new ArrayList<TypeAnnotationNode>();
+ for (int i = 0; i < insn.visibleTypeAnnotations.size(); ++i) {
+ TypeAnnotationNode src = insn.visibleTypeAnnotations.get(i);
+ TypeAnnotationNode ann = new TypeAnnotationNode(src.typeRef,
+ src.typePath, src.desc);
+ src.accept(ann);
+ this.visibleTypeAnnotations.add(ann);
+ }
+ }
+ if (insn.invisibleTypeAnnotations != null) {
+ this.invisibleTypeAnnotations = new ArrayList<TypeAnnotationNode>();
+ for (int i = 0; i < insn.invisibleTypeAnnotations.size(); ++i) {
+ TypeAnnotationNode src = insn.invisibleTypeAnnotations.get(i);
+ TypeAnnotationNode ann = new TypeAnnotationNode(src.typeRef,
+ src.typePath, src.desc);
+ src.accept(ann);
+ this.invisibleTypeAnnotations.add(ann);
+ }
+ }
+ return this;
+ }
}
diff --git a/src/asm/scala/tools/asm/tree/AnnotationNode.java b/src/asm/scala/tools/asm/tree/AnnotationNode.java
index 1f4beef9f7..b8d5988066 100644
--- a/src/asm/scala/tools/asm/tree/AnnotationNode.java
+++ b/src/asm/scala/tools/asm/tree/AnnotationNode.java
@@ -67,9 +67,14 @@ public class AnnotationNode extends AnnotationVisitor {
*
* @param desc
* the class descriptor of the annotation class.
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
*/
public AnnotationNode(final String desc) {
- this(Opcodes.ASM4, desc);
+ this(Opcodes.ASM5, desc);
+ if (getClass() != AnnotationNode.class) {
+ throw new IllegalStateException();
+ }
}
/**
@@ -77,7 +82,7 @@ public class AnnotationNode extends AnnotationVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param desc
* the class descriptor of the annotation class.
*/
@@ -93,7 +98,7 @@ public class AnnotationNode extends AnnotationVisitor {
* where the visited values must be stored.
*/
AnnotationNode(final List<Object> values) {
- super(Opcodes.ASM4);
+ super(Opcodes.ASM5);
this.values = values;
}
@@ -166,7 +171,8 @@ public class AnnotationNode extends AnnotationVisitor {
* versions of the ASM API than the given version.
*
* @param api
- * an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * an ASM API version. Must be one of {@link Opcodes#ASM4} or
+ * {@link Opcodes#ASM5}.
*/
public void check(final int api) {
// nothing to do
diff --git a/src/asm/scala/tools/asm/tree/ClassNode.java b/src/asm/scala/tools/asm/tree/ClassNode.java
index c3d999985a..304b4ec9f5 100644
--- a/src/asm/scala/tools/asm/tree/ClassNode.java
+++ b/src/asm/scala/tools/asm/tree/ClassNode.java
@@ -39,6 +39,7 @@ import scala.tools.asm.ClassVisitor;
import scala.tools.asm.FieldVisitor;
import scala.tools.asm.MethodVisitor;
import scala.tools.asm.Opcodes;
+import scala.tools.asm.TypePath;
/**
* A node that represents a class.
@@ -133,6 +134,24 @@ public class ClassNode extends ClassVisitor {
public List<AnnotationNode> invisibleAnnotations;
/**
+ * The runtime visible type annotations of this class. This list is a list
+ * of {@link TypeAnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates scala.tools.asm.tree.TypeAnnotationNode
+ * @label visible
+ */
+ public List<TypeAnnotationNode> visibleTypeAnnotations;
+
+ /**
+ * The runtime invisible type annotations of this class. This list is a list
+ * of {@link TypeAnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates scala.tools.asm.tree.TypeAnnotationNode
+ * @label invisible
+ */
+ public List<TypeAnnotationNode> invisibleTypeAnnotations;
+
+ /**
* The non standard attributes of this class. This list is a list of
* {@link Attribute} objects. May be <tt>null</tt>.
*
@@ -168,9 +187,15 @@ public class ClassNode extends ClassVisitor {
* Constructs a new {@link ClassNode}. <i>Subclasses must not use this
* constructor</i>. Instead, they must use the {@link #ClassNode(int)}
* version.
+ *
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
*/
public ClassNode() {
- this(Opcodes.ASM4);
+ this(Opcodes.ASM5);
+ if (getClass() != ClassNode.class) {
+ throw new IllegalStateException();
+ }
}
/**
@@ -178,7 +203,7 @@ public class ClassNode extends ClassVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
public ClassNode(final int api) {
super(api);
@@ -239,6 +264,24 @@ public class ClassNode extends ClassVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc);
+ if (visible) {
+ if (visibleTypeAnnotations == null) {
+ visibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(1);
+ }
+ visibleTypeAnnotations.add(an);
+ } else {
+ if (invisibleTypeAnnotations == null) {
+ invisibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(1);
+ }
+ invisibleTypeAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
public void visitAttribute(final Attribute attr) {
if (attrs == null) {
attrs = new ArrayList<Attribute>(1);
@@ -286,10 +329,26 @@ public class ClassNode extends ClassVisitor {
* API than the given version.
*
* @param api
- * an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * an ASM API version. Must be one of {@link Opcodes#ASM4} or
+ * {@link Opcodes#ASM5}.
*/
public void check(final int api) {
- // nothing to do
+ if (api == Opcodes.ASM4) {
+ if (visibleTypeAnnotations != null
+ && visibleTypeAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ if (invisibleTypeAnnotations != null
+ && invisibleTypeAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ for (FieldNode f : fields) {
+ f.check(api);
+ }
+ for (MethodNode m : methods) {
+ m.check(api);
+ }
+ }
}
/**
@@ -323,6 +382,19 @@ public class ClassNode extends ClassVisitor {
AnnotationNode an = invisibleAnnotations.get(i);
an.accept(cv.visitAnnotation(an.desc, false));
}
+ n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ TypeAnnotationNode an = visibleTypeAnnotations.get(i);
+ an.accept(cv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc,
+ true));
+ }
+ n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations
+ .size();
+ for (i = 0; i < n; ++i) {
+ TypeAnnotationNode an = invisibleTypeAnnotations.get(i);
+ an.accept(cv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc,
+ false));
+ }
n = attrs == null ? 0 : attrs.size();
for (i = 0; i < n; ++i) {
cv.visitAttribute(attrs.get(i));
diff --git a/src/asm/scala/tools/asm/tree/FieldInsnNode.java b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
index 0c94f18adf..c027de109b 100644
--- a/src/asm/scala/tools/asm/tree/FieldInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
@@ -97,12 +97,14 @@ public class FieldInsnNode extends AbstractInsnNode {
}
@Override
- public void accept(final MethodVisitor cv) {
- cv.visitFieldInsn(opcode, owner, name, desc);
+ public void accept(final MethodVisitor mv) {
+ mv.visitFieldInsn(opcode, owner, name, desc);
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new FieldInsnNode(opcode, owner, name, desc);
+ return new FieldInsnNode(opcode, owner, name, desc)
+ .cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/FieldNode.java b/src/asm/scala/tools/asm/tree/FieldNode.java
index 61b614ec59..3fb14dac4f 100644
--- a/src/asm/scala/tools/asm/tree/FieldNode.java
+++ b/src/asm/scala/tools/asm/tree/FieldNode.java
@@ -37,6 +37,7 @@ import scala.tools.asm.Attribute;
import scala.tools.asm.ClassVisitor;
import scala.tools.asm.FieldVisitor;
import scala.tools.asm.Opcodes;
+import scala.tools.asm.TypePath;
/**
* A node that represents a field.
@@ -92,6 +93,24 @@ public class FieldNode extends FieldVisitor {
public List<AnnotationNode> invisibleAnnotations;
/**
+ * The runtime visible type annotations of this field. This list is a list
+ * of {@link TypeAnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates scala.tools.asm.tree.TypeAnnotationNode
+ * @label visible
+ */
+ public List<TypeAnnotationNode> visibleTypeAnnotations;
+
+ /**
+ * The runtime invisible type annotations of this field. This list is a list
+ * of {@link TypeAnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates scala.tools.asm.tree.TypeAnnotationNode
+ * @label invisible
+ */
+ public List<TypeAnnotationNode> invisibleTypeAnnotations;
+
+ /**
* The non standard attributes of this field. This list is a list of
* {@link Attribute} objects. May be <tt>null</tt>.
*
@@ -120,20 +139,24 @@ public class FieldNode extends FieldVisitor {
* <tt>null</tt> if the field does not have an initial value,
* must be an {@link Integer}, a {@link Float}, a {@link Long}, a
* {@link Double} or a {@link String}.
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
*/
public FieldNode(final int access, final String name, final String desc,
final String signature, final Object value) {
- this(Opcodes.ASM4, access, name, desc, signature, value);
+ this(Opcodes.ASM5, access, name, desc, signature, value);
+ if (getClass() != FieldNode.class) {
+ throw new IllegalStateException();
+ }
}
/**
* Constructs a new {@link FieldNode}. <i>Subclasses must not use this
- * constructor</i>. Instead, they must use the
- * {@link #FieldNode(int, int, String, String, String, Object)} version.
+ * constructor</i>.
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param access
* the field's access flags (see
* {@link scala.tools.asm.Opcodes}). This parameter also
@@ -184,6 +207,24 @@ public class FieldNode extends FieldVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc);
+ if (visible) {
+ if (visibleTypeAnnotations == null) {
+ visibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(1);
+ }
+ visibleTypeAnnotations.add(an);
+ } else {
+ if (invisibleTypeAnnotations == null) {
+ invisibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(1);
+ }
+ invisibleTypeAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
public void visitAttribute(final Attribute attr) {
if (attrs == null) {
attrs = new ArrayList<Attribute>(1);
@@ -206,10 +247,20 @@ public class FieldNode extends FieldVisitor {
* API than the given version.
*
* @param api
- * an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * an ASM API version. Must be one of {@link Opcodes#ASM4} or
+ * {@link Opcodes#ASM5}.
*/
public void check(final int api) {
- // nothing to do
+ if (api == Opcodes.ASM4) {
+ if (visibleTypeAnnotations != null
+ && visibleTypeAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ if (invisibleTypeAnnotations != null
+ && invisibleTypeAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ }
}
/**
@@ -234,6 +285,19 @@ public class FieldNode extends FieldVisitor {
AnnotationNode an = invisibleAnnotations.get(i);
an.accept(fv.visitAnnotation(an.desc, false));
}
+ n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ TypeAnnotationNode an = visibleTypeAnnotations.get(i);
+ an.accept(fv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc,
+ true));
+ }
+ n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations
+ .size();
+ for (i = 0; i < n; ++i) {
+ TypeAnnotationNode an = invisibleTypeAnnotations.get(i);
+ an.accept(fv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc,
+ false));
+ }
n = attrs == null ? 0 : attrs.size();
for (i = 0; i < n; ++i) {
fv.visitAttribute(attrs.get(i));
diff --git a/src/asm/scala/tools/asm/tree/IincInsnNode.java b/src/asm/scala/tools/asm/tree/IincInsnNode.java
index f9adf2e38c..c37ac91c27 100644
--- a/src/asm/scala/tools/asm/tree/IincInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/IincInsnNode.java
@@ -73,10 +73,11 @@ public class IincInsnNode extends AbstractInsnNode {
@Override
public void accept(final MethodVisitor mv) {
mv.visitIincInsn(var, incr);
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new IincInsnNode(var, incr);
+ return new IincInsnNode(var, incr).cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/InsnList.java b/src/asm/scala/tools/asm/tree/InsnList.java
index b1e2d97c6f..e808712e78 100644
--- a/src/asm/scala/tools/asm/tree/InsnList.java
+++ b/src/asm/scala/tools/asm/tree/InsnList.java
@@ -100,7 +100,7 @@ public class InsnList {
* the index of the instruction that must be returned.
* @return the instruction whose index is given.
* @throws IndexOutOfBoundsException
- * if (index < 0 || index >= size()).
+ * if (index &lt; 0 || index &gt;= size()).
*/
public AbstractInsnNode get(final int index) {
if (index < 0 || index >= size) {
@@ -535,6 +535,8 @@ public class InsnList {
AbstractInsnNode prev;
+ AbstractInsnNode remove;
+
InsnListIterator(int index) {
if (index == size()) {
next = null;
@@ -556,12 +558,22 @@ public class InsnList {
AbstractInsnNode result = next;
prev = result;
next = result.next;
+ remove = result;
return result;
}
public void remove() {
- InsnList.this.remove(prev);
- prev = prev.prev;
+ if (remove != null) {
+ if (remove == next) {
+ next = next.next;
+ } else {
+ prev = prev.prev;
+ }
+ InsnList.this.remove(remove);
+ remove = null;
+ } else {
+ throw new IllegalStateException();
+ }
}
public boolean hasPrevious() {
@@ -572,6 +584,7 @@ public class InsnList {
AbstractInsnNode result = prev;
next = result;
prev = result.prev;
+ remove = result;
return result;
}
@@ -598,6 +611,7 @@ public class InsnList {
public void add(Object o) {
InsnList.this.insertBefore(next, (AbstractInsnNode) o);
prev = (AbstractInsnNode) o;
+ remove = null;
}
public void set(Object o) {
diff --git a/src/asm/scala/tools/asm/tree/InsnNode.java b/src/asm/scala/tools/asm/tree/InsnNode.java
index 4d5288cafa..f5313929ee 100644
--- a/src/asm/scala/tools/asm/tree/InsnNode.java
+++ b/src/asm/scala/tools/asm/tree/InsnNode.java
@@ -78,10 +78,11 @@ public class InsnNode extends AbstractInsnNode {
@Override
public void accept(final MethodVisitor mv) {
mv.visitInsn(opcode);
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new InsnNode(opcode);
+ return new InsnNode(opcode).cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/IntInsnNode.java b/src/asm/scala/tools/asm/tree/IntInsnNode.java
index e0aeed4bc8..6bbe8d845c 100644
--- a/src/asm/scala/tools/asm/tree/IntInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/IntInsnNode.java
@@ -78,10 +78,11 @@ public class IntInsnNode extends AbstractInsnNode {
@Override
public void accept(final MethodVisitor mv) {
mv.visitIntInsn(opcode, operand);
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new IntInsnNode(opcode, operand);
+ return new IntInsnNode(opcode, operand).cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
index 7ee84b875b..0f85e60291 100644
--- a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
@@ -91,10 +91,12 @@ public class InvokeDynamicInsnNode extends AbstractInsnNode {
@Override
public void accept(final MethodVisitor mv) {
mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs);
+ return new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs)
+ .cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/JumpInsnNode.java b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
index 81e1e09deb..8b8a769204 100644
--- a/src/asm/scala/tools/asm/tree/JumpInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
@@ -86,10 +86,12 @@ public class JumpInsnNode extends AbstractInsnNode {
@Override
public void accept(final MethodVisitor mv) {
mv.visitJumpInsn(opcode, label.getLabel());
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new JumpInsnNode(opcode, clone(label, labels));
+ return new JumpInsnNode(opcode, clone(label, labels))
+ .cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/LdcInsnNode.java b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
index 4e328f9b39..1cc850bb31 100644
--- a/src/asm/scala/tools/asm/tree/LdcInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
@@ -69,10 +69,11 @@ public class LdcInsnNode extends AbstractInsnNode {
@Override
public void accept(final MethodVisitor mv) {
mv.visitLdcInsn(cst);
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new LdcInsnNode(cst);
+ return new LdcInsnNode(cst).cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/LocalVariableAnnotationNode.java b/src/asm/scala/tools/asm/tree/LocalVariableAnnotationNode.java
new file mode 100644
index 0000000000..d05b808171
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LocalVariableAnnotationNode.java
@@ -0,0 +1,157 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.TypePath;
+import scala.tools.asm.TypeReference;
+
+/**
+ * A node that represents a type annotation on a local or resource variable.
+ *
+ * @author Eric Bruneton
+ */
+public class LocalVariableAnnotationNode extends TypeAnnotationNode {
+
+ /**
+ * The fist instructions corresponding to the continuous ranges that make
+ * the scope of this local variable (inclusive). Must not be <tt>null</tt>.
+ */
+ public List<LabelNode> start;
+
+ /**
+ * The last instructions corresponding to the continuous ranges that make
+ * the scope of this local variable (exclusive). This list must have the
+ * same size as the 'start' list. Must not be <tt>null</tt>.
+ */
+ public List<LabelNode> end;
+
+ /**
+ * The local variable's index in each range. This list must have the same
+ * size as the 'start' list. Must not be <tt>null</tt>.
+ */
+ public List<Integer> index;
+
+ /**
+ * Constructs a new {@link LocalVariableAnnotationNode}. <i>Subclasses must
+ * not use this constructor</i>. Instead, they must use the
+ * {@link #LocalVariableAnnotationNode(int, TypePath, LabelNode[], LabelNode[], int[], String)}
+ * version.
+ *
+ * @param typeRef
+ * a reference to the annotated type. See {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param start
+ * the fist instructions corresponding to the continuous ranges
+ * that make the scope of this local variable (inclusive).
+ * @param end
+ * the last instructions corresponding to the continuous ranges
+ * that make the scope of this local variable (exclusive). This
+ * array must have the same size as the 'start' array.
+ * @param index
+ * the local variable's index in each range. This array must have
+ * the same size as the 'start' array.
+ * @param desc
+ * the class descriptor of the annotation class.
+ */
+ public LocalVariableAnnotationNode(int typeRef, TypePath typePath,
+ LabelNode[] start, LabelNode[] end, int[] index, String desc) {
+ this(Opcodes.ASM5, typeRef, typePath, start, end, index, desc);
+ }
+
+ /**
+ * Constructs a new {@link LocalVariableAnnotationNode}.
+ *
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
+ * @param typeRef
+ * a reference to the annotated type. See {@link TypeReference}.
+ * @param start
+ * the fist instructions corresponding to the continuous ranges
+ * that make the scope of this local variable (inclusive).
+ * @param end
+ * the last instructions corresponding to the continuous ranges
+ * that make the scope of this local variable (exclusive). This
+ * array must have the same size as the 'start' array.
+ * @param index
+ * the local variable's index in each range. This array must have
+ * the same size as the 'start' array.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param desc
+ * the class descriptor of the annotation class.
+ */
+ public LocalVariableAnnotationNode(int api, int typeRef, TypePath typePath,
+ LabelNode[] start, LabelNode[] end, int[] index, String desc) {
+ super(api, typeRef, typePath, desc);
+ this.start = new ArrayList<LabelNode>(start.length);
+ this.start.addAll(Arrays.asList(start));
+ this.end = new ArrayList<LabelNode>(end.length);
+ this.end.addAll(Arrays.asList(end));
+ this.index = new ArrayList<Integer>(index.length);
+ for (int i : index) {
+ this.index.add(i);
+ }
+ }
+
+ /**
+ * Makes the given visitor visit this type annotation.
+ *
+ * @param mv
+ * the visitor that must visit this annotation.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
+ */
+ public void accept(final MethodVisitor mv, boolean visible) {
+ Label[] start = new Label[this.start.size()];
+ Label[] end = new Label[this.end.size()];
+ int[] index = new int[this.index.size()];
+ for (int i = 0; i < start.length; ++i) {
+ start[i] = this.start.get(i).getLabel();
+ end[i] = this.end.get(i).getLabel();
+ index[i] = this.index.get(i);
+ }
+ accept(mv.visitLocalVariableAnnotation(typeRef, typePath, start, end,
+ index, desc, true));
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
index d2479b4814..7db2f53ff4 100644
--- a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
@@ -105,6 +105,7 @@ public class LookupSwitchInsnNode extends AbstractInsnNode {
labels[i] = this.labels.get(i).getLabel();
}
mv.visitLookupSwitchInsn(dflt.getLabel(), keys, labels);
+ acceptAnnotations(mv);
}
@Override
@@ -112,6 +113,6 @@ public class LookupSwitchInsnNode extends AbstractInsnNode {
LookupSwitchInsnNode clone = new LookupSwitchInsnNode(clone(dflt,
labels), null, clone(this.labels, labels));
clone.keys.addAll(keys);
- return clone;
+ return clone.cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/MethodInsnNode.java b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
index bf09f556d8..30c7854646 100644
--- a/src/asm/scala/tools/asm/tree/MethodInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
@@ -32,6 +32,7 @@ package scala.tools.asm.tree;
import java.util.Map;
import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
/**
* A node that represents a method instruction. A method instruction is an
@@ -44,6 +45,7 @@ public class MethodInsnNode extends AbstractInsnNode {
/**
* The internal name of the method's owner class (see
* {@link scala.tools.asm.Type#getInternalName() getInternalName}).
+ * For methods of arrays, e.g., clone(), the array type descriptor.
*/
public String owner;
@@ -58,6 +60,11 @@ public class MethodInsnNode extends AbstractInsnNode {
public String desc;
/**
+ * If the method's owner class if an interface.
+ */
+ public boolean itf;
+
+ /**
* Constructs a new {@link MethodInsnNode}.
*
* @param opcode
@@ -73,12 +80,37 @@ public class MethodInsnNode extends AbstractInsnNode {
* @param desc
* the method's descriptor (see {@link scala.tools.asm.Type}).
*/
+ @Deprecated
public MethodInsnNode(final int opcode, final String owner,
final String name, final String desc) {
+ this(opcode, owner, name, desc, opcode == Opcodes.INVOKEINTERFACE);
+ }
+
+ /**
+ * Constructs a new {@link MethodInsnNode}.
+ *
+ * @param opcode
+ * the opcode of the type instruction to be constructed. This
+ * opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
+ * INVOKEINTERFACE.
+ * @param owner
+ * the internal name of the method's owner class (see
+ * {@link scala.tools.asm.Type#getInternalName()
+ * getInternalName}).
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link scala.tools.asm.Type}).
+ * @param itf
+ * if the method's owner class is an interface.
+ */
+ public MethodInsnNode(final int opcode, final String owner,
+ final String name, final String desc, final boolean itf) {
super(opcode);
this.owner = owner;
this.name = name;
this.desc = desc;
+ this.itf = itf;
}
/**
@@ -99,11 +131,11 @@ public class MethodInsnNode extends AbstractInsnNode {
@Override
public void accept(final MethodVisitor mv) {
- mv.visitMethodInsn(opcode, owner, name, desc);
+ mv.visitMethodInsn(opcode, owner, name, desc, itf);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new MethodInsnNode(opcode, owner, name, desc);
+ return new MethodInsnNode(opcode, owner, name, desc, itf);
}
}
diff --git a/src/asm/scala/tools/asm/tree/MethodNode.java b/src/asm/scala/tools/asm/tree/MethodNode.java
index a161600edb..3dec50e02c 100644
--- a/src/asm/scala/tools/asm/tree/MethodNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodNode.java
@@ -41,6 +41,7 @@ import scala.tools.asm.Label;
import scala.tools.asm.MethodVisitor;
import scala.tools.asm.Opcodes;
import scala.tools.asm.Type;
+import scala.tools.asm.TypePath;
/**
* A node that represents a method.
@@ -78,6 +79,11 @@ public class MethodNode extends MethodVisitor {
public List<String> exceptions;
/**
+ * The method parameter info (access flags and name)
+ */
+ public List<ParameterNode> parameters;
+
+ /**
* The runtime visible annotations of this method. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
@@ -96,6 +102,24 @@ public class MethodNode extends MethodVisitor {
public List<AnnotationNode> invisibleAnnotations;
/**
+ * The runtime visible type annotations of this method. This list is a list
+ * of {@link TypeAnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates scala.tools.asm.tree.TypeAnnotationNode
+ * @label visible
+ */
+ public List<TypeAnnotationNode> visibleTypeAnnotations;
+
+ /**
+ * The runtime invisible type annotations of this method. This list is a
+ * list of {@link TypeAnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates scala.tools.asm.tree.TypeAnnotationNode
+ * @label invisible
+ */
+ public List<TypeAnnotationNode> invisibleTypeAnnotations;
+
+ /**
* The non standard attributes of this method. This list is a list of
* {@link Attribute} objects. May be <tt>null</tt>.
*
@@ -167,6 +191,22 @@ public class MethodNode extends MethodVisitor {
public List<LocalVariableNode> localVariables;
/**
+ * The visible local variable annotations of this method. This list is a
+ * list of {@link LocalVariableAnnotationNode} objects. May be <tt>null</tt>
+ *
+ * @associates scala.tools.asm.tree.LocalVariableAnnotationNode
+ */
+ public List<LocalVariableAnnotationNode> visibleLocalVariableAnnotations;
+
+ /**
+ * The invisible local variable annotations of this method. This list is a
+ * list of {@link LocalVariableAnnotationNode} objects. May be <tt>null</tt>
+ *
+ * @associates scala.tools.asm.tree.LocalVariableAnnotationNode
+ */
+ public List<LocalVariableAnnotationNode> invisibleLocalVariableAnnotations;
+
+ /**
* If the accept method has been called on this object.
*/
private boolean visited;
@@ -175,9 +215,15 @@ public class MethodNode extends MethodVisitor {
* Constructs an uninitialized {@link MethodNode}. <i>Subclasses must not
* use this constructor</i>. Instead, they must use the
* {@link #MethodNode(int)} version.
+ *
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
*/
public MethodNode() {
- this(Opcodes.ASM4);
+ this(Opcodes.ASM5);
+ if (getClass() != MethodNode.class) {
+ throw new IllegalStateException();
+ }
}
/**
@@ -185,7 +231,7 @@ public class MethodNode extends MethodVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
public MethodNode(final int api) {
super(api);
@@ -211,10 +257,15 @@ public class MethodNode extends MethodVisitor {
* the internal names of the method's exception classes (see
* {@link Type#getInternalName() getInternalName}). May be
* <tt>null</tt>.
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
*/
public MethodNode(final int access, final String name, final String desc,
final String signature, final String[] exceptions) {
- this(Opcodes.ASM4, access, name, desc, signature, exceptions);
+ this(Opcodes.ASM5, access, name, desc, signature, exceptions);
+ if (getClass() != MethodNode.class) {
+ throw new IllegalStateException();
+ }
}
/**
@@ -222,7 +273,7 @@ public class MethodNode extends MethodVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param access
* the method's access flags (see {@link Opcodes}). This
* parameter also indicates if the method is synthetic and/or
@@ -263,6 +314,15 @@ public class MethodNode extends MethodVisitor {
// ------------------------------------------------------------------------
@Override
+ public void visitParameter(String name, int access) {
+ if (parameters == null) {
+ parameters = new ArrayList<ParameterNode>(5);
+ }
+ parameters.add(new ParameterNode(name, access));
+ }
+
+ @Override
+ @SuppressWarnings("serial")
public AnnotationVisitor visitAnnotationDefault() {
return new AnnotationNode(new ArrayList<Object>(0) {
@Override
@@ -292,6 +352,24 @@ public class MethodNode extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc);
+ if (visible) {
+ if (visibleTypeAnnotations == null) {
+ visibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(1);
+ }
+ visibleTypeAnnotations.add(an);
+ } else {
+ if (invisibleTypeAnnotations == null) {
+ invisibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(1);
+ }
+ invisibleTypeAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
public AnnotationVisitor visitParameterAnnotation(final int parameter,
final String desc, final boolean visible) {
AnnotationNode an = new AnnotationNode(desc);
@@ -365,13 +443,28 @@ public class MethodNode extends MethodVisitor {
instructions.add(new FieldInsnNode(opcode, owner, name, desc));
}
+ @Deprecated
@Override
- public void visitMethodInsn(final int opcode, final String owner,
- final String name, final String desc) {
+ public void visitMethodInsn(int opcode, String owner, String name,
+ String desc) {
+ if (api >= Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc);
+ return;
+ }
instructions.add(new MethodInsnNode(opcode, owner, name, desc));
}
@Override
+ public void visitMethodInsn(int opcode, String owner, String name,
+ String desc, boolean itf) {
+ if (api < Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc, itf);
+ return;
+ }
+ instructions.add(new MethodInsnNode(opcode, owner, name, desc, itf));
+ }
+
+ @Override
public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
Object... bsmArgs) {
instructions.add(new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs));
@@ -417,6 +510,33 @@ public class MethodNode extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitInsnAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ // Finds the last real instruction, i.e. the instruction targeted by
+ // this annotation.
+ AbstractInsnNode insn = instructions.getLast();
+ while (insn.getOpcode() == -1) {
+ insn = insn.getPrevious();
+ }
+ // Adds the annotation to this instruction.
+ TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc);
+ if (visible) {
+ if (insn.visibleTypeAnnotations == null) {
+ insn.visibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(
+ 1);
+ }
+ insn.visibleTypeAnnotations.add(an);
+ } else {
+ if (insn.invisibleTypeAnnotations == null) {
+ insn.invisibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(
+ 1);
+ }
+ insn.invisibleTypeAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
public void visitTryCatchBlock(final Label start, final Label end,
final Label handler, final String type) {
tryCatchBlocks.add(new TryCatchBlockNode(getLabelNode(start),
@@ -424,6 +544,27 @@ public class MethodNode extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitTryCatchAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ TryCatchBlockNode tcb = tryCatchBlocks.get((typeRef & 0x00FFFF00) >> 8);
+ TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc);
+ if (visible) {
+ if (tcb.visibleTypeAnnotations == null) {
+ tcb.visibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(
+ 1);
+ }
+ tcb.visibleTypeAnnotations.add(an);
+ } else {
+ if (tcb.invisibleTypeAnnotations == null) {
+ tcb.invisibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(
+ 1);
+ }
+ tcb.invisibleTypeAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
public void visitLocalVariable(final String name, final String desc,
final String signature, final Label start, final Label end,
final int index) {
@@ -432,6 +573,29 @@ public class MethodNode extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitLocalVariableAnnotation(int typeRef,
+ TypePath typePath, Label[] start, Label[] end, int[] index,
+ String desc, boolean visible) {
+ LocalVariableAnnotationNode an = new LocalVariableAnnotationNode(
+ typeRef, typePath, getLabelNodes(start), getLabelNodes(end),
+ index, desc);
+ if (visible) {
+ if (visibleLocalVariableAnnotations == null) {
+ visibleLocalVariableAnnotations = new ArrayList<LocalVariableAnnotationNode>(
+ 1);
+ }
+ visibleLocalVariableAnnotations.add(an);
+ } else {
+ if (invisibleLocalVariableAnnotations == null) {
+ invisibleLocalVariableAnnotations = new ArrayList<LocalVariableAnnotationNode>(
+ 1);
+ }
+ invisibleLocalVariableAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
public void visitLineNumber(final int line, final Label start) {
instructions.add(new LineNumberNode(line, getLabelNode(start)));
}
@@ -494,10 +658,57 @@ public class MethodNode extends MethodVisitor {
* versions of the ASM API than the given version.
*
* @param api
- * an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * an ASM API version. Must be one of {@link Opcodes#ASM4} or
+ * {@link Opcodes#ASM5}.
*/
public void check(final int api) {
- // nothing to do
+ if (api == Opcodes.ASM4) {
+ if (visibleTypeAnnotations != null
+ && visibleTypeAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ if (invisibleTypeAnnotations != null
+ && invisibleTypeAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ int n = tryCatchBlocks == null ? 0 : tryCatchBlocks.size();
+ for (int i = 0; i < n; ++i) {
+ TryCatchBlockNode tcb = tryCatchBlocks.get(i);
+ if (tcb.visibleTypeAnnotations != null
+ && tcb.visibleTypeAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ if (tcb.invisibleTypeAnnotations != null
+ && tcb.invisibleTypeAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ }
+ for (int i = 0; i < instructions.size(); ++i) {
+ AbstractInsnNode insn = instructions.get(i);
+ if (insn.visibleTypeAnnotations != null
+ && insn.visibleTypeAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ if (insn.invisibleTypeAnnotations != null
+ && insn.invisibleTypeAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ if (insn instanceof MethodInsnNode) {
+ boolean itf = ((MethodInsnNode) insn).itf;
+ if (itf != (insn.opcode == Opcodes.INVOKEINTERFACE)) {
+ throw new RuntimeException();
+ }
+ }
+ }
+ if (visibleLocalVariableAnnotations != null
+ && visibleLocalVariableAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ if (invisibleLocalVariableAnnotations != null
+ && invisibleLocalVariableAnnotations.size() > 0) {
+ throw new RuntimeException();
+ }
+ }
}
/**
@@ -523,8 +734,14 @@ public class MethodNode extends MethodVisitor {
* a method visitor.
*/
public void accept(final MethodVisitor mv) {
- // visits the method attributes
+ // visits the method parameters
int i, j, n;
+ n = parameters == null ? 0 : parameters.size();
+ for (i = 0; i < n; i++) {
+ ParameterNode parameter = parameters.get(i);
+ mv.visitParameter(parameter.name, parameter.access);
+ }
+ // visits the method attributes
if (annotationDefault != null) {
AnnotationVisitor av = mv.visitAnnotationDefault();
AnnotationNode.accept(av, null, annotationDefault);
@@ -542,6 +759,19 @@ public class MethodNode extends MethodVisitor {
AnnotationNode an = invisibleAnnotations.get(i);
an.accept(mv.visitAnnotation(an.desc, false));
}
+ n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ TypeAnnotationNode an = visibleTypeAnnotations.get(i);
+ an.accept(mv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc,
+ true));
+ }
+ n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations
+ .size();
+ for (i = 0; i < n; ++i) {
+ TypeAnnotationNode an = invisibleTypeAnnotations.get(i);
+ an.accept(mv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc,
+ false));
+ }
n = visibleParameterAnnotations == null ? 0
: visibleParameterAnnotations.length;
for (i = 0; i < n; ++i) {
@@ -579,6 +809,7 @@ public class MethodNode extends MethodVisitor {
// visits try catch blocks
n = tryCatchBlocks == null ? 0 : tryCatchBlocks.size();
for (i = 0; i < n; ++i) {
+ tryCatchBlocks.get(i).updateIndex(i);
tryCatchBlocks.get(i).accept(mv);
}
// visits instructions
@@ -588,6 +819,17 @@ public class MethodNode extends MethodVisitor {
for (i = 0; i < n; ++i) {
localVariables.get(i).accept(mv);
}
+ // visits local variable annotations
+ n = visibleLocalVariableAnnotations == null ? 0
+ : visibleLocalVariableAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ visibleLocalVariableAnnotations.get(i).accept(mv, true);
+ }
+ n = invisibleLocalVariableAnnotations == null ? 0
+ : invisibleLocalVariableAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ invisibleLocalVariableAnnotations.get(i).accept(mv, false);
+ }
// visits maxs
mv.visitMaxs(maxStack, maxLocals);
visited = true;
diff --git a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
index fe5e8832b3..a8339a20b5 100644
--- a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
@@ -73,11 +73,12 @@ public class MultiANewArrayInsnNode extends AbstractInsnNode {
@Override
public void accept(final MethodVisitor mv) {
mv.visitMultiANewArrayInsn(desc, dims);
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new MultiANewArrayInsnNode(desc, dims);
+ return new MultiANewArrayInsnNode(desc, dims).cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/ParameterNode.java b/src/asm/scala/tools/asm/tree/ParameterNode.java
new file mode 100644
index 0000000000..a3e55d5629
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/ParameterNode.java
@@ -0,0 +1,76 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a parameter access and name.
+ *
+ * @author Remi Forax
+ */
+public class ParameterNode {
+ /**
+ * The parameter's name.
+ */
+ public String name;
+
+ /**
+ * The parameter's access flags (see {@link scala.tools.asm.Opcodes}).
+ * Valid values are <tt>ACC_FINAL</tt>, <tt>ACC_SYNTHETIC</tt> and
+ * <tt>ACC_MANDATED</tt>.
+ */
+ public int access;
+
+ /**
+ * Constructs a new {@link ParameterNode}.
+ *
+ * @param access
+ * The parameter's access flags. Valid values are
+ * <tt>ACC_FINAL</tt>, <tt>ACC_SYNTHETIC</tt> or/and
+ * <tt>ACC_MANDATED</tt> (see {@link scala.tools.asm.Opcodes}).
+ * @param name
+ * the parameter's name.
+ */
+ public ParameterNode(final String name, final int access) {
+ this.name = name;
+ this.access = access;
+ }
+
+ /**
+ * Makes the given visitor visit this parameter declaration.
+ *
+ * @param mv
+ * a method visitor.
+ */
+ public void accept(final MethodVisitor mv) {
+ mv.visitParameter(name, access);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
index 9b3c2a3437..fb17b9e2e9 100644
--- a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
@@ -103,11 +103,12 @@ public class TableSwitchInsnNode extends AbstractInsnNode {
labels[i] = this.labels.get(i).getLabel();
}
mv.visitTableSwitchInsn(min, max, dflt.getLabel(), labels);
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new TableSwitchInsnNode(min, max, clone(dflt, labels), clone(
- this.labels, labels));
+ this.labels, labels)).cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
index ab4fa97c34..c639b9aa8b 100644
--- a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
+++ b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
@@ -29,6 +29,8 @@
*/
package scala.tools.asm.tree;
+import java.util.List;
+
import scala.tools.asm.MethodVisitor;
/**
@@ -60,6 +62,26 @@ public class TryCatchBlockNode {
public String type;
/**
+ * The runtime visible type annotations on the exception handler type. This
+ * list is a list of {@link TypeAnnotationNode} objects. May be
+ * <tt>null</tt>.
+ *
+ * @associates scala.tools.asm.tree.TypeAnnotationNode
+ * @label visible
+ */
+ public List<TypeAnnotationNode> visibleTypeAnnotations;
+
+ /**
+ * The runtime invisible type annotations on the exception handler type.
+ * This list is a list of {@link TypeAnnotationNode} objects. May be
+ * <tt>null</tt>.
+ *
+ * @associates scala.tools.asm.tree.TypeAnnotationNode
+ * @label invisible
+ */
+ public List<TypeAnnotationNode> invisibleTypeAnnotations;
+
+ /**
* Constructs a new {@link TryCatchBlockNode}.
*
* @param start
@@ -82,6 +104,29 @@ public class TryCatchBlockNode {
}
/**
+ * Updates the index of this try catch block in the method's list of try
+ * catch block nodes. This index maybe stored in the 'target' field of the
+ * type annotations of this block.
+ *
+ * @param index
+ * the new index of this try catch block in the method's list of
+ * try catch block nodes.
+ */
+ public void updateIndex(final int index) {
+ int newTypeRef = 0x42000000 | (index << 8);
+ if (visibleTypeAnnotations != null) {
+ for (TypeAnnotationNode tan : visibleTypeAnnotations) {
+ tan.typeRef = newTypeRef;
+ }
+ }
+ if (invisibleTypeAnnotations != null) {
+ for (TypeAnnotationNode tan : invisibleTypeAnnotations) {
+ tan.typeRef = newTypeRef;
+ }
+ }
+ }
+
+ /**
* Makes the given visitor visit this try catch block.
*
* @param mv
@@ -90,5 +135,19 @@ public class TryCatchBlockNode {
public void accept(final MethodVisitor mv) {
mv.visitTryCatchBlock(start.getLabel(), end.getLabel(),
handler == null ? null : handler.getLabel(), type);
+ int n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations
+ .size();
+ for (int i = 0; i < n; ++i) {
+ TypeAnnotationNode an = visibleTypeAnnotations.get(i);
+ an.accept(mv.visitTryCatchAnnotation(an.typeRef, an.typePath,
+ an.desc, true));
+ }
+ n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations
+ .size();
+ for (int i = 0; i < n; ++i) {
+ TypeAnnotationNode an = invisibleTypeAnnotations.get(i);
+ an.accept(mv.visitTryCatchAnnotation(an.typeRef, an.typePath,
+ an.desc, false));
+ }
}
}
diff --git a/src/asm/scala/tools/asm/tree/TypeAnnotationNode.java b/src/asm/scala/tools/asm/tree/TypeAnnotationNode.java
new file mode 100644
index 0000000000..73b29624f7
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/TypeAnnotationNode.java
@@ -0,0 +1,100 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.TypePath;
+import scala.tools.asm.TypeReference;
+
+/**
+ * A node that represents a type annotationn.
+ *
+ * @author Eric Bruneton
+ */
+public class TypeAnnotationNode extends AnnotationNode {
+
+ /**
+ * A reference to the annotated type. See {@link TypeReference}.
+ */
+ public int typeRef;
+
+ /**
+ * The path to the annotated type argument, wildcard bound, array element
+ * type, or static outer type within the referenced type. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ */
+ public TypePath typePath;
+
+ /**
+ * Constructs a new {@link AnnotationNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #TypeAnnotationNode(int, int, TypePath, String)} version.
+ *
+ * @param typeRef
+ * a reference to the annotated type. See {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
+ */
+ public TypeAnnotationNode(final int typeRef, final TypePath typePath,
+ final String desc) {
+ this(Opcodes.ASM5, typeRef, typePath, desc);
+ if (getClass() != TypeAnnotationNode.class) {
+ throw new IllegalStateException();
+ }
+ }
+
+ /**
+ * Constructs a new {@link AnnotationNode}.
+ *
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
+ * @param typeRef
+ * a reference to the annotated type. See {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param desc
+ * the class descriptor of the annotation class.
+ */
+ public TypeAnnotationNode(final int api, final int typeRef,
+ final TypePath typePath, final String desc) {
+ super(api, desc);
+ this.typeRef = typeRef;
+ this.typePath = typePath;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/TypeInsnNode.java b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
index 3210dd60e6..401400c3cb 100644
--- a/src/asm/scala/tools/asm/tree/TypeInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
@@ -81,10 +81,11 @@ public class TypeInsnNode extends AbstractInsnNode {
@Override
public void accept(final MethodVisitor mv) {
mv.visitTypeInsn(opcode, desc);
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new TypeInsnNode(opcode, desc);
+ return new TypeInsnNode(opcode, desc).cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/VarInsnNode.java b/src/asm/scala/tools/asm/tree/VarInsnNode.java
index 5dd9ef6726..685e4fce2c 100644
--- a/src/asm/scala/tools/asm/tree/VarInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/VarInsnNode.java
@@ -84,10 +84,11 @@ public class VarInsnNode extends AbstractInsnNode {
@Override
public void accept(final MethodVisitor mv) {
mv.visitVarInsn(opcode, var);
+ acceptAnnotations(mv);
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new VarInsnNode(opcode, var);
+ return new VarInsnNode(opcode, var).cloneAnnotations(this);
}
}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
index 0134555f10..ff840aabde 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
@@ -375,7 +375,7 @@ public class Analyzer<V extends Value> implements Opcodes {
* instruction of the method. The size of the returned array is
* equal to the number of instructions (and labels) of the method. A
* given frame is <tt>null</tt> if the corresponding instruction
- * cannot be reached, or if an error occured during the analysis of
+ * cannot be reached, or if an error occurred during the analysis of
* the method.
*/
public Frame<V>[] getFrames() {
@@ -435,7 +435,7 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Creates a control flow graph edge. The default implementation of this
- * method does nothing. It can be overriden in order to construct the
+ * method does nothing. It can be overridden in order to construct the
* control flow graph of a method (this method is called by the
* {@link #analyze analyze} method during its visit of the method's code).
*
diff --git a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
index 5e3f51f21a..52b2a11d6f 100644
--- a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
+++ b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
@@ -37,6 +37,7 @@ import scala.tools.asm.tree.AbstractInsnNode;
* @author Bing Ran
* @author Eric Bruneton
*/
+@SuppressWarnings("serial")
public class AnalyzerException extends Exception {
public final AbstractInsnNode node;
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
index 8d6653c1c5..7d0b7b0694 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
@@ -53,7 +53,7 @@ public class BasicInterpreter extends Interpreter<BasicValue> implements
Opcodes {
public BasicInterpreter() {
- super(ASM4);
+ super(ASM5);
}
protected BasicInterpreter(final int api) {
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
index 71666edb74..b852f20acf 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
@@ -47,7 +47,7 @@ import scala.tools.asm.tree.MethodInsnNode;
public class BasicVerifier extends BasicInterpreter {
public BasicVerifier() {
- super(ASM4);
+ super(ASM5);
}
protected BasicVerifier(final int api) {
diff --git a/src/asm/scala/tools/asm/tree/analysis/Frame.java b/src/asm/scala/tools/asm/tree/analysis/Frame.java
index 0d92edc4d6..0b7f4ba53b 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Frame.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Frame.java
@@ -134,6 +134,15 @@ public class Frame<V extends Value> {
}
/**
+ * Returns the maximum stack size of this frame.
+ *
+ * @return the maximum stack size of this frame.
+ */
+ public int getMaxStackSize() {
+ return values.length - locals;
+ }
+
+ /**
* Returns the value of the given local variable.
*
* @param i
@@ -716,14 +725,14 @@ public class Frame<V extends Value> {
*/
@Override
public String toString() {
- StringBuffer b = new StringBuffer();
+ StringBuilder sb = new StringBuilder();
for (int i = 0; i < getLocals(); ++i) {
- b.append(getLocal(i));
+ sb.append(getLocal(i));
}
- b.append(' ');
+ sb.append(' ');
for (int i = 0; i < getStackSize(); ++i) {
- b.append(getStack(i).toString());
+ sb.append(getStack(i).toString());
}
- return b.toString();
+ return sb.toString();
}
}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
index 56f4bedc00..00fe6c8bff 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
@@ -82,7 +82,7 @@ public abstract class Interpreter<V extends Value> {
* the bytecode instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V newOperation(AbstractInsnNode insn)
throws AnalyzerException;
@@ -101,7 +101,7 @@ public abstract class Interpreter<V extends Value> {
* @return the result of the interpretation of the given instruction. The
* returned value must be <tt>equal</tt> to the given value.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V copyOperation(AbstractInsnNode insn, V value)
throws AnalyzerException;
@@ -122,7 +122,7 @@ public abstract class Interpreter<V extends Value> {
* the argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V unaryOperation(AbstractInsnNode insn, V value)
throws AnalyzerException;
@@ -146,7 +146,7 @@ public abstract class Interpreter<V extends Value> {
* the second argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2)
throws AnalyzerException;
@@ -167,7 +167,7 @@ public abstract class Interpreter<V extends Value> {
* the third argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V ternaryOperation(AbstractInsnNode insn, V value1,
V value2, V value3) throws AnalyzerException;
@@ -185,7 +185,7 @@ public abstract class Interpreter<V extends Value> {
* the arguments of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V naryOperation(AbstractInsnNode insn,
List<? extends V> values) throws AnalyzerException;
@@ -203,7 +203,7 @@ public abstract class Interpreter<V extends Value> {
* @param expected
* the expected return type of the analyzed method.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract void returnOperation(AbstractInsnNode insn, V value,
V expected) throws AnalyzerException;
diff --git a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
index eaecd057ea..a345981f36 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
@@ -107,7 +107,7 @@ public class SimpleVerifier extends BasicVerifier {
public SimpleVerifier(final Type currentClass,
final Type currentSuperClass,
final List<Type> currentClassInterfaces, final boolean isInterface) {
- this(ASM4, currentClass, currentSuperClass, currentClassInterfaces,
+ this(ASM5, currentClass, currentSuperClass, currentClassInterfaces,
isInterface);
}
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
index a68086c073..7d739d3df9 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
@@ -50,7 +50,7 @@ public class SourceInterpreter extends Interpreter<SourceValue> implements
Opcodes {
public SourceInterpreter() {
- super(ASM4);
+ super(ASM5);
}
protected SourceInterpreter(final int api) {
diff --git a/src/asm/scala/tools/asm/util/ASMifier.java b/src/asm/scala/tools/asm/util/ASMifier.java
index 7e6b223853..521e07541b 100644
--- a/src/asm/scala/tools/asm/util/ASMifier.java
+++ b/src/asm/scala/tools/asm/util/ASMifier.java
@@ -40,6 +40,7 @@ import scala.tools.asm.Handle;
import scala.tools.asm.Label;
import scala.tools.asm.Opcodes;
import scala.tools.asm.Type;
+import scala.tools.asm.TypePath;
/**
* A {@link Printer} that prints the ASM code to generate the classes if visits.
@@ -83,9 +84,15 @@ public class ASMifier extends Printer {
* Constructs a new {@link ASMifier}. <i>Subclasses must not use this
* constructor</i>. Instead, they must use the
* {@link #ASMifier(int, String, int)} version.
+ *
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
*/
public ASMifier() {
- this(Opcodes.ASM4, "cw", 0);
+ this(Opcodes.ASM5, "cw", 0);
+ if (getClass() != ASMifier.class) {
+ throw new IllegalStateException();
+ }
}
/**
@@ -93,7 +100,7 @@ public class ASMifier extends Printer {
*
* @param api
* the ASM API version implemented by this class. Must be one of
- * {@link Opcodes#ASM4}.
+ * {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param name
* the name of the visitor variable in the produced code.
* @param id
@@ -170,7 +177,6 @@ public class ASMifier extends Printer {
}
text.add("import java.util.*;\n");
text.add("import scala.tools.asm.*;\n");
- text.add("import scala.tools.asm.attrs.*;\n");
text.add("public class " + simpleName + "Dump implements Opcodes {\n\n");
text.add("public static byte[] dump () throws Exception {\n\n");
text.add("ClassWriter cw = new ClassWriter(0);\n");
@@ -261,6 +267,12 @@ public class ASMifier extends Printer {
}
@Override
+ public ASMifier visitClassTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ return visitTypeAnnotation(typeRef, typePath, desc, visible);
+ }
+
+ @Override
public void visitClassAttribute(final Attribute attr) {
visitAttribute(attr);
}
@@ -423,6 +435,12 @@ public class ASMifier extends Printer {
}
@Override
+ public ASMifier visitFieldTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ return visitTypeAnnotation(typeRef, typePath, desc, visible);
+ }
+
+ @Override
public void visitFieldAttribute(final Attribute attr) {
visitAttribute(attr);
}
@@ -439,6 +457,16 @@ public class ASMifier extends Printer {
// ------------------------------------------------------------------------
@Override
+ public void visitParameter(String parameterName, int access) {
+ buf.setLength(0);
+ buf.append(name).append(".visitParameter(");
+ appendString(buf, parameterName);
+ buf.append(", ");
+ appendAccess(access);
+ text.add(buf.append(");\n").toString());
+ }
+
+ @Override
public ASMifier visitAnnotationDefault() {
buf.setLength(0);
buf.append("{\n").append("av0 = ").append(name)
@@ -457,6 +485,12 @@ public class ASMifier extends Printer {
}
@Override
+ public ASMifier visitMethodTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ return visitTypeAnnotation(typeRef, typePath, desc, visible);
+ }
+
+ @Override
public ASMifier visitParameterAnnotation(final int parameter,
final String desc, final boolean visible) {
buf.setLength(0);
@@ -582,9 +616,30 @@ public class ASMifier extends Printer {
text.add(buf.toString());
}
+ @Deprecated
@Override
public void visitMethodInsn(final int opcode, final String owner,
final String name, final String desc) {
+ if (api >= Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc);
+ return;
+ }
+ doVisitMethodInsn(opcode, owner, name, desc,
+ opcode == Opcodes.INVOKEINTERFACE);
+ }
+
+ @Override
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc, final boolean itf) {
+ if (api < Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc, itf);
+ return;
+ }
+ doVisitMethodInsn(opcode, owner, name, desc, itf);
+ }
+
+ private void doVisitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc, final boolean itf) {
buf.setLength(0);
buf.append(this.name).append(".visitMethodInsn(")
.append(OPCODES[opcode]).append(", ");
@@ -593,6 +648,8 @@ public class ASMifier extends Printer {
appendConstant(name);
buf.append(", ");
appendConstant(desc);
+ buf.append(", ");
+ buf.append(itf ? "true" : "false");
buf.append(");\n");
text.add(buf.toString());
}
@@ -711,6 +768,13 @@ public class ASMifier extends Printer {
}
@Override
+ public ASMifier visitInsnAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ return visitTypeAnnotation("visitInsnAnnotation", typeRef, typePath,
+ desc, visible);
+ }
+
+ @Override
public void visitTryCatchBlock(final Label start, final Label end,
final Label handler, final String type) {
buf.setLength(0);
@@ -730,6 +794,13 @@ public class ASMifier extends Printer {
}
@Override
+ public ASMifier visitTryCatchAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ return visitTypeAnnotation("visitTryCatchAnnotation", typeRef,
+ typePath, desc, visible);
+ }
+
+ @Override
public void visitLocalVariable(final String name, final String desc,
final String signature, final Label start, final Label end,
final int index) {
@@ -749,6 +820,39 @@ public class ASMifier extends Printer {
}
@Override
+ public Printer visitLocalVariableAnnotation(int typeRef, TypePath typePath,
+ Label[] start, Label[] end, int[] index, String desc,
+ boolean visible) {
+ buf.setLength(0);
+ buf.append("{\n").append("av0 = ").append(name)
+ .append(".visitLocalVariableAnnotation(");
+ buf.append(typeRef);
+ buf.append(", TypePath.fromString(\"").append(typePath).append("\"), ");
+ buf.append("new Label[] {");
+ for (int i = 0; i < start.length; ++i) {
+ buf.append(i == 0 ? " " : ", ");
+ appendLabel(start[i]);
+ }
+ buf.append(" }, new Label[] {");
+ for (int i = 0; i < end.length; ++i) {
+ buf.append(i == 0 ? " " : ", ");
+ appendLabel(end[i]);
+ }
+ buf.append(" }, new int[] {");
+ for (int i = 0; i < index.length; ++i) {
+ buf.append(i == 0 ? " " : ", ").append(index[i]);
+ }
+ buf.append(" }, ");
+ appendConstant(desc);
+ buf.append(", ").append(visible).append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
public void visitLineNumber(final int line, final Label start) {
buf.setLength(0);
buf.append(name).append(".visitLineNumber(").append(line).append(", ");
@@ -789,6 +893,28 @@ public class ASMifier extends Printer {
return a;
}
+ public ASMifier visitTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ return visitTypeAnnotation("visitTypeAnnotation", typeRef, typePath,
+ desc, visible);
+ }
+
+ public ASMifier visitTypeAnnotation(final String method, final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ buf.setLength(0);
+ buf.append("{\n").append("av0 = ").append(name).append(".")
+ .append(method).append("(");
+ buf.append(typeRef);
+ buf.append(", TypePath.fromString(\"").append(typePath).append("\"), ");
+ appendConstant(desc);
+ buf.append(", ").append(visible).append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
public void visitAttribute(final Attribute attr) {
buf.setLength(0);
buf.append("// ATTRIBUTE ").append(attr.type).append('\n');
@@ -809,7 +935,7 @@ public class ASMifier extends Printer {
// ------------------------------------------------------------------------
protected ASMifier createASMifier(final String name, final int id) {
- return new ASMifier(Opcodes.ASM4, name, id);
+ return new ASMifier(Opcodes.ASM5, name, id);
}
/**
@@ -950,6 +1076,13 @@ public class ASMifier extends Printer {
buf.append("ACC_DEPRECATED");
first = false;
}
+ if ((access & Opcodes.ACC_MANDATED) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_MANDATED");
+ first = false;
+ }
if (first) {
buf.append('0');
}
diff --git a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
index f00a8f04a2..70441d1df4 100644
--- a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
@@ -49,7 +49,7 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
}
CheckAnnotationAdapter(final AnnotationVisitor av, final boolean named) {
- super(Opcodes.ASM4, av);
+ super(Opcodes.ASM5, av);
this.named = named;
}
@@ -70,7 +70,7 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
}
if (value instanceof Type) {
int sort = ((Type) value).getSort();
- if (sort != Type.OBJECT && sort != Type.ARRAY) {
+ if (sort == Type.METHOD) {
throw new IllegalArgumentException("Invalid annotation value");
}
}
diff --git a/src/asm/scala/tools/asm/util/CheckClassAdapter.java b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
index 0bfa143a95..88afdb0441 100644
--- a/src/asm/scala/tools/asm/util/CheckClassAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
@@ -46,6 +46,8 @@ import scala.tools.asm.Label;
import scala.tools.asm.MethodVisitor;
import scala.tools.asm.Opcodes;
import scala.tools.asm.Type;
+import scala.tools.asm.TypePath;
+import scala.tools.asm.TypeReference;
import scala.tools.asm.tree.ClassNode;
import scala.tools.asm.tree.MethodNode;
import scala.tools.asm.tree.analysis.Analyzer;
@@ -91,9 +93,9 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
* insnNumber locals : stack):
*
* <pre>
- * scala.tools.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
- * at scala.tools.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
- * at scala.tools.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
+ * org.objectweb.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
+ * at org.objectweb.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
+ * at org.objectweb.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
* ...
* remove()V
* 00000 LinkedBlockingQueue$Itr . . . . . . . . :
@@ -106,7 +108,7 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
* 00071 LinkedBlockingQueue$Itr <b>.</b> I . . . . . . :
* ILOAD 1
* 00072 <b>?</b>
- * INVOKESPECIAL java/lang/Integer.<init> (I)V
+ * INVOKESPECIAL java/lang/Integer.&lt;init&gt; (I)V
* ...
* </pre>
*
@@ -215,7 +217,7 @@ public class CheckClassAdapter extends ClassVisitor {
List<Type> interfaces = new ArrayList<Type>();
for (Iterator<String> i = cn.interfaces.iterator(); i.hasNext();) {
- interfaces.add(Type.getObjectType(i.next().toString()));
+ interfaces.add(Type.getObjectType(i.next()));
}
for (int i = 0; i < methods.size(); ++i) {
@@ -267,26 +269,26 @@ public class CheckClassAdapter extends ClassVisitor {
for (int j = 0; j < method.instructions.size(); ++j) {
method.instructions.get(j).accept(mv);
- StringBuffer s = new StringBuffer();
+ StringBuilder sb = new StringBuilder();
Frame<BasicValue> f = frames[j];
if (f == null) {
- s.append('?');
+ sb.append('?');
} else {
for (int k = 0; k < f.getLocals(); ++k) {
- s.append(getShortName(f.getLocal(k).toString()))
+ sb.append(getShortName(f.getLocal(k).toString()))
.append(' ');
}
- s.append(" : ");
+ sb.append(" : ");
for (int k = 0; k < f.getStackSize(); ++k) {
- s.append(getShortName(f.getStack(k).toString()))
+ sb.append(getShortName(f.getStack(k).toString()))
.append(' ');
}
}
- while (s.length() < method.maxStack + method.maxLocals + 1) {
- s.append(' ');
+ while (sb.length() < method.maxStack + method.maxLocals + 1) {
+ sb.append(' ');
}
pw.print(Integer.toString(j + 100000).substring(1));
- pw.print(" " + s + " : " + t.text.get(t.text.size() - 1));
+ pw.print(" " + sb + " : " + t.text.get(t.text.size() - 1));
}
for (int j = 0; j < method.tryCatchBlocks.size(); ++j) {
method.tryCatchBlocks.get(j).accept(mv);
@@ -328,9 +330,14 @@ public class CheckClassAdapter extends ClassVisitor {
* <tt>false</tt> to not perform any data flow check (see
* {@link CheckMethodAdapter}). This option requires valid
* maxLocals and maxStack values.
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
*/
public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow) {
- this(Opcodes.ASM4, cv, checkDataFlow);
+ this(Opcodes.ASM5, cv, checkDataFlow);
+ if (getClass() != CheckClassAdapter.class) {
+ throw new IllegalStateException();
+ }
}
/**
@@ -338,7 +345,7 @@ public class CheckClassAdapter extends ClassVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param cv
* the class visitor to which this adapter must delegate calls.
* @param checkDataFlow
@@ -440,7 +447,15 @@ public class CheckClassAdapter extends ClassVisitor {
CheckMethodAdapter.checkInternalName(outerName, "outer class name");
}
if (innerName != null) {
- CheckMethodAdapter.checkIdentifier(innerName, "inner class name");
+ int start = 0;
+ while (start < innerName.length()
+ && Character.isDigit(innerName.charAt(start))) {
+ start++;
+ }
+ if (start == 0 || start < innerName.length()) {
+ CheckMethodAdapter.checkIdentifier(innerName, start, -1,
+ "inner class name");
+ }
}
checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
@@ -517,6 +532,23 @@ public class CheckClassAdapter extends ClassVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ checkState();
+ int sort = typeRef >>> 24;
+ if (sort != TypeReference.CLASS_TYPE_PARAMETER
+ && sort != TypeReference.CLASS_TYPE_PARAMETER_BOUND
+ && sort != TypeReference.CLASS_EXTENDS) {
+ throw new IllegalArgumentException("Invalid type reference sort 0x"
+ + Integer.toHexString(sort));
+ }
+ checkTypeRefAndPath(typeRef, typePath);
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitTypeAnnotation(typeRef,
+ typePath, desc, visible));
+ }
+
+ @Override
public void visitAttribute(final Attribute attr) {
checkState();
if (attr == null) {
@@ -661,6 +693,77 @@ public class CheckClassAdapter extends ClassVisitor {
}
/**
+ * Checks the reference to a type in a type annotation.
+ *
+ * @param typeRef
+ * a reference to an annotated type.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ */
+ static void checkTypeRefAndPath(int typeRef, TypePath typePath) {
+ int mask = 0;
+ switch (typeRef >>> 24) {
+ case TypeReference.CLASS_TYPE_PARAMETER:
+ case TypeReference.METHOD_TYPE_PARAMETER:
+ case TypeReference.METHOD_FORMAL_PARAMETER:
+ mask = 0xFFFF0000;
+ break;
+ case TypeReference.FIELD:
+ case TypeReference.METHOD_RETURN:
+ case TypeReference.METHOD_RECEIVER:
+ case TypeReference.LOCAL_VARIABLE:
+ case TypeReference.RESOURCE_VARIABLE:
+ case TypeReference.INSTANCEOF:
+ case TypeReference.NEW:
+ case TypeReference.CONSTRUCTOR_REFERENCE:
+ case TypeReference.METHOD_REFERENCE:
+ mask = 0xFF000000;
+ break;
+ case TypeReference.CLASS_EXTENDS:
+ case TypeReference.CLASS_TYPE_PARAMETER_BOUND:
+ case TypeReference.METHOD_TYPE_PARAMETER_BOUND:
+ case TypeReference.THROWS:
+ case TypeReference.EXCEPTION_PARAMETER:
+ mask = 0xFFFFFF00;
+ break;
+ case TypeReference.CAST:
+ case TypeReference.CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT:
+ case TypeReference.METHOD_INVOCATION_TYPE_ARGUMENT:
+ case TypeReference.CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT:
+ case TypeReference.METHOD_REFERENCE_TYPE_ARGUMENT:
+ mask = 0xFF0000FF;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid type reference sort 0x"
+ + Integer.toHexString(typeRef >>> 24));
+ }
+ if ((typeRef & ~mask) != 0) {
+ throw new IllegalArgumentException("Invalid type reference 0x"
+ + Integer.toHexString(typeRef));
+ }
+ if (typePath != null) {
+ for (int i = 0; i < typePath.getLength(); ++i) {
+ int step = typePath.getStep(i);
+ if (step != TypePath.ARRAY_ELEMENT
+ && step != TypePath.INNER_TYPE
+ && step != TypePath.TYPE_ARGUMENT
+ && step != TypePath.WILDCARD_BOUND) {
+ throw new IllegalArgumentException(
+ "Invalid type path step " + i + " in " + typePath);
+ }
+ if (step != TypePath.TYPE_ARGUMENT
+ && typePath.getStepArgument(i) != 0) {
+ throw new IllegalArgumentException(
+ "Invalid type path step argument for step " + i
+ + " in " + typePath);
+ }
+ }
+ }
+ }
+
+ /**
* Checks the formal type parameters of a class or method signature.
*
* @param signature
diff --git a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
index 4657605936..e682df47af 100644
--- a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
@@ -33,6 +33,8 @@ import scala.tools.asm.AnnotationVisitor;
import scala.tools.asm.Attribute;
import scala.tools.asm.FieldVisitor;
import scala.tools.asm.Opcodes;
+import scala.tools.asm.TypePath;
+import scala.tools.asm.TypeReference;
/**
* A {@link FieldVisitor} that checks that its methods are properly used.
@@ -48,9 +50,14 @@ public class CheckFieldAdapter extends FieldVisitor {
*
* @param fv
* the field visitor to which this adapter must delegate calls.
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
*/
public CheckFieldAdapter(final FieldVisitor fv) {
- this(Opcodes.ASM4, fv);
+ this(Opcodes.ASM5, fv);
+ if (getClass() != CheckFieldAdapter.class) {
+ throw new IllegalStateException();
+ }
}
/**
@@ -58,7 +65,7 @@ public class CheckFieldAdapter extends FieldVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param fv
* the field visitor to which this adapter must delegate calls.
*/
@@ -75,6 +82,21 @@ public class CheckFieldAdapter extends FieldVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ checkEnd();
+ int sort = typeRef >>> 24;
+ if (sort != TypeReference.FIELD) {
+ throw new IllegalArgumentException("Invalid type reference sort 0x"
+ + Integer.toHexString(sort));
+ }
+ CheckClassAdapter.checkTypeRefAndPath(typeRef, typePath);
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitTypeAnnotation(typeRef,
+ typePath, desc, visible));
+ }
+
+ @Override
public void visitAttribute(final Attribute attr) {
checkEnd();
if (attr == null) {
diff --git a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
index 9da01c9d6e..131dfa5e5b 100644
--- a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
@@ -46,6 +46,8 @@ import scala.tools.asm.Label;
import scala.tools.asm.MethodVisitor;
import scala.tools.asm.Opcodes;
import scala.tools.asm.Type;
+import scala.tools.asm.TypePath;
+import scala.tools.asm.TypeReference;
import scala.tools.asm.tree.MethodNode;
import scala.tools.asm.tree.analysis.Analyzer;
import scala.tools.asm.tree.analysis.BasicValue;
@@ -390,10 +392,15 @@ public class CheckMethodAdapter extends MethodVisitor {
* the method visitor to which this adapter must delegate calls.
* @param labels
* a map of already visited labels (in other methods).
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
*/
public CheckMethodAdapter(final MethodVisitor mv,
final Map<Label, Integer> labels) {
- this(Opcodes.ASM4, mv, labels);
+ this(Opcodes.ASM5, mv, labels);
+ if (getClass() != CheckMethodAdapter.class) {
+ throw new IllegalStateException();
+ }
}
/**
@@ -434,7 +441,7 @@ public class CheckMethodAdapter extends MethodVisitor {
public CheckMethodAdapter(final int access, final String name,
final String desc, final MethodVisitor cmv,
final Map<Label, Integer> labels) {
- this(new MethodNode(access, name, desc, null, null) {
+ this(new MethodNode(Opcodes.ASM5, access, name, desc, null, null) {
@Override
public void visitEnd() {
Analyzer<BasicValue> a = new Analyzer<BasicValue>(
@@ -462,6 +469,16 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
+ public void visitParameter(String name, int access) {
+ if (name != null) {
+ checkUnqualifiedName(version, name, "name");
+ }
+ CheckClassAdapter.checkAccess(access, Opcodes.ACC_FINAL
+ + Opcodes.ACC_MANDATED + Opcodes.ACC_SYNTHETIC);
+ super.visitParameter(name, access);
+ }
+
+ @Override
public AnnotationVisitor visitAnnotation(final String desc,
final boolean visible) {
checkEndMethod();
@@ -470,6 +487,26 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ checkEndMethod();
+ int sort = typeRef >>> 24;
+ if (sort != TypeReference.METHOD_TYPE_PARAMETER
+ && sort != TypeReference.METHOD_TYPE_PARAMETER_BOUND
+ && sort != TypeReference.METHOD_RETURN
+ && sort != TypeReference.METHOD_RECEIVER
+ && sort != TypeReference.METHOD_FORMAL_PARAMETER
+ && sort != TypeReference.THROWS) {
+ throw new IllegalArgumentException("Invalid type reference sort 0x"
+ + Integer.toHexString(sort));
+ }
+ CheckClassAdapter.checkTypeRefAndPath(typeRef, typePath);
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitTypeAnnotation(typeRef,
+ typePath, desc, visible));
+ }
+
+ @Override
public AnnotationVisitor visitAnnotationDefault() {
checkEndMethod();
return new CheckAnnotationAdapter(super.visitAnnotationDefault(), false);
@@ -647,9 +684,30 @@ public class CheckMethodAdapter extends MethodVisitor {
++insnCount;
}
+ @Deprecated
@Override
- public void visitMethodInsn(final int opcode, final String owner,
- final String name, final String desc) {
+ public void visitMethodInsn(int opcode, String owner, String name,
+ String desc) {
+ if (api >= Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc);
+ return;
+ }
+ doVisitMethodInsn(opcode, owner, name, desc,
+ opcode == Opcodes.INVOKEINTERFACE);
+ }
+
+ @Override
+ public void visitMethodInsn(int opcode, String owner, String name,
+ String desc, boolean itf) {
+ if (api < Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc, itf);
+ return;
+ }
+ doVisitMethodInsn(opcode, owner, name, desc, itf);
+ }
+
+ private void doVisitMethodInsn(int opcode, final String owner,
+ final String name, final String desc, final boolean itf) {
checkStartCode();
checkEndCode();
checkOpcode(opcode, 5);
@@ -658,7 +716,21 @@ public class CheckMethodAdapter extends MethodVisitor {
}
checkInternalName(owner, "owner");
checkMethodDesc(desc);
- super.visitMethodInsn(opcode, owner, name, desc);
+ if (opcode == Opcodes.INVOKEVIRTUAL && itf) {
+ throw new IllegalArgumentException(
+ "INVOKEVIRTUAL can't be used with interfaces");
+ }
+ if (opcode == Opcodes.INVOKEINTERFACE && !itf) {
+ throw new IllegalArgumentException(
+ "INVOKEINTERFACE can't be used with classes");
+ }
+ // Calling super.visitMethodInsn requires to call the correct version
+ // depending on this.api (otherwise infinite loops can occur). To
+ // simplify and to make it easier to automatically remove the backward
+ // compatibility code, we inline the code of the overridden method here.
+ if (mv != null) {
+ mv.visitMethodInsn(opcode, owner, name, desc, itf);
+ }
++insnCount;
}
@@ -797,6 +869,29 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitInsnAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ checkStartCode();
+ checkEndCode();
+ int sort = typeRef >>> 24;
+ if (sort != TypeReference.INSTANCEOF && sort != TypeReference.NEW
+ && sort != TypeReference.CONSTRUCTOR_REFERENCE
+ && sort != TypeReference.METHOD_REFERENCE
+ && sort != TypeReference.CAST
+ && sort != TypeReference.CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT
+ && sort != TypeReference.METHOD_INVOCATION_TYPE_ARGUMENT
+ && sort != TypeReference.CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT
+ && sort != TypeReference.METHOD_REFERENCE_TYPE_ARGUMENT) {
+ throw new IllegalArgumentException("Invalid type reference sort 0x"
+ + Integer.toHexString(sort));
+ }
+ CheckClassAdapter.checkTypeRefAndPath(typeRef, typePath);
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitInsnAnnotation(typeRef,
+ typePath, desc, visible));
+ }
+
+ @Override
public void visitTryCatchBlock(final Label start, final Label end,
final Label handler, final String type) {
checkStartCode();
@@ -821,6 +916,22 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitTryCatchAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ checkStartCode();
+ checkEndCode();
+ int sort = typeRef >>> 24;
+ if (sort != TypeReference.EXCEPTION_PARAMETER) {
+ throw new IllegalArgumentException("Invalid type reference sort 0x"
+ + Integer.toHexString(sort));
+ }
+ CheckClassAdapter.checkTypeRefAndPath(typeRef, typePath);
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitTryCatchAnnotation(
+ typeRef, typePath, desc, visible));
+ }
+
+ @Override
public void visitLocalVariable(final String name, final String desc,
final String signature, final Label start, final Label end,
final int index) {
@@ -841,6 +952,40 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitLocalVariableAnnotation(int typeRef,
+ TypePath typePath, Label[] start, Label[] end, int[] index,
+ String desc, boolean visible) {
+ checkStartCode();
+ checkEndCode();
+ int sort = typeRef >>> 24;
+ if (sort != TypeReference.LOCAL_VARIABLE
+ && sort != TypeReference.RESOURCE_VARIABLE) {
+ throw new IllegalArgumentException("Invalid type reference sort 0x"
+ + Integer.toHexString(sort));
+ }
+ CheckClassAdapter.checkTypeRefAndPath(typeRef, typePath);
+ checkDesc(desc, false);
+ if (start == null || end == null || index == null
+ || end.length != start.length || index.length != start.length) {
+ throw new IllegalArgumentException(
+ "Invalid start, end and index arrays (must be non null and of identical length");
+ }
+ for (int i = 0; i < start.length; ++i) {
+ checkLabel(start[i], true, "start label");
+ checkLabel(end[i], true, "end label");
+ checkUnsignedShort(index[i], "Invalid variable index");
+ int s = labels.get(start[i]).intValue();
+ int e = labels.get(end[i]).intValue();
+ if (e < s) {
+ throw new IllegalArgumentException(
+ "Invalid start and end labels (end must be greater than start)");
+ }
+ }
+ return super.visitLocalVariableAnnotation(typeRef, typePath, start,
+ end, index, desc, visible);
+ }
+
+ @Override
public void visitLineNumber(final int line, final Label start) {
checkStartCode();
checkEndCode();
@@ -1202,7 +1347,7 @@ public class CheckMethodAdapter extends MethodVisitor {
checkIdentifier(name, begin, slash, null);
begin = slash + 1;
} while (slash != max);
- } catch (IllegalArgumentException _) {
+ } catch (IllegalArgumentException unused) {
throw new IllegalArgumentException(
"Invalid "
+ msg
@@ -1280,7 +1425,7 @@ public class CheckMethodAdapter extends MethodVisitor {
}
try {
checkInternalName(desc, start + 1, index, null);
- } catch (IllegalArgumentException _) {
+ } catch (IllegalArgumentException unused) {
throw new IllegalArgumentException("Invalid descriptor: "
+ desc);
}
diff --git a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
index e69302b8a6..54c9033c90 100644
--- a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
@@ -113,7 +113,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
* <tt>null</tt>.
*/
public CheckSignatureAdapter(final int type, final SignatureVisitor sv) {
- this(Opcodes.ASM4, type, sv);
+ this(Opcodes.ASM5, type, sv);
}
/**
@@ -121,7 +121,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param type
* the type of signature to be checked. See
* {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
diff --git a/src/asm/scala/tools/asm/util/Printer.java b/src/asm/scala/tools/asm/util/Printer.java
index 86e0f9e122..773f129ad9 100644
--- a/src/asm/scala/tools/asm/util/Printer.java
+++ b/src/asm/scala/tools/asm/util/Printer.java
@@ -37,6 +37,7 @@ import scala.tools.asm.Attribute;
import scala.tools.asm.Handle;
import scala.tools.asm.Label;
import scala.tools.asm.Opcodes;
+import scala.tools.asm.TypePath;
/**
* An abstract converter from visit events to text.
@@ -116,7 +117,7 @@ public abstract class Printer {
/**
* The ASM API version implemented by this class. The value of this field
- * must be one of {@link Opcodes#ASM4}.
+ * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
protected final int api;
@@ -175,6 +176,15 @@ public abstract class Printer {
final boolean visible);
/**
+ * Class type annotation. See
+ * {@link scala.tools.asm.ClassVisitor#visitTypeAnnotation}.
+ */
+ public Printer visitClassTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ throw new RuntimeException("Must be overridden");
+ }
+
+ /**
* Class attribute. See
* {@link scala.tools.asm.ClassVisitor#visitAttribute}.
*/
@@ -249,6 +259,15 @@ public abstract class Printer {
final boolean visible);
/**
+ * Field type annotation. See
+ * {@link scala.tools.asm.FieldVisitor#visitTypeAnnotation}.
+ */
+ public Printer visitFieldTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ throw new RuntimeException("Must be overridden");
+ }
+
+ /**
* Field attribute. See
* {@link scala.tools.asm.FieldVisitor#visitAttribute}.
*/
@@ -264,6 +283,14 @@ public abstract class Printer {
// ------------------------------------------------------------------------
/**
+ * Method parameter. See
+ * {@link scala.tools.asm.MethodVisitor#visitParameter(String, int)}.
+ */
+ public void visitParameter(String name, int access) {
+ throw new RuntimeException("Must be overridden");
+ }
+
+ /**
* Method default annotation. See
* {@link scala.tools.asm.MethodVisitor#visitAnnotationDefault}.
*/
@@ -277,6 +304,15 @@ public abstract class Printer {
final boolean visible);
/**
+ * Method type annotation. See
+ * {@link scala.tools.asm.MethodVisitor#visitTypeAnnotation}.
+ */
+ public Printer visitMethodTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ throw new RuntimeException("Must be overridden");
+ }
+
+ /**
* Method parameter annotation. See
* {@link scala.tools.asm.MethodVisitor#visitParameterAnnotation}.
*/
@@ -336,8 +372,33 @@ public abstract class Printer {
* Method instruction. See
* {@link scala.tools.asm.MethodVisitor#visitMethodInsn}.
*/
- public abstract void visitMethodInsn(final int opcode, final String owner,
- final String name, final String desc);
+ @Deprecated
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
+ if (api >= Opcodes.ASM5) {
+ boolean itf = opcode == Opcodes.INVOKEINTERFACE;
+ visitMethodInsn(opcode, owner, name, desc, itf);
+ return;
+ }
+ throw new RuntimeException("Must be overridden");
+ }
+
+ /**
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitMethodInsn}.
+ */
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc, final boolean itf) {
+ if (api < Opcodes.ASM5) {
+ if (itf != (opcode == Opcodes.INVOKEINTERFACE)) {
+ throw new IllegalArgumentException(
+ "INVOKESPECIAL/STATIC on interfaces require ASM 5");
+ }
+ visitMethodInsn(opcode, owner, name, desc);
+ return;
+ }
+ throw new RuntimeException("Must be overridden");
+ }
/**
* Method instruction. See
@@ -391,6 +452,15 @@ public abstract class Printer {
final int dims);
/**
+ * Instruction type annotation. See
+ * {@link scala.tools.asm.MethodVisitor#visitInsnAnnotation}.
+ */
+ public Printer visitInsnAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ throw new RuntimeException("Must be overridden");
+ }
+
+ /**
* Method exception handler. See
* {@link scala.tools.asm.MethodVisitor#visitTryCatchBlock}.
*/
@@ -398,6 +468,15 @@ public abstract class Printer {
final Label handler, final String type);
/**
+ * Try catch block type annotation. See
+ * {@link scala.tools.asm.MethodVisitor#visitTryCatchAnnotation}.
+ */
+ public Printer visitTryCatchAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ throw new RuntimeException("Must be overridden");
+ }
+
+ /**
* Method debug info. See
* {@link scala.tools.asm.MethodVisitor#visitLocalVariable}.
*/
@@ -406,6 +485,16 @@ public abstract class Printer {
final Label end, final int index);
/**
+ * Local variable type annotation. See
+ * {@link scala.tools.asm.MethodVisitor#visitTryCatchAnnotation}.
+ */
+ public Printer visitLocalVariableAnnotation(final int typeRef,
+ final TypePath typePath, final Label[] start, final Label[] end,
+ final int[] index, final String desc, final boolean visible) {
+ throw new RuntimeException("Must be overridden");
+ }
+
+ /**
* Method debug info. See
* {@link scala.tools.asm.MethodVisitor#visitLineNumber}.
*/
diff --git a/src/asm/scala/tools/asm/util/Textifier.java b/src/asm/scala/tools/asm/util/Textifier.java
index a5c4f6779e..373e46f5ed 100644
--- a/src/asm/scala/tools/asm/util/Textifier.java
+++ b/src/asm/scala/tools/asm/util/Textifier.java
@@ -40,6 +40,8 @@ import scala.tools.asm.Handle;
import scala.tools.asm.Label;
import scala.tools.asm.Opcodes;
import scala.tools.asm.Type;
+import scala.tools.asm.TypePath;
+import scala.tools.asm.TypeReference;
import scala.tools.asm.signature.SignatureReader;
/**
@@ -135,15 +137,26 @@ public class Textifier extends Printer {
*/
protected Map<Label, String> labelNames;
+ /**
+ * Class access flags
+ */
+ private int access;
+
private int valueNumber = 0;
/**
* Constructs a new {@link Textifier}. <i>Subclasses must not use this
* constructor</i>. Instead, they must use the {@link #Textifier(int)}
* version.
+ *
+ * @throws IllegalStateException
+ * If a subclass calls this constructor.
*/
public Textifier() {
- this(Opcodes.ASM4);
+ this(Opcodes.ASM5);
+ if (getClass() != Textifier.class) {
+ throw new IllegalStateException();
+ }
}
/**
@@ -151,7 +164,7 @@ public class Textifier extends Printer {
*
* @param api
* the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
*/
protected Textifier(final int api) {
super(api);
@@ -208,6 +221,7 @@ public class Textifier extends Printer {
public void visit(final int version, final int access, final String name,
final String signature, final String superName,
final String[] interfaces) {
+ this.access = access;
int major = version & 0xFFFF;
int minor = version >>> 16;
buf.setLength(0);
@@ -294,6 +308,13 @@ public class Textifier extends Printer {
}
@Override
+ public Printer visitClassTypeAnnotation(int typeRef, TypePath typePath,
+ String desc, boolean visible) {
+ text.add("\n");
+ return visitTypeAnnotation(typeRef, typePath, desc, visible);
+ }
+
+ @Override
public void visitClassAttribute(final Attribute attr) {
text.add("\n");
visitAttribute(attr);
@@ -393,7 +414,7 @@ public class Textifier extends Printer {
}
buf.append(tab);
- appendAccess(access);
+ appendAccess(access & ~Opcodes.ACC_VOLATILE);
if ((access & Opcodes.ACC_NATIVE) != 0) {
buf.append("native ");
}
@@ -403,6 +424,11 @@ public class Textifier extends Printer {
if ((access & Opcodes.ACC_BRIDGE) != 0) {
buf.append("bridge ");
}
+ if ((this.access & Opcodes.ACC_INTERFACE) != 0
+ && (access & Opcodes.ACC_ABSTRACT) == 0
+ && (access & Opcodes.ACC_STATIC) == 0) {
+ buf.append("default ");
+ }
buf.append(name);
appendDescriptor(METHOD_DESCRIPTOR, desc);
@@ -617,6 +643,12 @@ public class Textifier extends Printer {
}
@Override
+ public Printer visitFieldTypeAnnotation(int typeRef, TypePath typePath,
+ String desc, boolean visible) {
+ return visitTypeAnnotation(typeRef, typePath, desc, visible);
+ }
+
+ @Override
public void visitFieldAttribute(final Attribute attr) {
visitAttribute(attr);
}
@@ -630,6 +662,16 @@ public class Textifier extends Printer {
// ------------------------------------------------------------------------
@Override
+ public void visitParameter(final String name, final int access) {
+ buf.setLength(0);
+ buf.append(tab2).append("// parameter ");
+ appendAccess(access);
+ buf.append(' ').append((name == null) ? "<no name>" : name)
+ .append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
public Textifier visitAnnotationDefault() {
text.add(tab2 + "default=");
Textifier t = createTextifier();
@@ -645,6 +687,12 @@ public class Textifier extends Printer {
}
@Override
+ public Printer visitMethodTypeAnnotation(int typeRef, TypePath typePath,
+ String desc, boolean visible) {
+ return visitTypeAnnotation(typeRef, typePath, desc, visible);
+ }
+
+ @Override
public Textifier visitParameterAnnotation(final int parameter,
final String desc, final boolean visible) {
buf.setLength(0);
@@ -761,9 +809,30 @@ public class Textifier extends Printer {
text.add(buf.toString());
}
+ @Deprecated
@Override
public void visitMethodInsn(final int opcode, final String owner,
final String name, final String desc) {
+ if (api >= Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc);
+ return;
+ }
+ doVisitMethodInsn(opcode, owner, name, desc,
+ opcode == Opcodes.INVOKEINTERFACE);
+ }
+
+ @Override
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc, final boolean itf) {
+ if (api < Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc, itf);
+ return;
+ }
+ doVisitMethodInsn(opcode, owner, name, desc, itf);
+ }
+
+ private void doVisitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc, final boolean itf) {
buf.setLength(0);
buf.append(tab2).append(OPCODES[opcode]).append(' ');
appendDescriptor(INTERNAL_NAME, owner);
@@ -781,26 +850,35 @@ public class Textifier extends Printer {
buf.append(name);
appendDescriptor(METHOD_DESCRIPTOR, desc);
buf.append(" [");
+ buf.append('\n');
+ buf.append(tab3);
appendHandle(bsm);
+ buf.append('\n');
buf.append(tab3).append("// arguments:");
if (bsmArgs.length == 0) {
buf.append(" none");
} else {
- buf.append('\n').append(tab3);
+ buf.append('\n');
for (int i = 0; i < bsmArgs.length; i++) {
+ buf.append(tab3);
Object cst = bsmArgs[i];
if (cst instanceof String) {
Printer.appendString(buf, (String) cst);
} else if (cst instanceof Type) {
- buf.append(((Type) cst).getDescriptor()).append(".class");
+ Type type = (Type) cst;
+ if(type.getSort() == Type.METHOD){
+ appendDescriptor(METHOD_DESCRIPTOR, type.getDescriptor());
+ } else {
+ buf.append(type.getDescriptor()).append(".class");
+ }
} else if (cst instanceof Handle) {
appendHandle((Handle) cst);
} else {
buf.append(cst);
}
- buf.append(", ");
+ buf.append(", \n");
}
- buf.setLength(buf.length() - 2);
+ buf.setLength(buf.length() - 3);
}
buf.append('\n');
buf.append(tab2).append("]\n");
@@ -890,6 +968,12 @@ public class Textifier extends Printer {
}
@Override
+ public Printer visitInsnAnnotation(int typeRef, TypePath typePath,
+ String desc, boolean visible) {
+ return visitTypeAnnotation(typeRef, typePath, desc, visible);
+ }
+
+ @Override
public void visitTryCatchBlock(final Label start, final Label end,
final Label handler, final String type) {
buf.setLength(0);
@@ -906,6 +990,25 @@ public class Textifier extends Printer {
}
@Override
+ public Printer visitTryCatchAnnotation(int typeRef, TypePath typePath,
+ String desc, boolean visible) {
+ buf.setLength(0);
+ buf.append(tab2).append("TRYCATCHBLOCK @");
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('(');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ buf.setLength(0);
+ buf.append(") : ");
+ appendTypeReference(typeRef);
+ buf.append(", ").append(typePath);
+ buf.append(visible ? "\n" : " // invisible\n");
+ text.add(buf.toString());
+ return t;
+ }
+
+ @Override
public void visitLocalVariable(final String name, final String desc,
final String signature, final Label start, final Label end,
final int index) {
@@ -932,6 +1035,33 @@ public class Textifier extends Printer {
}
@Override
+ public Printer visitLocalVariableAnnotation(int typeRef, TypePath typePath,
+ Label[] start, Label[] end, int[] index, String desc,
+ boolean visible) {
+ buf.setLength(0);
+ buf.append(tab2).append("LOCALVARIABLE @");
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('(');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ buf.setLength(0);
+ buf.append(") : ");
+ appendTypeReference(typeRef);
+ buf.append(", ").append(typePath);
+ for (int i = 0; i < start.length; ++i) {
+ buf.append(" [ ");
+ appendLabel(start[i]);
+ buf.append(" - ");
+ appendLabel(end[i]);
+ buf.append(" - ").append(index[i]).append(" ]");
+ }
+ buf.append(visible ? "\n" : " // invisible\n");
+ text.add(buf.toString());
+ return t;
+ }
+
+ @Override
public void visitLineNumber(final int line, final Label start) {
buf.setLength(0);
buf.append(tab2).append("LINENUMBER ").append(line).append(' ');
@@ -981,6 +1111,39 @@ public class Textifier extends Printer {
}
/**
+ * Prints a disassembled view of the given type annotation.
+ *
+ * @param typeRef
+ * a reference to the annotated type. See {@link TypeReference}.
+ * @param typePath
+ * the path to the annotated type argument, wildcard bound, array
+ * element type, or static inner type within 'typeRef'. May be
+ * <tt>null</tt> if the annotation targets 'typeRef' as a whole.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values.
+ */
+ public Textifier visitTypeAnnotation(final int typeRef,
+ final TypePath typePath, final String desc, final boolean visible) {
+ buf.setLength(0);
+ buf.append(tab).append('@');
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('(');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ buf.setLength(0);
+ buf.append(") : ");
+ appendTypeReference(typeRef);
+ buf.append(", ").append(typePath);
+ buf.append(visible ? "\n" : " // invisible\n");
+ text.add(buf.toString());
+ return t;
+ }
+
+ /**
* Prints a disassembled view of the given attribute.
*
* @param attr
@@ -1061,10 +1224,10 @@ public class Textifier extends Printer {
* a handle, non null.
*/
protected void appendHandle(final Handle h) {
- buf.append('\n').append(tab3);
int tag = h.getTag();
buf.append("// handle kind 0x").append(Integer.toHexString(tag))
.append(" : ");
+ boolean isMethodHandle = false;
switch (tag) {
case Opcodes.H_GETFIELD:
buf.append("GETFIELD");
@@ -1080,18 +1243,23 @@ public class Textifier extends Printer {
break;
case Opcodes.H_INVOKEINTERFACE:
buf.append("INVOKEINTERFACE");
+ isMethodHandle = true;
break;
case Opcodes.H_INVOKESPECIAL:
buf.append("INVOKESPECIAL");
+ isMethodHandle = true;
break;
case Opcodes.H_INVOKESTATIC:
buf.append("INVOKESTATIC");
+ isMethodHandle = true;
break;
case Opcodes.H_INVOKEVIRTUAL:
buf.append("INVOKEVIRTUAL");
+ isMethodHandle = true;
break;
case Opcodes.H_NEWINVOKESPECIAL:
buf.append("NEWINVOKESPECIAL");
+ isMethodHandle = true;
break;
}
buf.append('\n');
@@ -1099,9 +1267,13 @@ public class Textifier extends Printer {
appendDescriptor(INTERNAL_NAME, h.getOwner());
buf.append('.');
buf.append(h.getName());
- buf.append('(');
+ if(!isMethodHandle){
+ buf.append('(');
+ }
appendDescriptor(HANDLE_DESCRIPTOR, h.getDesc());
- buf.append(')').append('\n');
+ if(!isMethodHandle){
+ buf.append(')');
+ }
}
/**
@@ -1145,6 +1317,9 @@ public class Textifier extends Printer {
if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
buf.append("synthetic ");
}
+ if ((access & Opcodes.ACC_MANDATED) != 0) {
+ buf.append("mandated ");
+ }
if ((access & Opcodes.ACC_ENUM) != 0) {
buf.append("enum ");
}
@@ -1156,6 +1331,90 @@ public class Textifier extends Printer {
}
}
+ private void appendTypeReference(final int typeRef) {
+ TypeReference ref = new TypeReference(typeRef);
+ switch (ref.getSort()) {
+ case TypeReference.CLASS_TYPE_PARAMETER:
+ buf.append("CLASS_TYPE_PARAMETER ").append(
+ ref.getTypeParameterIndex());
+ break;
+ case TypeReference.METHOD_TYPE_PARAMETER:
+ buf.append("METHOD_TYPE_PARAMETER ").append(
+ ref.getTypeParameterIndex());
+ break;
+ case TypeReference.CLASS_EXTENDS:
+ buf.append("CLASS_EXTENDS ").append(ref.getSuperTypeIndex());
+ break;
+ case TypeReference.CLASS_TYPE_PARAMETER_BOUND:
+ buf.append("CLASS_TYPE_PARAMETER_BOUND ")
+ .append(ref.getTypeParameterIndex()).append(", ")
+ .append(ref.getTypeParameterBoundIndex());
+ break;
+ case TypeReference.METHOD_TYPE_PARAMETER_BOUND:
+ buf.append("METHOD_TYPE_PARAMETER_BOUND ")
+ .append(ref.getTypeParameterIndex()).append(", ")
+ .append(ref.getTypeParameterBoundIndex());
+ break;
+ case TypeReference.FIELD:
+ buf.append("FIELD");
+ break;
+ case TypeReference.METHOD_RETURN:
+ buf.append("METHOD_RETURN");
+ break;
+ case TypeReference.METHOD_RECEIVER:
+ buf.append("METHOD_RECEIVER");
+ break;
+ case TypeReference.METHOD_FORMAL_PARAMETER:
+ buf.append("METHOD_FORMAL_PARAMETER ").append(
+ ref.getFormalParameterIndex());
+ break;
+ case TypeReference.THROWS:
+ buf.append("THROWS ").append(ref.getExceptionIndex());
+ break;
+ case TypeReference.LOCAL_VARIABLE:
+ buf.append("LOCAL_VARIABLE");
+ break;
+ case TypeReference.RESOURCE_VARIABLE:
+ buf.append("RESOURCE_VARIABLE");
+ break;
+ case TypeReference.EXCEPTION_PARAMETER:
+ buf.append("EXCEPTION_PARAMETER ").append(
+ ref.getTryCatchBlockIndex());
+ break;
+ case TypeReference.INSTANCEOF:
+ buf.append("INSTANCEOF");
+ break;
+ case TypeReference.NEW:
+ buf.append("NEW");
+ break;
+ case TypeReference.CONSTRUCTOR_REFERENCE:
+ buf.append("CONSTRUCTOR_REFERENCE");
+ break;
+ case TypeReference.METHOD_REFERENCE:
+ buf.append("METHOD_REFERENCE");
+ break;
+ case TypeReference.CAST:
+ buf.append("CAST ").append(ref.getTypeArgumentIndex());
+ break;
+ case TypeReference.CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT:
+ buf.append("CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT ").append(
+ ref.getTypeArgumentIndex());
+ break;
+ case TypeReference.METHOD_INVOCATION_TYPE_ARGUMENT:
+ buf.append("METHOD_INVOCATION_TYPE_ARGUMENT ").append(
+ ref.getTypeArgumentIndex());
+ break;
+ case TypeReference.CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT:
+ buf.append("CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT ").append(
+ ref.getTypeArgumentIndex());
+ break;
+ case TypeReference.METHOD_REFERENCE_TYPE_ARGUMENT:
+ buf.append("METHOD_REFERENCE_TYPE_ARGUMENT ").append(
+ ref.getTypeArgumentIndex());
+ break;
+ }
+ }
+
private void appendFrameTypes(final int n, final Object[] o) {
for (int i = 0; i < n; ++i) {
if (i > 0) {
diff --git a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
index 33e7cf0b26..7a9dbfef06 100644
--- a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
@@ -47,7 +47,7 @@ public final class TraceAnnotationVisitor extends AnnotationVisitor {
}
public TraceAnnotationVisitor(final AnnotationVisitor av, final Printer p) {
- super(Opcodes.ASM4, av);
+ super(Opcodes.ASM5, av);
this.p = p;
}
diff --git a/src/asm/scala/tools/asm/util/TraceClassVisitor.java b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
index ff7a017482..842d286672 100644
--- a/src/asm/scala/tools/asm/util/TraceClassVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
@@ -37,6 +37,7 @@ import scala.tools.asm.ClassVisitor;
import scala.tools.asm.FieldVisitor;
import scala.tools.asm.MethodVisitor;
import scala.tools.asm.Opcodes;
+import scala.tools.asm.TypePath;
/**
* A {@link ClassVisitor} that prints the classes it visits with a
@@ -130,7 +131,7 @@ public final class TraceClassVisitor extends ClassVisitor {
*/
public TraceClassVisitor(final ClassVisitor cv, final Printer p,
final PrintWriter pw) {
- super(Opcodes.ASM4, cv);
+ super(Opcodes.ASM5, cv);
this.pw = pw;
this.p = p;
}
@@ -166,6 +167,16 @@ public final class TraceClassVisitor extends ClassVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ Printer p = this.p.visitClassTypeAnnotation(typeRef, typePath, desc,
+ visible);
+ AnnotationVisitor av = cv == null ? null : cv.visitTypeAnnotation(
+ typeRef, typePath, desc, visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
public void visitAttribute(final Attribute attr) {
p.visitClassAttribute(attr);
super.visitAttribute(attr);
diff --git a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
index 9547a70008..1d0743a424 100644
--- a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
@@ -33,6 +33,7 @@ import scala.tools.asm.AnnotationVisitor;
import scala.tools.asm.Attribute;
import scala.tools.asm.FieldVisitor;
import scala.tools.asm.Opcodes;
+import scala.tools.asm.TypePath;
/**
* A {@link FieldVisitor} that prints the fields it visits with a
@@ -49,7 +50,7 @@ public final class TraceFieldVisitor extends FieldVisitor {
}
public TraceFieldVisitor(final FieldVisitor fv, final Printer p) {
- super(Opcodes.ASM4, fv);
+ super(Opcodes.ASM5, fv);
this.p = p;
}
@@ -63,6 +64,16 @@ public final class TraceFieldVisitor extends FieldVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ Printer p = this.p.visitFieldTypeAnnotation(typeRef, typePath, desc,
+ visible);
+ AnnotationVisitor av = fv == null ? null : fv.visitTypeAnnotation(
+ typeRef, typePath, desc, visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
public void visitAttribute(final Attribute attr) {
p.visitFieldAttribute(attr);
super.visitAttribute(attr);
diff --git a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
index 9034567c8f..db5f051003 100644
--- a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
@@ -35,6 +35,7 @@ import scala.tools.asm.Handle;
import scala.tools.asm.Label;
import scala.tools.asm.MethodVisitor;
import scala.tools.asm.Opcodes;
+import scala.tools.asm.TypePath;
/**
* A {@link MethodVisitor} that prints the methods it visits with a
@@ -51,11 +52,17 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
public TraceMethodVisitor(final MethodVisitor mv, final Printer p) {
- super(Opcodes.ASM4, mv);
+ super(Opcodes.ASM5, mv);
this.p = p;
}
@Override
+ public void visitParameter(String name, int access) {
+ p.visitParameter(name, access);
+ super.visitParameter(name, access);
+ }
+
+ @Override
public AnnotationVisitor visitAnnotation(final String desc,
final boolean visible) {
Printer p = this.p.visitMethodAnnotation(desc, visible);
@@ -65,6 +72,16 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitTypeAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ Printer p = this.p.visitMethodTypeAnnotation(typeRef, typePath, desc,
+ visible);
+ AnnotationVisitor av = mv == null ? null : mv.visitTypeAnnotation(
+ typeRef, typePath, desc, visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
public void visitAttribute(final Attribute attr) {
p.visitMethodAttribute(attr);
super.visitAttribute(attr);
@@ -130,11 +147,31 @@ public final class TraceMethodVisitor extends MethodVisitor {
super.visitFieldInsn(opcode, owner, name, desc);
}
+ @Deprecated
@Override
- public void visitMethodInsn(final int opcode, final String owner,
- final String name, final String desc) {
+ public void visitMethodInsn(int opcode, String owner, String name,
+ String desc) {
+ if (api >= Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc);
+ return;
+ }
p.visitMethodInsn(opcode, owner, name, desc);
- super.visitMethodInsn(opcode, owner, name, desc);
+ if (mv != null) {
+ mv.visitMethodInsn(opcode, owner, name, desc);
+ }
+ }
+
+ @Override
+ public void visitMethodInsn(int opcode, String owner, String name,
+ String desc, boolean itf) {
+ if (api < Opcodes.ASM5) {
+ super.visitMethodInsn(opcode, owner, name, desc, itf);
+ return;
+ }
+ p.visitMethodInsn(opcode, owner, name, desc, itf);
+ if (mv != null) {
+ mv.visitMethodInsn(opcode, owner, name, desc, itf);
+ }
}
@Override
@@ -189,6 +226,16 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitInsnAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ Printer p = this.p
+ .visitInsnAnnotation(typeRef, typePath, desc, visible);
+ AnnotationVisitor av = mv == null ? null : mv.visitInsnAnnotation(
+ typeRef, typePath, desc, visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
public void visitTryCatchBlock(final Label start, final Label end,
final Label handler, final String type) {
p.visitTryCatchBlock(start, end, handler, type);
@@ -196,6 +243,16 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitTryCatchAnnotation(int typeRef,
+ TypePath typePath, String desc, boolean visible) {
+ Printer p = this.p.visitTryCatchAnnotation(typeRef, typePath, desc,
+ visible);
+ AnnotationVisitor av = mv == null ? null : mv.visitTryCatchAnnotation(
+ typeRef, typePath, desc, visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
public void visitLocalVariable(final String name, final String desc,
final String signature, final Label start, final Label end,
final int index) {
@@ -204,6 +261,18 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
+ public AnnotationVisitor visitLocalVariableAnnotation(int typeRef,
+ TypePath typePath, Label[] start, Label[] end, int[] index,
+ String desc, boolean visible) {
+ Printer p = this.p.visitLocalVariableAnnotation(typeRef, typePath,
+ start, end, index, desc, visible);
+ AnnotationVisitor av = mv == null ? null : mv
+ .visitLocalVariableAnnotation(typeRef, typePath, start, end,
+ index, desc, visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
public void visitLineNumber(final int line, final Label start) {
p.visitLineNumber(line, start);
super.visitLineNumber(line, start);
diff --git a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
index 1e23c7ef1a..f99ec2b0c2 100644
--- a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
@@ -75,13 +75,13 @@ public final class TraceSignatureVisitor extends SignatureVisitor {
private String separator = "";
public TraceSignatureVisitor(final int access) {
- super(Opcodes.ASM4);
+ super(Opcodes.ASM5);
isInterface = (access & Opcodes.ACC_INTERFACE) != 0;
this.declaration = new StringBuffer();
}
private TraceSignatureVisitor(final StringBuffer buf) {
- super(Opcodes.ASM4);
+ super(Opcodes.ASM5);
this.declaration = buf;
}
diff --git a/src/build/bnd/scala-actors.bnd b/src/build/bnd/scala-actors.bnd
index 8d0555777f..69885fc2bf 100644
--- a/src/build/bnd/scala-actors.bnd
+++ b/src/build/bnd/scala-actors.bnd
@@ -3,3 +3,5 @@ Bundle-SymbolicName: org.scala-lang.scala-actors
ver: @VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
+Import-Package: scala.*;version="${range;[==,=+);${ver}}",*
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-compiler-doc.bnd b/src/build/bnd/scala-compiler-doc.bnd
index 4910e5fcb0..9d6d0304d1 100644
--- a/src/build/bnd/scala-compiler-doc.bnd
+++ b/src/build/bnd/scala-compiler-doc.bnd
@@ -3,4 +3,5 @@ Bundle-SymbolicName: org.scala-lang.modules.scala-compiler-doc_@SCALA_BINARY_VER
ver: @SCALA_COMPILER_DOC_VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
-Import-Package: *
+Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-compiler-interactive.bnd b/src/build/bnd/scala-compiler-interactive.bnd
index 34d2f2956d..07e3de35b0 100644
--- a/src/build/bnd/scala-compiler-interactive.bnd
+++ b/src/build/bnd/scala-compiler-interactive.bnd
@@ -3,4 +3,5 @@ Bundle-SymbolicName: org.scala-lang.modules.scala-compiler-interactive_@SCALA_BI
ver: @SCALA_COMPILER_INTERACTIVE_VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
-Import-Package: *
+Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-compiler.bnd b/src/build/bnd/scala-compiler.bnd
index dc30513db4..2bd24d780d 100644
--- a/src/build/bnd/scala-compiler.bnd
+++ b/src/build/bnd/scala-compiler.bnd
@@ -5,4 +5,8 @@ Bundle-Version: ${ver}
Export-Package: *;version=${ver}
Import-Package: jline.*;resolution:=optional, \
org.apache.tools.ant.*;resolution:=optional, \
+ scala.util.parsing.*;version="${range;[====,====];@PARSER_COMBINATORS_VERSION@}";resolution:=optional, \
+ scala.xml.*;version="${range;[====,====];@XML_VERSION@}";resolution:=optional, \
+ scala.*;version="${range;[==,=+);${ver}}", \
*
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-continuations-library.bnd b/src/build/bnd/scala-continuations-library.bnd
index bb505b60a9..b36718cc5b 100644
--- a/src/build/bnd/scala-continuations-library.bnd
+++ b/src/build/bnd/scala-continuations-library.bnd
@@ -1,5 +1,7 @@
Bundle-Name: Scala Delimited Continuations Library
Bundle-SymbolicName: org.scala-lang.plugins.scala-continuations-library
-ver: @VERSION@
+ver: @CONTINUATIONS_LIBRARY_VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
+Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-continuations-plugin.bnd b/src/build/bnd/scala-continuations-plugin.bnd
index cd66614a22..2f2464b452 100644
--- a/src/build/bnd/scala-continuations-plugin.bnd
+++ b/src/build/bnd/scala-continuations-plugin.bnd
@@ -1,5 +1,7 @@
Bundle-Name: Scala Delimited Continuations Compiler Plugin
Bundle-SymbolicName: org.scala-lang.plugins.scala-continuations-plugin
-ver: @VERSION@
+ver: @CONTINUATIONS_PLUGIN_VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
+Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-library.bnd b/src/build/bnd/scala-library.bnd
index 03aff45672..7eb4fa4b2a 100644
--- a/src/build/bnd/scala-library.bnd
+++ b/src/build/bnd/scala-library.bnd
@@ -4,3 +4,4 @@ ver: @VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
Import-Package: sun.misc;resolution:=optional, *
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-parser-combinators.bnd b/src/build/bnd/scala-parser-combinators.bnd
index 6ffc3b2760..ef8646cbd0 100644
--- a/src/build/bnd/scala-parser-combinators.bnd
+++ b/src/build/bnd/scala-parser-combinators.bnd
@@ -1,5 +1,7 @@
Bundle-Name: Scala Parser Combinators Library
Bundle-SymbolicName: org.scala-lang.modules.scala-parser-combinators
-ver: @VERSION@
+ver: @PARSER_COMBINATORS_VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
+Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-reflect.bnd b/src/build/bnd/scala-reflect.bnd
index 6cda346d3a..e4bc54e52e 100644
--- a/src/build/bnd/scala-reflect.bnd
+++ b/src/build/bnd/scala-reflect.bnd
@@ -3,4 +3,7 @@ Bundle-SymbolicName: org.scala-lang.scala-reflect
ver: @VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
-Import-Package: scala.tools.nsc;resolution:=optional, *
+Import-Package: scala.*;version="${range;[==,=+);${ver}}", \
+ scala.tools.nsc;resolution:=optional;version="${range;[==,=+);${ver}}", \
+ *
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/bnd/scala-swing.bnd b/src/build/bnd/scala-swing.bnd
index 7cccb1343b..f8b50baa91 100644
--- a/src/build/bnd/scala-swing.bnd
+++ b/src/build/bnd/scala-swing.bnd
@@ -1,5 +1,7 @@
Bundle-Name: Scala Swing
Bundle-SymbolicName: org.scala-lang.modules.scala-swing
-ver: @VERSION@
+ver: @SCALA_SWING_VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
+Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6,JavaSE-1.7
diff --git a/src/build/bnd/scala-xml.bnd b/src/build/bnd/scala-xml.bnd
index 5d64c05e65..01bf0144eb 100644
--- a/src/build/bnd/scala-xml.bnd
+++ b/src/build/bnd/scala-xml.bnd
@@ -1,5 +1,7 @@
Bundle-Name: Scala XML Library
Bundle-SymbolicName: org.scala-lang.modules.scala-xml
-ver: @VERSION@
+ver: @XML_VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
+Import-Package: scala.*;version="${range;[==,=+);@VERSION@}",*
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6, JavaSE-1.7
diff --git a/src/build/maven/scala-dist-pom.xml b/src/build/maven/scala-dist-pom.xml
index 22a24dea21..9477e14285 100644
--- a/src/build/maven/scala-dist-pom.xml
+++ b/src/build/maven/scala-dist-pom.xml
@@ -40,6 +40,11 @@
<version>@VERSION@</version>
</dependency>
<dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scalap</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
<groupId>org.scala-lang.plugins</groupId>
<!-- plugins are fully cross-versioned. But, we don't publish with 2.11.0-SNAPSHOT, instead use full version of the last non-snapshot version -->
<artifactId>scala-continuations-plugin_@SCALA_FULL_VERSION@</artifactId>
diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
index 1413065a27..b8384851da 100644
--- a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
+++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
@@ -12,7 +12,7 @@ abstract class DefaultMacroCompiler extends Resolvers
import treeInfo._
import definitions._
val runDefinitions = currentRun.runDefinitions
- import runDefinitions.{Predef_???, _}
+ import runDefinitions.Predef_???
val typer: global.analyzer.Typer
val context = typer.context
@@ -53,7 +53,7 @@ abstract class DefaultMacroCompiler extends Resolvers
(EmptyTree, TermName(""), Nil)
}
val bundleImplRef = MacroImplRefCompiler(
- atPos(macroDdef.rhs.pos)(gen.mkTypeApply(Select(New(maybeBundleRef, List(List(Ident(Predef_???)))), methName), targs)),
+ atPos(macroDdef.rhs.pos)(gen.mkTypeApply(Select(New(maybeBundleRef, List(List(Literal(Constant(null))))), methName), targs)),
isImplBundle = true
)
val vanillaResult = tryCompile(vanillaImplRef)
diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala
index cc4508e696..98fd091e9c 100644
--- a/src/compiler/scala/reflect/macros/compiler/Errors.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala
@@ -11,7 +11,6 @@ trait Errors extends Traces {
import analyzer._
import definitions._
import treeInfo._
- import typer.TyperErrorGen._
import typer.infer.InferErrorGen._
import runDefinitions._
def globalSettings = global.settings
diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
index 4484c234aa..d3f49390ea 100644
--- a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
@@ -1,18 +1,12 @@
package scala.reflect.macros
package compiler
-import scala.reflect.internal.Flags._
-import scala.reflect.macros.TypecheckException
-
trait Resolvers {
self: DefaultMacroCompiler =>
import global._
import analyzer._
- import definitions._
import treeInfo._
- import gen._
- import runDefinitions._
trait Resolver {
self: MacroImplRefCompiler =>
diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala
index a146818ae3..fc932f2b18 100644
--- a/src/compiler/scala/reflect/macros/compiler/Validators.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala
@@ -9,7 +9,7 @@ trait Validators {
import global._
import analyzer._
import definitions._
- import runDefinitions.{Predef_???, _}
+ import runDefinitions.Predef_???
trait Validator {
self: MacroImplRefCompiler =>
diff --git a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
index df7aa4d2be..7088058145 100644
--- a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
@@ -12,5 +12,5 @@ trait Infrastructure {
def compilerSettings: List[String] = universe.settings.recreateArgs
- def classPath: List[java.net.URL] = global.classPath.asURLs
+ def classPath: List[java.net.URL] = global.classPath.asURLs.toList
}
diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
index 88cfea8157..f4584f3627 100644
--- a/src/compiler/scala/reflect/macros/contexts/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
@@ -9,12 +9,15 @@ trait Parsers {
def parse(code: String) = {
val sreporter = new StoreReporter()
- val unit = new CompilationUnit(newSourceFile(code, "<macro>")) { override def reporter = sreporter }
- val parser = newUnitParser(unit)
- val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages())
- sreporter.infos.foreach {
- case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg)
- }
- tree
+ val oldReporter = global.reporter
+ try {
+ global.reporter = sreporter
+ val parser = newUnitParser(new CompilationUnit(newSourceFile(code, "<macro>")))
+ val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages())
+ sreporter.infos.foreach {
+ case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg)
+ }
+ tree
+ } finally global.reporter = oldReporter
}
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
index bddc42d1f9..961c41dab5 100644
--- a/src/compiler/scala/reflect/macros/util/Helpers.scala
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -54,14 +54,10 @@ trait Helpers {
*
* @see Metalevels.scala for more information and examples about metalevels
*/
- def increaseMetalevel(pre: Type, tp: Type): Type = {
- val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
-
+ def increaseMetalevel(pre: Type, tp: Type): Type =
transparentShallowTransform(RepeatedParamClass, tp) {
case tp => typeRef(pre, MacroContextExprClass, List(tp))
}
- }
/** Transforms c.Expr[T] types into c.Tree and leaves the rest unchanged.
*/
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala b/src/compiler/scala/reflect/quasiquotes/Holes.scala
index 68cc728eb3..6fa6b9b37a 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Holes.scala
@@ -1,4 +1,4 @@
-package scala.tools.reflect
+package scala.reflect
package quasiquotes
import scala.collection.{immutable, mutable}
@@ -132,7 +132,7 @@ trait Holes { self: Quasiquotes =>
private def mapF(tree: Tree, f: Tree => Tree): Tree =
if (f(Ident(TermName("x"))) equalsStructure Ident(TermName("x"))) tree
else {
- val x: TermName = c.freshName()
+ val x = TermName(c.freshName())
// q"$tree.map { $x => ${f(Ident(x))} }"
Apply(Select(tree, nme.map),
Function(ValDef(Modifiers(PARAM), x, TypeTree(), EmptyTree) :: Nil,
@@ -187,7 +187,7 @@ trait Holes { self: Quasiquotes =>
lazy val tree =
tptopt.map { tpt =>
val TypeDef(_, _, _, typedTpt) =
- try c.typeCheck(TypeDef(NoMods, TypeName("T"), Nil, tpt))
+ try c.typecheck(TypeDef(NoMods, TypeName("T"), Nil, tpt))
catch { case TypecheckException(pos, msg) => c.abort(pos.asInstanceOf[c.Position], msg) }
val tpe = typedTpt.tpe
val (iterableRank, _) = stripIterable(tpe)
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/reflect/quasiquotes/Parsers.scala
index b68022afd9..97ec7dbfc3 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Parsers.scala
@@ -1,4 +1,4 @@
-package scala.tools.reflect
+package scala.reflect
package quasiquotes
import scala.tools.nsc.ast.parser.{Parsers => ScalaParser}
@@ -69,9 +69,16 @@ trait Parsers { self: Quasiquotes =>
override def makeTupleType(trees: List[Tree]): Tree = TupleTypePlaceholder(trees)
// q"{ $x }"
- override def makeBlock(stats: List[Tree]): Tree = stats match {
- case (head @ Ident(name)) :: Nil if isHole(name) => Block(Nil, head)
- case _ => super.makeBlock(stats)
+ override def makeBlock(stats: List[Tree]): Tree = method match {
+ case nme.apply =>
+ stats match {
+ // we don't want to eagerly flatten trees with placeholders as they
+ // might have to be wrapped into a block depending on their value
+ case (head @ Ident(name)) :: Nil if isHole(name) => Block(Nil, head)
+ case _ => gen.mkBlock(stats, doFlatten = true)
+ }
+ case nme.unapply => gen.mkBlock(stats, doFlatten = false)
+ case other => global.abort("unreachable")
}
// tq"$a => $b"
@@ -83,7 +90,7 @@ trait Parsers { self: Quasiquotes =>
case _ => super.makePatDef(mods, pat, rhs)
}
}
- import treeBuilder.{global => _, unit => _, _}
+ import treeBuilder.{global => _, unit => _}
// q"def foo($x)"
override def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef =
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala
index b287971815..a5b42f8a1f 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala
@@ -1,4 +1,4 @@
-package scala.tools.reflect
+package scala.reflect
package quasiquotes
import java.util.UUID.randomUUID
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala
index b33069181c..72e6000e9f 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala
@@ -1,4 +1,4 @@
-package scala.tools.reflect
+package scala.reflect
package quasiquotes
import scala.reflect.macros.runtime.Context
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
index 95113d5b00..e753c9787a 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
@@ -1,4 +1,4 @@
-package scala.tools.reflect
+package scala.reflect
package quasiquotes
import java.lang.UnsupportedOperationException
@@ -8,7 +8,6 @@ import scala.reflect.internal.Flags._
trait Reifiers { self: Quasiquotes =>
import global._
import global.build._
- import global.treeInfo._
import global.definitions._
import Rank._
import universeTypes._
@@ -248,7 +247,7 @@ trait Reifiers { self: Quasiquotes =>
hole.tree
case Placeholder(hole: UnapplyHole) => hole.treeNoUnlift
case FreshName(prefix) if prefix != nme.QUASIQUOTE_NAME_PREFIX =>
- def fresh() = c.freshName[TermName](nme.QUASIQUOTE_NAME_PREFIX)
+ def fresh() = c.freshName(TermName(nme.QUASIQUOTE_NAME_PREFIX))
def introduceName() = { val n = fresh(); nameMap(name) += n; n}
def result(n: Name) = if (isReifyingExpressions) Ident(n) else Bind(n, Ident(nme.WILDCARD))
if (isReifyingPatterns) result(introduceName())
@@ -318,7 +317,7 @@ trait Reifiers { self: Quasiquotes =>
* Reification of non-trivial list is done in two steps:
*
* 1. split the list into groups where every placeholder is always
- * put in a group of it's own and all subsquent non-holeMap are
+ * put in a group of its own and all subsquent non-holeMap are
* grouped together; element is considered to be a placeholder if it's
* in the domain of the fill function;
*
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index b1cc797389..a3e0f02dcc 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -21,7 +21,6 @@ abstract class Reifier extends States
import global._
import definitions._
private val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
val typer: global.analyzer.Typer
val universe: Tree
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index 093c2bee22..0863ee38f9 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -79,8 +79,7 @@ abstract class Taggers {
try materializer
catch {
case ReificationException(pos, msg) =>
- c.error(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling
- EmptyTree
+ c.abort(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling
case UnexpectedReificationException(pos, err, cause) if cause != null =>
throw cause
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index 52ddcb154b..e41fbf042a 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -39,7 +39,7 @@ trait GenSymbols {
else if (sym.isModuleClass)
if (sym.sourceModule.isLocatable) Select(Select(reify(sym.sourceModule), nme.asModule), nme.moduleClass)
else reifySymDef(sym)
- else if (sym.isPackage)
+ else if (sym.hasPackageFlag)
mirrorMirrorCall(nme.staticPackage, reify(sym.fullName))
else if (sym.isLocatable) {
/* This is a fancy conundrum that stems from the fact that Scala allows
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index 743fe131c4..f34d75140b 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -153,7 +153,7 @@ trait GenTrees {
else mirrorCall(nme.Ident, reify(name))
case Select(qual, name) =>
- if (qual.symbol != null && qual.symbol.isPackage) {
+ if (qual.symbol != null && qual.symbol.hasPackageFlag) {
mirrorBuildCall(nme.mkIdent, reify(sym))
} else {
val effectiveName = if (sym != null && sym != NoSymbol) sym.name else name
@@ -199,7 +199,7 @@ trait GenTrees {
}
}
else tree match {
- case Select(qual, name) if !qual.symbol.isPackage =>
+ case Select(qual, name) if !qual.symbol.hasPackageFlag =>
if (reifyDebug) println(s"reifying Select($qual, $name)")
mirrorCall(nme.Select, reify(qual), reify(name))
case SelectFromTypeTree(qual, name) =>
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 4512b2cb6f..b5b0f93750 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -5,10 +5,6 @@ trait GenUtils {
self: Reifier =>
import global._
- import treeInfo._
- import definitions._
- private val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
def reifyList(xs: List[Any]): Tree =
mkList(xs map reify)
@@ -19,7 +15,7 @@ trait GenUtils {
def reifyProduct(prefix: String, elements: List[Any]): Tree = {
// reflection would be more robust, but, hey, this is a hot path
if (prefix.startsWith("Tuple")) scalaFactoryCall(prefix, (elements map reify).toList: _*)
- else mirrorCall(prefix, (elements map reify): _*)
+ else mirrorCall(TermName(prefix), (elements map reify): _*)
}
// helper functions
@@ -53,16 +49,16 @@ trait GenUtils {
call("" + nme.MIRROR_PREFIX + name, args: _*)
def mirrorFactoryCall(value: Product, args: Tree*): Tree =
- mirrorFactoryCall(value.productPrefix, args: _*)
+ mirrorFactoryCall(TermName(value.productPrefix), args: _*)
def mirrorFactoryCall(prefix: TermName, args: Tree*): Tree =
- mirrorCall("" + prefix, args: _*)
+ mirrorCall(TermName("" + prefix), args: _*)
def scalaFactoryCall(name: TermName, args: Tree*): Tree =
call(s"scala.$name.apply", args: _*)
def scalaFactoryCall(name: String, args: Tree*): Tree =
- scalaFactoryCall(name: TermName, args: _*)
+ scalaFactoryCall(TermName(name), args: _*)
def mkList(args: List[Tree]): Tree =
scalaFactoryCall("collection.immutable.List", args: _*)
diff --git a/src/compiler/scala/tools/ant/FastScalac.scala b/src/compiler/scala/tools/ant/FastScalac.scala
index c3eb9eef9c..6f0a30aa9d 100644
--- a/src/compiler/scala/tools/ant/FastScalac.scala
+++ b/src/compiler/scala/tools/ant/FastScalac.scala
@@ -15,7 +15,7 @@ import org.apache.tools.ant.types.Path
import scala.tools.nsc.Settings
import scala.tools.nsc.io.File
import scala.tools.nsc.settings.FscSettings
-import scala.tools.nsc.util.ScalaClassLoader
+import scala.reflect.internal.util.ScalaClassLoader
/** An Ant task to compile with the fast Scala compiler (`fsc`).
*
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 1747405f03..13bf0ef4c6 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -97,7 +97,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Defines valid values for the `target` property. */
object Target extends PermissibleValue {
- val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7")
+ val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8")
}
/** Defines valid values for the `deprecation` and `unchecked` properties. */
@@ -479,7 +479,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Tests if a file exists and prints a warning in case it doesn't. Always
* returns the file, even if it doesn't exist.
- * @param file A file to test for existance.
+ * @param file A file to test for existence.
* @return The same file. */
protected def existing(file: File): File = {
if (!file.exists)
diff --git a/src/compiler/scala/tools/ant/sabbus/Compiler.scala b/src/compiler/scala/tools/ant/sabbus/Compiler.scala
index 65cd9f41c2..81cd1f3196 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compiler.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compiler.scala
@@ -12,7 +12,7 @@ package scala.tools.ant.sabbus
import java.io.File
import java.net.URL
import java.lang.reflect.InvocationTargetException
-import scala.tools.nsc.util.ScalaClassLoader
+import scala.reflect.internal.util.ScalaClassLoader
class Compiler(classpath: Array[URL], val settings: Settings)
{
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index 595b45ae51..cde827ba54 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -16,7 +16,7 @@ import org.apache.tools.ant.taskdefs.Java
import org.apache.tools.ant.util.{ GlobPatternMapper, SourceFileScanner }
import org.apache.tools.ant.BuildException
import scala.tools.nsc.io
-import scala.tools.nsc.util.ScalaClassLoader
+import scala.reflect.internal.util.ScalaClassLoader
/** An Ant task to compile with the new Scala compiler (NSC).
*
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index f58223a39e..6e91a2a202 100755
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -86,10 +86,14 @@ fi
TOOL_CLASSPATH="@classpath@"
if [[ -z "$TOOL_CLASSPATH" ]]; then
for ext in "$SCALA_HOME"/lib/* ; do
- if [[ -z "$TOOL_CLASSPATH" ]]; then
- TOOL_CLASSPATH="$ext"
- else
- TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
+ file_extension="${ext##*.}"
+ # SI-8967 Only consider directories and files named '*.jar'
+ if [[ -d "$ext" || $file_extension == "jar" ]]; then
+ if [[ -z "$TOOL_CLASSPATH" ]]; then
+ TOOL_CLASSPATH="$ext"
+ else
+ TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
+ fi
fi
done
fi
@@ -105,9 +109,6 @@ if [[ -n "$cygwin" ]]; then
JAVA_HOME="$(cygpath --$format "$JAVA_HOME")"
fi
TOOL_CLASSPATH="$(cygpath --path --$format "$TOOL_CLASSPATH")"
-elif [[ -n "$mingw" ]]; then
- SCALA_HOME="$(cmd //c echo "$SCALA_HOME")"
- TOOL_CLASSPATH="$(cmd //c echo "$TOOL_CLASSPATH")"
fi
if [[ -n "$cygwin$mingw" ]]; then
@@ -127,23 +128,6 @@ fi
declare -a java_args
declare -a scala_args
-# default to the boot classpath for speed, except on cygwin/mingw because
-# JLine on Windows requires a custom DLL to be loaded.
-unset usebootcp
-if [[ -z "$cygwin$mingw" ]]; then
- usebootcp="true"
-fi
-
-# If using the boot classpath, also pass an empty classpath
-# to java to suppress "." from materializing.
-classpathArgs () {
- if [[ -n $usebootcp ]]; then
- echo "-Xbootclasspath/a:$TOOL_CLASSPATH -classpath \"\""
- else
- echo "-classpath $TOOL_CLASSPATH"
- fi
-}
-
# SI-8358, SI-8368 -- the default should really be false,
# but I don't want to flip the default during 2.11's RC cycle
OVERRIDE_USEJAVACP="-Dscala.usejavacp=true"
@@ -153,8 +137,8 @@ while [[ $# -gt 0 ]]; do
-D*)
# pass to scala as well: otherwise we lose it sometimes when we
# need it, e.g. communicating with a server compiler.
- java_args=("${java_args[@@]}" "$1")
- scala_args=("${scala_args[@@]}" "$1")
+ java_args+=("$1")
+ scala_args+=("$1")
# respect user-supplied -Dscala.usejavacp
case "$1" in -Dscala.usejavacp*) OVERRIDE_USEJAVACP="";; esac
shift
@@ -162,8 +146,8 @@ while [[ $# -gt 0 ]]; do
-J*)
# as with -D, pass to scala even though it will almost
# never be used.
- java_args=("${java_args[@@]}" "${1:2}")
- scala_args=("${scala_args[@@]}" "$1")
+ java_args+=("${1:2}")
+ scala_args+=("$1")
shift
;;
-toolcp)
@@ -183,7 +167,7 @@ while [[ $# -gt 0 ]]; do
shift
;;
*)
- scala_args=("${scala_args[@@]}" "$1")
+ scala_args+=("$1")
shift
;;
esac
@@ -196,6 +180,23 @@ if [[ -z "$JAVACMD" && -n "$JAVA_HOME" && -x "$JAVA_HOME/bin/java" ]]; then
JAVACMD="$JAVA_HOME/bin/java"
fi
+declare -a classpath_args
+
+# default to the boot classpath for speed, except on cygwin/mingw because
+# JLine on Windows requires a custom DLL to be loaded.
+unset usebootcp
+if [[ -z "$cygwin$mingw" ]]; then
+ usebootcp="true"
+fi
+
+# If using the boot classpath, also pass an empty classpath
+# to java to suppress "." from materializing.
+if [[ -n $usebootcp ]]; then
+ classpath_args=("-Xbootclasspath/a:$TOOL_CLASSPATH" -classpath "\"\"")
+else
+ classpath_args=(-classpath "$TOOL_CLASSPATH")
+fi
+
# note that variables which may intentionally be empty must not
# be quoted: otherwise an empty string will appear as a command line
# argument, and java will think that is the program to run.
@@ -203,7 +204,7 @@ execCommand \
"${JAVACMD:=java}" \
$JAVA_OPTS \
"${java_args[@@]}" \
- $(classpathArgs) \
+ "${classpath_args[@@]}" \
-Dscala.home="$SCALA_HOME" \
$OVERRIDE_USEJAVACP \
"$EMACS_OPT" \
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index cf0e003f10..50e44fb669 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -128,7 +128,7 @@ if defined _JAVA_PARAMS set _JAVA_OPTS=%_JAVA_OPTS% %_JAVA_PARAMS%
set _TOOL_CLASSPATH=@classpath@
if "%_TOOL_CLASSPATH%"=="" (
- for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
+ for %%f in ("!_SCALA_HOME!\lib\*.jar") do call :add_cpath "%%f"
for /d %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
)
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 842851b4f6..e78589908c 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -111,8 +111,8 @@ import scala.language.implicitConversions"""
" */"),
Op(">>", "/**\n" +
- " * Returns this value bit-shifted left by the specified number of bits,\n" +
- " * filling in the right bits with the same value as the left-most bit of this.\n" +
+ " * Returns this value bit-shifted right by the specified number of bits,\n" +
+ " * filling in the left bits with the same value as the left-most bit of this.\n" +
" * The effect of this is to retain the sign of the value.\n" +
" * @example {{{\n" +
" * -21 >> 3 == -3\n" +
diff --git a/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala
new file mode 100644
index 0000000000..2faf6c6272
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc
+
+import scala.io.StdIn.readLine
+
+/**
+ * Simple application to check out amount of memory used by chosen classpath representation.
+ * It allows us to create many scalac-like calls based on specified parameters, where each main retains Global.
+ * And we need additional tool (e.g. profiler) to measure memory consumption itself.
+ */
+object ClassPathMemoryConsumptionTester {
+
+ private class TestSettings extends Settings {
+ val requiredInstances = IntSetting("-requiredInstances",
+ "Determine how many times classpath should be loaded", 10, Some((1, 10000)), (_: String) => None)
+ }
+
+ private class MainRetainsGlobal extends scala.tools.nsc.MainClass {
+ var retainedGlobal: Global = _
+ override def doCompile(compiler: Global) {
+ retainedGlobal = compiler
+ super.doCompile(compiler)
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ if (args contains "-help") usage()
+ else doTest(args)
+ }
+
+ private def doTest(args: Array[String]) = {
+ val settings = loadSettings(args.toList)
+
+ val mains = (1 to settings.requiredInstances.value) map (_ => new MainRetainsGlobal)
+
+ // we need original settings without additional params to be able to use them later
+ val baseArgs = argsWithoutRequiredInstances(args)
+
+ println(s"Loading classpath ${settings.requiredInstances.value} times")
+ val startTime = System.currentTimeMillis()
+
+ mains map (_.process(baseArgs))
+
+ val elapsed = System.currentTimeMillis() - startTime
+ println(s"Operation finished - elapsed $elapsed ms")
+ println("Memory consumption can be now measured")
+
+ var textFromStdIn = ""
+ while (textFromStdIn.toLowerCase != "exit")
+ textFromStdIn = readLine("Type 'exit' to close application: ")
+ }
+
+ /**
+ * Prints usage information
+ */
+ private def usage(): Unit =
+ println( """Use classpath and sourcepath options like in the case of e.g. 'scala' command.
+ | There's also one additional option:
+ | -requiredInstances <int value> Determine how many times classpath should be loaded
+ """.stripMargin.trim)
+
+ private def loadSettings(args: List[String]) = {
+ val settings = new TestSettings()
+ settings.processArguments(args, processAll = true)
+ if (settings.classpath.isDefault)
+ settings.classpath.value = sys.props("java.class.path")
+ settings
+ }
+
+ private def argsWithoutRequiredInstances(args: Array[String]) = {
+ val instancesIndex = args.indexOf("-requiredInstances")
+ if (instancesIndex == -1) args
+ else args.dropRight(args.length - instancesIndex) ++ args.drop(instancesIndex + 2)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index c2caed70a0..6be1fda1b5 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
import scala.reflect.internal.util.{ SourceFile, NoSourceFile, FreshNameCreator }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
-import scala.tools.nsc.reporters.Reporter
trait CompilationUnits { global: Global =>
@@ -26,9 +25,9 @@ trait CompilationUnits { global: Global =>
class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { self =>
/** the fresh name creator */
- implicit val fresh: FreshNameCreator = new FreshNameCreator
- def freshTermName(prefix: String = "x$") = global.freshTermName(prefix)
- def freshTypeName(prefix: String) = global.freshTypeName(prefix)
+ implicit val fresh: FreshNameCreator = new FreshNameCreator
+ def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX) = global.freshTermName(prefix)
+ def freshTypeName(prefix: String) = global.freshTypeName(prefix)
/** the content of the compilation unit in tree form */
var body: Tree = EmptyTree
@@ -123,31 +122,20 @@ trait CompilationUnits { global: Global =>
*/
val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet
- def reporter = global.reporter
+ @deprecated("Call global.reporter.echo directly instead.", "2.11.2")
+ final def echo(pos: Position, msg: String): Unit = reporter.echo(pos, msg)
+ @deprecated("Call global.reporter.error (or typer.context.error) directly instead.", "2.11.2")
+ final def error(pos: Position, msg: String): Unit = reporter.error(pos, msg)
+ @deprecated("Call global.reporter.warning (or typer.context.warning) directly instead.", "2.11.2")
+ final def warning(pos: Position, msg: String): Unit = reporter.warning(pos, msg)
- def echo(pos: Position, msg: String) =
- reporter.echo(pos, msg)
+ @deprecated("Call global.currentRun.reporting.deprecationWarning directly instead.", "2.11.2")
+ final def deprecationWarning(pos: Position, msg: String): Unit = currentRun.reporting.deprecationWarning(pos, msg)
+ @deprecated("Call global.currentRun.reporting.uncheckedWarning directly instead.", "2.11.2")
+ final def uncheckedWarning(pos: Position, msg: String): Unit = currentRun.reporting.uncheckedWarning(pos, msg)
- def error(pos: Position, msg: String) =
- reporter.error(pos, msg)
-
- def warning(pos: Position, msg: String) =
- reporter.warning(pos, msg)
-
- def deprecationWarning(pos: Position, msg: String) =
- currentRun.deprecationWarnings0.warn(pos, msg)
-
- def uncheckedWarning(pos: Position, msg: String) =
- currentRun.uncheckedWarnings0.warn(pos, msg)
-
- def inlinerWarning(pos: Position, msg: String) =
- currentRun.inlinerWarnings.warn(pos, msg)
-
- def incompleteInputError(pos: Position, msg:String) =
- reporter.incompleteInputError(pos, msg)
-
- def comment(pos: Position, msg: String) =
- reporter.comment(pos, msg)
+ @deprecated("This method will be removed. It does nothing.", "2.11.2")
+ final def comment(pos: Position, msg: String): Unit = {}
/** Is this about a .java source file? */
lazy val isJava = source.file.name.endsWith(".java")
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 3017d8c9cc..f259504473 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -43,8 +43,8 @@ class StandardCompileClient extends HasCompileSocket with CompileOutputCommon {
info(vmArgs.mkString("[VM arguments: ", " ", "]"))
val socket =
- if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown)
- else Some(compileSocket.getSocket(settings.server.value))
+ if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown, settings.port.value)
+ else compileSocket.getSocket(settings.server.value)
socket match {
case Some(sock) => compileOnServer(sock, fscArgs)
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 6f068e179c..aa02957a6c 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -6,8 +6,9 @@
package scala.tools.nsc
import java.io.PrintStream
+import io.Directory
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.FakePos //Position
+import scala.reflect.internal.util.{FakePos, Position}
import scala.tools.util.SocketServer
import settings.FscSettings
@@ -19,7 +20,7 @@ import settings.FscSettings
* @author Martin Odersky
* @version 1.0
*/
-class StandardCompileServer extends SocketServer {
+class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) {
lazy val compileSocket: CompileSocket = CompileSocket
private var compiler: Global = null
@@ -37,7 +38,7 @@ class StandardCompileServer extends SocketServer {
/** Create a new compiler instance */
def newGlobal(settings: Settings, reporter: Reporter) =
new Global(settings, reporter) {
- override def inform(msg: String) = out.println(msg)
+ override def inform(pos: Position, msg: String) = out.println(msg)
}
override def timeout() {
@@ -152,6 +153,7 @@ class StandardCompileServer extends SocketServer {
clearCompiler()
case ex: Throwable =>
warn("Compile server encountered fatal condition: " + ex)
+ reporter.error(null, "Compile server encountered fatal condition: " + ex.getMessage)
shutdown = true
throw ex
}
@@ -165,12 +167,12 @@ class StandardCompileServer extends SocketServer {
}
-object CompileServer extends StandardCompileServer {
+object CompileServer {
/** A directory holding redirected output */
- private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
+ //private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
- private def createRedirect(filename: String) =
- new PrintStream((redirectDir / filename).createFile().bufferedOutput())
+ private def createRedirect(dir: Directory, filename: String) =
+ new PrintStream((dir / filename).createFile().bufferedOutput())
def main(args: Array[String]) =
execute(() => (), args)
@@ -186,21 +188,33 @@ object CompileServer extends StandardCompileServer {
*/
def execute(startupCallback : () => Unit, args: Array[String]) {
val debug = args contains "-v"
+ var port = 0
+ val i = args.indexOf("-p")
+ if (i >= 0 && args.length > i + 1) {
+ scala.util.control.Exception.ignoring(classOf[NumberFormatException]) {
+ port = args(i + 1).toInt
+ }
+ }
+
+ // Create instance rather than extend to pass a port parameter.
+ val server = new StandardCompileServer(port)
+ val redirectDir = (server.compileSocket.tmpDir / "output-redirects").createDirectory()
+
if (debug) {
- echo("Starting CompileServer on port " + port)
- echo("Redirect dir is " + redirectDir)
+ server.echo("Starting CompileServer on port " + server.port)
+ server.echo("Redirect dir is " + redirectDir)
}
- Console.withErr(createRedirect("scala-compile-server-err.log")) {
- Console.withOut(createRedirect("scala-compile-server-out.log")) {
- Console.err.println("...starting server on socket "+port+"...")
+ Console.withErr(createRedirect(redirectDir, "scala-compile-server-err.log")) {
+ Console.withOut(createRedirect(redirectDir, "scala-compile-server-out.log")) {
+ Console.err.println("...starting server on socket "+server.port+"...")
Console.err.flush()
- compileSocket setPort port
+ server.compileSocket setPort server.port
startupCallback()
- run()
+ server.run()
- compileSocket deletePort port
+ server.compileSocket deletePort server.port
}
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index c4f06b59ec..27a14141fa 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -32,7 +32,8 @@ trait HasCompileSocket {
if (isErrorMessage(line))
noErrors = false
- compileSocket.echo(line)
+ // be consistent with scalac: everything goes to stderr
+ compileSocket.warn(line)
loop()
}
try loop()
@@ -45,6 +46,9 @@ trait HasCompileSocket {
class CompileSocket extends CompileOutputCommon {
protected lazy val compileClient: StandardCompileClient = CompileClient
def verbose = compileClient.verbose
+
+ /* Fixes the port where to start the server, 0 yields some free port */
+ var fixPort = 0
/** The prefix of the port identification file, which is followed
* by the port number.
@@ -63,7 +67,7 @@ class CompileSocket extends CompileOutputCommon {
/** The class name of the scala compile server */
protected val serverClass = "scala.tools.nsc.CompileServer"
- protected def serverClassArgs = if (verbose) List("-v") else Nil // debug
+ protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil)
/** A temporary directory to use */
val tmpDir = {
@@ -103,9 +107,14 @@ class CompileSocket extends CompileOutputCommon {
def portFile(port: Int) = portsDir / File(port.toString)
/** Poll for a server port number; return -1 if none exists yet */
- private def pollPort(): Int = portsDir.list.toList match {
+ private def pollPort(): Int = if (fixPort > 0) {
+ if (portsDir.list.toList.exists(_.name == fixPort.toString)) fixPort else -1
+ } else portsDir.list.toList match {
case Nil => -1
- case x :: xs => try x.name.toInt finally xs foreach (_.delete())
+ case x :: xs => try x.name.toInt catch {
+ case e: Exception => x.delete()
+ throw e
+ }
}
/** Get the port number to which a scala compile server is connected;
@@ -151,7 +160,8 @@ class CompileSocket extends CompileOutputCommon {
* create a new daemon if necessary. Returns None if the connection
* cannot be established.
*/
- def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = {
+ def getOrCreateSocket(vmArgs: String, create: Boolean = true, fixedPort: Int = 0): Option[Socket] = {
+ fixPort = fixedPort
val maxMillis = 10L * 1000 // try for 10 seconds
val retryDelay = 50L
val maxAttempts = (maxMillis / retryDelay).toInt
@@ -185,14 +195,17 @@ class CompileSocket extends CompileOutputCommon {
try { Some(x.toInt) }
catch { case _: NumberFormatException => None }
- def getSocket(serverAdr: String): Socket = (
- for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
+ def getSocket(serverAdr: String): Option[Socket] = (
+ for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
getSocket(name, port)
) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
- def getSocket(hostName: String, port: Int): Socket =
- Socket(hostName, port).opt getOrElse fatal("Unable to establish connection to server %s:%d; exiting".format(hostName, port))
-
+ def getSocket(hostName: String, port: Int): Option[Socket] = {
+ val sock = Socket(hostName, port).opt
+ if (sock.isEmpty) warn("Unable to establish connection to server %s:%d".format(hostName, port))
+ sock
+ }
+
def getPassword(port: Int): String = {
val ff = portFile(port)
val f = ff.bufferedReader()
diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala
index bab0768ca9..9b8e9fa330 100644
--- a/src/compiler/scala/tools/nsc/CompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala
@@ -20,9 +20,12 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
def ok = processArgumentsResult._1
def files = processArgumentsResult._2
- /** The name of the command */
+ /** The name of the command. */
def cmdName = "scalac"
+ /** A descriptive alias for version and help messages. */
+ def cmdDesc = "compiler"
+
private def explainAdvanced = "\n" + """
|-- Notes on option parsing --
|Boolean settings are always false unless set.
@@ -85,7 +88,11 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
def getInfoMessage(global: Global): String = {
import settings._
- if (help) usageMsg + global.pluginOptionsHelp
+ import Properties.{ versionString, copyrightString } //versionFor
+ def versionFor(command: String) = f"Scala $command $versionString -- $copyrightString"
+
+ if (version) versionFor(cmdDesc)
+ else if (help) usageMsg + global.pluginOptionsHelp
else if (Xhelp) xusageMsg
else if (Yhelp) yusageMsg
else if (showPlugins) global.pluginDescriptions
@@ -96,7 +103,15 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
val components = global.phaseNames // global.phaseDescriptors // one initializes
s"Phase graph of ${components.size} components output to ${genPhaseGraph.value}*.dot."
}
- else ""
+ // would be nicer if we could ask all the options for their helpful messages
+ else {
+ val sb = new StringBuilder
+ allSettings foreach {
+ case s: MultiChoiceSetting[_] if s.isHelping => sb append s.help
+ case _ =>
+ }
+ sb.toString
+ }
}
/**
diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala
index 3ac27a42e8..6befa76b3f 100644
--- a/src/compiler/scala/tools/nsc/Driver.scala
+++ b/src/compiler/scala/tools/nsc/Driver.scala
@@ -2,26 +2,24 @@ package scala
package tools.nsc
import scala.tools.nsc.reporters.ConsoleReporter
-import Properties.{ versionString, copyrightString, residentPromptString }
+import Properties.{ versionMsg, residentPromptString }
import scala.reflect.internal.util.FakePos
abstract class Driver {
val prompt = residentPromptString
- val versionMsg = "Scala compiler " +
- versionString + " -- " +
- copyrightString
-
var reporter: ConsoleReporter = _
protected var command: CompilerCommand = _
protected var settings: Settings = _
- protected def scalacError(msg: String) {
+ protected def scalacError(msg: String): Unit = {
reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information")
}
- protected def processSettingsHook(): Boolean = true
+ protected def processSettingsHook(): Boolean = {
+ if (settings.version) { reporter echo versionMsg ; false } else true
+ }
protected def newCompiler(): Global
@@ -37,14 +35,12 @@ abstract class Driver {
}
def process(args: Array[String]) {
- val ss = new Settings(scalacError)
- reporter = new ConsoleReporter(ss)
+ val ss = new Settings(scalacError)
+ reporter = new ConsoleReporter(ss)
command = new CompilerCommand(args.toList, ss)
settings = command.settings
- if (settings.version) {
- reporter.echo(versionMsg)
- } else if (processSettingsHook()) {
+ if (processSettingsHook()) {
val compiler = newCompiler()
try {
if (reporter.hasErrors)
@@ -68,5 +64,4 @@ abstract class Driver {
process(args)
sys.exit(if (reporter.hasErrors) 1 else 0)
}
-
}
diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala
index 15a296c836..73f4b9a119 100644
--- a/src/compiler/scala/tools/nsc/EvalLoop.scala
+++ b/src/compiler/scala/tools/nsc/EvalLoop.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc
import scala.annotation.tailrec
+import scala.io.StdIn
import java.io.EOFException
trait EvalLoop {
@@ -14,7 +15,7 @@ trait EvalLoop {
def loop(action: (String) => Unit) {
@tailrec def inner() {
Console.print(prompt)
- val line = try Console.readLine() catch { case _: EOFException => null }
+ val line = try StdIn.readLine() catch { case _: EOFException => null }
if (line != null && line != "") {
action(line)
inner()
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
index e710222285..2584054686 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc
import GenericRunnerCommand._
+import scala.reflect.internal.util.ScalaClassLoader
/** A command for ScriptRunner */
class GenericRunnerCommand(
@@ -19,9 +20,10 @@ extends CompilerCommand(args, settings) {
def this(args: List[String]) =
this(args, str => Console.println("Error: " + str))
- /** name of the associated compiler command */
override def cmdName = "scala"
- def compCmdName = "scalac"
+ override def cmdDesc = "code runner"
+
+ def compCmdName = "scalac" // super.cmdName
// change CompilerCommand behavior
override def shouldProcessArguments: Boolean = false
@@ -31,7 +33,7 @@ extends CompilerCommand(args, settings) {
private def guessHowToRun(target: String): GenericRunnerCommand.HowToRun = {
if (!ok) Error
else if (io.Jar.isJarOrZip(target)) AsJar
- else if (util.ScalaClassLoader.classExists(settings.classpathURLs, target)) AsObject
+ else if (ScalaClassLoader.classExists(settings.classpathURLs, target)) AsObject
else {
val f = io.File(target)
if (!f.hasExtension("class", "jar", "zip") && f.canRead) AsScript
@@ -50,17 +52,16 @@ extends CompilerCommand(args, settings) {
case Nil => AsRepl
case hd :: _ => waysToRun find (_.name == settings.howtorun.value) getOrElse guessHowToRun(hd)
}
- private def interpolate(s: String) = s.trim.replaceAll("@cmd@", cmdName).replaceAll("@compileCmd@", compCmdName) + "\n"
-
- def shortUsageMsg = interpolate("""
-Usage: @cmd@ <options> [<script|class|object|jar> <arguments>]
- or @cmd@ -help
-All options to @compileCmd@ (see @compileCmd@ -help) are also allowed.
-""")
+ def shortUsageMsg =
+s"""|Usage: $cmdName <options> [<script|class|object|jar> <arguments>]
+ | or $cmdName -help
+ |
+ |All options to $compCmdName (see $compCmdName -help) are also allowed.
+""".stripMargin
- override def usageMsg = shortUsageMsg + interpolate("""
-The first given argument other than options to @cmd@ designates
+ override def usageMsg = f"""$shortUsageMsg
+The first given argument other than options to $cmdName designates
what to run. Runnable targets are:
- a file containing scala source
@@ -68,7 +69,7 @@ what to run. Runnable targets are:
- a runnable jar file with a valid Main-Class attribute
- or if no argument is given, the repl (interactive shell) is started
-Options to @cmd@ which reach the java runtime:
+Options to $cmdName which reach the java runtime:
-Dname=prop passed directly to java to set system properties
-J<arg> -J is stripped and <arg> passed to java as-is
@@ -86,8 +87,7 @@ A file argument will be run as a scala script unless it contains only
self-contained compilation units (classes and objects) and exactly one
runnable main method. In that case the file will be compiled and the
main method invoked. This provides a bridge between scripts and standard
-scala source.
- """) + "\n"
+scala source.%n"""
}
object GenericRunnerCommand {
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index ad75d02bff..1289d55c37 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -5,10 +5,11 @@
package scala.tools.nsc
-import scala.tools.util.PathResolver
+import java.net.URL
+import scala.tools.util.PathResolverFactory
class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
- def classpathURLs = new PathResolver(this).asURLs
+ def classpathURLs: Seq[URL] = PathResolverFactory.create(this).resultAsURLs
val howtorun =
ChoiceSetting(
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 35eab94333..b233acf271 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -8,18 +8,17 @@ package tools
package nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
+import java.net.URL
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
-import java.util.UUID._
import scala.compat.Platform.currentTime
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString }
+import util.{ ClassFileLookup, ClassPath, MergedClassPath, StatisticsInfo, returning }
import scala.reflect.ClassTag
-import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
-import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import scala.reflect.io.VirtualFile
-import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import scala.reflect.internal.pickling.PickleBuffer
+import symtab.{ Flags, SymbolTable, SymbolTrackers }
import symtab.classfile.Pickler
import plugins.Plugins
import ast._
@@ -28,13 +27,15 @@ import typechecker._
import transform.patmat.PatternMatching
import transform._
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
-import backend.{ ScalaPrimitives, Platform, JavaPlatform }
+import backend.{ ScalaPrimitives, JavaPlatform }
import backend.jvm.GenBCode
import backend.jvm.GenASM
import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
import scala.language.postfixOps
import scala.tools.nsc.ast.{TreeGen => AstTreeGen}
+import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.settings.ClassPathRepresentationType
class Global(var currentSettings: Settings, var reporter: Reporter)
extends SymbolTable
@@ -44,7 +45,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
with Trees
with Printers
with DocComments
- with Positions { self =>
+ with Positions
+ with Reporting
+ with Parsing { self =>
// the mirror --------------------------------------------------
@@ -56,7 +59,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
class GlobalMirror extends Roots(NoSymbol) {
val universe: self.type = self
- def rootLoader: LazyType = new loaders.PackageLoader(classPath)
+ def rootLoader: LazyType = {
+ settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat => new loaders.PackageLoaderUsingFlatClassPath(FlatClassPath.RootPackage, flatClassPath)
+ case ClassPathRepresentationType.Recursive => new loaders.PackageLoader(recursiveClassPath)
+ }
+ }
override def toString = "compiler mirror"
}
implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[GlobalMirror])
@@ -102,7 +110,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
type PlatformClassPath = ClassPath[AbstractFile]
type OptClassPath = Option[PlatformClassPath]
- def classPath: PlatformClassPath = platform.classPath
+ def classPath: ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat => flatClassPath
+ case ClassPathRepresentationType.Recursive => recursiveClassPath
+ }
+
+ private def recursiveClassPath: ClassPath[AbstractFile] = platform.classPath
+
+ private def flatClassPath: FlatClassPath = platform.flatClassPath
// sub-components --------------------------------------------------
@@ -217,6 +232,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Called from parser, which signals hereby that a method definition has been parsed. */
def signalParseProgress(pos: Position) {}
+ /** Called by ScalaDocAnalyzer when a doc comment has been parsed. */
+ def signalParsedDocComment(comment: String, pos: Position) = {
+ // TODO: this is all very broken (only works for scaladoc comments, not regular ones)
+ // --> add hooks to parser and refactor Interactive global to handle comments directly
+ // in any case don't use reporter for parser hooks
+ reporter.comment(pos, comment)
+ }
+
/** Register new context; called for every created context
*/
def registerContext(c: analyzer.Context) {
@@ -227,20 +250,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
*/
def registerTopLevelSym(sym: Symbol) {}
-// ------------------ Reporting -------------------------------------
-
- // not deprecated yet, but a method called "error" imported into
- // nearly every trait really must go. For now using globalError.
- def error(msg: String) = globalError(msg)
-
- override def inform(msg: String) = inform(NoPosition, msg)
- override def globalError(msg: String) = globalError(NoPosition, msg)
- override def warning(msg: String) = warning(NoPosition, msg)
- override def deprecationWarning(pos: Position, msg: String) = currentUnit.deprecationWarning(pos, msg)
-
- def globalError(pos: Position, msg: String) = reporter.error(pos, msg)
- def warning(pos: Position, msg: String) = if (settings.fatalWarnings) globalError(pos, msg) else reporter.warning(pos, msg)
- def inform(pos: Position, msg: String) = reporter.echo(pos, msg)
+// ------------------ Debugging -------------------------------------
// Getting in front of Predef's asserts to supplement with more info.
// This has the happy side effect of masking the one argument forms
@@ -263,12 +273,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
require(requirement, "")
}
- // Needs to call error to make sure the compile fails.
- override def abort(msg: String): Nothing = {
- error(msg)
- super.abort(msg)
- }
-
@inline final def ifDebug(body: => Unit) {
if (settings.debug)
body
@@ -291,8 +295,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
log(s"!!!$pos_s $msg") // such warnings always at least logged
}
- def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg))
-
def logError(msg: String, t: Throwable): Unit = ()
override def shouldLogAtThisPhase = settings.log.isSetByUser && (
@@ -330,7 +332,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
None
}
- val charset = ( if (settings.encoding.isSetByUser) Some(settings.encoding.value) else None ) flatMap loadCharset getOrElse {
+ val charset = settings.encoding.valueSetByUser flatMap loadCharset getOrElse {
settings.encoding.value = defaultEncoding // A mandatory charset
Charset.forName(defaultEncoding)
}
@@ -345,16 +347,16 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- ( if (settings.sourceReader.isSetByUser) Some(settings.sourceReader.value) else None ) flatMap loadReader getOrElse {
+ settings.sourceReader.valueSetByUser flatMap loadReader getOrElse {
new SourceReader(charset.newDecoder(), reporter)
}
}
- if (settings.verbose || settings.Ylogcp) {
- // Uses the "do not truncate" inform
- informComplete("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]")
- informComplete("[search path for class files: " + classPath.asClasspathString + "]")
- }
+ if (settings.verbose || settings.Ylogcp)
+ reporter.echo(
+ s"[search path for source files: ${classPath.asSourcePathString}]\n" +
+ s"[search path for class files: ${classPath.asClassPathString}]"
+ )
// The current division between scala.reflect.* and scala.tools.nsc.* is pretty
// clunky. It is often difficult to have a setting influence something without having
@@ -415,7 +417,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
reporter.cancelled || unit.isJava && this.id > maxJavaPhase
}
- final def applyPhase(unit: CompilationUnit) {
+ final def withCurrentUnit(unit: CompilationUnit)(task: => Unit) {
if ((unit ne null) && unit.exists)
lastSeenSourceFile = unit.source
@@ -427,7 +429,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
currentRun.currentUnit = unit
if (!cancelled(unit)) {
currentRun.informUnitStarting(this, unit)
- apply(unit)
+ task
}
currentRun.advanceUnit()
} finally {
@@ -435,6 +437,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
currentRun.currentUnit = unit0
}
}
+
+ final def applyPhase(unit: CompilationUnit) = withCurrentUnit(unit)(apply(unit))
}
// phaseName = "parser"
@@ -851,52 +855,63 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
} reverse
}
+ // ------------ REPL utilities ---------------------------------
+
+ /** Extend classpath of `platform` and rescan updated packages. */
+ def extendCompilerClassPath(urls: URL*): Unit = {
+ if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat)
+ throw new UnsupportedOperationException("Flat classpath doesn't support extending the compiler classpath")
+
+ val newClassPath = platform.classPath.mergeUrlsIntoClassPath(urls: _*)
+ platform.currentClassPath = Some(newClassPath)
+ // Reload all specified jars into this compiler instance
+ invalidateClassPathEntries(urls.map(_.getPath): _*)
+ }
+
// ------------ Invalidations ---------------------------------
/** Is given package class a system package class that cannot be invalidated?
*/
private def isSystemPackageClass(pkg: Symbol) =
- pkg == RootClass ||
- pkg == definitions.ScalaPackageClass || {
- val pkgname = pkg.fullName
- (pkgname startsWith "scala.") && !(pkgname startsWith "scala.tools")
- }
+ pkg == RootClass || (pkg.hasTransOwner(definitions.ScalaPackageClass) && !pkg.hasTransOwner(this.rootMirror.staticPackage("scala.tools").moduleClass.asClass))
/** Invalidates packages that contain classes defined in a classpath entry, and
* rescans that entry.
- * @param paths Fully qualified names that refer to directories or jar files that are
- * a entries on the classpath.
- * First, causes the classpath entry referred to by `path` to be rescanned, so that
- * any new files or deleted files or changes in subpackages are picked up.
- * Second, invalidates any packages for which one of the following considitions is met:
-
- * - the classpath entry contained during the last compilation run classfiles
- * that represent a member in the package
- * - the classpath entry now contains classfiles
- * that represent a member in the package
+ *
+ * First, the classpath entry referred to by one of the `paths` is rescanned,
+ * so that any new files or changes in subpackages are picked up.
+ * Second, any packages for which one of the following conditions is met is invalidated:
+ * - the classpath entry contained during the last compilation run now contains classfiles
+ * that represent a member in the package;
+ * - the classpath entry now contains classfiles that represent a member in the package;
* - the set of subpackages has changed.
*
* The invalidated packages are reset in their entirety; all member classes and member packages
* are re-accessed using the new classpath.
- * Not invalidated are system packages that the compiler needs to access as parts
- * of standard definitions. The criterion what is a system package is currently:
- * any package rooted in "scala", with the exception of packages rooted in "scala.tools".
- * This can be refined later.
- * @return A pair consisting of
- * - a list of invalidated packages
- * - a list of of packages that should have been invalidated but were not because
- * they are system packages.
+ *
+ * System packages that the compiler needs to access as part of standard definitions
+ * are not invalidated. A system package is:
+ * Any package rooted in "scala", with the exception of packages rooted in "scala.tools".
+ *
+ * @param paths Fully-qualified names that refer to directories or jar files that are
+ * entries on the classpath.
*/
- def invalidateClassPathEntries(paths: String*): (List[ClassSymbol], List[ClassSymbol]) = {
+ def invalidateClassPathEntries(paths: String*): Unit = {
+ if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat)
+ throw new UnsupportedOperationException("Flat classpath doesn't support the classpath invalidation")
+
+ implicit object ClassPathOrdering extends Ordering[PlatformClassPath] {
+ def compare(a:PlatformClassPath, b:PlatformClassPath) = a.asClassPathString compare b.asClassPathString
+ }
val invalidated, failed = new mutable.ListBuffer[ClassSymbol]
classPath match {
case cp: MergedClassPath[_] =>
def assoc(path: String): List[(PlatformClassPath, PlatformClassPath)] = {
- val dir = AbstractFile getDirectory path
+ val dir = AbstractFile.getDirectory(path)
val canonical = dir.canonicalPath
def matchesCanonical(e: ClassPath[_]) = e.origin match {
case Some(opath) =>
- (AbstractFile getDirectory opath).canonicalPath == canonical
+ AbstractFile.getDirectory(opath).canonicalPath == canonical
case None =>
false
}
@@ -904,21 +919,20 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
case Some(oldEntry) =>
List(oldEntry -> cp.context.newClassPath(dir))
case None =>
- println(s"canonical = $canonical, origins = ${cp.entries map (_.origin)}")
- error(s"cannot invalidate: no entry named $path in classpath $classPath")
+ error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath")
List()
}
}
- val subst = Map(paths flatMap assoc: _*)
+ val subst = immutable.TreeMap(paths flatMap assoc: _*)
if (subst.nonEmpty) {
platform updateClassPath subst
informProgress(s"classpath updated on entries [${subst.keys mkString ","}]")
def mkClassPath(elems: Iterable[PlatformClassPath]): PlatformClassPath =
if (elems.size == 1) elems.head
- else new MergedClassPath(elems, classPath.context)
+ else new MergedClassPath(elems, recursiveClassPath.context)
val oldEntries = mkClassPath(subst.keys)
val newEntries = mkClassPath(subst.values)
- reSync(RootClass, Some(classPath), Some(oldEntries), Some(newEntries), invalidated, failed)
+ mergeNewEntries(newEntries, RootClass, Some(recursiveClassPath), Some(oldEntries), invalidated, failed)
}
}
def show(msg: String, syms: scala.collection.Traversable[Symbol]) =
@@ -926,36 +940,32 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
informProgress(s"$msg: ${syms map (_.fullName) mkString ","}")
show("invalidated packages", invalidated)
show("could not invalidate system packages", failed)
- (invalidated.toList, failed.toList)
}
- /** Re-syncs symbol table with classpath
+ /** Merges new classpath entries into the symbol table
+ *
+ * @param newEntries The new classpath entries
* @param root The root symbol to be resynced (a package class)
- * @param allEntries Optionally, the corresponding package in the complete current classPath
- * @param oldEntries Optionally, the corresponding package in the old classPath entries
- * @param newEntries Optionally, the corresponding package in the new classPath entries
+ * @param allEntries Optionally, the corresponding package in the complete current classpath
+ * @param oldEntries Optionally, the corresponding package in the old classpath entries
* @param invalidated A listbuffer collecting the invalidated package classes
* @param failed A listbuffer collecting system package classes which could not be invalidated
- * The resyncing strategy is determined by the absence or presence of classes and packages.
- * If either oldEntries or newEntries contains classes, root is invalidated, provided a corresponding package
- * exists in allEntries, or otherwise is removed.
- * Otherwise, the action is determined by the following matrix, with columns:
*
- * old new all sym action
- * + + + + recurse into all child packages of old ++ new
- * + - + + invalidate root
- * + - - + remove root from its scope
- * - + + + invalidate root
- * - + + - create and enter root
- * - - * * no action
+ * The merging strategy is determined by the absence or presence of classes and packages.
+ *
+ * If either oldEntries or newEntries contains classes, root is invalidated provided that a corresponding package
+ * exists in allEntries. Otherwise it is removed.
+ * Otherwise, the action is determined by the following matrix, with columns:
*
- * Here, old, new, all mean classpaths and sym means symboltable. + is presence of an
- * entry in its column, - is absence, * is don't care.
+ * old sym action
+ * + + recurse into all child packages of newEntries
+ * - + invalidate root
+ * - - create and enter root
*
- * Note that new <= all and old <= sym, so the matrix above covers all possibilities.
+ * Here, old means classpath, and sym means symboltable. + is presence of an entry in its column, - is absence.
*/
- private def reSync(root: ClassSymbol,
- allEntries: OptClassPath, oldEntries: OptClassPath, newEntries: OptClassPath,
+ private def mergeNewEntries(newEntries: PlatformClassPath, root: ClassSymbol,
+ allEntries: OptClassPath, oldEntries: OptClassPath,
invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
@@ -968,11 +978,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
invalidated += root
}
- def packageNames(cp: PlatformClassPath): Set[String] = cp.packages.toSet map getName
def subPackage(cp: PlatformClassPath, name: String): OptClassPath =
cp.packages find (cp1 => getName(cp1) == name)
- val classesFound = hasClasses(oldEntries) || hasClasses(newEntries)
+ val classesFound = hasClasses(oldEntries) || newEntries.classes.nonEmpty
if (classesFound && !isSystemPackageClass(root)) {
invalidateOrRemove(root)
} else {
@@ -980,69 +989,27 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (root.isRoot) invalidateOrRemove(EmptyPackageClass)
else failed += root
}
- (oldEntries, newEntries) match {
- case (Some(oldcp) , Some(newcp)) =>
- for (pstr <- packageNames(oldcp) ++ packageNames(newcp)) {
- val pname = newTermName(pstr)
- val pkg = (root.info decl pname) orElse {
- // package was created by external agent, create symbol to track it
- assert(!subPackage(oldcp, pstr).isDefined)
- loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get))
- }
- reSync(
- pkg.moduleClass.asInstanceOf[ClassSymbol],
- subPackage(allEntries.get, pstr), subPackage(oldcp, pstr), subPackage(newcp, pstr),
- invalidated, failed)
+ if (!oldEntries.isDefined) invalidateOrRemove(root)
+ else
+ for (pstr <- newEntries.packages.map(getName)) {
+ val pname = newTermName(pstr)
+ val pkg = (root.info decl pname) orElse {
+ // package does not exist in symbol table, create symbol to track it
+ assert(!subPackage(oldEntries.get, pstr).isDefined)
+ loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get))
}
- case (Some(oldcp), None) =>
- invalidateOrRemove(root)
- case (None, Some(newcp)) =>
- invalidateOrRemove(root)
- case (None, None) =>
- }
+ mergeNewEntries(subPackage(newEntries, pstr).get, pkg.moduleClass.asClass,
+ subPackage(allEntries.get, pstr), subPackage(oldEntries.get, pstr),
+ invalidated, failed)
+ }
}
}
- /** Invalidate contents of setting -Yinvalidate */
- def doInvalidation() = settings.Yinvalidate.value match {
- case "" =>
- case entry => invalidateClassPathEntries(entry)
- }
-
// ----------- Runs ---------------------------------------
private var curRun: Run = null
private var curRunId = 0
- /** A hook that lets subclasses of `Global` define whether a package or class should be kept loaded for the
- * next compiler run. If the parameter `sym` is a class or object, and `clearOnNextRun(sym)` returns `true`,
- * then the symbol is unloaded and reset to its state before the last compiler run. If the parameter `sym` is
- * a package, and clearOnNextRun(sym)` returns `true`, the package is recursively searched for
- * classes to drop.
- *
- * Example: Let's say I want a compiler that drops all classes corresponding to the current project
- * between runs. Then `keepForNextRun` of a toplevel class or object should return `true` if the
- * class or object does not form part of the current project, `false` otherwise. For a package,
- * clearOnNextRun should return `true` if no class in that package forms part of the current project,
- * `false` otherwise.
- *
- * @param sym A class symbol, object symbol, package, or package class.
- */
- @deprecated("use invalidateClassPathEntries instead", "2.10.0")
- def clearOnNextRun(sym: Symbol) = false
- /* To try out clearOnNext run on the scala.tools.nsc project itself
- * replace `false` above with the following code
-
- settings.Xexperimental.value && { sym.isRoot || {
- sym.fullName match {
- case "scala" | "scala.tools" | "scala.tools.nsc" => true
- case _ => sym.owner.fullName.startsWith("scala.tools.nsc")
- }
- }}
-
- * Then, fsc -Xexperimental clears the nsc project between successive runs of `fsc`.
- */
-
object typeDeconstruct extends {
val global: Global.this.type = Global.this
} with typechecker.StructuredTypeStrings
@@ -1112,45 +1079,41 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Don't want to introduce new errors trying to report errors,
* so swallow exceptions.
*/
- override def supplementErrorMessage(errorMessage: String): String = {
- if (currentRun.supplementedError) errorMessage
- else try {
- currentRun.supplementedError = true
- val tree = analyzer.lastTreeToTyper
- val sym = tree.symbol
- val tpe = tree.tpe
- val site = lastSeenContext.enclClassOrMethod.owner
- val pos_s = if (tree.pos.isDefined) s"line ${tree.pos.line} of ${tree.pos.source.file}" else "<unknown>"
- val context_s = try {
- // Taking 3 before, 3 after the fingered line.
- val start = 0 max (tree.pos.line - 3)
- val xs = scala.reflect.io.File(tree.pos.source.file.file).lines drop start take 7
- val strs = xs.zipWithIndex map { case (line, idx) => f"${start + idx}%6d $line" }
- strs.mkString("== Source file context for tree position ==\n\n", "\n", "")
- }
- catch { case t: Exception => devWarning("" + t) ; "<Cannot read source file>" }
-
- val info1 = formatExplain(
- "while compiling" -> currentSource.path,
- "during phase" -> ( if (globalPhase eq phase) phase else "globalPhase=%s, enteringPhase=%s".format(globalPhase, phase) ),
- "library version" -> scala.util.Properties.versionString,
- "compiler version" -> Properties.versionString,
- "reconstructed args" -> settings.recreateArgs.mkString(" ")
- )
- val info2 = formatExplain(
- "last tree to typer" -> tree.summaryString,
- "tree position" -> pos_s,
- "tree tpe" -> tpe,
- "symbol" -> Option(sym).fold("null")(_.debugLocationString),
- "symbol definition" -> Option(sym).fold("null")(s => s.defString + s" (a ${s.shortSymbolClass})"),
- "symbol package" -> sym.enclosingPackage.fullName,
- "symbol owners" -> ownerChainString(sym),
- "call site" -> (site.fullLocationString + " in " + site.enclosingPackage)
- )
- ("\n " + errorMessage + "\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
+ override def supplementTyperState(errorMessage: String): String = try {
+ val tree = analyzer.lastTreeToTyper
+ val sym = tree.symbol
+ val tpe = tree.tpe
+ val site = lastSeenContext.enclClassOrMethod.owner
+ val pos_s = if (tree.pos.isDefined) s"line ${tree.pos.line} of ${tree.pos.source.file}" else "<unknown>"
+ val context_s = try {
+ // Taking 3 before, 3 after the fingered line.
+ val start = 0 max (tree.pos.line - 3)
+ val xs = scala.reflect.io.File(tree.pos.source.file.file).lines drop start take 7
+ val strs = xs.zipWithIndex map { case (line, idx) => f"${start + idx}%6d $line" }
+ strs.mkString("== Source file context for tree position ==\n\n", "\n", "")
}
- catch { case _: Exception | _: TypeError => errorMessage }
- }
+ catch { case t: Exception => devWarning("" + t) ; "<Cannot read source file>" }
+
+ val info1 = formatExplain(
+ "while compiling" -> currentSource.path,
+ "during phase" -> ( if (globalPhase eq phase) phase else "globalPhase=%s, enteringPhase=%s".format(globalPhase, phase) ),
+ "library version" -> scala.util.Properties.versionString,
+ "compiler version" -> Properties.versionString,
+ "reconstructed args" -> settings.recreateArgs.mkString(" ")
+ )
+ val info2 = formatExplain(
+ "last tree to typer" -> tree.summaryString,
+ "tree position" -> pos_s,
+ "tree tpe" -> tpe,
+ "symbol" -> Option(sym).fold("null")(_.debugLocationString),
+ "symbol definition" -> Option(sym).fold("null")(s => s.defString + s" (a ${s.shortSymbolClass})"),
+ "symbol package" -> sym.enclosingPackage.fullName,
+ "symbol owners" -> ownerChainString(sym),
+ "call site" -> (site.fullLocationString + " in " + site.enclosingPackage)
+ )
+ ("\n " + errorMessage + "\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
+ } catch { case _: Exception | _: TypeError => errorMessage }
+
/** The id of the currently active run
*/
@@ -1162,17 +1125,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]")
}
- /** Collects for certain classes of warnings during this run. */
- class ConditionalWarning(what: String, option: Settings#BooleanSetting) {
- val warnings = mutable.LinkedHashMap[Position, String]()
- def warn(pos: Position, msg: String) =
- if (option) reporter.warning(pos, msg)
- else if (!(warnings contains pos)) warnings += ((pos, msg))
- def summarize() =
- if (warnings.nonEmpty && (option.isDefault || settings.fatalWarnings))
- warning("there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
- }
-
def newSourceFile(code: String, filename: String = "<console>") =
new BatchSourceFile(filename, code)
@@ -1190,7 +1142,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** A Run is a single execution of the compiler on a set of units.
*/
- class Run extends RunContextApi {
+ class Run extends RunContextApi with RunReporting with RunParsing {
/** Have been running into too many init order issues with Run
* during erroneous conditions. Moved all these vals up to the
* top of the file so at least they're not trivially null.
@@ -1199,24 +1151,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** The currently compiled unit; set from GlobalPhase */
var currentUnit: CompilationUnit = NoCompilationUnit
- // This change broke sbt; I gave it the thrilling name of uncheckedWarnings0 so
- // as to recover uncheckedWarnings for its ever-fragile compiler interface.
- val deprecationWarnings0 = new ConditionalWarning("deprecation", settings.deprecation)
- val uncheckedWarnings0 = new ConditionalWarning("unchecked", settings.unchecked)
- val featureWarnings = new ConditionalWarning("feature", settings.feature)
- val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings)
- val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings)
-
- def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList // used in sbt
- def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList // used in sbt
-
- var reportedFeature = Set[Symbol]()
-
- /** Has any macro expansion used a fallback during this run? */
- var seenMacroExpansionsFallingBack = false
-
- /** Have we already supplemented the error message of a compiler crash? */
- private[nsc] final var supplementedError = false
+ // used in sbt
+ def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings
+ // used in sbt
+ def deprecationWarnings: List[(Position, String)] = reporting.deprecationWarnings
private class SyncedCompilationBuffer { self =>
private val underlying = new mutable.ArrayBuffer[CompilationUnit]
@@ -1341,47 +1279,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
first
}
- /** Reset all classes contained in current project, as determined by
- * the clearOnNextRun hook
- */
- @deprecated("use invalidateClassPathEntries instead", "2.10.0")
- def resetProjectClasses(root: Symbol): Unit = try {
- def unlink(sym: Symbol) =
- if (sym != NoSymbol) root.info.decls.unlink(sym)
- if (settings.verbose) inform("[reset] recursing in "+root)
- val toReload = mutable.Set[String]()
- for (sym <- root.info.decls) {
- if (sym.isInitialized && clearOnNextRun(sym))
- if (sym.hasPackageFlag) {
- resetProjectClasses(sym.moduleClass)
- openPackageModule(sym.moduleClass)
- } else {
- unlink(sym)
- unlink(root.info.decls.lookup(
- if (sym.isTerm) sym.name.toTypeName else sym.name.toTermName))
- toReload += sym.fullName
- // note: toReload could be set twice with the same name
- // but reinit must happen only once per name. That's why
- // the following classPath.findClass { ... } code cannot be moved here.
- }
- }
- for (fullname <- toReload)
- classPath.findClass(fullname) match {
- case Some(classRep) =>
- if (settings.verbose) inform("[reset] reinit "+fullname)
- loaders.initializeFromClassPath(root, classRep)
- case _ =>
- }
- } catch {
- case ex: Throwable =>
- // this handler should not be nessasary, but it seems that `fsc`
- // eats exceptions if they appear here. Need to find out the cause for
- // this and fix it.
- inform("[reset] exception happened: "+ex)
- ex.printStackTrace()
- throw ex
- }
-
// --------------- Miscellania -------------------------------
/** Progress tracking. Measured in "progress units" which are 1 per
@@ -1414,6 +1311,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
refreshProgress()
}
+ // for sbt
def cancel() { reporter.cancelled = true }
private def currentProgress = (phasec * size) + unitc
@@ -1445,7 +1343,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val erasurePhase = phaseNamed("erasure")
val posterasurePhase = phaseNamed("posterasure")
// val lazyvalsPhase = phaseNamed("lazyvals")
- // val lambdaliftPhase = phaseNamed("lambdalift")
+ val lambdaliftPhase = phaseNamed("lambdalift")
// val constructorsPhase = phaseNamed("constructors")
val flattenPhase = phaseNamed("flatten")
val mixinPhase = phaseNamed("mixin")
@@ -1479,10 +1377,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
private def checkDeprecatedSettings(unit: CompilationUnit) {
// issue warnings for any usage of deprecated settings
settings.userSetSettings filter (_.isDeprecated) foreach { s =>
- unit.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get)
+ currentRun.reporting.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get)
}
if (settings.target.value.contains("jvm-1.5"))
- unit.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated: use target for Java 1.6 or above.")
+ currentRun.reporting.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated: use target for Java 1.6 or above.")
}
/* An iterator returning all the units being compiled in this run */
@@ -1497,13 +1395,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** does this run compile given class, module, or case factory? */
// NOTE: Early initialized members temporarily typechecked before the enclosing class, see typedPrimaryConstrBody!
- // Here we work around that wrinkle by claiming that a top-level, early-initialized member is compiled in
+ // Here we work around that wrinkle by claiming that a early-initialized member is compiled in
// *every* run. This approximation works because this method is exclusively called with `this` == `currentRun`.
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
else if (symSource.isDefinedAt(sym)) true
- else if (sym.isTopLevel && sym.isEarlyInitialized) true
- else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClass)
+ else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClassOrDummy)
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
@@ -1563,28 +1460,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- def reportCompileErrors() {
- if (!reporter.hasErrors && reporter.hasWarnings && settings.fatalWarnings)
- globalError("No warnings can be incurred under -Xfatal-warnings.")
- if (reporter.hasErrors) {
- for ((sym, file) <- symSource.iterator) {
- sym.reset(new loaders.SourcefileLoader(file))
- if (sym.isTerm)
- sym.moduleClass reset loaders.moduleClassLoader
- }
- }
- else {
- allConditionalWarnings foreach (_.summarize())
-
- if (seenMacroExpansionsFallingBack)
- warning("some macros could not be expanded and code fell back to overridden methods;"+
- "\nrecompiling with generated classfiles on the classpath might help.")
- // todo: migrationWarnings
- }
- }
-
- /** Caching member symbols that are def-s in Defintions because they might change from Run to Run. */
+ /** Caching member symbols that are def-s in Definitions because they might change from Run to Run. */
val runDefinitions: definitions.RunDefinitions = new definitions.RunDefinitions
/** Compile list of source files,
@@ -1595,7 +1472,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def checkDeprecations() = {
checkDeprecatedSettings(newCompilationUnit(""))
- reportCompileErrors()
+ reporting.summarizeErrors()
}
val units = sources map scripted map (new CompilationUnit(_))
@@ -1610,8 +1487,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
compileUnitsInternal(units, fromPhase)
private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) {
- doInvalidation()
-
units foreach addUnit
val startTime = currentTime
@@ -1619,7 +1494,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
checkDeprecatedSettings(unitbuf.head)
globalPhase = fromPhase
- while (globalPhase.hasNext && !reporter.hasErrors) {
+ while (globalPhase.hasNext && !reporter.hasErrors) {
val startTime = currentTime
phase = globalPhase
globalPhase.run()
@@ -1659,12 +1534,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
runCheckers()
// output collected statistics
- if (settings.Ystatistics)
+ if (settings.YstatisticsEnabled)
statistics.print(phase)
advancePhase()
}
+ reporting.summarizeErrors()
+
if (traceSymbolActivity)
units map (_.body) foreach (traceSymbols recordSymbolsInTree _)
@@ -1672,19 +1549,20 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (settings.Yshow.isDefault)
showMembers()
- reportCompileErrors()
+ if (reporter.hasErrors) {
+ for ((sym, file) <- symSource.iterator) {
+ if (file != null)
+ sym.reset(new loaders.SourcefileLoader(file))
+ if (sym.isTerm)
+ sym.moduleClass reset loaders.moduleClassLoader
+ }
+ }
symSource.keys foreach (x => resetPackageClass(x.owner))
+
informTime("total", startTime)
// Clear any sets or maps created via perRunCaches.
perRunCaches.clearAll()
-
- // Reset project
- if (!stopPhase("namer")) {
- enteringPhase(namerPhase) {
- resetProjectClasses(RootClass)
- }
- }
}
/** Compile list of abstract files. */
@@ -1732,10 +1610,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- /** Reset package class to state at typer (not sure what this
- * is needed for?)
+ /** Reset package class to state at typer (not sure what this is needed for?)
*/
- private def resetPackageClass(pclazz: Symbol) {
+ private def resetPackageClass(pclazz: Symbol): Unit = if (typerPhase != NoPhase) {
enteringPhase(firstPhase) {
pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info))
}
diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala
index 03190a63f3..f01de0cbe1 100644
--- a/src/compiler/scala/tools/nsc/MainBench.scala
+++ b/src/compiler/scala/tools/nsc/MainBench.scala
@@ -24,7 +24,7 @@ object MainBench extends Driver with EvalLoop {
var start = System.nanoTime()
for (i <- 0 until NIter) {
if (i == NIter-1) {
- theCompiler.settings.Ystatistics.value = true
+ theCompiler.settings.Ystatistics.default.get foreach theCompiler.settings.Ystatistics.add
Statistics.enabled = true
}
process(args)
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index 95264aeda6..8e01418e8b 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -7,8 +7,8 @@
package scala.tools.nsc
import java.net.URL
-import util.ScalaClassLoader
import util.Exceptional.unwrap
+import scala.reflect.internal.util.ScalaClassLoader
trait CommonRunner {
/** Run a given object, specified by name, using a
@@ -18,14 +18,14 @@ trait CommonRunner {
* @throws NoSuchMethodException
* @throws InvocationTargetException
*/
- def run(urls: List[URL], objectName: String, arguments: Seq[String]) {
+ def run(urls: Seq[URL], objectName: String, arguments: Seq[String]) {
(ScalaClassLoader fromURLs urls).run(objectName, arguments)
}
/** Catches exceptions enumerated by run (in the case of InvocationTargetException,
* unwrapping it) and returns it any thrown in Left(x).
*/
- def runAndCatch(urls: List[URL], objectName: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
+ def runAndCatch(urls: Seq[URL], objectName: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
try { run(urls, objectName, arguments) ; Right(true) }
catch { case e: Throwable => Left(unwrap(e)) }
}
diff --git a/src/compiler/scala/tools/nsc/Parsing.scala b/src/compiler/scala/tools/nsc/Parsing.scala
new file mode 100644
index 0000000000..9e5999ce4f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/Parsing.scala
@@ -0,0 +1,35 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL, Typesafe Inc.
+ * @author Adriaan Moors
+ */
+
+package scala
+package tools.nsc
+
+import scala.reflect.internal.Positions
+
+/** Similar to Reporting: gather global functionality specific to parsing.
+ */
+trait Parsing { self : Positions with Reporting =>
+ def currentRun: RunParsing
+
+ trait RunParsing {
+ val parsing: PerRunParsing = new PerRunParsing
+ }
+
+ class PerRunParsing {
+ // for repl
+ private[this] var incompleteHandler: (Position, String) => Unit = null
+ def withIncompleteHandler[T](handler: (Position, String) => Unit)(thunk: => T) = {
+ val saved = incompleteHandler
+ incompleteHandler = handler
+ try thunk
+ finally incompleteHandler = saved
+ }
+
+ def incompleteHandled = incompleteHandler != null
+ def incompleteInputError(pos: Position, msg: String): Unit =
+ if (incompleteHandled) incompleteHandler(pos, msg)
+ else reporter.error(pos, msg)
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index cfb4cd23a1..4b32aab5ee 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -18,7 +18,7 @@ trait PhaseAssembly {
/**
* Aux datastructure for solving the constraint system
- * The depency graph container with helper methods for node and edge creation
+ * The dependency graph container with helper methods for node and edge creation
*/
private class DependencyGraph {
@@ -127,7 +127,7 @@ trait PhaseAssembly {
}
/* Find all edges in the given graph that are hard links. For each hard link we
- * need to check that its the only dependency. If not, then we will promote the
+ * need to check that it's the only dependency. If not, then we will promote the
* other dependencies down
*/
def validateAndEnforceHardlinks() {
@@ -199,7 +199,7 @@ trait PhaseAssembly {
// Add all phases in the set to the graph
val graph = phasesSetToDepGraph(phasesSet)
- val dot = if (settings.genPhaseGraph.isSetByUser) Some(settings.genPhaseGraph.value) else None
+ val dot = settings.genPhaseGraph.valueSetByUser
// Output the phase dependency graph at this stage
def dump(stage: Int) = dot foreach (n => graphToDotFile(graph, s"$n-$stage.dot"))
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index ed5fda9c3f..9f160e2485 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -11,9 +11,12 @@ object Properties extends scala.util.PropertiesTrait {
protected def propCategory = "compiler"
protected def pickJarBasedOn = classOf[Global]
- // settings based on jar properties
+ // settings based on jar properties, falling back to System prefixed by "scala."
def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
+ // message to display at EOF (which by default ends with
+ // a newline so as not to break the user's terminal)
+ def shellInterruptedString = scalaPropOrElse("shell.interrupted", f":quit$lineSeparator")
// derived values
def isEmacsShell = propOrEmpty("env.emacs") != ""
diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala
new file mode 100644
index 0000000000..4e7a527a5a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/Reporting.scala
@@ -0,0 +1,107 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL, Typesafe Inc.
+ * @author Adriaan Moors
+ */
+
+package scala
+package tools
+package nsc
+
+import scala.collection.{ mutable, immutable }
+import scala.reflect.internal.util.StringOps.countElementsAsString
+
+/** Provides delegates to the reporter doing the actual work.
+ * PerRunReporting implements per-Run stateful info tracking and reporting
+ *
+ * TODO: make reporting configurable
+ */
+trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions with CompilationUnits with scala.reflect.internal.Symbols =>
+ def settings: Settings
+
+ // not deprecated yet, but a method called "error" imported into
+ // nearly every trait really must go. For now using globalError.
+ def error(msg: String) = globalError(msg)
+
+ // a new instance of this class is created for every Run (access the current instance via `currentRun.reporting`)
+ protected def PerRunReporting = new PerRunReporting
+ class PerRunReporting extends PerRunReportingBase {
+ /** Collects for certain classes of warnings during this run. */
+ private class ConditionalWarning(what: String, option: Settings#BooleanSetting)(reRunFlag: String = option.name) {
+ val warnings = mutable.LinkedHashMap[Position, String]()
+ def warn(pos: Position, msg: String) =
+ if (option) reporter.warning(pos, msg)
+ else if (!(warnings contains pos)) warnings += ((pos, msg))
+ def summarize() =
+ if (warnings.nonEmpty && (option.isDefault || option)) {
+ val numWarnings = warnings.size
+ val warningVerb = if (numWarnings == 1) "was" else "were"
+ val warningCount = countElementsAsString(numWarnings, s"$what warning")
+
+ reporter.warning(NoPosition, s"there $warningVerb $warningCount; re-run with $reRunFlag for details")
+ }
+ }
+
+ // This change broke sbt; I gave it the thrilling name of uncheckedWarnings0 so
+ // as to recover uncheckedWarnings for its ever-fragile compiler interface.
+ private val _deprecationWarnings = new ConditionalWarning("deprecation", settings.deprecation)()
+ private val _uncheckedWarnings = new ConditionalWarning("unchecked", settings.unchecked)()
+ private val _featureWarnings = new ConditionalWarning("feature", settings.feature)()
+ private val _inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings)(if (settings.isBCodeActive) settings.YoptWarnings.name else settings.YinlinerWarnings.name)
+ private val _allConditionalWarnings = List(_deprecationWarnings, _uncheckedWarnings, _featureWarnings, _inlinerWarnings)
+
+ // TODO: remove in favor of the overload that takes a Symbol, give that argument a default (NoSymbol)
+ def deprecationWarning(pos: Position, msg: String): Unit = _deprecationWarnings.warn(pos, msg)
+ def uncheckedWarning(pos: Position, msg: String): Unit = _uncheckedWarnings.warn(pos, msg)
+ def featureWarning(pos: Position, msg: String): Unit = _featureWarnings.warn(pos, msg)
+ def inlinerWarning(pos: Position, msg: String): Unit = _inlinerWarnings.warn(pos, msg)
+
+ def deprecationWarnings = _deprecationWarnings.warnings.toList
+ def uncheckedWarnings = _uncheckedWarnings.warnings.toList
+ def featureWarnings = _featureWarnings.warnings.toList
+ def inlinerWarnings = _inlinerWarnings.warnings.toList
+
+ def allConditionalWarnings = _allConditionalWarnings flatMap (_.warnings)
+
+ // behold! the symbol that caused the deprecation warning (may not be deprecated itself)
+ def deprecationWarning(pos: Position, sym: Symbol, msg: String): Unit = _deprecationWarnings.warn(pos, msg)
+ def deprecationWarning(pos: Position, sym: Symbol): Unit = {
+ val suffix = sym.deprecationMessage match { case Some(msg) => ": "+ msg case _ => "" }
+ deprecationWarning(pos, sym, s"$sym${sym.locationString} is deprecated$suffix")
+ }
+
+ private[this] var reportedFeature = Set[Symbol]()
+ def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean): Unit = {
+ val req = if (required) "needs to" else "should"
+ val fqname = "scala.language." + featureName
+ val explain = (
+ if (reportedFeature contains featureTrait) "" else
+ s"""|
+ |This can be achieved by adding the import clause 'import $fqname'
+ |or by setting the compiler option -language:$featureName.
+ |See the Scala docs for value $fqname for a discussion
+ |why the feature $req be explicitly enabled.""".stripMargin
+ )
+ reportedFeature += featureTrait
+
+ val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain" replace ("#", construct)
+ if (required) reporter.error(pos, msg)
+ else featureWarning(pos, msg)
+ }
+
+ /** Has any macro expansion used a fallback during this run? */
+ var seenMacroExpansionsFallingBack = false
+
+ def summarizeErrors(): Unit = if (!reporter.hasErrors) {
+ _allConditionalWarnings foreach (_.summarize())
+
+ if (seenMacroExpansionsFallingBack)
+ reporter.warning(NoPosition, "some macros could not be expanded and code fell back to overridden methods;"+
+ "\nrecompiling with generated classfiles on the classpath might help.")
+
+ // todo: migrationWarnings
+
+ if (settings.fatalWarnings && reporter.hasWarnings)
+ reporter.error(NoPosition, "No warnings can be incurred under -Xfatal-warnings.")
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index c2d62db558..6d24b31531 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -6,9 +6,12 @@
package scala
package tools.nsc
-import io.{ Directory, File, Path }
+import io.{ AbstractFile, Directory, File, Path }
import java.io.IOException
+import scala.tools.nsc.classpath.DirectoryFlatClassPath
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.ClassPath.DefaultJavaContext
import util.Exceptional.unwrap
/** An object that runs Scala code in script files.
@@ -111,6 +114,14 @@ class ScriptRunner extends HasCompileSocket {
else None
}
+ def hasClassToRun(d: Directory): Boolean = {
+ val cp = settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Recursive => DefaultJavaContext.newClassPath(AbstractFile.getDirectory(d))
+ case ClassPathRepresentationType.Flat => DirectoryFlatClassPath(d.jfile)
+ }
+ cp.findClass(mainClass).isDefined
+ }
+
/* The script runner calls sys.exit to communicate a return value, but this must
* not take place until there are no non-daemon threads running. Tickets #1955, #2006.
*/
@@ -124,15 +135,21 @@ class ScriptRunner extends HasCompileSocket {
compile match {
case Some(compiledPath) =>
- try io.Jar.create(jarFile, compiledPath, mainClass)
- catch { case _: Exception => jarFile.delete() }
-
- if (jarOK) {
- compiledPath.deleteRecursively()
- handler(jarFile.toAbsolute.path)
+ if (!hasClassToRun(compiledPath)) {
+ // it compiled ok, but there is nothing to run;
+ // running an empty script should succeed
+ true
+ } else {
+ try io.Jar.create(jarFile, compiledPath, mainClass)
+ catch { case _: Exception => jarFile.delete() }
+
+ if (jarOK) {
+ compiledPath.deleteRecursively()
+ handler(jarFile.toAbsolute.path)
+ }
+ // jar failed; run directly from the class files
+ else handler(compiledPath.path)
}
- // jar failed; run directly from the class files
- else handler(compiledPath.path)
case _ => false
}
}
@@ -140,8 +157,8 @@ class ScriptRunner extends HasCompileSocket {
if (jarOK) handler(jarFile.toAbsolute.path) // pre-compiled jar is current
else recompile() // jar old - recompile the script.
}
- // don't use a cache jar at all--just use the class files
- else compile exists (cp => handler(cp.path))
+ // don't use a cache jar at all--just use the class files, if they exist
+ else compile exists (cp => !hasClassToRun(cp) || handler(cp.path))
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 6d9b41ec45..02a199f7ac 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -59,14 +59,21 @@ trait DocComments { self: Global =>
comment.defineVariables(sym)
}
+
+ def replaceInheritDocToInheritdoc(docStr: String):String = {
+ docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc")
+ }
+
/** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by
* missing sections of an inherited doc comment.
* If a symbol does not have a doc comment but some overridden version of it does,
* the doc comment of the overridden version is copied instead.
*/
def cookedDocComment(sym: Symbol, docStr: String = ""): String = cookedDocComments.getOrElseUpdate(sym, {
- val ownComment = if (docStr.length == 0) docComments get sym map (_.template) getOrElse ""
+ var ownComment = if (docStr.length == 0) docComments get sym map (_.template) getOrElse ""
else DocComment(docStr).template
+ ownComment = replaceInheritDocToInheritdoc(ownComment)
+
superComment(sym) match {
case None =>
if (ownComment.indexOf("@inheritdoc") != -1)
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index 9c8e13a1a9..6fe85cde7a 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -9,6 +9,7 @@ package ast
import scala.compat.Platform.EOL
import symtab.Flags._
import scala.language.postfixOps
+import scala.reflect.internal.util.ListOfNil
/** The object `nodePrinter` converts the internal tree
* representation to a string.
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index 0731d78a9b..689e6405d0 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -74,6 +74,11 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
}
}
+ // TODO these overrides, and the slow trickle of bugs that they solve (e.g. SI-8479),
+ // suggest that we should pursue an alternative design in which the DocDef nodes
+ // are eliminated from the tree before typer, and instead are modelled as tree
+ // attachments.
+
/** Is tree legal as a member definition of an interface?
*/
override def isInterfaceMember(tree: Tree): Boolean = tree match {
@@ -81,6 +86,11 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
case _ => super.isInterfaceMember(tree)
}
+ override def isConstructorWithDefault(t: Tree) = t match {
+ case DocDef(_, definition) => isConstructorWithDefault(definition)
+ case _ => super.isConstructorWithDefault(t)
+ }
+
/** Is tree a pure (i.e. non-side-effecting) definition?
*/
override def isPureDef(tree: Tree): Boolean = tree match {
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 3652f51153..934257092f 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -178,7 +178,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
}
}
- // Finally, noone resetAllAttrs it anymore, so I'm removing it from the compiler.
+ // Finally, no one uses resetAllAttrs anymore, so I'm removing it from the compiler.
// Even though it's with great pleasure I'm doing that, I'll leave its body here to warn future generations about what happened in the past.
//
// So what actually happened in the past is that we used to have two flavors of resetAttrs: resetAllAttrs and resetLocalAttrs.
@@ -308,7 +308,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
// Erasing locally-defined symbols is useful to prevent tree corruption, but erasing external bindings is not,
// therefore we want to retain those bindings, especially given that restoring them can be impossible
// if we move these trees into lexical contexts different from their original locations.
- if (dupl.hasSymbol) {
+ if (dupl.hasSymbolField) {
val sym = dupl.symbol
val vetoScope = !brutally && !(locals contains sym) && !(locals contains sym.deSkolemize)
val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index d3f495f280..52b8a51a79 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc
package ast.parser
+import scala.annotation.tailrec
import scala.collection.mutable
import mutable.{ Buffer, ArrayBuffer, ListBuffer }
import scala.util.control.ControlThrowable
@@ -172,20 +173,19 @@ trait MarkupParsers {
}
def appendText(pos: Position, ts: Buffer[Tree], txt: String): Unit = {
- def append(t: String) = ts append handle.text(pos, t)
-
- if (preserveWS) append(txt)
- else {
+ def append(text: String): Unit = {
+ val tree = handle.text(pos, text)
+ ts append tree
+ }
+ val clean = if (preserveWS) txt else {
val sb = new StringBuilder()
-
txt foreach { c =>
if (!isSpace(c)) sb append c
else if (sb.isEmpty || !isSpace(sb.last)) sb append ' '
}
-
- val trimmed = sb.toString.trim
- if (!trimmed.isEmpty) append(trimmed)
+ sb.toString.trim
}
+ if (!clean.isEmpty) append(clean)
}
/** adds entity/character to ts as side-effect
@@ -216,44 +216,75 @@ trait MarkupParsers {
if (xCheckEmbeddedBlock) ts append xEmbeddedExpr
else appendText(p, ts, xText)
- /** Returns true if it encounters an end tag (without consuming it),
- * appends trees to ts as side-effect.
+ /** At an open angle-bracket, detects an end tag
+ * or consumes CDATA, comment, PI or element.
+ * Trees are appended to `ts` as a side-effect.
+ * @return true if an end tag (without consuming it)
*/
- private def content_LT(ts: ArrayBuffer[Tree]): Boolean = {
- if (ch == '/')
- return true // end tag
-
- val toAppend = ch match {
- case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment
- case '?' => nextch() ; xProcInstr // PI
- case _ => element // child node
+ private def content_LT(ts: ArrayBuffer[Tree]): Boolean =
+ (ch == '/') || {
+ val toAppend = ch match {
+ case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment
+ case '?' => nextch() ; xProcInstr // PI
+ case _ => element // child node
+ }
+ ts append toAppend
+ false
}
- ts append toAppend
- false
- }
-
def content: Buffer[Tree] = {
val ts = new ArrayBuffer[Tree]
- while (true) {
- if (xEmbeddedBlock)
+ val coalescing = settings.XxmlSettings.isCoalescing
+ @tailrec def loopContent(): Unit =
+ if (xEmbeddedBlock) {
ts append xEmbeddedExpr
- else {
+ loopContent()
+ } else {
tmppos = o2p(curOffset)
ch match {
- // end tag, cdata, comment, pi or child node
- case '<' => nextch() ; if (content_LT(ts)) return ts
- // either the character '{' or an embedded scala block }
- case '{' => content_BRACE(tmppos, ts) // }
- // EntityRef or CharRef
- case '&' => content_AMP(ts)
- case SU => return ts
- // text content - here xEmbeddedBlock might be true
- case _ => appendText(tmppos, ts, xText)
+ case '<' => // end tag, cdata, comment, pi or child node
+ nextch()
+ if (!content_LT(ts)) loopContent()
+ case '{' => // } literal brace or embedded Scala block
+ content_BRACE(tmppos, ts)
+ loopContent()
+ case '&' => // EntityRef or CharRef
+ content_AMP(ts)
+ loopContent()
+ case SU => ()
+ case _ => // text content - here xEmbeddedBlock might be true
+ appendText(tmppos, ts, xText)
+ loopContent()
}
}
+ // merge text sections and strip attachments
+ def coalesce(): ArrayBuffer[Tree] = {
+ def copy() = {
+ val buf = new ArrayBuffer[Tree]
+ var acc = new StringBuilder
+ var pos: Position = NoPosition
+ def emit() = if (acc.nonEmpty) {
+ appendText(pos, buf, acc.toString)
+ acc.clear()
+ }
+ for (t <- ts)
+ t.attachments.get[handle.TextAttache] match {
+ case Some(ta) =>
+ if (acc.isEmpty) pos = ta.pos
+ acc append ta.text
+ case _ =>
+ emit()
+ buf += t
+ }
+ emit()
+ buf
+ }
+ val res = if (ts.count(_.hasAttachment[handle.TextAttache]) > 1) copy() else ts
+ for (t <- res) t.removeAttachment[handle.TextAttache]
+ res
}
- unreachable
+ loopContent()
+ if (coalescing) coalesce() else ts
}
/** '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
@@ -289,20 +320,16 @@ trait MarkupParsers {
private def xText: String = {
assert(!xEmbeddedBlock, "internal error: encountered embedded block")
val buf = new StringBuilder
- def done = buf.toString
-
- while (ch != SU) {
- if (ch == '}') {
- if (charComingAfter(nextch()) == '}') nextch()
- else errorBraces()
- }
-
- buf append ch
- nextch()
- if (xCheckEmbeddedBlock || ch == '<' || ch == '&')
- return done
- }
- done
+ if (ch != SU)
+ do {
+ if (ch == '}') {
+ if (charComingAfter(nextch()) == '}') nextch()
+ else errorBraces()
+ }
+ buf append ch
+ nextch()
+ } while (!(ch == SU || xCheckEmbeddedBlock || ch == '<' || ch == '&'))
+ buf.toString
}
/** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */
@@ -344,14 +371,13 @@ trait MarkupParsers {
tmppos = o2p(curOffset) // Iuli: added this line, as it seems content_LT uses tmppos when creating trees
content_LT(ts)
- // parse more XML ?
+ // parse more XML?
if (charComingAfter(xSpaceOpt()) == '<') {
- xSpaceOpt()
- while (ch == '<') {
- nextch()
- ts append element
+ do {
xSpaceOpt()
- }
+ nextch()
+ content_LT(ts)
+ } while (charComingAfter(xSpaceOpt()) == '<')
handle.makeXMLseq(r2p(start, start, curOffset), ts)
}
else {
@@ -426,11 +452,10 @@ trait MarkupParsers {
if (ch != '/') ts append xPattern // child
else return false // terminate
- case '{' => // embedded Scala patterns
- while (ch == '{') {
- nextch()
+ case '{' if xCheckEmbeddedBlock => // embedded Scala patterns, if not double brace
+ do {
ts ++= xScalaPatterns
- }
+ } while (xCheckEmbeddedBlock)
assert(!xEmbeddedBlock, "problem with embedded block")
case SU =>
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 9e631febee..4f195c2985 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -13,7 +13,7 @@ import scala.collection.{ mutable, immutable }
import mutable.{ ListBuffer, StringBuilder }
import scala.reflect.internal.{ Precedence, ModifierFlags => Flags }
import scala.reflect.internal.Chars.{ isScalaLetter }
-import scala.reflect.internal.util.{ SourceFile, Position, FreshNameCreator }
+import scala.reflect.internal.util.{ SourceFile, Position, FreshNameCreator, ListOfNil }
import Tokens._
/** Historical note: JavaParsers started life as a direct copy of Parsers
@@ -154,8 +154,8 @@ self =>
def unit = global.currentUnit
// suppress warnings; silent abort on errors
- def warning(offset: Offset, msg: String) {}
- def deprecationWarning(offset: Offset, msg: String) {}
+ def warning(offset: Offset, msg: String): Unit = ()
+ def deprecationWarning(offset: Offset, msg: String): Unit = ()
def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg)
def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
@@ -204,13 +204,11 @@ self =>
override def newScanner() = new UnitScanner(unit, patches)
- override def warning(offset: Offset, msg: String) {
- unit.warning(o2p(offset), msg)
- }
+ override def warning(offset: Offset, msg: String): Unit =
+ reporter.warning(o2p(offset), msg)
- override def deprecationWarning(offset: Offset, msg: String) {
- unit.deprecationWarning(o2p(offset), msg)
- }
+ override def deprecationWarning(offset: Offset, msg: String): Unit =
+ currentRun.reporting.deprecationWarning(o2p(offset), msg)
private var smartParsing = false
@inline private def withSmartParsing[T](body: => T): T = {
@@ -224,17 +222,17 @@ self =>
val syntaxErrors = new ListBuffer[(Int, String)]
def showSyntaxErrors() =
for ((offset, msg) <- syntaxErrors)
- unit.error(o2p(offset), msg)
+ reporter.error(o2p(offset), msg)
- override def syntaxError(offset: Offset, msg: String) {
+ override def syntaxError(offset: Offset, msg: String): Unit = {
if (smartParsing) syntaxErrors += ((offset, msg))
- else unit.error(o2p(offset), msg)
+ else reporter.error(o2p(offset), msg)
}
- override def incompleteInputError(msg: String) {
+ override def incompleteInputError(msg: String): Unit = {
val offset = source.content.length - 1
if (smartParsing) syntaxErrors += ((offset, msg))
- else unit.incompleteInputError(o2p(offset), msg)
+ else currentRun.parsing.incompleteInputError(o2p(offset), msg)
}
/** parse unit. If there are inbalanced braces,
@@ -335,7 +333,7 @@ self =>
*/
private var inScalaPackage = false
private var currentPackage = ""
- def resetPackage() {
+ def resetPackage(): Unit = {
inScalaPackage = false
currentPackage = ""
}
@@ -514,7 +512,7 @@ self =>
finally inFunReturnType = saved
}
- protected def skip(targetToken: Token) {
+ protected def skip(targetToken: Token): Unit = {
var nparens = 0
var nbraces = 0
while (true) {
@@ -544,27 +542,25 @@ self =>
}
def warning(offset: Offset, msg: String): Unit
def incompleteInputError(msg: String): Unit
- private def syntaxError(pos: Position, msg: String, skipIt: Boolean) {
- syntaxError(pos pointOrElse in.offset, msg, skipIt)
- }
def syntaxError(offset: Offset, msg: String): Unit
- def syntaxError(msg: String, skipIt: Boolean) {
+
+ private def syntaxError(pos: Position, msg: String, skipIt: Boolean): Unit =
+ syntaxError(pos pointOrElse in.offset, msg, skipIt)
+ def syntaxError(msg: String, skipIt: Boolean): Unit =
syntaxError(in.offset, msg, skipIt)
- }
- def syntaxError(offset: Offset, msg: String, skipIt: Boolean) {
+ def syntaxError(offset: Offset, msg: String, skipIt: Boolean): Unit = {
if (offset > lastErrorOffset) {
syntaxError(offset, msg)
- // no more errors on this token.
- lastErrorOffset = in.offset
+ lastErrorOffset = in.offset // no more errors on this token.
}
if (skipIt)
skip(UNDEF)
}
- def warning(msg: String) { warning(in.offset, msg) }
+ def warning(msg: String): Unit = warning(in.offset, msg)
- def syntaxErrorOrIncomplete(msg: String, skipIt: Boolean) {
+ def syntaxErrorOrIncomplete(msg: String, skipIt: Boolean): Unit = {
if (in.token == EOF)
incompleteInputError(msg)
else
@@ -654,9 +650,10 @@ self =>
def isIdentExcept(except: Name) = isIdent && in.name != except
def isIdentOf(name: Name) = isIdent && in.name == name
- def isUnaryOp = isIdent && raw.isUnary(in.name)
- def isRawStar = isIdent && in.name == raw.STAR
- def isRawBar = isIdent && in.name == raw.BAR
+ def isUnaryOp = isIdent && raw.isUnary(in.name)
+ def isRawStar = isRawIdent && in.name == raw.STAR
+ def isRawBar = isRawIdent && in.name == raw.BAR
+ def isRawIdent = in.token == IDENTIFIER
def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw
@@ -718,7 +715,7 @@ self =>
/** Convert tree to formal parameter. */
def convertToParam(tree: Tree): ValDef = atPos(tree.pos) {
- def removeAsPlaceholder(name: Name) {
+ def removeAsPlaceholder(name: Name): Unit = {
placeholderParams = placeholderParams filter (_.name != name)
}
def errorParam = makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.end))
@@ -1001,19 +998,30 @@ self =>
}
def infixTypeRest(t: Tree, mode: InfixMode.Value): Tree = {
- if (isIdent && in.name != nme.STAR) {
- val opOffset = in.offset
+ // Detect postfix star for repeated args.
+ // Only RPAREN can follow, but accept COMMA and EQUALS for error's sake.
+ // Take RBRACE as a paren typo.
+ def checkRepeatedParam = if (isRawStar) {
+ lookingAhead (in.token match {
+ case RPAREN | COMMA | EQUALS | RBRACE => t
+ case _ => EmptyTree
+ })
+ } else EmptyTree
+ def asInfix = {
+ val opOffset = in.offset
val leftAssoc = treeInfo.isLeftAssoc(in.name)
- if (mode != InfixMode.FirstOp) checkAssoc(opOffset, in.name, leftAssoc = mode == InfixMode.LeftOp)
- val op = identForType()
- val tycon = atPos(opOffset) { Ident(op) }
+ if (mode != InfixMode.FirstOp)
+ checkAssoc(opOffset, in.name, leftAssoc = mode == InfixMode.LeftOp)
+ val tycon = atPos(opOffset) { Ident(identForType()) }
newLineOptWhenFollowing(isTypeIntroToken)
def mkOp(t1: Tree) = atPos(t.pos.start, opOffset) { AppliedTypeTree(tycon, List(t, t1)) }
if (leftAssoc)
infixTypeRest(mkOp(compoundType()), InfixMode.LeftOp)
else
mkOp(infixType(InfixMode.RightOp))
- } else t
+ }
+ if (isIdent) checkRepeatedParam orElse asInfix
+ else t
}
/** {{{
@@ -1051,7 +1059,7 @@ self =>
def identOrMacro(): Name = if (isMacro) rawIdent() else ident()
def selector(t: Tree): Tree = {
- val point = in.offset
+ val point = if(isIdent) in.offset else in.lastOffset //SI-8459
//assert(t.pos.isDefined, t)
if (t != EmptyTree)
Select(t, ident(skipIt = false)) setPos r2p(t.pos.start, point, in.lastOffset)
@@ -1221,15 +1229,15 @@ self =>
skipIt = true)(EmptyTree)
// Like Swiss cheese, with holes
def stringCheese: Tree = atPos(in.offset) {
- val start = in.offset
+ val start = in.offset
val interpolator = in.name.encoded // ident() for INTERPOLATIONID
val partsBuf = new ListBuffer[Tree]
- val exprBuf = new ListBuffer[Tree]
+ val exprsBuf = new ListBuffer[Tree]
in.nextToken()
while (in.token == STRINGPART) {
partsBuf += literal()
- exprBuf += (
+ exprsBuf += (
if (inPattern) dropAnyBraces(pattern())
else in.token match {
case IDENTIFIER => atPos(in.offset)(Ident(ident()))
@@ -1242,33 +1250,35 @@ self =>
}
if (in.token == STRINGLIT) partsBuf += literal()
+ // Documenting that it is intentional that the ident is not rooted for purposes of virtualization
+ //val t1 = atPos(o2p(start)) { Select(Select (Ident(nme.ROOTPKG), nme.scala_), nme.StringContext) }
val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
t2 setPos t2.pos.makeTransparent
val t3 = Select(t2, interpolator) setPos t2.pos
- atPos(start) { Apply(t3, exprBuf.toList) }
+ atPos(start) { Apply(t3, exprsBuf.toList) }
}
if (inPattern) stringCheese
- else withPlaceholders(stringCheese, isAny = true) // strinterpolator params are Any* by definition
+ else withPlaceholders(stringCheese, isAny = true) // string interpolator params are Any* by definition
}
/* ------------- NEW LINES ------------------------------------------------- */
- def newLineOpt() {
+ def newLineOpt(): Unit = {
if (in.token == NEWLINE) in.nextToken()
}
- def newLinesOpt() {
+ def newLinesOpt(): Unit = {
if (in.token == NEWLINE || in.token == NEWLINES)
in.nextToken()
}
- def newLineOptWhenFollowedBy(token: Offset) {
+ def newLineOptWhenFollowedBy(token: Offset): Unit = {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && in.next.token == token) newLineOpt()
}
- def newLineOptWhenFollowing(p: Token => Boolean) {
+ def newLineOptWhenFollowing(p: Token => Boolean): Unit = {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
}
@@ -1549,7 +1559,7 @@ self =>
}
/** {{{
- * PrefixExpr ::= [`-' | `+' | `~' | `!' | `&'] SimpleExpr
+ * PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr
* }}}
*/
def prefixExpr(): Tree = {
@@ -2778,7 +2788,7 @@ self =>
*/
def packageObjectDef(start: Offset): PackageDef = {
val defn = objectDef(in.offset, NoMods)
- val pidPos = o2p(defn.pos.startOrPoint)
+ val pidPos = o2p(defn.pos.start)
val pkgPos = r2p(start, pidPos.point)
gen.mkPackageObject(defn, pidPos, pkgPos)
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index e8d46704c3..92833d647b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -113,7 +113,7 @@ trait Scanners extends ScannersCommon {
case SU | CR | LF =>
case _ => nextChar() ; skipLineComment()
}
- private def maybeOpen() {
+ private def maybeOpen(): Unit = {
putCommentChar()
if (ch == '*') {
putCommentChar()
@@ -137,7 +137,7 @@ trait Scanners extends ScannersCommon {
def skipDocComment(): Unit = skipNestedComments()
def skipBlockComment(): Unit = skipNestedComments()
- private def skipToCommentEnd(isLineComment: Boolean) {
+ private def skipToCommentEnd(isLineComment: Boolean): Unit = {
nextChar()
if (isLineComment) skipLineComment()
else {
@@ -185,7 +185,7 @@ trait Scanners extends ScannersCommon {
/** append Unicode character to "cbuf" buffer
*/
- protected def putChar(c: Char) {
+ protected def putChar(c: Char): Unit = {
// assert(cbuf.size < 10000, cbuf)
cbuf.append(c)
}
@@ -196,7 +196,7 @@ trait Scanners extends ScannersCommon {
protected def emitIdentifierDeprecationWarnings = true
/** Clear buffer and set name and token */
- private def finishNamed(idtoken: Token = IDENTIFIER) {
+ private def finishNamed(idtoken: Token = IDENTIFIER): Unit = {
name = newTermName(cbuf.toString)
cbuf.clear()
token = idtoken
@@ -215,7 +215,7 @@ trait Scanners extends ScannersCommon {
}
/** Clear buffer and set string */
- private def setStrVal() {
+ private def setStrVal(): Unit = {
strVal = cbuf.toString
cbuf.clear()
}
@@ -270,7 +270,7 @@ trait Scanners extends ScannersCommon {
/** Produce next token, filling TokenData fields of Scanner.
*/
- def nextToken() {
+ def nextToken(): Unit = {
val lastToken = token
// Adapt sepRegions according to last token
(lastToken: @switch) match {
@@ -341,7 +341,7 @@ trait Scanners extends ScannersCommon {
prev copyFrom this
val nextLastOffset = charOffset - 1
fetchToken()
- def resetOffset() {
+ def resetOffset(): Unit = {
offset = prev.offset
lastOffset = prev.lastOffset
}
@@ -399,7 +399,7 @@ trait Scanners extends ScannersCommon {
/** read next token, filling TokenData fields of Scanner.
*/
- protected final def fetchToken() {
+ protected final def fetchToken(): Unit = {
offset = charOffset - 1
(ch: @switch) match {
@@ -453,18 +453,15 @@ trait Scanners extends ScannersCommon {
getOperatorRest()
}
case '0' =>
- def fetchZero() = {
- putChar(ch)
+ def fetchLeadingZero(): Unit = {
nextChar()
- if (ch == 'x' || ch == 'X') {
- nextChar()
- base = 16
- } else {
- base = 8
+ ch match {
+ case 'x' | 'X' => base = 16 ; nextChar()
+ case _ => base = 8 // single decimal zero, perhaps
}
- getNumber()
}
- fetchZero()
+ fetchLeadingZero()
+ getNumber()
case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
base = 10
getNumber()
@@ -604,7 +601,7 @@ trait Scanners extends ScannersCommon {
// Identifiers ---------------------------------------------------------------
- private def getBackquotedIdent() {
+ private def getBackquotedIdent(): Unit = {
nextChar()
getLitChars('`')
if (ch == '`') {
@@ -664,7 +661,7 @@ trait Scanners extends ScannersCommon {
else finishNamed()
}
- private def getIdentOrOperatorRest() {
+ private def getIdentOrOperatorRest(): Unit = {
if (isIdentifierPart(ch))
getIdentRest()
else ch match {
@@ -688,9 +685,11 @@ trait Scanners extends ScannersCommon {
setStrVal()
nextChar()
token = STRINGLIT
- } else syntaxError("unclosed string literal")
+ } else unclosedStringLit()
}
+ private def unclosedStringLit(): Unit = syntaxError("unclosed string literal")
+
private def getRawStringLit(): Unit = {
if (ch == '\"') {
nextRawChar()
@@ -764,7 +763,7 @@ trait Scanners extends ScannersCommon {
if (multiLine)
incompleteInputError("unclosed multi-line string literal")
else
- syntaxError("unclosed string literal")
+ unclosedStringLit()
}
else {
putChar(ch)
@@ -857,7 +856,7 @@ trait Scanners extends ScannersCommon {
/** read fractional part and exponent of floating point number
* if one is present.
*/
- protected def getFraction() {
+ protected def getFraction(): Unit = {
token = DOUBLELIT
while ('0' <= ch && ch <= '9') {
putChar(ch)
@@ -900,62 +899,61 @@ trait Scanners extends ScannersCommon {
*/
def charVal: Char = if (strVal.length > 0) strVal.charAt(0) else 0
- /** Convert current strVal, base to long value
+ /** Convert current strVal, base to long value.
* This is tricky because of max negative value.
+ *
+ * Conversions in base 10 and 16 are supported. As a permanent migration
+ * path, attempts to write base 8 literals except `0` emit a verbose error.
*/
def intVal(negated: Boolean): Long = {
- if (token == CHARLIT && !negated) {
- charVal.toLong
- } else {
- var value: Long = 0
- val divider = if (base == 10) 1 else 2
- val limit: Long =
- if (token == LONGLIT) Long.MaxValue else Int.MaxValue
- var i = 0
+ def malformed: Long = {
+ if (base == 8) syntaxError("Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)")
+ else syntaxError("malformed integer number")
+ 0
+ }
+ def tooBig: Long = {
+ syntaxError("integer number too large")
+ 0
+ }
+ def intConvert: Long = {
val len = strVal.length
- while (i < len) {
- val d = digit2int(strVal charAt i, base)
- if (d < 0) {
- syntaxError("malformed integer number")
- return 0
- }
- if (value < 0 ||
- limit / (base / divider) < value ||
- limit - (d / divider) < value * (base / divider) &&
- !(negated && limit == value * base - 1 + d)) {
- syntaxError("integer number too large")
- return 0
- }
- value = value * base + d
- i += 1
+ if (len == 0) {
+ if (base != 8) syntaxError("missing integer number") // e.g., 0x;
+ 0
+ } else {
+ val divider = if (base == 10) 1 else 2
+ val limit: Long = if (token == LONGLIT) Long.MaxValue else Int.MaxValue
+ @tailrec def convert(value: Long, i: Int): Long =
+ if (i >= len) value
+ else {
+ val d = digit2int(strVal charAt i, base)
+ if (d < 0)
+ malformed
+ else if (value < 0 ||
+ limit / (base / divider) < value ||
+ limit - (d / divider) < value * (base / divider) &&
+ !(negated && limit == value * base - 1 + d))
+ tooBig
+ else
+ convert(value * base + d, i + 1)
+ }
+ val result = convert(0, 0)
+ if (base == 8) malformed else if (negated) -result else result
}
- if (negated) -value else value
}
+ if (token == CHARLIT && !negated) charVal.toLong else intConvert
}
def intVal: Long = intVal(negated = false)
/** Convert current strVal, base to double value
- */
+ */
def floatVal(negated: Boolean): Double = {
-
- val limit: Double =
- if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
+ val limit: Double = if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
try {
val value: Double = java.lang.Double.valueOf(strVal).doubleValue()
- def isDeprecatedForm = {
- val idx = strVal indexOf '.'
- (idx == strVal.length - 1) || (
- (idx >= 0)
- && (idx + 1 < strVal.length)
- && (!Character.isDigit(strVal charAt (idx + 1)))
- )
- }
if (value > limit)
syntaxError("floating point number too large")
- if (isDeprecatedForm)
- syntaxError("floating point number is missing digit after dot")
-
if (negated) -value else value
} catch {
case _: NumberFormatException =>
@@ -966,93 +964,50 @@ trait Scanners extends ScannersCommon {
def floatVal: Double = floatVal(negated = false)
- def checkNoLetter() {
+ def checkNoLetter(): Unit = {
if (isIdentifierPart(ch) && ch >= ' ')
syntaxError("Invalid literal number")
}
- /** Read a number into strVal and set base
- */
- protected def getNumber() {
- val base1 = if (base < 10) 10 else base
- // Read 8,9's even if format is octal, produce a malformed number error afterwards.
- // At this point, we have already read the first digit, so to tell an innocent 0 apart
- // from an octal literal 0123... (which we want to disallow), we check whether there
- // are any additional digits coming after the first one we have already read.
- var notSingleZero = false
- while (digit2int(ch, base1) >= 0) {
- putChar(ch)
- nextChar()
- notSingleZero = true
- }
- token = INTLIT
-
- /* When we know for certain it's a number after using a touch of lookahead */
- def restOfNumber() = {
- putChar(ch)
- nextChar()
+ /** Read a number into strVal.
+ *
+ * The `base` can be 8, 10 or 16, where base 8 flags a leading zero.
+ * For ints, base 8 is legal only for the case of exactly one zero.
+ */
+ protected def getNumber(): Unit = {
+ // consume digits of a radix
+ def consumeDigits(radix: Int): Unit =
+ while (digit2int(ch, radix) >= 0) {
+ putChar(ch)
+ nextChar()
+ }
+ // adding decimal point is always OK because `Double valueOf "0."` is OK
+ def restOfNonIntegralNumber(): Unit = {
+ putChar('.')
+ if (ch == '.') nextChar()
getFraction()
}
- def restOfUncertainToken() = {
- def isEfd = ch match { case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' => true ; case _ => false }
- def isL = ch match { case 'l' | 'L' => true ; case _ => false }
-
- if (base <= 10 && isEfd)
- getFraction()
- else {
- // Checking for base == 8 is not enough, because base = 8 is set
- // as soon as a 0 is read in `case '0'` of method fetchToken.
- if (base == 8 && notSingleZero) syntaxError("Non-zero integral values may not have a leading zero.")
- setStrVal()
- if (isL) {
- nextChar()
- token = LONGLIT
- }
- else checkNoLetter()
+ // after int: 5e7f, 42L, 42.toDouble but not 42b. Repair 0d.
+ def restOfNumber(): Unit = {
+ ch match {
+ case 'e' | 'E' | 'f' | 'F' |
+ 'd' | 'D' => if (cbuf.isEmpty) putChar('0'); restOfNonIntegralNumber()
+ case 'l' | 'L' => token = LONGLIT ; setStrVal() ; nextChar()
+ case _ => token = INTLIT ; setStrVal() ; checkNoLetter()
}
}
- if (base > 10 || ch != '.')
- restOfUncertainToken()
- else {
- val lookahead = lookaheadReader
- val c = lookahead.getc()
-
- /* Prohibit 1. */
- if (!isDigit(c))
- return setStrVal()
-
- val isDefinitelyNumber = (c: @switch) match {
- /** Another digit is a giveaway. */
- case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
- true
+ // consume leading digits, provisionally an Int
+ consumeDigits(if (base == 16) 16 else 10)
- /* Backquoted idents like 22.`foo`. */
- case '`' =>
- return setStrVal() /** Note the early return */
-
- /* These letters may be part of a literal, or a method invocation on an Int.
- */
- case 'd' | 'D' | 'f' | 'F' =>
- !isIdentifierPart(lookahead.getc())
-
- /* A little more special handling for e.g. 5e7 */
- case 'e' | 'E' =>
- val ch = lookahead.getc()
- !isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
-
- case x =>
- !isIdentifierStart(x)
- }
- if (isDefinitelyNumber) restOfNumber()
- else restOfUncertainToken()
- }
+ val detectedFloat: Boolean = base != 16 && ch == '.' && isDigit(lookaheadReader.getc)
+ if (detectedFloat) restOfNonIntegralNumber() else restOfNumber()
}
/** Parse character literal if current character is followed by \',
* or follow with given op and return a symbol literal token
*/
- def charLitOr(op: () => Unit) {
+ def charLitOr(op: () => Unit): Unit = {
putChar(ch)
nextChar()
if (ch == '\'') {
@@ -1068,21 +1023,19 @@ trait Scanners extends ScannersCommon {
// Errors -----------------------------------------------------------------
- /** generate an error at the given offset
- */
- def syntaxError(off: Offset, msg: String) {
+ /** generate an error at the given offset */
+ def syntaxError(off: Offset, msg: String): Unit = {
error(off, msg)
token = ERROR
}
- /** generate an error at the current token offset
- */
+ /** generate an error at the current token offset */
def syntaxError(msg: String): Unit = syntaxError(offset, msg)
def deprecationWarning(msg: String): Unit = deprecationWarning(offset, msg)
/** signal an error where the input ended in the middle of a token */
- def incompleteInputError(msg: String) {
+ def incompleteInputError(msg: String): Unit = {
incompleteInputError(offset, msg)
token = EOF
}
@@ -1134,7 +1087,7 @@ trait Scanners extends ScannersCommon {
/** Initialization method: read first char, then first token
*/
- def init() {
+ def init(): Unit = {
nextChar()
nextToken()
}
@@ -1259,9 +1212,9 @@ trait Scanners extends ScannersCommon {
class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
def this(unit: CompilationUnit) = this(unit, List())
- override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
- override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
- override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
+ override def deprecationWarning(off: Offset, msg: String) = currentRun.reporting.deprecationWarning(unit.position(off), msg)
+ override def error (off: Offset, msg: String) = reporter.error(unit.position(off), msg)
+ override def incompleteInputError(off: Offset, msg: String) = currentRun.parsing.incompleteInputError(unit.position(off), msg)
private var bracePatches: List[BracePatch] = patches
@@ -1490,6 +1443,6 @@ trait Scanners extends ScannersCommon {
// when skimming through the source file trying to heal braces
override def emitIdentifierDeprecationWarnings = false
- override def error(offset: Offset, msg: String) {}
+ override def error(offset: Offset, msg: String): Unit = ()
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 1abc0c860c..67241ef639 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -8,6 +8,7 @@ package ast.parser
import scala.collection.{ mutable, immutable }
import symtab.Flags.MUTABLE
+import scala.reflect.internal.util.ListOfNil
import scala.reflect.internal.util.StringOps.splitWhere
/** This class builds instance of `Tree` that represent XML.
@@ -35,6 +36,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val _MetaData: NameType = "MetaData"
val _NamespaceBinding: NameType = "NamespaceBinding"
val _NodeBuffer: NameType = "NodeBuffer"
+ val _PCData: NameType = "PCData"
val _PrefixedAttribute: NameType = "PrefixedAttribute"
val _ProcInstr: NameType = "ProcInstr"
val _Text: NameType = "Text"
@@ -45,6 +47,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
private object xmlterms extends TermNames {
val _Null: NameType = "Null"
val __Elem: NameType = "Elem"
+ val _PCData: NameType = "PCData"
val __Text: NameType = "Text"
val _buf: NameType = "$buf"
val _md: NameType = "$md"
@@ -54,10 +57,15 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val _xml: NameType = "xml"
}
- import xmltypes.{_Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer,
- _PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute}
+ import xmltypes.{
+ _Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer,
+ _PCData, _PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute
+ }
+
+ import xmlterms.{ _Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml }
- import xmlterms.{_Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml}
+ /** Attachment for trees deriving from text nodes (Text, CData, entities). Used for coalescing. */
+ case class TextAttache(pos: Position, text: String)
// convenience methods
private def LL[A](x: A*): List[List[A]] = List(List(x:_*))
@@ -107,16 +115,21 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
final def entityRef(pos: Position, n: String) =
atPos(pos)( New(_scala_xml_EntityRef, LL(const(n))) )
+ private def coalescing = settings.XxmlSettings.isCoalescing
+
// create scala.xml.Text here <: scala.xml.Node
final def text(pos: Position, txt: String): Tree = atPos(pos) {
- if (isPattern) makeTextPat(const(txt))
- else makeText1(const(txt))
+ val t = if (isPattern) makeTextPat(const(txt)) else makeText1(const(txt))
+ if (coalescing) t updateAttachment TextAttache(pos, txt) else t
}
def makeTextPat(txt: Tree) = Apply(_scala_xml__Text, List(txt))
def makeText1(txt: Tree) = New(_scala_xml_Text, LL(txt))
def comment(pos: Position, text: String) = atPos(pos)( Comment(const(text)) )
- def charData(pos: Position, txt: String) = atPos(pos)( makeText1(const(txt)) )
+ def charData(pos: Position, txt: String) = if (coalescing) text(pos, txt) else atPos(pos) {
+ if (isPattern) Apply(_scala_xml(xmlterms._PCData), List(const(txt)))
+ else New(_scala_xml(_PCData), LL(const(txt)))
+ }
def procInstr(pos: Position, target: String, txt: String) =
atPos(pos)( ProcInstr(const(target), const(txt)) )
@@ -184,7 +197,8 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
)
val uri1 = attrMap(z) match {
- case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri)
+ case Apply(Select(New(Select(Select(Select(Ident(nme.ROOTPKG), nme.scala_), nme.xml), tpnme.Text)), nme.CONSTRUCTOR), List(uri @ Literal(Constant(_)))) =>
+ mkAssign(uri)
case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626
case x => mkAssign(x)
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index 3a695c6f59..df2073785b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -83,7 +83,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse
private def initialUnitBody(unit: CompilationUnit): Tree = {
if (unit.isJava) new JavaUnitParser(unit).parse()
- else if (global.reporter.incompleteHandled) newUnitParser(unit).parse()
+ else if (currentRun.parsing.incompleteHandled) newUnitParser(unit).parse()
else newUnitParser(unit).smartParse()
}
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 32b5a98b98..6bd123c51f 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -7,7 +7,10 @@ package scala.tools.nsc
package backend
import io.AbstractFile
-import util.{ClassPath,MergedClassPath,DeltaClassPath}
+import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.{ ClassPath, DeltaClassPath, MergedClassPath }
+import scala.tools.util.FlatClassPathResolver
import scala.tools.util.PathResolver
trait JavaPlatform extends Platform {
@@ -16,13 +19,23 @@ trait JavaPlatform extends Platform {
import global._
import definitions._
- private var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
+ private[nsc] var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
def classPath: ClassPath[AbstractFile] = {
+ assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Recursive,
+ "To use recursive classpath representation you must enable it with -YclasspathImpl:recursive compiler option.")
+
if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
currentClassPath.get
}
+ private[nsc] lazy val flatClassPath: FlatClassPath = {
+ assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Flat,
+ "To use flat classpath representation you must enable it with -YclasspathImpl:flat compiler option.")
+
+ new FlatClassPathResolver(settings).result
+ }
+
/** Update classpath with a substituted subentry */
def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) =
currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
@@ -55,8 +68,6 @@ trait JavaPlatform extends Platform {
(sym isNonBottomSubClass BoxedBooleanClass)
}
- def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true
-
def needCompile(bin: AbstractFile, src: AbstractFile) =
src.lastModified >= bin.lastModified
}
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
index 499f8a9290..c3bc213be1 100644
--- a/src/compiler/scala/tools/nsc/backend/Platform.scala
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -8,6 +8,7 @@ package backend
import util.ClassPath
import io.AbstractFile
+import scala.tools.nsc.classpath.FlatClassPath
/** The platform dependent pieces of Global.
*/
@@ -15,9 +16,12 @@ trait Platform {
val symbolTable: symtab.SymbolTable
import symbolTable._
- /** The compiler classpath. */
+ /** The old, recursive implementation of compiler classpath. */
def classPath: ClassPath[AbstractFile]
+ /** The new implementation of compiler classpath. */
+ private[nsc] def flatClassPath: FlatClassPath
+
/** Update classpath with a substitution that maps entries to entries */
def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]])
@@ -31,14 +35,6 @@ trait Platform {
def isMaybeBoxed(sym: Symbol): Boolean
/**
- * Tells whether a class should be loaded and entered into the package
- * scope. On .NET, this method returns `false` for all synthetic classes
- * (anonymous classes, implementation classes, module classes), their
- * symtab is encoded in the pickle of another class.
- */
- def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean
-
- /**
* Tells whether a class with both a binary and a source representation
* (found in classpath and in sourcepath) should be re-compiled. Behaves
* on the JVM similar to javac, i.e. if the source file is newer than the classfile,
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index f9551697d2..ad1975ef23 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -300,14 +300,16 @@ trait BasicBlocks {
if (!closed)
instructionList = instructionList map (x => map.getOrElse(x, x))
else
- instrs.zipWithIndex collect {
- case (oldInstr, i) if map contains oldInstr =>
- // SI-6288 clone important here because `replaceInstruction` assigns
- // a position to `newInstr`. Without this, a single instruction can
- // be added twice, and the position last position assigned clobbers
- // all previous positions in other usages.
- val newInstr = map(oldInstr).clone()
- code.touched |= replaceInstruction(i, newInstr)
+ instrs.iterator.zipWithIndex foreach {
+ case (oldInstr, i) =>
+ if (map contains oldInstr) {
+ // SI-6288 clone important here because `replaceInstruction` assigns
+ // a position to `newInstr`. Without this, a single instruction can
+ // be added twice, and the position last position assigned clobbers
+ // all previous positions in other usages.
+ val newInstr = map(oldInstr).clone()
+ code.touched |= replaceInstruction(i, newInstr)
+ }
}
////////////////////// Emit //////////////////////
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 1cea4bedda..137954b52d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -13,13 +13,12 @@ import scala.collection.{ mutable, immutable }
import scala.collection.mutable.{ ListBuffer, Buffer }
import scala.tools.nsc.symtab._
import scala.annotation.switch
-import PartialFunction._
/**
* @author Iulian Dragos
* @version 1.0
*/
-abstract class GenICode extends SubComponent {
+abstract class GenICode extends SubComponent {
import global._
import icodes._
import icodes.opcodes._
@@ -30,6 +29,9 @@ abstract class GenICode extends SubComponent {
}
import platform.isMaybeBoxed
+ private val bCodeICodeCommon: jvm.BCodeICodeCommon[global.type] = new jvm.BCodeICodeCommon(global)
+ import bCodeICodeCommon._
+
val phaseName = "icode"
override def newPhase(prev: Phase) = new ICodePhase(prev)
@@ -678,7 +680,7 @@ abstract class GenICode extends SubComponent {
val dims = arr.dimensions
var elemKind = arr.elementKind
if (args.length > dims)
- unit.error(tree.pos, "too many arguments for array constructor: found " + args.length +
+ reporter.error(tree.pos, "too many arguments for array constructor: found " + args.length +
" but array has only " + dims + " dimension(s)")
if (args.length != dims)
for (i <- args.length until dims) elemKind = ARRAY(elemKind)
@@ -874,13 +876,20 @@ abstract class GenICode extends SubComponent {
genLoadModule(ctx, tree)
generatedType = toTypeKind(sym.info)
} else {
- try {
- val Some(l) = ctx.method.lookupLocal(sym)
- ctx.bb.emit(LOAD_LOCAL(l), tree.pos)
- generatedType = l.kind
- } catch {
- case ex: MatchError =>
- abort("symbol " + sym + " does not exist in " + ctx.method)
+ ctx.method.lookupLocal(sym) match {
+ case Some(l) =>
+ ctx.bb.emit(LOAD_LOCAL(l), tree.pos)
+ generatedType = l.kind
+ case None =>
+ val saved = settings.uniqid
+ settings.uniqid.value = true
+ try {
+ val methodCode = unit.body.collect { case dd: DefDef
+ if dd.symbol == ctx.method.symbol => showCode(dd);
+ }.headOption.getOrElse("<unknown>")
+ abort(s"symbol $sym does not exist in ${ctx.method}, which contains locals ${ctx.method.locals.mkString(",")}. \nMethod code: $methodCode")
+ }
+ finally settings.uniqid.value = saved
}
}
}
@@ -1075,7 +1084,7 @@ abstract class GenICode extends SubComponent {
()
case (_, UNIT) =>
ctx.bb.emit(DROP(from), pos)
- // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem
+ // otherwise we'd better be doing a primitive -> primitive coercion or there's a problem
case _ if !from.isRefOrArrayType && !to.isRefOrArrayType =>
coerce(from, to)
case _ =>
@@ -1326,15 +1335,6 @@ abstract class GenICode extends SubComponent {
List(tree)
}
- /** Some useful equality helpers.
- */
- def isNull(t: Tree) = cond(t) { case Literal(Constant(null)) => true }
- def isLiteral(t: Tree) = cond(t) { case Literal(_) => true }
- def isNonNullExpr(t: Tree) = isLiteral(t) || ((t.symbol ne null) && t.symbol.isModule)
-
- /* If l or r is constant null, returns the other ; otherwise null */
- def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
-
/**
* Find the label denoted by `lsym` and enter it in context `ctx`.
*
@@ -1431,11 +1431,18 @@ abstract class GenICode extends SubComponent {
def genZandOrZor(and: Boolean): Boolean = {
val ctxInterm = ctx.newBlock()
- val branchesReachable = if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
+ val lhsBranchesReachable = if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
else genCond(lhs, ctx, thenCtx, ctxInterm)
- ctxInterm.bb killUnless branchesReachable
+ // If lhs is known to throw, we can kill the just created ctxInterm.
+ ctxInterm.bb killUnless lhsBranchesReachable
+
+ val rhsBranchesReachable = genCond(rhs, ctxInterm, thenCtx, elseCtx)
- genCond(rhs, ctxInterm, thenCtx, elseCtx)
+ // Reachable means "it does not always throw", i.e. "it might not throw".
+ // In an expression (a && b) or (a || b), the b branch might not be evaluated.
+ // Such an expression is therefore known to throw only if both expressions throw. Or,
+ // successors are reachable if either of the two is reachable (SI-8625).
+ lhsBranchesReachable || rhsBranchesReachable
}
def genRefEq(isEq: Boolean) = {
val f = genEqEqPrimitive(lhs, rhs, ctx) _
@@ -1495,7 +1502,7 @@ abstract class GenICode extends SubComponent {
if (!settings.optimise) {
if (l.tpe <:< BoxedNumberClass.tpe) {
if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
- else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
+ else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030
else platform.externalEqualsNumObject
} else platform.externalEquals
} else {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
index 0cdf629ce1..843648282b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
@@ -109,7 +109,7 @@ abstract class ICodeCheckers {
/** Only called when m1 < m2, so already known that (m1 ne m2).
*/
- private def isConfict(m1: IMember, m2: IMember, canOverload: Boolean) = (
+ private def isConflict(m1: IMember, m2: IMember, canOverload: Boolean) = (
(m1.symbol.name == m2.symbol.name) &&
(!canOverload || (m1.symbol.tpe =:= m2.symbol.tpe))
)
@@ -119,11 +119,11 @@ abstract class ICodeCheckers {
clasz = cls
for (f1 <- cls.fields ; f2 <- cls.fields ; if f1 < f2)
- if (isConfict(f1, f2, canOverload = false))
+ if (isConflict(f1, f2, canOverload = false))
icodeError("Repetitive field name: " + f1.symbol.fullName)
for (m1 <- cls.methods ; m2 <- cls.methods ; if m1 < m2)
- if (isConfict(m1, m2, canOverload = true))
+ if (isConflict(m1, m2, canOverload = true))
icodeError("Repetitive method: " + m1.symbol.fullName)
clasz.methods foreach check
@@ -471,7 +471,7 @@ abstract class ICodeCheckers {
pushStack(local.kind)
case LOAD_FIELD(field, isStatic) =>
- // the symbol's owner should contain it's field, but
+ // the symbol's owner should contain its field, but
// this is already checked by the type checker, no need
// to redo that here
if (isStatic) ()
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index bc35a9e7de..10f0c6ee00 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -113,7 +113,8 @@ abstract class ICodes extends AnyRef
global.loaders.lookupMemberAtTyperPhaseIfPossible(sym, name)
lazy val symbolTable: global.type = global
lazy val loaders: global.loaders.type = global.loaders
- def classPath: util.ClassPath[AbstractFile] = ICodes.this.global.platform.classPath
+
+ def classFileLookup: util.ClassFileLookup[AbstractFile] = global.classPath
}
/** A phase which works on icode. */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index f81c42d836..27bf836484 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -60,7 +60,7 @@ trait Primitives { self: ICodes =>
// type : (buf,el) => buf
// range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR }
- // jvm : It should call the appropiate 'append' method on StringBuffer
+ // jvm : It should call the appropriate 'append' method on StringBuffer
case class StringConcat(el: TypeKind) extends Primitive
/** Signals the beginning of a series of concatenations.
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 676ee12683..058b6a161d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -332,13 +332,13 @@ abstract class TypeFlowAnalysis {
`remainingCALLs` also caches info about the typestack just before the callsite, so as to spare computing them again at inlining time.
Besides caching, a further optimization involves skipping those basic blocks whose in-flow and out-flow isn't needed anyway (as explained next).
- A basic block lacking a callsite in `remainingCALLs`, when visisted by the standard algorithm, won't cause any inlining.
+ A basic block lacking a callsite in `remainingCALLs`, when visited by the standard algorithm, won't cause any inlining.
But as we know from the way type-flows are computed, computing the in- and out-flow for a basic block relies in general on those of other basic blocks.
In detail, we want to focus on that sub-graph of the CFG such that control flow may reach a remaining candidate callsite.
Those basic blocks not in that subgraph can be skipped altogether. That's why:
- `forwardAnalysis()` in `MTFAGrowable` now checks for inclusion of a basic block in `relevantBBs`
- same check is performed before adding a block to the worklist, and as part of choosing successors.
- The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overridding most methods of the dataflow-analysis.
+ The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overriding most methods of the dataflow-analysis.
The rest of the story takes place in Inliner, which does not visit all of the method's basic blocks but only on those represented in `remainingCALLs`.
@@ -572,7 +572,7 @@ abstract class TypeFlowAnalysis {
- `inlined` : These blocks were spliced into the method's CFG as part of inlining. Being new blocks, they haven't been visited yet by the typeflow analysis.
- - `staleIn` : These blocks are what `doInline()` calls `afterBlock`s, ie the new home for instructions that previously appearead
+ - `staleIn` : These blocks are what `doInline()` calls `afterBlock`s, ie the new home for instructions that previously appeared
after a callsite in a `staleOut` block.
Based on the above information, we have to bring up-to-date the caches that `forwardAnalysis` and `blockTransfer` use to skip blocks and instructions.
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
new file mode 100644
index 0000000000..0df1b2029d
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
@@ -0,0 +1,123 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc.backend.jvm
+
+import scala.tools.asm.tree.{InsnList, AbstractInsnNode, ClassNode, MethodNode}
+import java.io.{StringWriter, PrintWriter}
+import scala.tools.asm.util.{CheckClassAdapter, TraceClassVisitor, TraceMethodVisitor, Textifier}
+import scala.tools.asm.{ClassWriter, Attribute, ClassReader}
+import scala.collection.convert.decorateAsScala._
+import scala.tools.nsc.backend.jvm.opt.InlineInfoAttributePrototype
+
+object AsmUtils {
+
+ /**
+ * Print the bytecode of methods generated by GenBCode to the standard output. Only methods
+ * whose name contains `traceMethodPattern` are traced.
+ */
+ final val traceMethodEnabled = false
+ final val traceMethodPattern = ""
+
+ /**
+ * Print the bytecode of classes generated by GenBCode to the standard output.
+ */
+ final val traceClassEnabled = false
+ final val traceClassPattern = ""
+
+ /**
+ * Print the bytedcode of classes as they are serialized by the ASM library. The serialization
+ * performed by `asm.ClassWriter` can change the code generated by GenBCode. For example, it
+ * introduces stack map frames, it computes the maximal stack sizes, and it replaces dead
+ * code by NOPs (see also https://github.com/scala/scala/pull/3726#issuecomment-42861780).
+ */
+ final val traceSerializedClassEnabled = false
+ final val traceSerializedClassPattern = ""
+
+ def traceMethod(mnode: MethodNode): Unit = {
+ println(s"Bytecode for method ${mnode.name}")
+ println(textify(mnode))
+ }
+
+ def traceClass(cnode: ClassNode): Unit = {
+ println(s"Bytecode for class ${cnode.name}")
+ println(textify(cnode))
+ }
+
+ def traceClass(bytes: Array[Byte]): Unit = traceClass(readClass(bytes))
+
+ def readClass(bytes: Array[Byte]): ClassNode = {
+ val node = new ClassNode()
+ new ClassReader(bytes).accept(node, Array[Attribute](InlineInfoAttributePrototype), 0)
+ node
+ }
+
+ /**
+ * Returns a human-readable representation of the cnode ClassNode.
+ */
+ def textify(cnode: ClassNode): String = {
+ val trace = new TraceClassVisitor(new PrintWriter(new StringWriter))
+ cnode.accept(trace)
+ val sw = new StringWriter
+ val pw = new PrintWriter(sw)
+ trace.p.print(pw)
+ sw.toString
+ }
+
+ /**
+ * Returns a human-readable representation of the code in the mnode MethodNode.
+ */
+ def textify(mnode: MethodNode): String = {
+ val trace = new TraceClassVisitor(new PrintWriter(new StringWriter))
+ mnode.accept(trace)
+ val sw = new StringWriter
+ val pw = new PrintWriter(sw)
+ trace.p.print(pw)
+ sw.toString
+ }
+
+ /**
+ * Returns a human-readable representation of the given instruction.
+ */
+ def textify(insn: AbstractInsnNode): String = {
+ val trace = new TraceMethodVisitor(new Textifier)
+ insn.accept(trace)
+ val sw = new StringWriter
+ val pw = new PrintWriter(sw)
+ trace.p.print(pw)
+ sw.toString.trim
+ }
+
+ /**
+ * Returns a human-readable representation of the given instruction sequence.
+ */
+ def textify(insns: Iterator[AbstractInsnNode]): String = {
+ val trace = new TraceMethodVisitor(new Textifier)
+ insns.foreach(_.accept(trace))
+ val sw: StringWriter = new StringWriter
+ val pw: PrintWriter = new PrintWriter(sw)
+ trace.p.print(pw)
+ sw.toString.trim
+ }
+
+ /**
+ * Returns a human-readable representation of the given instruction sequence.
+ */
+ def textify(insns: InsnList): String = textify(insns.iterator().asScala)
+
+ /**
+ * Run ASM's CheckClassAdapter over a class. Returns None if no problem is found, otherwise
+ * Some(msg) with the verifier's error message.
+ */
+ def checkClass(classNode: ClassNode): Option[String] = {
+ val cw = new ClassWriter(ClassWriter.COMPUTE_MAXS)
+ classNode.accept(cw)
+ val sw = new StringWriter()
+ val pw = new PrintWriter(sw)
+ CheckClassAdapter.verify(new ClassReader(cw.toByteArray), false, pw)
+ val res = sw.toString
+ if (res.isEmpty) None else Some(res)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
new file mode 100644
index 0000000000..eadc404bee
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
@@ -0,0 +1,401 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.backend.jvm.BTypes.{InternalName, MethodInlineInfo, InlineInfo}
+import BackendReporting.ClassSymbolInfoFailureSI9111
+
+/**
+ * This trait contains code shared between GenBCode and GenASM that depends on types defined in
+ * the compiler cake (Global).
+ */
+final class BCodeAsmCommon[G <: Global](val global: G) {
+ import global._
+ import definitions._
+
+ val ExcludedForwarderFlags = {
+ import scala.tools.nsc.symtab.Flags._
+ // Should include DEFERRED but this breaks findMember.
+ SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO
+ }
+
+ /**
+ * True for classes generated by the Scala compiler that are considered top-level in terms of
+ * the InnerClass / EnclosingMethod classfile attributes. See comment in BTypes.
+ */
+ def considerAsTopLevelImplementationArtifact(classSym: Symbol) = {
+ classSym.isImplClass || classSym.isSpecialized
+ }
+
+ /**
+ * Cache the value of delambdafy == "inline" for each run. We need to query this value many
+ * times, so caching makes sense.
+ */
+ object delambdafyInline {
+ private var runId = -1
+ private var value = false
+
+ def apply(): Boolean = {
+ if (runId != global.currentRunId) {
+ runId = global.currentRunId
+ value = settings.Ydelambdafy.value == "inline"
+ }
+ value
+ }
+ }
+
+ /**
+ * True if `classSym` is an anonymous class or a local class. I.e., false if `classSym` is a
+ * member class. This method is used to decide if we should emit an EnclosingMethod attribute.
+ * It is also used to decide whether the "owner" field in the InnerClass attribute should be
+ * null.
+ */
+ def isAnonymousOrLocalClass(classSym: Symbol): Boolean = {
+ assert(classSym.isClass, s"not a class: $classSym")
+ val r = exitingPickler(classSym.isAnonymousClass) || !classSym.originalOwner.isClass
+ if (r && settings.Ybackend.value == "GenBCode") {
+ // this assertion only holds in GenBCode. lambda lift renames symbols and may accidentally
+ // introduce `$lambda` into a class name, making `isDelambdafyFunction` true. under GenBCode
+ // we prevent this, see `nonAnon` in LambdaLift.
+ // phase travel necessary: after flatten, the name includes the name of outer classes.
+ // if some outer name contains $lambda, a non-lambda class is considered lambda.
+ assert(exitingPickler(!classSym.isDelambdafyFunction), classSym.name)
+ }
+ r
+ }
+
+ /**
+ * The next enclosing definition in the source structure. Includes anonymous function classes
+ * under delambdafy:inline, even though they are only generated during UnCurry.
+ */
+ def nextEnclosing(sym: Symbol): Symbol = {
+ val origOwner = sym.originalOwner
+ // phase travel necessary: after flatten, the name includes the name of outer classes.
+ // if some outer name contains $anon, a non-anon class is considered anon.
+ if (delambdafyInline() && sym.rawowner.isAnonymousFunction) {
+ // SI-9105: special handling for anonymous functions under delambdafy:inline.
+ //
+ // class C { def t = () => { def f { class Z } } }
+ //
+ // class C { def t = byNameMethod { def f { class Z } } }
+ //
+ // In both examples, the method f lambda-lifted into the anonfun class.
+ //
+ // In both examples, the enclosing method of Z is f, the enclosing class is the anonfun.
+ // So nextEnclosing needs to return the following chain: Z - f - anonFunClassSym - ...
+ //
+ // In the first example, the initial owner of f is a TermSymbol named "$anonfun" (note: not the anonFunClassSym!)
+ // In the second, the initial owner of f is t (no anon fun term symbol for by-name args!).
+ //
+ // In both cases, the rawowner of class Z is the anonFunClassSym. So the check in the `if`
+ // above makes sure we don't jump over the anonymous function in the by-name argument case.
+ //
+ // However, we cannot directly return the rawowner: if `sym` is Z, we need to include method f
+ // in the result. This is done by comparing the rawowners (read: lambdalift-targets) of `sym`
+ // and `sym.originalOwner`: if they are the same, then the originalOwner is "in between", and
+ // we need to return it.
+ // If the rawowners are different, the symbol was not in between. In the first example, the
+ // originalOwner of `f` is the anonfun-term-symbol, whose rawowner is C. So the nextEnclosing
+ // of `f` is its rawowner, the anonFunClassSym.
+ //
+ // In delambdafy:method we don't have that problem. The f method is lambda-lifted into C,
+ // not into the anonymous function class. The originalOwner chain is Z - f - C.
+ if (sym.originalOwner.rawowner == sym.rawowner) sym.originalOwner
+ else sym.rawowner
+ } else {
+ origOwner
+ }
+ }
+
+ def nextEnclosingClass(sym: Symbol): Symbol = {
+ if (sym.isClass) sym
+ else nextEnclosingClass(nextEnclosing(sym))
+ }
+
+ def classOriginallyNestedInClass(nestedClass: Symbol, enclosingClass: Symbol) ={
+ nextEnclosingClass(nextEnclosing(nestedClass)) == enclosingClass
+ }
+
+ /**
+ * Returns the enclosing method for non-member classes. In the following example
+ *
+ * class A {
+ * def f = {
+ * class B {
+ * class C
+ * }
+ * }
+ * }
+ *
+ * the method returns Some(f) for B, but None for C, because C is a member class. For non-member
+ * classes that are not enclosed by a method, it returns None:
+ *
+ * class A {
+ * { class B }
+ * }
+ *
+ * In this case, for B, we return None.
+ *
+ * The EnclosingMethod attribute needs to be added to non-member classes (see doc in BTypes).
+ * This is a source-level property, so we need to use the originalOwner chain to reconstruct it.
+ */
+ private def enclosingMethodForEnclosingMethodAttribute(classSym: Symbol): Option[Symbol] = {
+ assert(classSym.isClass, classSym)
+
+ def doesNotExist(method: Symbol) = {
+ // (1) SI-9124, some trait methods don't exist in the generated interface. see comment in BTypes.
+ // (2) Value classes. Member methods of value classes exist in the generated box class. However,
+ // nested methods lifted into a value class are moved to the companion object and don't exist
+ // in the value class itself. We can identify such nested methods: the initial enclosing class
+ // is a value class, but the current owner is some other class (the module class).
+ method.owner.isTrait && method.isImplOnly || { // (1)
+ val enclCls = nextEnclosingClass(method)
+ exitingPickler(enclCls.isDerivedValueClass) && method.owner != enclCls // (2)
+ }
+ }
+
+ def enclosingMethod(sym: Symbol): Option[Symbol] = {
+ if (sym.isClass || sym == NoSymbol) None
+ else if (sym.isMethod) {
+ if (doesNotExist(sym)) None else Some(sym)
+ }
+ else enclosingMethod(nextEnclosing(sym))
+ }
+ enclosingMethod(nextEnclosing(classSym))
+ }
+
+ /**
+ * The enclosing class for emitting the EnclosingMethod attribute. Since this is a source-level
+ * property, this method looks at the originalOwner chain. See doc in BTypes.
+ */
+ private def enclosingClassForEnclosingMethodAttribute(classSym: Symbol): Symbol = {
+ assert(classSym.isClass, classSym)
+ val r = nextEnclosingClass(nextEnclosing(classSym))
+ // this should be an assertion, but we are more cautious for now as it was introduced before the 2.11.6 minor release
+ if (considerAsTopLevelImplementationArtifact(r)) devWarning(s"enclosing class of $classSym should not be an implementation artifact class: $r")
+ r
+ }
+
+ final case class EnclosingMethodEntry(owner: String, name: String, methodDescriptor: String)
+
+ /**
+ * Data for emitting an EnclosingMethod attribute. None if `classSym` is a member class (not
+ * an anonymous or local class). See doc in BTypes.
+ *
+ * The class is parametrized by two functions to obtain a bytecode class descriptor for a class
+ * symbol, and to obtain a method signature descriptor fro a method symbol. These function depend
+ * on the implementation of GenASM / GenBCode, so they need to be passed in.
+ */
+ def enclosingMethodAttribute(classSym: Symbol, classDesc: Symbol => String, methodDesc: Symbol => String): Option[EnclosingMethodEntry] = {
+ // trait impl classes are always top-level, see comment in BTypes
+ if (isAnonymousOrLocalClass(classSym) && !considerAsTopLevelImplementationArtifact(classSym)) {
+ val enclosingClass = enclosingClassForEnclosingMethodAttribute(classSym)
+ val methodOpt = enclosingMethodForEnclosingMethodAttribute(classSym) match {
+ case some @ Some(m) =>
+ if (m.owner != enclosingClass) {
+ // This should never happen. In case it does, it prevents emitting an invalid
+ // EnclosingMethod attribute: if the attribute specifies an enclosing method,
+ // it needs to exist in the specified enclosing class.
+ devWarning(s"the owner of the enclosing method ${m.locationString} should be the same as the enclosing class $enclosingClass")
+ None
+ } else some
+ case none => none
+ }
+ Some(EnclosingMethodEntry(
+ classDesc(enclosingClass),
+ methodOpt.map(_.javaSimpleName.toString).orNull,
+ methodOpt.map(methodDesc).orNull))
+ } else {
+ None
+ }
+ }
+
+ /**
+ * This is basically a re-implementation of sym.isStaticOwner, but using the originalOwner chain.
+ *
+ * The problem is that we are interested in a source-level property. Various phases changed the
+ * symbol's properties in the meantime, mostly lambdalift modified (destructively) the owner.
+ * Therefore, `sym.isStatic` is not what we want. For example, in
+ * object T { def f { object U } }
+ * the owner of U is T, so UModuleClass.isStatic is true. Phase travel does not help here.
+ */
+ def isOriginallyStaticOwner(sym: Symbol): Boolean = {
+ sym.isPackageClass || sym.isModuleClass && isOriginallyStaticOwner(sym.originalOwner)
+ }
+
+ /**
+ * The member classes of a class symbol. Note that the result of this method depends on the
+ * current phase, for example, after lambdalift, all local classes become member of the enclosing
+ * class.
+ *
+ * Impl classes are always considered top-level, see comment in BTypes.
+ */
+ def memberClassesForInnerClassTable(classSymbol: Symbol): List[Symbol] = classSymbol.info.decls.collect({
+ case sym if sym.isClass && !considerAsTopLevelImplementationArtifact(sym) =>
+ sym
+ case sym if sym.isModule && !considerAsTopLevelImplementationArtifact(sym) => // impl classes get the lateMODULE flag in mixin
+ val r = exitingPickler(sym.moduleClass)
+ assert(r != NoSymbol, sym.fullLocationString)
+ r
+ })(collection.breakOut)
+
+ lazy val AnnotationRetentionPolicyModule = AnnotationRetentionPolicyAttr.companionModule
+ lazy val AnnotationRetentionPolicySourceValue = AnnotationRetentionPolicyModule.tpe.member(TermName("SOURCE"))
+ lazy val AnnotationRetentionPolicyClassValue = AnnotationRetentionPolicyModule.tpe.member(TermName("CLASS"))
+ lazy val AnnotationRetentionPolicyRuntimeValue = AnnotationRetentionPolicyModule.tpe.member(TermName("RUNTIME"))
+
+ /** Whether an annotation should be emitted as a Java annotation
+ * .initialize: if 'annot' is read from pickle, atp might be un-initialized
+ */
+ def shouldEmitAnnotation(annot: AnnotationInfo) = {
+ annot.symbol.initialize.isJavaDefined &&
+ annot.matches(ClassfileAnnotationClass) &&
+ retentionPolicyOf(annot) != AnnotationRetentionPolicySourceValue &&
+ annot.args.isEmpty
+ }
+
+ def isRuntimeVisible(annot: AnnotationInfo): Boolean = {
+ annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr) match {
+ case Some(retentionAnnot) =>
+ retentionAnnot.assocs.contains(nme.value -> LiteralAnnotArg(Constant(AnnotationRetentionPolicyRuntimeValue)))
+ case _ =>
+ // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the
+ // annotation is emitted with visibility `RUNTIME`
+ true
+ }
+ }
+
+ private def retentionPolicyOf(annot: AnnotationInfo): Symbol =
+ annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr).map(_.assocs).map(assoc =>
+ assoc.collectFirst {
+ case (`nme`.value, LiteralAnnotArg(Constant(value: Symbol))) => value
+ }).flatten.getOrElse(AnnotationRetentionPolicyClassValue)
+
+ def implementedInterfaces(classSym: Symbol): List[Symbol] = {
+ // Additional interface parents based on annotations and other cues
+ def newParentForAnnotation(ann: AnnotationInfo): Option[Type] = ann.symbol match {
+ case RemoteAttr => Some(RemoteInterfaceClass.tpe)
+ case _ => None
+ }
+
+ def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait
+
+ val allParents = classSym.info.parents ++ classSym.annotations.flatMap(newParentForAnnotation)
+
+ // We keep the superClass when computing minimizeParents to eliminate more interfaces.
+ // Example: T can be eliminated from D
+ // trait T
+ // class C extends T
+ // class D extends C with T
+ val interfaces = erasure.minimizeParents(allParents) match {
+ case superClass :: ifs if !isInterfaceOrTrait(superClass.typeSymbol) =>
+ ifs
+ case ifs =>
+ // minimizeParents removes the superclass if it's redundant, for example:
+ // trait A
+ // class C extends Object with A // minimizeParents removes Object
+ ifs
+ }
+ interfaces.map(_.typeSymbol)
+ }
+
+ /**
+ * This is a hack to work around SI-9111. The completer of `methodSym` may report type errors. We
+ * cannot change the typer context of the completer at this point and make it silent: the context
+ * captured when creating the completer in the namer. However, we can temporarily replace
+ * global.reporter (it's a var) to store errors.
+ */
+ def completeSilentlyAndCheckErroneous(sym: Symbol): Boolean = {
+ if (sym.hasCompleteInfo) false
+ else {
+ val originalReporter = global.reporter
+ val storeReporter = new reporters.StoreReporter()
+ global.reporter = storeReporter
+ try {
+ sym.info
+ } finally {
+ global.reporter = originalReporter
+ }
+ sym.isErroneous
+ }
+ }
+
+ /**
+ * Build the [[InlineInfo]] for a class symbol.
+ */
+ def buildInlineInfoFromClassSymbol(classSym: Symbol, classSymToInternalName: Symbol => InternalName, methodSymToDescriptor: Symbol => String): InlineInfo = {
+ val traitSelfType = if (classSym.isTrait && !classSym.isImplClass) {
+ // The mixin phase uses typeOfThis for the self parameter in implementation class methods.
+ val selfSym = classSym.typeOfThis.typeSymbol
+ if (selfSym != classSym) Some(classSymToInternalName(selfSym)) else None
+ } else {
+ None
+ }
+
+ val isEffectivelyFinal = classSym.isEffectivelyFinal
+
+ var warning = Option.empty[ClassSymbolInfoFailureSI9111]
+
+ // Primitive methods cannot be inlined, so there's no point in building a MethodInlineInfo. Also, some
+ // primitive methods (e.g., `isInstanceOf`) have non-erased types, which confuses [[typeToBType]].
+ val methodInlineInfos = classSym.info.decls.iterator.filter(m => m.isMethod && !scalaPrimitives.isPrimitive(m)).flatMap({
+ case methodSym =>
+ if (completeSilentlyAndCheckErroneous(methodSym)) {
+ // Happens due to SI-9111. Just don't provide any MethodInlineInfo for that method, we don't need fail the compiler.
+ if (!classSym.isJavaDefined) devWarning("SI-9111 should only be possible for Java classes")
+ warning = Some(ClassSymbolInfoFailureSI9111(classSym.fullName))
+ None
+ } else {
+ val name = methodSym.javaSimpleName.toString // same as in genDefDef
+ val signature = name + methodSymToDescriptor(methodSym)
+
+ // Some detours are required here because of changing flags (lateDEFERRED, lateMODULE):
+ // 1. Why the phase travel? Concrete trait methods obtain the lateDEFERRED flag in Mixin.
+ // This makes isEffectivelyFinalOrNotOverridden false, which would prevent non-final
+ // but non-overridden methods of sealed traits from being inlined.
+ // 2. Why the special case for `classSym.isImplClass`? Impl class symbols obtain the
+ // lateMODULE flag during Mixin. During the phase travel to exitingPickler, the late
+ // flag is ignored. The members are therefore not isEffectivelyFinal (their owner
+ // is not a module). Since we know that all impl class members are static, we can
+ // just take the shortcut.
+ val effectivelyFinal = classSym.isImplClass || exitingPickler(methodSym.isEffectivelyFinalOrNotOverridden)
+
+ // Identify trait interface methods that have a static implementation in the implementation
+ // class. Invocations of these methods can be re-wrired directly to the static implementation
+ // if they are final or the receiver is known.
+ //
+ // Using `erasure.needsImplMethod` is not enough: it keeps field accessors, module getters
+ // and super accessors. When AddInterfaces creates the impl class, these methods are
+ // initially added to it.
+ //
+ // The mixin phase later on filters out most of these members from the impl class (see
+ // Mixin.isImplementedStatically). However, accessors for concrete lazy vals remain in the
+ // impl class after mixin. So the filter in mixin is not exactly what we need here (we
+ // want to identify concrete trait methods, not any accessors). So we check some symbol
+ // properties manually.
+ val traitMethodWithStaticImplementation = {
+ import symtab.Flags._
+ classSym.isTrait && !classSym.isImplClass &&
+ erasure.needsImplMethod(methodSym) &&
+ !methodSym.isModule &&
+ !(methodSym hasFlag (ACCESSOR | SUPERACCESSOR))
+ }
+
+ val info = MethodInlineInfo(
+ effectivelyFinal = effectivelyFinal,
+ traitMethodWithStaticImplementation = traitMethodWithStaticImplementation,
+ annotatedInline = methodSym.hasAnnotation(ScalaInlineClass),
+ annotatedNoInline = methodSym.hasAnnotation(ScalaNoInlineClass)
+ )
+ Some((signature, info))
+ }
+ }).toMap
+
+ InlineInfo(traitSelfType, isEffectivelyFinal, methodInlineInfos, warning)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
index 53142fbd87..8ebe27e61b 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -9,10 +9,12 @@ package tools.nsc
package backend
package jvm
-import scala.collection.{ mutable, immutable }
import scala.annotation.switch
+import scala.reflect.internal.Flags
import scala.tools.asm
+import GenBCode._
+import BackendReporting._
/*
*
@@ -23,6 +25,9 @@ import scala.tools.asm
abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
import global._
import definitions._
+ import bTypes._
+ import bCodeICodeCommon._
+ import coreBTypes._
/*
* Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions.
@@ -45,16 +50,16 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def emit(opc: Int) { mnode.visitInsn(opc) }
def emitZeroOf(tk: BType) {
- (tk.sort: @switch) match {
- case asm.Type.BOOLEAN => bc.boolconst(false)
- case asm.Type.BYTE |
- asm.Type.SHORT |
- asm.Type.CHAR |
- asm.Type.INT => bc.iconst(0)
- case asm.Type.LONG => bc.lconst(0)
- case asm.Type.FLOAT => bc.fconst(0)
- case asm.Type.DOUBLE => bc.dconst(0)
- case asm.Type.VOID => ()
+ tk match {
+ case BOOL => bc.boolconst(false)
+ case BYTE |
+ SHORT |
+ CHAR |
+ INT => bc.iconst(0)
+ case LONG => bc.lconst(0)
+ case FLOAT => bc.fconst(0)
+ case DOUBLE => bc.dconst(0)
+ case UNIT => ()
case _ => emit(asm.Opcodes.ACONST_NULL)
}
}
@@ -90,7 +95,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
val thrownKind = tpeTK(expr)
// `throw null` is valid although scala.Null (as defined in src/libray-aux) isn't a subtype of Throwable.
// Similarly for scala.Nothing (again, as defined in src/libray-aux).
- assert(thrownKind.isNullType || thrownKind.isNothingType || exemplars.get(thrownKind).isSubtypeOf(ThrowableReference))
+ assert(thrownKind.isNullType || thrownKind.isNothingType || thrownKind.asClassBType.isSubtypeOf(ThrowableReference).get)
genLoad(expr, thrownKind)
lineNumber(expr)
emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level.
@@ -121,7 +126,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
// binary operation
case rarg :: Nil =>
- resKind = maxType(tpeTK(larg), tpeTK(rarg))
+ resKind = tpeTK(larg).maxType(tpeTK(rarg))
if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code)) {
assert(resKind.isIntegralType || (resKind == BOOL),
s"$resKind incompatible with arithmetic modulo operation.")
@@ -165,7 +170,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
// load argument on stack
assert(args.length == 1, s"Too many arguments for array get operation: $tree");
genLoad(args.head, INT)
- generatedType = k.getComponentType
+ generatedType = k.asArrayBType.componentType
bc.aload(elementType)
}
else if (scalaPrimitives.isArraySet(code)) {
@@ -227,7 +232,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
if (isArithmeticOp(code)) genArithmeticOp(tree, code)
else if (code == scalaPrimitives.CONCAT) genStringConcat(tree)
- else if (code == scalaPrimitives.HASH) genScalaHash(receiver)
+ else if (code == scalaPrimitives.HASH) genScalaHash(receiver, tree.pos)
else if (isArrayOp(code)) genArrayOp(tree, code, expectedType)
else if (isLogicalOp(code) || isComparisonOp(code)) {
val success, failure, after = new asm.Label
@@ -280,9 +285,10 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
val Local(tk, _, idx, isSynth) = locals.getOrMakeLocal(sym)
if (rhs == EmptyTree) { emitZeroOf(tk) }
else { genLoad(rhs, tk) }
+ val localVarStart = currProgramPoint()
bc.store(idx, tk)
if (!isSynth) { // there are case <synthetic> ValDef's emitted by patmat
- varsInScope ::= (sym -> currProgramPoint())
+ varsInScope ::= (sym -> localVarStart)
}
generatedType = UNIT
@@ -319,7 +325,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
generatedType =
if (tree.symbol == ArrayClass) ObjectReference
- else brefType(thisName) // inner class (if any) for claszSymbol already tracked.
+ else classBTypeFromSymbol(claszSymbol)
}
case Select(Ident(nme.EMPTY_PACKAGE_NAME), module) =>
@@ -417,7 +423,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
if (hostClass == null) internalName(field.owner)
else internalName(hostClass)
val fieldJName = field.javaSimpleName.toString
- val fieldDescr = symInfoTK(field).getDescriptor
+ val fieldDescr = symInfoTK(field).descriptor
val isStatic = field.isStaticMember
val opc =
if (isLoad) { if (isStatic) asm.Opcodes.GETSTATIC else asm.Opcodes.GETFIELD }
@@ -457,9 +463,10 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
case ClazzTag =>
val toPush: BType = {
- val kind = toTypeKind(const.typeValue)
- if (kind.isValueType) classLiteral(kind)
- else kind
+ toTypeKind(const.typeValue) match {
+ case kind: PrimitiveBType => boxedClassOfPrimitive(kind)
+ case kind => kind
+ }
}
mnode.visitLdcInsn(toPush.toASMType)
@@ -467,7 +474,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
val sym = const.symbolValue
val ownerName = internalName(sym.owner)
val fieldName = sym.javaSimpleName.toString
- val fieldDesc = toTypeKind(sym.tpe.underlying).getDescriptor
+ val fieldDesc = toTypeKind(sym.tpe.underlying).descriptor
mnode.visitFieldInsn(
asm.Opcodes.GETSTATIC,
ownerName,
@@ -502,7 +509,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
case nextCleanup :: rest =>
if (saveReturnValue) {
if (insideCleanupBlock) {
- cunit.warning(r.pos, "Return statement found in finally-clause, discarding its return-value in favor of that of a more deeply nested return.")
+ reporter.warning(r.pos, "Return statement found in finally-clause, discarding its return-value in favor of that of a more deeply nested return.")
bc drop returnType
} else {
// regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
@@ -539,26 +546,28 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def genTypeApply(): BType = {
genLoadQualifier(fun)
- if (l.isValueType && r.isValueType)
+ // TODO @lry make pattern match
+ if (l.isPrimitive && r.isPrimitive)
genConversion(l, r, cast)
- else if (l.isValueType) {
+ else if (l.isPrimitive) {
bc drop l
if (cast) {
- mnode.visitTypeInsn(asm.Opcodes.NEW, classCastExceptionReference.getInternalName)
+ mnode.visitTypeInsn(asm.Opcodes.NEW, classCastExceptionReference.internalName)
bc dup ObjectReference
emit(asm.Opcodes.ATHROW)
} else {
bc boolconst false
}
}
- else if (r.isValueType && cast) {
+ else if (r.isPrimitive && cast) {
abort(s"Erasure should have added an unboxing operation to prevent this cast. Tree: $app")
}
- else if (r.isValueType) {
- bc isInstance classLiteral(r)
+ else if (r.isPrimitive) {
+ bc isInstance boxedClassOfPrimitive(r.asPrimitiveBType)
}
else {
- genCast(r, cast)
+ assert(r.isRef, r) // ensure that it's not a method
+ genCast(r.asRefBType, cast)
}
if (cast) r else BOOL
@@ -577,8 +586,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
// if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
genLoadArguments(args, paramTKs(app))
- genCallMethod(fun.symbol, invokeStyle, pos = app.pos)
- generatedType = asmMethodType(fun.symbol).getReturnType
+ genCallMethod(fun.symbol, invokeStyle, app.pos)
+ generatedType = asmMethodType(fun.symbol).returnType
// 'new' constructor call: Note: since constructors are
// thought to return an instance of what they construct,
@@ -589,55 +598,59 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
assert(ctor.isClassConstructor, s"'new' call to non-constructor: ${ctor.name}")
generatedType = tpeTK(tpt)
- assert(generatedType.isRefOrArrayType, s"Non reference type cannot be instantiated: $generatedType")
+ assert(generatedType.isRef, s"Non reference type cannot be instantiated: $generatedType")
generatedType match {
- case arr if generatedType.isArray =>
+ case arr @ ArrayBType(componentType) =>
genLoadArguments(args, paramTKs(app))
- val dims = arr.getDimensions
- var elemKind = arr.getElementType
+ val dims = arr.dimension
+ var elemKind = arr.elementType
val argsSize = args.length
if (argsSize > dims) {
- cunit.error(app.pos, s"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)")
+ reporter.error(app.pos, s"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)")
}
if (argsSize < dims) {
/* In one step:
* elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize)
* however the above does not enter a TypeName for each nested arrays in chrs.
*/
- for (i <- args.length until dims) elemKind = arrayOf(elemKind)
+ for (i <- args.length until dims) elemKind = ArrayBType(elemKind)
}
- (argsSize : @switch) match {
+ argsSize match {
case 1 => bc newarray elemKind
case _ =>
- val descr = ('[' * argsSize) + elemKind.getDescriptor // denotes the same as: arrayN(elemKind, argsSize).getDescriptor
+ val descr = ('[' * argsSize) + elemKind.descriptor // denotes the same as: arrayN(elemKind, argsSize).descriptor
mnode.visitMultiANewArrayInsn(descr, argsSize)
}
- case rt if generatedType.hasObjectSort =>
- assert(exemplar(ctor.owner).c == rt, s"Symbol ${ctor.owner.fullName} is different from $rt")
- mnode.visitTypeInsn(asm.Opcodes.NEW, rt.getInternalName)
+ case rt: ClassBType =>
+ assert(classBTypeFromSymbol(ctor.owner) == rt, s"Symbol ${ctor.owner.fullName} is different from $rt")
+ mnode.visitTypeInsn(asm.Opcodes.NEW, rt.internalName)
bc dup generatedType
genLoadArguments(args, paramTKs(app))
- genCallMethod(ctor, icodes.opcodes.Static(onInstance = true))
+ genCallMethod(ctor, icodes.opcodes.Static(onInstance = true), app.pos)
case _ =>
abort(s"Cannot instantiate $tpt of kind: $generatedType")
}
+ case Apply(_, args) if app.hasAttachment[delambdafy.LambdaMetaFactoryCapable] =>
+ val attachment = app.attachments.get[delambdafy.LambdaMetaFactoryCapable].get
+ genLoadArguments(args, paramTKs(app))
+ genInvokeDynamicLambda(attachment.target, attachment.arity, attachment.functionalInterface)
case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) =>
val nativeKind = tpeTK(expr)
genLoad(expr, nativeKind)
- val MethodNameAndType(mname, mdesc) = asmBoxTo(nativeKind)
- bc.invokestatic(BoxesRunTime.getInternalName, mname, mdesc)
+ val MethodNameAndType(mname, methodType) = asmBoxTo(nativeKind)
+ bc.invokestatic(BoxesRunTime.internalName, mname, methodType.descriptor, app.pos)
generatedType = boxResultType(fun.symbol) // was toTypeKind(fun.symbol.tpe.resultType)
case Apply(fun @ _, List(expr)) if currentRun.runDefinitions.isUnbox(fun.symbol) =>
genLoad(expr)
val boxType = unboxResultType(fun.symbol) // was toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
generatedType = boxType
- val MethodNameAndType(mname, mdesc) = asmUnboxTo(boxType)
- bc.invokestatic(BoxesRunTime.getInternalName, mname, mdesc)
+ val MethodNameAndType(mname, methodType) = asmUnboxTo(boxType)
+ bc.invokestatic(BoxesRunTime.internalName, mname, methodType.descriptor, app.pos)
case app @ Apply(fun, args) =>
val sym = fun.symbol
@@ -682,18 +695,23 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
case _ =>
}
if ((targetTypeKind != null) && (sym == definitions.Array_clone) && invokeStyle.isDynamic) {
- val target: String = targetTypeKind.getInternalName
- bc.invokevirtual(target, "clone", "()Ljava/lang/Object;")
+ // An invokevirtual points to a CONSTANT_Methodref_info which in turn points to a
+ // CONSTANT_Class_info of the receiver type.
+ // The JVMS is not explicit about this, but that receiver type may be an array type
+ // descriptor (instead of a class internal name):
+ // invokevirtual #2; //Method "[I".clone:()Ljava/lang/Object
+ val target: String = targetTypeKind.asRefBType.classOrArrayType
+ bc.invokevirtual(target, "clone", "()Ljava/lang/Object;", app.pos)
}
else {
- genCallMethod(sym, invokeStyle, hostClass, app.pos)
+ genCallMethod(sym, invokeStyle, app.pos, hostClass)
}
} // end of genNormalMethodCall()
genNormalMethodCall()
- generatedType = asmMethodType(sym).getReturnType
+ generatedType = asmMethodType(sym).returnType
}
}
@@ -705,7 +723,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
val ArrayValue(tpt @ TypeTree(), elems) = av
val elmKind = tpeTK(tpt)
- val generatedType = arrayOf(elmKind)
+ val generatedType = ArrayBType(elmKind)
lineNumber(av)
bc iconst elems.length
@@ -798,16 +816,60 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
}
def adapt(from: BType, to: BType) {
- if (!conforms(from, to)) {
+ if (!from.conformsTo(to).get) {
to match {
case UNIT => bc drop from
case _ => bc.emitT2T(from, to)
}
} else if (from.isNothingType) {
- emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level.
+ /* There are two possibilities for from.isNothingType: emitting a "throw e" expressions and
+ * loading a (phantom) value of type Nothing.
+ *
+ * The Nothing type in Scala's type system does not exist in the JVM. In bytecode, Nothing
+ * is mapped to scala.runtime.Nothing$. To the JVM, a call to Predef.??? looks like it would
+ * return an object of type Nothing$. We need to do something with that phantom object on
+ * the stack. "Phantom" because it never exists: such methods always throw, but the JVM does
+ * not know that.
+ *
+ * Note: The two verifiers (old: type inference, new: type checking) have different
+ * requirements. Very briefly:
+ *
+ * Old (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.2.1): at
+ * each program point, no matter what branches were taken to get there
+ * - Stack is same size and has same typed values
+ * - Local and stack values need to have consistent types
+ * - In practice, the old verifier seems to ignore unreachable code and accept any
+ * instructions after an ATHROW. For example, there can be another ATHROW (without
+ * loading another throwable first).
+ *
+ * New (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1)
+ * - Requires consistent stack map frames. GenBCode generates stack frames if -target:jvm-1.6
+ * or higher.
+ * - In practice: the ASM library computes stack map frames for us (ClassWriter). Emitting
+ * correct frames after an ATHROW is probably complex, so ASM uses the following strategy:
+ * - Every time when generating an ATHROW, a new basic block is started.
+ * - During classfile writing, such basic blocks are found to be dead: no branches go there
+ * - Eliminating dead code would probably require complex shifts in the output byte buffer
+ * - But there's an easy solution: replace all code in the dead block with with
+ * `nop; nop; ... nop; athrow`, making sure the bytecode size stays the same
+ * - The corresponding stack frame can be easily generated: on entering a dead the block,
+ * the frame requires a single Throwable on the stack.
+ * - Since there are no branches to the dead block, the frame requirements are never violated.
+ *
+ * To summarize the above: it does matter what we emit after an ATHROW.
+ *
+ * NOW: if we end up here because we emitted a load of a (phantom) value of type Nothing$,
+ * there was no ATHROW emitted. So, we have to make the verifier happy and do something
+ * with that value. Since Nothing$ extends Throwable, the easiest is to just emit an ATHROW.
+ *
+ * If we ended up here because we generated a "throw e" expression, we know the last
+ * emitted instruction was an ATHROW. As explained above, it is OK to emit a second ATHROW,
+ * the verifiers will be happy.
+ */
+ emit(asm.Opcodes.ATHROW)
} else if (from.isNullType) {
bc drop from
- mnode.visitInsn(asm.Opcodes.ACONST_NULL)
+ emit(asm.Opcodes.ACONST_NULL)
}
else (from, to) match {
case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG)
@@ -875,12 +937,12 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) {
mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
} else {
- val mbt = symInfoTK(module)
+ val mbt = symInfoTK(module).asClassBType
mnode.visitFieldInsn(
asm.Opcodes.GETSTATIC,
- mbt.getInternalName /* + "$" */ ,
+ mbt.internalName /* + "$" */ ,
strMODULE_INSTANCE_FIELD,
- mbt.getDescriptor // for nostalgics: toTypeKind(module.tpe).getDescriptor
+ mbt.descriptor // for nostalgics: toTypeKind(module.tpe).descriptor
)
}
}
@@ -893,7 +955,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
}
}
- def genCast(to: BType, cast: Boolean) {
+ def genCast(to: RefBType, cast: Boolean) {
if (cast) { bc checkCast to }
else { bc isInstance to }
}
@@ -920,26 +982,26 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
// Optimization for expressions of the form "" + x. We can avoid the StringBuilder.
case List(Literal(Constant("")), arg) =>
genLoad(arg, ObjectReference)
- genCallMethod(String_valueOf, icodes.opcodes.Static(onInstance = false))
+ genCallMethod(String_valueOf, icodes.opcodes.Static(onInstance = false), arg.pos)
case concatenations =>
- bc.genStartConcat
+ bc.genStartConcat(tree.pos)
for (elem <- concatenations) {
val kind = tpeTK(elem)
genLoad(elem, kind)
- bc.genStringConcat(kind)
+ bc.genStringConcat(kind, elem.pos)
}
- bc.genEndConcat
+ bc.genEndConcat(tree.pos)
}
StringReference
}
- def genCallMethod(method: Symbol, style: InvokeStyle, hostClass0: Symbol = null, pos: Position = NoPosition) {
+ def genCallMethod(method: Symbol, style: InvokeStyle, pos: Position, hostClass0: Symbol = null) {
val siteSymbol = claszSymbol
- val hostSymbol = if (hostClass0 == null) method.owner else hostClass0;
+ val hostSymbol = if (hostClass0 == null) method.owner else hostClass0
val methodOwner = method.owner
// info calls so that types are up to date; erasure may add lateINTERFACE to traits
hostSymbol.info ; methodOwner.info
@@ -957,18 +1019,17 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
|| methodOwner == definitions.ObjectClass
)
val receiver = if (useMethodOwner) methodOwner else hostSymbol
- val bmOwner = asmClassType(receiver)
- val jowner = bmOwner.getInternalName
+ val jowner = internalName(receiver)
val jname = method.javaSimpleName.toString
val bmType = asmMethodType(method)
- val mdescr = bmType.getDescriptor
+ val mdescr = bmType.descriptor
def initModule() {
// we initialize the MODULE$ field immediately after the super ctor
if (!isModuleInitialized &&
jMethodName == INSTANCE_CONSTRUCTOR_NAME &&
jname == INSTANCE_CONSTRUCTOR_NAME &&
- isStaticModule(siteSymbol)) {
+ isStaticModuleClass(siteSymbol)) {
isModuleInitialized = true
mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
mnode.visitFieldInsn(
@@ -981,26 +1042,26 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
}
if (style.isStatic) {
- if (style.hasInstance) { bc.invokespecial (jowner, jname, mdescr) }
- else { bc.invokestatic (jowner, jname, mdescr) }
+ if (style.hasInstance) { bc.invokespecial (jowner, jname, mdescr, pos) }
+ else { bc.invokestatic (jowner, jname, mdescr, pos) }
}
else if (style.isDynamic) {
- if (needsInterfaceCall(receiver)) { bc.invokeinterface(jowner, jname, mdescr) }
- else { bc.invokevirtual (jowner, jname, mdescr) }
+ if (needsInterfaceCall(receiver)) { bc.invokeinterface(jowner, jname, mdescr, pos) }
+ else { bc.invokevirtual (jowner, jname, mdescr, pos) }
}
else {
assert(style.isSuper, s"An unknown InvokeStyle: $style")
- bc.invokespecial(jowner, jname, mdescr)
+ bc.invokespecial(jowner, jname, mdescr, pos)
initModule()
}
} // end of genCallMethod()
/* Generate the scala ## method. */
- def genScalaHash(tree: Tree): BType = {
+ def genScalaHash(tree: Tree, applyPos: Position): BType = {
genLoadModule(ScalaRunTimeModule) // TODO why load ScalaRunTimeModule if ## has InvokeStyle of Static(false) ?
genLoad(tree, ObjectReference)
- genCallMethod(hashMethodSym, icodes.opcodes.Static(onInstance = false))
+ genCallMethod(hashMethodSym, icodes.opcodes.Static(onInstance = false), applyPos)
INT
}
@@ -1020,22 +1081,11 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
tree :: Nil
}
- /* Some useful equality helpers. */
- def isNull(t: Tree) = {
- t match {
- case Literal(Constant(null)) => true
- case _ => false
- }
- }
-
- /* If l or r is constant null, returns the other ; otherwise null */
- def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
-
/* Emit code to compare the two top-most stack values using the 'op' operator. */
private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) {
if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
bc.emitIF_ICMP(op, success)
- } else if (tk.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_)
bc.emitIF_ACMP(op, success)
} else {
(tk: @unchecked) match {
@@ -1056,7 +1106,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) {
if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
bc.emitIF(op, success)
- } else if (tk.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_)
// @unchecked because references aren't compared with GT, GE, LT, LE.
(op : @unchecked) match {
case icodes.EQ => bc emitIFNULL success
@@ -1102,7 +1152,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
genCZJUMP(success, failure, op, ObjectReference)
}
else {
- val tk = maxType(tpeTK(l), tpeTK(r))
+ val tk = tpeTK(l).maxType(tpeTK(r))
genLoad(l, tk)
genLoad(r, tk)
genCJUMP(success, failure, op, tk)
@@ -1141,10 +1191,10 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
case ZOR => genZandOrZor(and = false)
case code =>
// TODO !!!!!!!!!! isReferenceType, in the sense of TypeKind? (ie non-array, non-boxed, non-nothing, may be null)
- if (scalaPrimitives.isUniversalEqualityOp(code) && tpeTK(lhs).hasObjectSort) {
+ if (scalaPrimitives.isUniversalEqualityOp(code) && tpeTK(lhs).isClass) {
// `lhs` has reference type
- if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure)
- else genEqEqPrimitive(lhs, rhs, failure, success)
+ if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure, tree.pos)
+ else genEqEqPrimitive(lhs, rhs, failure, success, tree.pos)
}
else if (scalaPrimitives.isComparisonOp(code))
genComparisonOp(lhs, rhs, code)
@@ -1164,7 +1214,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
* @param l left-hand-side of the '=='
* @param r right-hand-side of the '=='
*/
- def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label) {
+ def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, pos: Position) {
/* True if the equality comparison is between values that require the use of the rich equality
* comparator (scala.runtime.Comparator.equals). This is the case when either side of the
@@ -1182,13 +1232,13 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
val equalsMethod: Symbol = {
if (l.tpe <:< BoxedNumberClass.tpe) {
if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
- else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
+ else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030
else platform.externalEqualsNumObject
} else platform.externalEquals
}
genLoad(l, ObjectReference)
genLoad(r, ObjectReference)
- genCallMethod(equalsMethod, icodes.opcodes.Static(onInstance = false))
+ genCallMethod(equalsMethod, icodes.opcodes.Static(onInstance = false), pos)
genCZJUMP(success, failure, icodes.NE, BOOL)
}
else {
@@ -1200,9 +1250,15 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
// expr == null -> expr eq null
genLoad(l, ObjectReference)
genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+ } else if (isNonNullExpr(l)) {
+ // SI-7852 Avoid null check if L is statically non-null.
+ genLoad(l, ObjectReference)
+ genLoad(r, ObjectReference)
+ genCallMethod(Object_equals, icodes.opcodes.Dynamic, pos)
+ genCZJUMP(success, failure, icodes.NE, BOOL)
} else {
// l == r -> if (l eq null) r eq null else l.equals(r)
- val eqEqTempLocal = locals.makeLocal(AnyRefReference, nme.EQEQ_LOCAL_VAR.toString)
+ val eqEqTempLocal = locals.makeLocal(ObjectReference, nme.EQEQ_LOCAL_VAR.toString)
val lNull = new asm.Label
val lNonNull = new asm.Label
@@ -1219,7 +1275,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
markProgramPoint(lNonNull)
locals.load(eqEqTempLocal)
- genCallMethod(Object_equals, icodes.opcodes.Dynamic)
+ genCallMethod(Object_equals, icodes.opcodes.Dynamic, pos)
genCZJUMP(success, failure, icodes.NE, BOOL)
}
}
@@ -1229,6 +1285,40 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
def genSynchronized(tree: Apply, expectedType: BType): BType
def genLoadTry(tree: Try): BType
+ def genInvokeDynamicLambda(lambdaTarget: Symbol, arity: Int, functionalInterface: Symbol) {
+ val isStaticMethod = lambdaTarget.hasFlag(Flags.STATIC)
+
+ val targetHandle =
+ new asm.Handle(if (lambdaTarget.hasFlag(Flags.STATIC)) asm.Opcodes.H_INVOKESTATIC else asm.Opcodes.H_INVOKEVIRTUAL,
+ classBTypeFromSymbol(lambdaTarget.owner).internalName,
+ lambdaTarget.name.toString,
+ asmMethodType(lambdaTarget).descriptor)
+ val receiver = if (isStaticMethod) None else Some(lambdaTarget.owner)
+ val (capturedParams, lambdaParams) = lambdaTarget.paramss.head.splitAt(lambdaTarget.paramss.head.length - arity)
+ // Requires https://github.com/scala/scala-java8-compat on the runtime classpath
+ val returnUnit = lambdaTarget.info.resultType.typeSymbol == UnitClass
+ val functionalInterfaceDesc: String = classBTypeFromSymbol(functionalInterface).descriptor
+ val desc = (receiver.toList ::: capturedParams).map(sym => toTypeKind(sym.info)).mkString(("("), "", ")") + functionalInterfaceDesc
+
+ // TODO specialization
+ val constrainedType = new MethodBType(lambdaParams.map(p => toTypeKind(p.tpe)), toTypeKind(lambdaTarget.tpe.resultType)).toASMType
+ val abstractMethod = functionalInterface.info.decls.find(_.isDeferred).getOrElse(functionalInterface.info.member(nme.apply))
+ val methodName = abstractMethod.name.toString
+ val applyN = {
+ val mt = asmMethodType(abstractMethod)
+ mt.toASMType
+ }
+
+ bc.jmethod.visitInvokeDynamicInsn(methodName, desc, lambdaMetaFactoryBootstrapHandle,
+ // boostrap args
+ applyN, targetHandle, constrainedType
+ )
+ }
}
+ val lambdaMetaFactoryBootstrapHandle =
+ new asm.Handle(asm.Opcodes.H_INVOKESTATIC,
+ "java/lang/invoke/LambdaMetafactory", "metafactory",
+ "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;")
+
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala
deleted file mode 100644
index cc3265c5f9..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala
+++ /dev/null
@@ -1,716 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala
-package tools.nsc
-package backend.jvm
-
-import scala.tools.asm
-import scala.annotation.switch
-import scala.collection.{ immutable, mutable }
-
-/*
- * Immutable representations of bytecode-level types.
- *
- * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
- * @version 1.0
- *
- */
-abstract class BCodeGlue extends SubComponent {
-
- import global._
-
- object BType {
-
- import global.chrs
-
- // ------------- sorts -------------
-
- val VOID : Int = 0
- val BOOLEAN: Int = 1
- val CHAR : Int = 2
- val BYTE : Int = 3
- val SHORT : Int = 4
- val INT : Int = 5
- val FLOAT : Int = 6
- val LONG : Int = 7
- val DOUBLE : Int = 8
- val ARRAY : Int = 9
- val OBJECT : Int = 10
- val METHOD : Int = 11
-
- // ------------- primitive types -------------
-
- val VOID_TYPE = new BType(VOID, ('V' << 24) | (5 << 16) | (0 << 8) | 0, 1)
- val BOOLEAN_TYPE = new BType(BOOLEAN, ('Z' << 24) | (0 << 16) | (5 << 8) | 1, 1)
- val CHAR_TYPE = new BType(CHAR, ('C' << 24) | (0 << 16) | (6 << 8) | 1, 1)
- val BYTE_TYPE = new BType(BYTE, ('B' << 24) | (0 << 16) | (5 << 8) | 1, 1)
- val SHORT_TYPE = new BType(SHORT, ('S' << 24) | (0 << 16) | (7 << 8) | 1, 1)
- val INT_TYPE = new BType(INT, ('I' << 24) | (0 << 16) | (0 << 8) | 1, 1)
- val FLOAT_TYPE = new BType(FLOAT, ('F' << 24) | (2 << 16) | (2 << 8) | 1, 1)
- val LONG_TYPE = new BType(LONG, ('J' << 24) | (1 << 16) | (1 << 8) | 2, 1)
- val DOUBLE_TYPE = new BType(DOUBLE, ('D' << 24) | (3 << 16) | (3 << 8) | 2, 1)
-
- /*
- * Returns the Java type corresponding to the given type descriptor.
- *
- * @param off the offset of this descriptor in the chrs buffer.
- * @return the Java type corresponding to the given type descriptor.
- *
- * can-multi-thread
- */
- def getType(off: Int): BType = {
- var len = 0
- chrs(off) match {
- case 'V' => VOID_TYPE
- case 'Z' => BOOLEAN_TYPE
- case 'C' => CHAR_TYPE
- case 'B' => BYTE_TYPE
- case 'S' => SHORT_TYPE
- case 'I' => INT_TYPE
- case 'F' => FLOAT_TYPE
- case 'J' => LONG_TYPE
- case 'D' => DOUBLE_TYPE
- case '[' =>
- len = 1
- while (chrs(off + len) == '[') {
- len += 1
- }
- if (chrs(off + len) == 'L') {
- len += 1
- while (chrs(off + len) != ';') {
- len += 1
- }
- }
- new BType(ARRAY, off, len + 1)
- case 'L' =>
- len = 1
- while (chrs(off + len) != ';') {
- len += 1
- }
- new BType(OBJECT, off + 1, len - 1)
- // case '(':
- case _ =>
- assert(chrs(off) == '(')
- var resPos = off + 1
- while (chrs(resPos) != ')') { resPos += 1 }
- val resType = getType(resPos + 1)
- val len = resPos - off + 1 + resType.len;
- new BType(
- METHOD,
- off,
- if (resType.hasObjectSort) {
- len + 2 // "+ 2" accounts for the "L ... ;" in a descriptor for a non-array reference.
- } else {
- len
- }
- )
- }
- }
-
- /* Params denote an internal name.
- * can-multi-thread
- */
- def getObjectType(index: Int, length: Int): BType = {
- val sort = if (chrs(index) == '[') ARRAY else OBJECT;
- new BType(sort, index, length)
- }
-
- /*
- * @param methodDescriptor a method descriptor.
- *
- * must-single-thread
- */
- def getMethodType(methodDescriptor: String): BType = {
- val n = global.newTypeName(methodDescriptor)
- new BType(BType.METHOD, n.start, n.length) // TODO assert isValidMethodDescriptor
- }
-
- /*
- * Returns the Java method type corresponding to the given argument and return types.
- *
- * @param returnType the return type of the method.
- * @param argumentTypes the argument types of the method.
- * @return the Java type corresponding to the given argument and return types.
- *
- * must-single-thread
- */
- def getMethodType(returnType: BType, argumentTypes: Array[BType]): BType = {
- val n = global.newTypeName(getMethodDescriptor(returnType, argumentTypes))
- new BType(BType.METHOD, n.start, n.length)
- }
-
- /*
- * Returns the Java types corresponding to the argument types of method descriptor whose first argument starts at idx0.
- *
- * @param idx0 index into chrs of the first argument.
- * @return the Java types corresponding to the argument types of the given method descriptor.
- *
- * can-multi-thread
- */
- private def getArgumentTypes(idx0: Int): Array[BType] = {
- assert(chrs(idx0 - 1) == '(', "doesn't look like a method descriptor.")
- val args = new Array[BType](getArgumentCount(idx0))
- var off = idx0
- var size = 0
- while (chrs(off) != ')') {
- args(size) = getType(off)
- off += args(size).len
- if (args(size).sort == OBJECT) { off += 2 }
- // debug: assert("LVZBSCIJFD[)".contains(chrs(off)))
- size += 1
- }
- // debug: var check = 0; while (check < args.length) { assert(args(check) != null); check += 1 }
- args
- }
-
- /*
- * Returns the number of argument types of this method type, whose first argument starts at idx0.
- *
- * @param idx0 index into chrs of the first argument.
- * @return the number of argument types of this method type.
- *
- * can-multi-thread
- */
- private def getArgumentCount(idx0: Int): Int = {
- assert(chrs(idx0 - 1) == '(', "doesn't look like a method descriptor.")
- var off = idx0
- var size = 0
- var keepGoing = true
- while (keepGoing) {
- val car = chrs(off)
- off += 1
- if (car == ')') {
- keepGoing = false
- } else if (car == 'L') {
- while (chrs(off) != ';') { off += 1 }
- off += 1
- size += 1
- } else if (car != '[') {
- size += 1
- }
- }
-
- size
- }
-
- /*
- * Returns the Java type corresponding to the return type of the given
- * method descriptor.
- *
- * @param methodDescriptor a method descriptor.
- * @return the Java type corresponding to the return type of the given method descriptor.
- *
- * must-single-thread
- */
- def getReturnType(methodDescriptor: String): BType = {
- val n = global.newTypeName(methodDescriptor)
- val delta = n.pos(')') // `delta` is relative to the Name's zero-based start position, not a valid index into chrs.
- assert(delta < n.length, s"not a valid method descriptor: $methodDescriptor")
- getType(n.start + delta + 1)
- }
-
- /*
- * Returns the descriptor corresponding to the given argument and return types.
- * Note: no BType is created here for the resulting method descriptor,
- * if that's desired the invoker is responsible for that.
- *
- * @param returnType the return type of the method.
- * @param argumentTypes the argument types of the method.
- * @return the descriptor corresponding to the given argument and return types.
- *
- * can-multi-thread
- */
- def getMethodDescriptor(
- returnType: BType,
- argumentTypes: Array[BType]): String =
- {
- val buf = new StringBuffer()
- buf.append('(')
- var i = 0
- while (i < argumentTypes.length) {
- argumentTypes(i).getDescriptor(buf)
- i += 1
- }
- buf.append(')')
- returnType.getDescriptor(buf)
- buf.toString()
- }
-
- } // end of object BType
-
- /*
- * Based on ASM's Type class. Namer's chrs is used in this class for the same purposes as the `buf` char array in asm.Type.
- *
- * All methods of this classs can-multi-thread
- */
- final class BType(val sort: Int, val off: Int, val len: Int) {
-
- import global.chrs
-
- /*
- * can-multi-thread
- */
- def toASMType: scala.tools.asm.Type = {
- import scala.tools.asm
- // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
- (sort: @switch) match {
- case asm.Type.VOID => asm.Type.VOID_TYPE
- case asm.Type.BOOLEAN => asm.Type.BOOLEAN_TYPE
- case asm.Type.CHAR => asm.Type.CHAR_TYPE
- case asm.Type.BYTE => asm.Type.BYTE_TYPE
- case asm.Type.SHORT => asm.Type.SHORT_TYPE
- case asm.Type.INT => asm.Type.INT_TYPE
- case asm.Type.FLOAT => asm.Type.FLOAT_TYPE
- case asm.Type.LONG => asm.Type.LONG_TYPE
- case asm.Type.DOUBLE => asm.Type.DOUBLE_TYPE
- case asm.Type.ARRAY |
- asm.Type.OBJECT => asm.Type.getObjectType(getInternalName)
- case asm.Type.METHOD => asm.Type.getMethodType(getDescriptor)
- }
- }
-
- /*
- * Unlike for ICode's REFERENCE, isBoxedType(t) implies isReferenceType(t)
- * Also, `isReferenceType(RT_NOTHING) == true` , similarly for RT_NULL.
- * Use isNullType() , isNothingType() to detect Nothing and Null.
- *
- * can-multi-thread
- */
- def hasObjectSort = (sort == BType.OBJECT)
-
- /*
- * Returns the number of dimensions of this array type. This method should
- * only be used for an array type.
- *
- * @return the number of dimensions of this array type.
- *
- * can-multi-thread
- */
- def getDimensions: Int = {
- var i = 1
- while (chrs(off + i) == '[') {
- i += 1
- }
- i
- }
-
- /*
- * Returns the (ultimate) element type of this array type.
- * This method should only be used for an array type.
- *
- * @return Returns the type of the elements of this array type.
- *
- * can-multi-thread
- */
- def getElementType: BType = {
- assert(isArray, s"Asked for the element type of a non-array type: $this")
- BType.getType(off + getDimensions)
- }
-
- /*
- * Returns the internal name of the class corresponding to this object or
- * array type. The internal name of a class is its fully qualified name (as
- * returned by Class.getName(), where '.' are replaced by '/'. This method
- * should only be used for an object or array type.
- *
- * @return the internal name of the class corresponding to this object type.
- *
- * can-multi-thread
- */
- def getInternalName: String = {
- new String(chrs, off, len)
- }
-
- /*
- * @return the suffix of the internal name until the last '/' (if '/' present), internal name otherwise.
- *
- * can-multi-thread
- */
- def getSimpleName: String = {
- assert(hasObjectSort, s"not of object sort: $toString")
- val iname = getInternalName
- val idx = iname.lastIndexOf('/')
- if (idx == -1) iname
- else iname.substring(idx + 1)
- }
-
- /*
- * Returns the argument types of methods of this type.
- * This method should only be used for method types.
- *
- * @return the argument types of methods of this type.
- *
- * can-multi-thread
- */
- def getArgumentTypes: Array[BType] = {
- BType.getArgumentTypes(off + 1)
- }
-
- /*
- * Returns the return type of methods of this type.
- * This method should only be used for method types.
- *
- * @return the return type of methods of this type.
- *
- * can-multi-thread
- */
- def getReturnType: BType = {
- assert(chrs(off) == '(', s"doesn't look like a method descriptor: $toString")
- var resPos = off + 1
- while (chrs(resPos) != ')') { resPos += 1 }
- BType.getType(resPos + 1)
- }
-
- // ------------------------------------------------------------------------
- // Inspector methods
- // ------------------------------------------------------------------------
-
- def isPrimitiveOrVoid = (sort < BType.ARRAY) // can-multi-thread
- def isValueType = (sort < BType.ARRAY) // can-multi-thread
- def isArray = (sort == BType.ARRAY) // can-multi-thread
- def isUnitType = (sort == BType.VOID) // can-multi-thread
-
- def isRefOrArrayType = { hasObjectSort || isArray } // can-multi-thread
- def isNonUnitValueType = { isValueType && !isUnitType } // can-multi-thread
-
- def isNonSpecial = { !isValueType && !isArray && !isPhantomType } // can-multi-thread
- def isNothingType = { (this == RT_NOTHING) || (this == CT_NOTHING) } // can-multi-thread
- def isNullType = { (this == RT_NULL) || (this == CT_NULL) } // can-multi-thread
- def isPhantomType = { isNothingType || isNullType } // can-multi-thread
-
- /*
- * can-multi-thread
- */
- def isBoxed = {
- this match {
- case BOXED_UNIT | BOXED_BOOLEAN | BOXED_CHAR |
- BOXED_BYTE | BOXED_SHORT | BOXED_INT |
- BOXED_FLOAT | BOXED_LONG | BOXED_DOUBLE
- => true
- case _
- => false
- }
- }
-
- /* On the JVM,
- * BOOL, BYTE, CHAR, SHORT, and INT
- * are like Ints for the purpose of lub calculation.
- *
- * can-multi-thread
- */
- def isIntSizedType = {
- (sort : @switch) match {
- case BType.BOOLEAN | BType.CHAR |
- BType.BYTE | BType.SHORT | BType.INT
- => true
- case _
- => false
- }
- }
-
- /* On the JVM, similar to isIntSizedType except that BOOL isn't integral while LONG is.
- *
- * can-multi-thread
- */
- def isIntegralType = {
- (sort : @switch) match {
- case BType.CHAR |
- BType.BYTE | BType.SHORT | BType.INT |
- BType.LONG
- => true
- case _
- => false
- }
- }
-
- /* On the JVM, FLOAT and DOUBLE.
- *
- * can-multi-thread
- */
- def isRealType = { (sort == BType.FLOAT ) || (sort == BType.DOUBLE) }
-
- def isNumericType = (isIntegralType || isRealType) // can-multi-thread
-
- /* Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?)
- *
- * can-multi-thread
- */
- def isWideType = (getSize == 2)
-
- /*
- * Element vs. Component type of an array:
- * Quoting from the JVMS, Sec. 2.4 "Reference Types and Values"
- *
- * An array type consists of a component type with a single dimension (whose
- * length is not given by the type). The component type of an array type may itself be
- * an array type. If, starting from any array type, one considers its component type,
- * and then (if that is also an array type) the component type of that type, and so on,
- * eventually one must reach a component type that is not an array type; this is called
- * the element type of the array type. The element type of an array type is necessarily
- * either a primitive type, or a class type, or an interface type.
- *
- */
-
- /* The type of items this array holds.
- *
- * can-multi-thread
- */
- def getComponentType: BType = {
- assert(isArray, s"Asked for the component type of a non-array type: $this")
- BType.getType(off + 1)
- }
-
- // ------------------------------------------------------------------------
- // Conversion to type descriptors
- // ------------------------------------------------------------------------
-
- /*
- * @return the descriptor corresponding to this Java type.
- *
- * can-multi-thread
- */
- def getDescriptor: String = {
- val buf = new StringBuffer()
- getDescriptor(buf)
- buf.toString()
- }
-
- /*
- * Appends the descriptor corresponding to this Java type to the given string buffer.
- *
- * @param buf the string buffer to which the descriptor must be appended.
- *
- * can-multi-thread
- */
- private def getDescriptor(buf: StringBuffer) {
- if (isPrimitiveOrVoid) {
- // descriptor is in byte 3 of 'off' for primitive types (buf == null)
- buf.append(((off & 0xFF000000) >>> 24).asInstanceOf[Char])
- } else if (sort == BType.OBJECT) {
- buf.append('L')
- buf.append(chrs, off, len)
- buf.append(';')
- } else { // sort == ARRAY || sort == METHOD
- buf.append(chrs, off, len)
- }
- }
-
- // ------------------------------------------------------------------------
- // Corresponding size and opcodes
- // ------------------------------------------------------------------------
-
- /*
- * Returns the size of values of this type.
- * This method must not be used for method types.
- *
- * @return the size of values of this type, i.e., 2 for <tt>long</tt> and
- * <tt>double</tt>, 0 for <tt>void</tt> and 1 otherwise.
- *
- * can-multi-thread
- */
- def getSize: Int = {
- // the size is in byte 0 of 'off' for primitive types (buf == null)
- if (isPrimitiveOrVoid) (off & 0xFF) else 1
- }
-
- /*
- * Returns a JVM instruction opcode adapted to this Java type. This method
- * must not be used for method types.
- *
- * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD,
- * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL,
- * ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
- * @return an opcode that is similar to the given opcode, but adapted to
- * this Java type. For example, if this type is <tt>float</tt> and
- * <tt>opcode</tt> is IRETURN, this method returns FRETURN.
- *
- * can-multi-thread
- */
- def getOpcode(opcode: Int): Int = {
- import scala.tools.asm.Opcodes
- if (opcode == Opcodes.IALOAD || opcode == Opcodes.IASTORE) {
- // the offset for IALOAD or IASTORE is in byte 1 of 'off' for
- // primitive types (buf == null)
- opcode + (if (isPrimitiveOrVoid) (off & 0xFF00) >> 8 else 4)
- } else {
- // the offset for other instructions is in byte 2 of 'off' for
- // primitive types (buf == null)
- opcode + (if (isPrimitiveOrVoid) (off & 0xFF0000) >> 16 else 4)
- }
- }
-
- // ------------------------------------------------------------------------
- // Equals, hashCode and toString
- // ------------------------------------------------------------------------
-
- /*
- * Tests if the given object is equal to this type.
- *
- * @param o the object to be compared to this type.
- * @return <tt>true</tt> if the given object is equal to this type.
- *
- * can-multi-thread
- */
- override def equals(o: Any): Boolean = {
- if (!(o.isInstanceOf[BType])) {
- return false
- }
- val t = o.asInstanceOf[BType]
- if (this eq t) {
- return true
- }
- if (sort != t.sort) {
- return false
- }
- if (sort >= BType.ARRAY) {
- if (len != t.len) {
- return false
- }
- // sort checked already
- if (off == t.off) {
- return true
- }
- var i = 0
- while (i < len) {
- if (chrs(off + i) != chrs(t.off + i)) {
- return false
- }
- i += 1
- }
- // If we reach here, we could update the largest of (this.off, t.off) to match the other, so as to simplify future == comparisons.
- // But that would require a var rather than val.
- }
- true
- }
-
- /*
- * @return a hash code value for this type.
- *
- * can-multi-thread
- */
- override def hashCode(): Int = {
- var hc = 13 * sort;
- if (sort >= BType.ARRAY) {
- var i = off
- val end = i + len
- while (i < end) {
- hc = 17 * (hc + chrs(i))
- i += 1
- }
- }
- hc
- }
-
- /*
- * @return the descriptor of this type.
- *
- * can-multi-thread
- */
- override def toString: String = { getDescriptor }
-
- }
-
- /*
- * Creates a TypeName and the BType token for it.
- * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
- *
- * must-single-thread
- */
- def brefType(iname: String): BType = { brefType(newTypeName(iname.toCharArray(), 0, iname.length())) }
-
- /*
- * Creates a BType token for the TypeName received as argument.
- * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
- *
- * can-multi-thread
- */
- def brefType(iname: TypeName): BType = { BType.getObjectType(iname.start, iname.length) }
-
- // due to keyboard economy only
- val UNIT = BType.VOID_TYPE
- val BOOL = BType.BOOLEAN_TYPE
- val CHAR = BType.CHAR_TYPE
- val BYTE = BType.BYTE_TYPE
- val SHORT = BType.SHORT_TYPE
- val INT = BType.INT_TYPE
- val LONG = BType.LONG_TYPE
- val FLOAT = BType.FLOAT_TYPE
- val DOUBLE = BType.DOUBLE_TYPE
-
- val BOXED_UNIT = brefType("java/lang/Void")
- val BOXED_BOOLEAN = brefType("java/lang/Boolean")
- val BOXED_BYTE = brefType("java/lang/Byte")
- val BOXED_SHORT = brefType("java/lang/Short")
- val BOXED_CHAR = brefType("java/lang/Character")
- val BOXED_INT = brefType("java/lang/Integer")
- val BOXED_LONG = brefType("java/lang/Long")
- val BOXED_FLOAT = brefType("java/lang/Float")
- val BOXED_DOUBLE = brefType("java/lang/Double")
-
- /*
- * RT_NOTHING and RT_NULL exist at run-time only.
- * They are the bytecode-level manifestation (in method signatures only) of what shows up as NothingClass resp. NullClass in Scala ASTs.
- * Therefore, when RT_NOTHING or RT_NULL are to be emitted,
- * a mapping is needed: the internal names of NothingClass and NullClass can't be emitted as-is.
- */
- val RT_NOTHING = brefType("scala/runtime/Nothing$")
- val RT_NULL = brefType("scala/runtime/Null$")
- val CT_NOTHING = brefType("scala/Nothing") // TODO needed?
- val CT_NULL = brefType("scala/Null") // TODO needed?
-
- val srBooleanRef = brefType("scala/runtime/BooleanRef")
- val srByteRef = brefType("scala/runtime/ByteRef")
- val srCharRef = brefType("scala/runtime/CharRef")
- val srIntRef = brefType("scala/runtime/IntRef")
- val srLongRef = brefType("scala/runtime/LongRef")
- val srFloatRef = brefType("scala/runtime/FloatRef")
- val srDoubleRef = brefType("scala/runtime/DoubleRef")
-
- /* Map from type kinds to the Java reference types.
- * Useful when pushing class literals onto the operand stack (ldc instruction taking a class literal).
- * @see Predef.classOf
- * @see genConstant()
- */
- val classLiteral = immutable.Map[BType, BType](
- UNIT -> BOXED_UNIT,
- BOOL -> BOXED_BOOLEAN,
- BYTE -> BOXED_BYTE,
- SHORT -> BOXED_SHORT,
- CHAR -> BOXED_CHAR,
- INT -> BOXED_INT,
- LONG -> BOXED_LONG,
- FLOAT -> BOXED_FLOAT,
- DOUBLE -> BOXED_DOUBLE
- )
-
- case class MethodNameAndType(mname: String, mdesc: String)
-
- val asmBoxTo: Map[BType, MethodNameAndType] = {
- Map(
- BOOL -> MethodNameAndType("boxToBoolean", "(Z)Ljava/lang/Boolean;" ) ,
- BYTE -> MethodNameAndType("boxToByte", "(B)Ljava/lang/Byte;" ) ,
- CHAR -> MethodNameAndType("boxToCharacter", "(C)Ljava/lang/Character;") ,
- SHORT -> MethodNameAndType("boxToShort", "(S)Ljava/lang/Short;" ) ,
- INT -> MethodNameAndType("boxToInteger", "(I)Ljava/lang/Integer;" ) ,
- LONG -> MethodNameAndType("boxToLong", "(J)Ljava/lang/Long;" ) ,
- FLOAT -> MethodNameAndType("boxToFloat", "(F)Ljava/lang/Float;" ) ,
- DOUBLE -> MethodNameAndType("boxToDouble", "(D)Ljava/lang/Double;" )
- )
- }
-
- val asmUnboxTo: Map[BType, MethodNameAndType] = {
- Map(
- BOOL -> MethodNameAndType("unboxToBoolean", "(Ljava/lang/Object;)Z") ,
- BYTE -> MethodNameAndType("unboxToByte", "(Ljava/lang/Object;)B") ,
- CHAR -> MethodNameAndType("unboxToChar", "(Ljava/lang/Object;)C") ,
- SHORT -> MethodNameAndType("unboxToShort", "(Ljava/lang/Object;)S") ,
- INT -> MethodNameAndType("unboxToInt", "(Ljava/lang/Object;)I") ,
- LONG -> MethodNameAndType("unboxToLong", "(Ljava/lang/Object;)J") ,
- FLOAT -> MethodNameAndType("unboxToFloat", "(Ljava/lang/Object;)F") ,
- DOUBLE -> MethodNameAndType("unboxToDouble", "(Ljava/lang/Object;)D")
- )
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
index 359e5d6c29..18468f5ae3 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
@@ -8,9 +8,10 @@ package tools.nsc
package backend.jvm
import scala.tools.asm
-import scala.annotation.switch
-import scala.collection.{ immutable, mutable }
+import scala.collection.mutable
import scala.tools.nsc.io.AbstractFile
+import GenBCode._
+import BackendReporting._
/*
* Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes.
@@ -19,9 +20,10 @@ import scala.tools.nsc.io.AbstractFile
* @version 1.0
*
*/
-abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
-
+abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
import global._
+ import bTypes._
+ import coreBTypes._
/*
* must-single-thread
@@ -38,7 +40,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
outputDirectory(csym)
} catch {
case ex: Throwable =>
- cunit.error(cunit.body.pos, s"Couldn't create file for class $cName\n${ex.getMessage}")
+ reporter.error(cunit.body.pos, s"Couldn't create file for class $cName\n${ex.getMessage}")
null
}
}
@@ -53,80 +55,24 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
// https://issues.scala-lang.org/browse/SI-3872
// -----------------------------------------------------------------------------------------
- /*
- * can-multi-thread
- */
- def firstCommonSuffix(as: List[Tracked], bs: List[Tracked]): BType = {
- var chainA = as
- var chainB = bs
- var fcs: Tracked = null
- do {
- if (chainB contains chainA.head) fcs = chainA.head
- else if (chainA contains chainB.head) fcs = chainB.head
- else {
- chainA = chainA.tail
- chainB = chainB.tail
- }
- } while (fcs == null)
- fcs.c
- }
-
/* An `asm.ClassWriter` that uses `jvmWiseLUB()`
* The internal name of the least common ancestor of the types given by inameA and inameB.
* It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow
*/
final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) {
- /*
- * This method is thread re-entrant because chrs never grows during its operation (that's because all TypeNames being looked up have already been entered).
- * To stress this point, rather than using `newTypeName()` we use `lookupTypeName()`
- *
- * can-multi-thread
+ /**
+ * This method is thread-safe: it depends only on the BTypes component, which does not depend
+ * on global. TODO @lry move to a different place where no global is in scope, on bTypes.
*/
override def getCommonSuperClass(inameA: String, inameB: String): String = {
- val a = brefType(lookupTypeName(inameA.toCharArray))
- val b = brefType(lookupTypeName(inameB.toCharArray))
- val lca = jvmWiseLUB(a, b)
- val lcaName = lca.getInternalName // don't call javaName because that side-effects innerClassBuffer.
- assert(lcaName != "scala/Any")
-
- lcaName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things.
+ val a = classBTypeFromInternalName(inameA)
+ val b = classBTypeFromInternalName(inameB)
+ val lub = a.jvmWiseLUB(b).get
+ val lubName = lub.internalName
+ assert(lubName != "scala/Any")
+ lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things.
}
-
- }
-
- /*
- * Finding the least upper bound in agreement with the bytecode verifier (given two internal names handed out by ASM)
- * Background:
- * http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
- * http://comments.gmane.org/gmane.comp.java.vm.languages/2293
- * https://issues.scala-lang.org/browse/SI-3872
- *
- * can-multi-thread
- */
- def jvmWiseLUB(a: BType, b: BType): BType = {
-
- assert(a.isNonSpecial, s"jvmWiseLUB() received a non-plain-class $a")
- assert(b.isNonSpecial, s"jvmWiseLUB() received a non-plain-class $b")
-
- val ta = exemplars.get(a)
- val tb = exemplars.get(b)
-
- val res = (ta.isInterface, tb.isInterface) match {
- case (true, true) =>
- // exercised by test/files/run/t4761.scala
- if (tb.isSubtypeOf(ta.c)) ta.c
- else if (ta.isSubtypeOf(tb.c)) tb.c
- else ObjectReference
- case (true, false) =>
- if (tb.isSubtypeOf(a)) a else ObjectReference
- case (false, true) =>
- if (ta.isSubtypeOf(b)) b else ObjectReference
- case _ =>
- firstCommonSuffix(ta :: ta.superClasses, tb :: tb.superClasses)
- }
- assert(res.isNonSpecial, "jvmWiseLUB() returned a non-plain-class.")
- res
}
/*
@@ -139,7 +85,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
*/
def apply(sym: Symbol, csymCompUnit: CompilationUnit): Boolean = {
def fail(msg: String, pos: Position = sym.pos) = {
- csymCompUnit.warning(sym.pos,
+ reporter.warning(sym.pos,
sym.name +
s" has a main method with parameter type Array[String], but ${sym.fullName('.')} will not be a runnable program.\n Reason: $msg"
// TODO: make this next claim true, if possible
@@ -228,7 +174,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
def fieldSymbols(cls: Symbol): List[Symbol] = {
for (f <- cls.info.decls.toList ;
if !f.isMethod && f.isTerm && !f.isModule
- ) yield f;
+ ) yield f
}
/*
@@ -239,6 +185,13 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
}
/*
+ * must-single-thread
+ */
+ def serialVUID(csym: Symbol): Option[Long] = csym getAnnotation definitions.SerialVersionUIDAttr collect {
+ case AnnotationInfo(_, _, (_, LiteralAnnotArg(const)) :: Nil) => const.longValue
+ }
+
+ /*
* Populates the InnerClasses JVM attribute with `refedInnerClasses`.
* In addition to inner classes mentioned somewhere in `jclass` (where `jclass` is a class file being emitted)
* `refedInnerClasses` should contain those inner classes defined as direct member classes of `jclass`
@@ -252,38 +205,16 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
*
* can-multi-thread
*/
- final def addInnerClassesASM(jclass: asm.ClassVisitor, refedInnerClasses: Iterable[BType]) {
- // used to detect duplicates.
- val seen = mutable.Map.empty[String, String]
- // result without duplicates, not yet sorted.
- val result = mutable.Set.empty[InnerClassEntry]
-
- for(s: BType <- refedInnerClasses;
- e: InnerClassEntry <- exemplars.get(s).innersChain) {
-
- assert(e.name != null, "saveInnerClassesFor() is broken.") // documentation
- val doAdd = seen.get(e.name) match {
- // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
- case Some(prevOName) =>
- // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
- // i.e. for them it must be the case that oname == java/lang/Thread
- assert(prevOName == e.outerName, "duplicate")
- false
- case None => true
- }
-
- if (doAdd) {
- seen += (e.name -> e.outerName)
- result += e
- }
+ final def addInnerClassesASM(jclass: asm.ClassVisitor, refedInnerClasses: List[ClassBType]) {
+ val allNestedClasses = refedInnerClasses.flatMap(_.enclosingNestedClassesChain.get).distinct
+ // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler
+ for (nestedClass <- allNestedClasses.sortBy(_.internalName.toString)) {
+ // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes.
+ val Some(e) = nestedClass.innerClassAttributeEntry.get
+ jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags)
}
- // sorting ensures inner classes are listed after their enclosing class thus satisfying the Eclipse Java compiler
- for(e <- result.toList sortBy (_.name.toString)) {
- jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.access)
- }
-
- } // end of method addInnerClassesASM()
+ }
/*
* Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only
@@ -314,8 +245,8 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
* can-multi-thread
*/
def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
- val dest = new Array[Byte](len);
- System.arraycopy(b, offset, dest, 0, len);
+ val dest = new Array[Byte](len)
+ System.arraycopy(b, offset, dest, 0, len)
new asm.CustomAttr(name, dest)
}
@@ -376,9 +307,9 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
def debugLevel = settings.debuginfo.indexOfChoice
- val emitSource = debugLevel >= 1
- val emitLines = debugLevel >= 2
- val emitVars = debugLevel >= 3
+ final val emitSource = debugLevel >= 1
+ final val emitLines = debugLevel >= 2
+ final val emitVars = debugLevel >= 3
/*
* Contains class-symbols that:
@@ -387,203 +318,77 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
*
* In other words, the lifetime of `innerClassBufferASM` is associated to "the class being generated".
*/
- val innerClassBufferASM = mutable.Set.empty[BType]
-
- /*
- * Tracks (if needed) the inner class given by `sym`.
- *
- * must-single-thread
- */
- final def internalName(sym: Symbol): String = { asmClassType(sym).getInternalName }
+ final val innerClassBufferASM = mutable.Set.empty[ClassBType]
- /*
- * Tracks (if needed) the inner class given by `sym`.
- *
- * must-single-thread
+ /**
+ * The class internal name for a given class symbol. If the symbol describes a nested class, the
+ * ClassBType is added to the innerClassBufferASM.
*/
- final def asmClassType(sym: Symbol): BType = {
- assert(
- hasInternalName(sym),
- {
- val msg0 = if (sym.isAbstractType) "An AbstractTypeSymbol (SI-7122) " else "A symbol ";
- msg0 + s"has reached the bytecode emitter, for which no JVM-level internal name can be found: ${sym.fullName}"
- }
- )
- val phantOpt = phantomTypeMap.get(sym)
- if (phantOpt.isDefined) {
- return phantOpt.get
- }
- val tracked = exemplar(sym)
- val tk = tracked.c
- if (tracked.isInnerClass) {
- innerClassBufferASM += tk
- }
-
- tk
+ final def internalName(sym: Symbol): String = {
+ // For each java class, the scala compiler creates a class and a module (thus a module class).
+ // If the `sym` is a java module class, we use the java class instead. This ensures that we
+ // register the class (instead of the module class) in innerClassBufferASM.
+ // The two symbols have the same name, so the resulting internalName is the same.
+ val classSym = if (sym.isJavaDefined && sym.isModuleClass) sym.linkedClassOfClass else sym
+ getClassBTypeAndRegisterInnerClass(classSym).internalName
}
- /*
- * Returns the BType for the given type.
- * Tracks (if needed) the inner class given by `t`.
+ /**
+ * The ClassBType for a class symbol. If the class is nested, the ClassBType is added to the
+ * innerClassBufferASM.
*
- * must-single-thread
+ * TODO: clean up the way we track referenced inner classes.
+ * doing it during code generation is not correct when the optimizer changes the code.
*/
- final def toTypeKind(t: Type): BType = {
-
- /* Interfaces have to be handled delicately to avoid introducing spurious errors,
- * but if we treat them all as AnyRef we lose too much information.
- */
- def newReference(sym0: Symbol): BType = {
- assert(!primitiveTypeMap.contains(sym0), "Use primitiveTypeMap instead.")
- assert(sym0 != definitions.ArrayClass, "Use arrayOf() instead.")
-
- if (sym0 == definitions.NullClass) return RT_NULL;
- if (sym0 == definitions.NothingClass) return RT_NOTHING;
-
- val sym = (
- if (!sym0.isPackageClass) sym0
- else sym0.info.member(nme.PACKAGE) match {
- case NoSymbol => abort(s"SI-5604: Cannot use package as value: ${sym0.fullName}")
- case s => abort(s"SI-5604: found package class where package object expected: $s")
- }
- )
-
- // Can't call .toInterface (at this phase) or we trip an assertion.
- // See PackratParser#grow for a method which fails with an apparent mismatch
- // between "object PackratParsers$class" and "trait PackratParsers"
- if (sym.isImplClass) {
- // pos/spec-List.scala is the sole failure if we don't check for NoSymbol
- val traitSym = sym.owner.info.decl(tpnme.interfaceName(sym.name))
- if (traitSym != NoSymbol) {
- // this tracks the inner class in innerClassBufferASM, if needed.
- return asmClassType(traitSym)
- }
- }
-
- assert(hasInternalName(sym), s"Invoked for a symbol lacking JVM internal name: ${sym.fullName}")
- assert(!phantomTypeMap.contains(sym), "phantom types not supposed to reach here.")
-
- val tracked = exemplar(sym)
- val tk = tracked.c
- if (tracked.isInnerClass) {
- innerClassBufferASM += tk
- }
-
- tk
- }
-
- def primitiveOrRefType(sym: Symbol): BType = {
- assert(sym != definitions.ArrayClass, "Use primitiveOrArrayOrRefType() instead.")
-
- primitiveTypeMap.getOrElse(sym, newReference(sym))
- }
-
- def primitiveOrRefType2(sym: Symbol): BType = {
- primitiveTypeMap.get(sym) match {
- case Some(pt) => pt
- case None =>
- sym match {
- case definitions.NullClass => RT_NULL
- case definitions.NothingClass => RT_NOTHING
- case _ if sym.isClass => newReference(sym)
- case _ =>
- assert(sym.isType, sym) // it must be compiling Array[a]
- ObjectReference
- }
- }
- }
-
- import definitions.ArrayClass
-
- // Call to .normalize fixes #3003 (follow type aliases). Otherwise, primitiveOrArrayOrRefType() would return ObjectReference.
- t.normalize match {
-
- case ThisType(sym) =>
- if (sym == ArrayClass) ObjectReference
- else phantomTypeMap.getOrElse(sym, exemplar(sym).c)
-
- case SingleType(_, sym) => primitiveOrRefType(sym)
-
- case _: ConstantType => toTypeKind(t.underlying)
-
- case TypeRef(_, sym, args) =>
- if (sym == ArrayClass) arrayOf(toTypeKind(args.head))
- else primitiveOrRefType2(sym)
-
- case ClassInfoType(_, _, sym) =>
- assert(sym != ArrayClass, "ClassInfoType to ArrayClass!")
- primitiveOrRefType(sym)
-
- // !!! Iulian says types which make no sense after erasure should not reach here, which includes the ExistentialType, AnnotatedType, RefinedType.
- case ExistentialType(_, t) => toTypeKind(t) // TODO shouldn't get here but the following does: akka-actor/src/main/scala/akka/util/WildcardTree.scala
- case AnnotatedType(_, w) => toTypeKind(w) // TODO test/files/jvm/annotations.scala causes an AnnotatedType to reach here.
- case RefinedType(parents, _) => parents map toTypeKind reduceLeft jvmWiseLUB
-
- // For sure WildcardTypes shouldn't reach here either, but when debugging such situations this may come in handy.
- // case WildcardType => REFERENCE(ObjectClass)
- case norm => abort(
- s"Unknown type: $t, $norm [${t.getClass}, ${norm.getClass}] TypeRef? ${t.isInstanceOf[TypeRef]}"
- )
- }
-
- } // end of method toTypeKind()
+ final def getClassBTypeAndRegisterInnerClass(sym: Symbol): ClassBType = {
+ val r = classBTypeFromSymbol(sym)
+ if (r.isNestedClass.get) innerClassBufferASM += r
+ r
+ }
- /*
- * must-single-thread
+ /**
+ * The BType for a type reference. If the result is a ClassBType for a nested class, it is added
+ * to the innerClassBufferASM.
+ * TODO: clean up the way we track referenced inner classes.
*/
- def asmMethodType(msym: Symbol): BType = {
- assert(msym.isMethod, s"not a method-symbol: $msym")
- val resT: BType =
- if (msym.isClassConstructor || msym.isConstructor) BType.VOID_TYPE
- else toTypeKind(msym.tpe.resultType);
- BType.getMethodType( resT, mkArray(msym.tpe.paramTypes map toTypeKind) )
+ final def toTypeKind(t: Type): BType = typeToBType(t) match {
+ case c: ClassBType if c.isNestedClass.get =>
+ innerClassBufferASM += c
+ c
+ case r => r
}
- /*
- * Returns all direct member inner classes of `csym`,
- * thus making sure they get entries in the InnerClasses JVM attribute
- * even if otherwise not mentioned in the class being built.
- *
- * must-single-thread
+ /**
+ * Class components that are nested classes are added to the innerClassBufferASM.
+ * TODO: clean up the way we track referenced inner classes.
*/
- final def trackMemberClasses(csym: Symbol, lateClosuresBTs: List[BType]): List[BType] = {
- val lateInnerClasses = exitingErasure {
- for (sym <- List(csym, csym.linkedClassOfClass); memberc <- sym.info.decls.map(innerClassSymbolFor) if memberc.isClass)
- yield memberc
- }
- // as a precaution, do the following outside the above `exitingErasure` otherwise funny internal names might be computed.
- val result = for(memberc <- lateInnerClasses) yield {
- val tracked = exemplar(memberc)
- val memberCTK = tracked.c
- assert(tracked.isInnerClass, s"saveInnerClassesFor() says this was no inner-class after all: ${memberc.fullName}")
-
- memberCTK
+ final def asmMethodType(msym: Symbol): MethodBType = {
+ val r = methodBTypeFromSymbol(msym)
+ (r.returnType :: r.argumentTypes) foreach {
+ case c: ClassBType if c.isNestedClass.get => innerClassBufferASM += c
+ case _ =>
}
-
- exemplar(csym).directMemberClasses = result
-
- result
+ r
}
- /*
- * Tracks (if needed) the inner class given by `t`.
- *
- * must-single-thread
+ /**
+ * The jvm descriptor of a type. If `t` references a nested class, its ClassBType is added to
+ * the innerClassBufferASM.
*/
- final def descriptor(t: Type): String = { toTypeKind(t).getDescriptor }
+ final def descriptor(t: Type): String = { toTypeKind(t).descriptor }
- /*
- * Tracks (if needed) the inner class given by `sym`.
- *
- * must-single-thread
+ /**
+ * The jvm descriptor for a symbol. If `sym` represents a nested class, its ClassBType is added
+ * to the innerClassBufferASM.
*/
- final def descriptor(sym: Symbol): String = { asmClassType(sym).getDescriptor }
+ final def descriptor(sym: Symbol): String = { getClassBTypeAndRegisterInnerClass(sym).descriptor }
} // end of trait BCInnerClassGen
trait BCAnnotGen extends BCInnerClassGen {
import genASM.{ubytesToCharArray, arrEncode}
+ import bCodeAsmCommon.{shouldEmitAnnotation, isRuntimeVisible}
/*
* can-multi-thread
@@ -648,17 +453,6 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
}
}
- /* Whether an annotation should be emitted as a Java annotation
- * .initialize: if 'annot' is read from pickle, atp might be un-initialized
- *
- * must-single-thread
- */
- private def shouldEmitAnnotation(annot: AnnotationInfo) =
- annot.symbol.initialize.isJavaDefined &&
- annot.matches(definitions.ClassfileAnnotationClass) &&
- annot.args.isEmpty &&
- !annot.matches(definitions.DeprecatedAttr)
-
/*
* In general,
* must-single-thread
@@ -678,7 +472,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = cw.visitAnnotation(descriptor(typ), true)
+ val av = cw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -690,7 +484,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = mw.visitAnnotation(descriptor(typ), true)
+ val av = mw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -702,7 +496,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = fw.visitAnnotation(descriptor(typ), true)
+ val av = fw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -717,7 +511,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
annot <- annots) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true)
+ val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), isRuntimeVisible(annot))
emitAssocs(pannVisitor, assocs)
}
}
@@ -740,13 +534,6 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen {
- // -----------------------------------------------------------------------------------------
- // Static forwarders (related to mirror classes but also present in
- // a plain class lacking companion module, for details see `isCandidateForForwarders`).
- // -----------------------------------------------------------------------------------------
-
- val ExcludedForwarderFlags = genASM.ExcludedForwarderFlags
-
/* Adds a @remote annotation, actual use unknown.
*
* Invoked from genMethod() and addForwarder().
@@ -782,7 +569,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
*/
// TODO: evaluate the other flags we might be dropping on the floor here.
// TODO: ACC_SYNTHETIC ?
- val flags = PublicStatic | (
+ val flags = GenBCode.PublicStatic | (
if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0
)
@@ -793,7 +580,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
val thrownExceptions: List[String] = getExceptions(throws)
val jReturnType = toTypeKind(methodInfo.resultType)
- val mdesc = BType.getMethodType(jReturnType, mkArray(paramJavaTypes)).getDescriptor
+ val mdesc = MethodBType(paramJavaTypes, jReturnType).descriptor
val mirrorMethodName = m.javaSimpleName.toString
val mirrorMethod: asm.MethodVisitor = jclass.visitMethod(
flags,
@@ -812,13 +599,13 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
var index = 0
for(jparamType <- paramJavaTypes) {
- mirrorMethod.visitVarInsn(jparamType.getOpcode(asm.Opcodes.ILOAD), index)
- assert(jparamType.sort != BType.METHOD, jparamType)
- index += jparamType.getSize
+ mirrorMethod.visitVarInsn(jparamType.typedOpcode(asm.Opcodes.ILOAD), index)
+ assert(!jparamType.isInstanceOf[MethodBType], jparamType)
+ index += jparamType.size
}
- mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).getDescriptor)
- mirrorMethod.visitInsn(jReturnType.getOpcode(asm.Opcodes.IRETURN))
+ mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).descriptor, false)
+ mirrorMethod.visitInsn(jReturnType.typedOpcode(asm.Opcodes.IRETURN))
mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
mirrorMethod.visitEnd()
@@ -842,7 +629,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
}
debuglog(s"Potentially conflicting names for forwarders: $conflictingNames")
- for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, symtab.Flags.METHOD)) {
+ for (m <- moduleClass.info.membersBasedOnFlags(bCodeAsmCommon.ExcludedForwarderFlags, symtab.Flags.METHOD)) {
if (m.isType || m.isDeferred || (m.owner eq definitions.ObjectClass) || m.isConstructor)
debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'")
else if (conflictingNames(m.name))
@@ -881,13 +668,6 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
val MIN_SWITCH_DENSITY = 0.7
/*
- * must-single-thread
- */
- def serialVUID(csym: Symbol): Option[Long] = csym getAnnotation definitions.SerialVersionUIDAttr collect {
- case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
- }
-
- /*
* Add public static final field serialVersionUID with value `id`
*
* can-multi-thread
@@ -895,72 +675,18 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
def addSerialVUID(id: Long, jclass: asm.ClassVisitor) {
// add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)`
jclass.visitField(
- PublicStaticFinal,
+ GenBCode.PublicStaticFinal,
"serialVersionUID",
"J",
null, // no java-generic-signature
new java.lang.Long(id)
).visitEnd()
}
-
- /*
- * @param owner internal name of the enclosing class of the class.
- *
- * @param name the name of the method that contains the class.
-
- * @param methodType the method that contains the class.
- */
- case class EnclMethodEntry(owner: String, name: String, methodType: BType)
-
- /*
- * @return null if the current class is not internal to a method
- *
- * Quoting from JVMS 4.7.7 The EnclosingMethod Attribute
- * A class must have an EnclosingMethod attribute if and only if it is a local class or an anonymous class.
- * A class may have no more than one EnclosingMethod attribute.
- *
- * must-single-thread
- */
- def getEnclosingMethodAttribute(clazz: Symbol): EnclMethodEntry = { // JVMS 4.7.7
-
- def newEEE(eClass: Symbol, m: Symbol) = {
- EnclMethodEntry(
- internalName(eClass),
- m.javaSimpleName.toString,
- asmMethodType(m)
- )
- }
-
- var res: EnclMethodEntry = null
- val sym = clazz.originalEnclosingMethod
- if (sym.isMethod) {
- debuglog(s"enclosing method for $clazz is $sym (in ${sym.enclClass})")
- res = newEEE(sym.enclClass, sym)
- } else if (clazz.isAnonymousClass) {
- val enclClass = clazz.rawowner
- assert(enclClass.isClass, enclClass)
- val sym = enclClass.primaryConstructor
- if (sym == NoSymbol) {
- log(s"Ran out of room looking for an enclosing method for $clazz: no constructor here: $enclClass.")
- } else {
- debuglog(s"enclosing method for $clazz is $sym (in $enclClass)")
- res = newEEE(enclClass, sym)
- }
- }
-
- res
- }
-
} // end of trait BCClassGen
- /* basic functionality for class file building of plain, mirror, and beaninfo classes. */
- abstract class JBuilder extends BCInnerClassGen {
-
- } // end of class JBuilder
-
/* functionality for building plain and mirror classes */
abstract class JCommonBuilder
- extends JBuilder
+ extends BCInnerClassGen
with BCAnnotGen
with BCForwardersGen
with BCPickles { }
@@ -979,41 +705,38 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
*
* must-single-thread
*/
- def genMirrorClass(modsym: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = {
- assert(modsym.companionClass == NoSymbol, modsym)
+ def genMirrorClass(moduleClass: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = {
+ assert(moduleClass.isModuleClass)
+ assert(moduleClass.companionClass == NoSymbol, moduleClass)
innerClassBufferASM.clear()
this.cunit = cunit
- val moduleName = internalName(modsym) // + "$"
- val mirrorName = moduleName.substring(0, moduleName.length() - 1)
- val flags = (asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL)
+ val bType = mirrorClassClassBType(moduleClass)
val mirrorClass = new asm.tree.ClassNode
mirrorClass.visit(
classfileVersion,
- flags,
- mirrorName,
+ bType.info.get.flags,
+ bType.internalName,
null /* no java-generic-signature */,
- JAVA_LANG_OBJECT.getInternalName,
+ ObjectReference.internalName,
EMPTY_STRING_ARRAY
)
- if (emitSource) {
- mirrorClass.visitSource("" + cunit.source,
- null /* SourceDebugExtension */)
- }
+ if (emitSource)
+ mirrorClass.visitSource("" + cunit.source, null /* SourceDebugExtension */)
- val ssa = getAnnotPickle(mirrorName, modsym.companionSymbol)
+ val ssa = getAnnotPickle(bType.internalName, moduleClass.companionSymbol)
mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
- emitAnnotations(mirrorClass, modsym.annotations ++ ssa)
+ emitAnnotations(mirrorClass, moduleClass.annotations ++ ssa)
- addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym)
+ addForwarders(isRemote(moduleClass), mirrorClass, bType.internalName, moduleClass)
- innerClassBufferASM ++= trackMemberClasses(modsym, Nil /* TODO what about Late-Closure-Classes */ )
+ innerClassBufferASM ++= bType.info.get.nestedClasses
addInnerClassesASM(mirrorClass, innerClassBufferASM.toList)
mirrorClass.visitEnd()
- ("" + modsym.name) // this side-effect is necessary, really.
+ ("" + moduleClass.name) // this side-effect is necessary, really.
mirrorClass
}
@@ -1021,7 +744,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
} // end of class JMirrorBuilder
/* builder of bean info classes */
- class JBeanInfoBuilder extends JBuilder {
+ class JBeanInfoBuilder extends BCInnerClassGen {
/*
* Generate a bean info class that describes the given class.
@@ -1036,10 +759,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
innerClassBufferASM.clear()
- val flags = mkFlags(
- javaFlags(cls),
- if (isDeprecated(cls)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
- )
+ val flags = javaFlags(cls)
val beanInfoName = (internalName(cls) + "BeanInfo")
val beanInfoClass = new asm.tree.ClassNode
@@ -1060,8 +780,8 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
var fieldList = List[String]()
for (f <- fieldSymbols if f.hasGetter;
- g = f.getter(cls);
- s = f.setter(cls);
+ g = f.getterIn(cls);
+ s = f.setterIn(cls);
if g.isPublic && !(f.name startsWith "$")
) {
// inserting $outer breaks the bean
@@ -1085,12 +805,11 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
EMPTY_STRING_ARRAY // no throwable exceptions
)
- val stringArrayJType: BType = arrayOf(JAVA_LANG_STRING)
- val conJType: BType =
- BType.getMethodType(
- BType.VOID_TYPE,
- Array(exemplar(definitions.ClassClass).c, stringArrayJType, stringArrayJType)
- )
+ val stringArrayJType: BType = ArrayBType(StringReference)
+ val conJType: BType = MethodBType(
+ classBTypeFromSymbol(definitions.ClassClass) :: stringArrayJType :: stringArrayJType :: Nil,
+ UNIT
+ )
def push(lst: List[String]) {
var fi = 0
@@ -1099,7 +818,7 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
constructor.visitLdcInsn(new java.lang.Integer(fi))
if (f == null) { constructor.visitInsn(asm.Opcodes.ACONST_NULL) }
else { constructor.visitLdcInsn(f) }
- constructor.visitInsn(JAVA_LANG_STRING.getOpcode(asm.Opcodes.IASTORE))
+ constructor.visitInsn(StringReference.typedOpcode(asm.Opcodes.IASTORE))
fi += 1
}
}
@@ -1108,27 +827,27 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
constructor.visitVarInsn(asm.Opcodes.ALOAD, 0)
// push the class
- constructor.visitLdcInsn(exemplar(cls).c)
+ constructor.visitLdcInsn(classBTypeFromSymbol(cls).toASMType)
// push the string array of field information
constructor.visitLdcInsn(new java.lang.Integer(fieldList.length))
- constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, StringReference.internalName)
push(fieldList)
// push the string array of method information
constructor.visitLdcInsn(new java.lang.Integer(methodList.length))
- constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, StringReference.internalName)
push(methodList)
// invoke the superclass constructor, which will do the
// necessary java reflection and create Method objects.
- constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor)
+ constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.descriptor, false)
constructor.visitInsn(asm.Opcodes.RETURN)
constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
constructor.visitEnd()
- innerClassBufferASM ++= trackMemberClasses(cls, Nil /* TODO what about Late-Closure-Classes */ )
+ innerClassBufferASM ++= classBTypeFromSymbol(cls).info.get.nestedClasses
addInnerClassesASM(beanInfoClass, innerClassBufferASM.toList)
beanInfoClass.visitEnd()
@@ -1160,11 +879,11 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
*/
def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String) {
// this tracks the inner class in innerClassBufferASM, if needed.
- val androidCreatorType = asmClassType(AndroidCreatorClass)
- val tdesc_creator = androidCreatorType.getDescriptor
+ val androidCreatorType = getClassBTypeAndRegisterInnerClass(AndroidCreatorClass)
+ val tdesc_creator = androidCreatorType.descriptor
cnode.visitField(
- PublicStaticFinal,
+ GenBCode.PublicStaticFinal,
"CREATOR",
tdesc_creator,
null, // no java-generic-signature
@@ -1182,12 +901,13 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
)
// INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator;
- val bt = BType.getMethodType(androidCreatorType, Array.empty[BType])
+ val bt = MethodBType(Nil, androidCreatorType)
clinit.visitMethodInsn(
asm.Opcodes.INVOKEVIRTUAL,
moduleName,
"CREATOR",
- bt.getDescriptor
+ bt.descriptor,
+ false
)
// PUTSTATIC `thisName`.CREATOR;
@@ -1200,5 +920,4 @@ abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
}
} // end of trait JAndroidBuilder
-
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala
new file mode 100644
index 0000000000..50d20921d5
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeICodeCommon.scala
@@ -0,0 +1,25 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc.backend.jvm
+
+import scala.tools.nsc.Global
+import PartialFunction._
+
+/**
+ * This trait contains code shared between GenBCode and GenICode that depends on types defined in
+ * the compiler cake (Global).
+ */
+final class BCodeICodeCommon[G <: Global](val global: G) {
+ import global._
+
+ /** Some useful equality helpers. */
+ def isNull(t: Tree) = cond(t) { case Literal(Constant(null)) => true }
+ def isLiteral(t: Tree) = cond(t) { case Literal(_) => true }
+ def isNonNullExpr(t: Tree) = isLiteral(t) || ((t.symbol ne null) && t.symbol.isModule)
+
+ /** If l or r is constant null, returns the other ; otherwise null */
+ def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
index c3492b79a9..8f2a17a2bf 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
@@ -9,8 +9,9 @@ package backend.jvm
import scala.tools.asm
import scala.annotation.switch
-import scala.collection.{ immutable, mutable }
-import collection.convert.Wrappers.JListWrapper
+import scala.collection.mutable
+import GenBCode._
+import scala.tools.asm.tree.MethodInsnNode
/*
* A high-level facade to the ASM API for bytecode generation.
@@ -19,40 +20,30 @@ import collection.convert.Wrappers.JListWrapper
* @version 1.0
*
*/
-abstract class BCodeIdiomatic extends BCodeGlue {
+abstract class BCodeIdiomatic extends SubComponent {
+ val bTypes = new BTypesFromSymbols[global.type](global)
import global._
+ import bTypes._
+ import coreBTypes._
val classfileVersion: Int = settings.target.value match {
case "jvm-1.5" => asm.Opcodes.V1_5
case "jvm-1.6" => asm.Opcodes.V1_6
case "jvm-1.7" => asm.Opcodes.V1_7
+ case "jvm-1.8" => asm.Opcodes.V1_8
}
val majorVersion: Int = (classfileVersion & 0xFF)
val emitStackMapFrame = (majorVersion >= 50)
- def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
-
- val extraProc: Int = mkFlags(
+ val extraProc: Int = GenBCode.mkFlags(
asm.ClassWriter.COMPUTE_MAXS,
if (emitStackMapFrame) asm.ClassWriter.COMPUTE_FRAMES else 0
)
val StringBuilderClassName = "scala/collection/mutable/StringBuilder"
- val CLASS_CONSTRUCTOR_NAME = "<clinit>"
- val INSTANCE_CONSTRUCTOR_NAME = "<init>"
-
- val ObjectReference = brefType("java/lang/Object")
- val AnyRefReference = ObjectReference
- val objArrayReference = arrayOf(ObjectReference)
-
- val JAVA_LANG_OBJECT = ObjectReference
- val JAVA_LANG_STRING = brefType("java/lang/String")
-
- var StringBuilderReference: BType = null
-
val EMPTY_STRING_ARRAY = Array.empty[String]
val EMPTY_INT_ARRAY = Array.empty[Int]
val EMPTY_LABEL_ARRAY = Array.empty[asm.Label]
@@ -108,17 +99,6 @@ abstract class BCodeIdiomatic extends BCodeGlue {
a
}
- /*
- * The type of 1-dimensional arrays of `elem` type.
- * The invoker is responsible for tracking (if needed) the inner class given by the elem BType.
- *
- * must-single-thread
- */
- final def arrayOf(elem: BType): BType = {
- assert(!(elem.isUnitType), s"The element type of an array can't be: $elem")
- brefType("[" + elem.getDescriptor)
- }
-
/* Just a namespace for utilities that encapsulate MethodVisitor idioms.
* In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role,
* but the methods here allow choosing when to transition from ICode to ASM types
@@ -126,7 +106,7 @@ abstract class BCodeIdiomatic extends BCodeGlue {
*/
abstract class JCodeMethodN {
- def jmethod: asm.MethodVisitor
+ def jmethod: asm.tree.MethodNode
import asm.Opcodes;
import icodes.opcodes.{ Static, Dynamic, SuperCall }
@@ -226,35 +206,36 @@ abstract class BCodeIdiomatic extends BCodeGlue {
/*
* can-multi-thread
*/
- final def genStartConcat {
+ final def genStartConcat(pos: Position): Unit = {
jmethod.visitTypeInsn(Opcodes.NEW, StringBuilderClassName)
jmethod.visitInsn(Opcodes.DUP)
invokespecial(
StringBuilderClassName,
INSTANCE_CONSTRUCTOR_NAME,
- "()V"
+ "()V",
+ pos
)
}
/*
* can-multi-thread
*/
- final def genStringConcat(el: BType) {
+ final def genStringConcat(el: BType, pos: Position): Unit = {
val jtype =
- if (el.isArray || el.hasObjectSort) JAVA_LANG_OBJECT
- else el;
+ if (el.isArray || el.isClass) ObjectReference
+ else el
- val bt = BType.getMethodType(StringBuilderReference, Array(jtype))
+ val bt = MethodBType(List(jtype), StringBuilderReference)
- invokevirtual(StringBuilderClassName, "append", bt.getDescriptor)
+ invokevirtual(StringBuilderClassName, "append", bt.descriptor, pos)
}
/*
* can-multi-thread
*/
- final def genEndConcat {
- invokevirtual(StringBuilderClassName, "toString", "()Ljava/lang/String;")
+ final def genEndConcat(pos: Position): Unit = {
+ invokevirtual(StringBuilderClassName, "toString", "()Ljava/lang/String;", pos)
}
/*
@@ -268,8 +249,8 @@ abstract class BCodeIdiomatic extends BCodeGlue {
final def emitT2T(from: BType, to: BType) {
assert(
- from.isNonUnitValueType && to.isNonUnitValueType,
- s"Cannot emit primitive conversion from $from to $to"
+ from.isNonVoidPrimitiveType && to.isNonVoidPrimitiveType,
+ s"Cannot emit primitive conversion from $from to $to - ${global.currentUnit}"
)
def pickOne(opcs: Array[Int]) { // TODO index on to.sort
@@ -290,37 +271,37 @@ abstract class BCodeIdiomatic extends BCodeGlue {
assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to")
// We're done with BOOL already
- (from.sort: @switch) match {
+ (from: @unchecked) match {
// using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
- case asm.Type.BYTE => pickOne(JCodeMethodN.fromByteT2T)
- case asm.Type.SHORT => pickOne(JCodeMethodN.fromShortT2T)
- case asm.Type.CHAR => pickOne(JCodeMethodN.fromCharT2T)
- case asm.Type.INT => pickOne(JCodeMethodN.fromIntT2T)
+ case BYTE => pickOne(JCodeMethodN.fromByteT2T)
+ case SHORT => pickOne(JCodeMethodN.fromShortT2T)
+ case CHAR => pickOne(JCodeMethodN.fromCharT2T)
+ case INT => pickOne(JCodeMethodN.fromIntT2T)
- case asm.Type.FLOAT =>
+ case FLOAT =>
import asm.Opcodes.{ F2L, F2D, F2I }
- (to.sort: @switch) match {
- case asm.Type.LONG => emit(F2L)
- case asm.Type.DOUBLE => emit(F2D)
- case _ => emit(F2I); emitT2T(INT, to)
+ to match {
+ case LONG => emit(F2L)
+ case DOUBLE => emit(F2D)
+ case _ => emit(F2I); emitT2T(INT, to)
}
- case asm.Type.LONG =>
+ case LONG =>
import asm.Opcodes.{ L2F, L2D, L2I }
- (to.sort: @switch) match {
- case asm.Type.FLOAT => emit(L2F)
- case asm.Type.DOUBLE => emit(L2D)
- case _ => emit(L2I); emitT2T(INT, to)
+ to match {
+ case FLOAT => emit(L2F)
+ case DOUBLE => emit(L2D)
+ case _ => emit(L2I); emitT2T(INT, to)
}
- case asm.Type.DOUBLE =>
+ case DOUBLE =>
import asm.Opcodes.{ D2L, D2F, D2I }
- (to.sort: @switch) match {
- case asm.Type.FLOAT => emit(D2F)
- case asm.Type.LONG => emit(D2L)
- case _ => emit(D2I); emitT2T(INT, to)
+ to match {
+ case FLOAT => emit(D2F)
+ case LONG => emit(D2L)
+ case _ => emit(D2I); emitT2T(INT, to)
}
}
} // end of emitT2T()
@@ -372,24 +353,26 @@ abstract class BCodeIdiomatic extends BCodeGlue {
// can-multi-thread
final def newarray(elem: BType) {
- if (elem.isRefOrArrayType || elem.isPhantomType ) {
- /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which hasObjectSort. */
- jmethod.visitTypeInsn(Opcodes.ANEWARRAY, elem.getInternalName)
- } else {
- val rand = {
- // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
- (elem.sort: @switch) match {
- case asm.Type.BOOLEAN => Opcodes.T_BOOLEAN
- case asm.Type.BYTE => Opcodes.T_BYTE
- case asm.Type.SHORT => Opcodes.T_SHORT
- case asm.Type.CHAR => Opcodes.T_CHAR
- case asm.Type.INT => Opcodes.T_INT
- case asm.Type.LONG => Opcodes.T_LONG
- case asm.Type.FLOAT => Opcodes.T_FLOAT
- case asm.Type.DOUBLE => Opcodes.T_DOUBLE
+ elem match {
+ case c: RefBType =>
+ /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which isObject. */
+ jmethod.visitTypeInsn(Opcodes.ANEWARRAY, c.classOrArrayType)
+ case _ =>
+ assert(elem.isNonVoidPrimitiveType)
+ val rand = {
+ // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+ (elem: @unchecked) match {
+ case BOOL => Opcodes.T_BOOLEAN
+ case BYTE => Opcodes.T_BYTE
+ case SHORT => Opcodes.T_SHORT
+ case CHAR => Opcodes.T_CHAR
+ case INT => Opcodes.T_INT
+ case LONG => Opcodes.T_LONG
+ case FLOAT => Opcodes.T_FLOAT
+ case DOUBLE => Opcodes.T_DOUBLE
+ }
}
- }
- jmethod.visitIntInsn(Opcodes.NEWARRAY, rand)
+ jmethod.visitIntInsn(Opcodes.NEWARRAY, rand)
}
}
@@ -408,20 +391,29 @@ abstract class BCodeIdiomatic extends BCodeGlue {
final def rem(tk: BType) { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread
// can-multi-thread
- final def invokespecial(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc)
+ final def invokespecial(owner: String, name: String, desc: String, pos: Position) {
+ addInvoke(Opcodes.INVOKESPECIAL, owner, name, desc, false, pos)
}
// can-multi-thread
- final def invokestatic(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc)
+ final def invokestatic(owner: String, name: String, desc: String, pos: Position) {
+ addInvoke(Opcodes.INVOKESTATIC, owner, name, desc, false, pos)
}
// can-multi-thread
- final def invokeinterface(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc)
+ final def invokeinterface(owner: String, name: String, desc: String, pos: Position) {
+ addInvoke(Opcodes.INVOKEINTERFACE, owner, name, desc, true, pos)
}
// can-multi-thread
- final def invokevirtual(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc)
+ final def invokevirtual(owner: String, name: String, desc: String, pos: Position) {
+ addInvoke(Opcodes.INVOKEVIRTUAL, owner, name, desc, false, pos)
+ }
+
+ private def addInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean, pos: Position) = {
+ val node = new MethodInsnNode(opcode, owner, name, desc, itf)
+ jmethod.instructions.add(node)
+ if (settings.YoptInlinerEnabled) callsitePositions(node) = pos
+ }
+ final def invokedynamic(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKEDYNAMIC, owner, name, desc)
}
// can-multi-thread
@@ -529,7 +521,7 @@ abstract class BCodeIdiomatic extends BCodeGlue {
// can-multi-thread
final def emitVarInsn(opc: Int, idx: Int, tk: BType) {
assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc)
- jmethod.visitVarInsn(tk.getOpcode(opc), idx)
+ jmethod.visitVarInsn(tk.typedOpcode(opc), idx)
}
// ---------------- array load and store ----------------
@@ -538,7 +530,7 @@ abstract class BCodeIdiomatic extends BCodeGlue {
final def emitTypeBased(opcs: Array[Int], tk: BType) {
assert(tk != UNIT, tk)
val opc = {
- if (tk.isRefOrArrayType) { opcs(0) }
+ if (tk.isRef) { opcs(0) }
else if (tk.isIntSizedType) {
(tk: @unchecked) match {
case BOOL | BYTE => opcs(1)
@@ -563,11 +555,11 @@ abstract class BCodeIdiomatic extends BCodeGlue {
final def emitPrimitive(opcs: Array[Int], tk: BType) {
val opc = {
// using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
- (tk.sort: @switch) match {
- case asm.Type.LONG => opcs(1)
- case asm.Type.FLOAT => opcs(2)
- case asm.Type.DOUBLE => opcs(3)
- case _ => opcs(0)
+ tk match {
+ case LONG => opcs(1)
+ case FLOAT => opcs(2)
+ case DOUBLE => opcs(3)
+ case _ => opcs(0)
}
}
emit(opc)
@@ -582,15 +574,14 @@ abstract class BCodeIdiomatic extends BCodeGlue {
// ---------------- type checks and casts ----------------
// can-multi-thread
- final def isInstance(tk: BType) {
- jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.getInternalName)
+ final def isInstance(tk: RefBType): Unit = {
+ jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.classOrArrayType)
}
// can-multi-thread
- final def checkCast(tk: BType) {
- assert(tk.isRefOrArrayType, s"checkcast on primitive type: $tk")
+ final def checkCast(tk: RefBType): Unit = {
// TODO ICode also requires: but that's too much, right? assert(!isBoxedType(tk), "checkcast on boxed type: " + tk)
- jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.getInternalName)
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.classOrArrayType)
}
} // end of class JCodeMethodN
@@ -650,7 +641,7 @@ abstract class BCodeIdiomatic extends BCodeGlue {
*/
final def coercionTo(code: Int): BType = {
import scalaPrimitives._
- (code: @scala.annotation.switch) match {
+ (code: @switch) match {
case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE
case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR
case B2S | C2S | S2S | I2S | L2S | F2S | D2S => SHORT
@@ -661,21 +652,6 @@ abstract class BCodeIdiomatic extends BCodeGlue {
}
}
- final val typeOfArrayOp: Map[Int, BType] = {
- import scalaPrimitives._
- Map(
- (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++
- (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++
- (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++
- (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++
- (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++
- (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++
- (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++
- (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++
- (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectReference)) : _*
- )
- }
-
/*
* Collects (in `result`) all LabelDef nodes enclosed (directly or not) by each node it visits.
*
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
index 360ce58ecc..2a06c62e37 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
@@ -4,16 +4,17 @@
*/
-package scala
-package tools.nsc
+package scala.tools.nsc
package backend
package jvm
import scala.collection.{ mutable, immutable }
+import scala.tools.nsc.backend.jvm.opt.ByteCodeRepository
import scala.tools.nsc.symtab._
-import scala.annotation.switch
import scala.tools.asm
+import GenBCode._
+import BackendReporting._
/*
*
@@ -23,6 +24,9 @@ import scala.tools.asm
*/
abstract class BCodeSkelBuilder extends BCodeHelpers {
import global._
+ import bTypes._
+ import coreBTypes._
+ import bCodeAsmCommon._
/*
* There's a dedicated PlainClassBuilder for each CompilationUnit,
@@ -92,10 +96,12 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
claszSymbol = cd.symbol
isCZParcelable = isAndroidParcelableClass(claszSymbol)
- isCZStaticModule = isStaticModule(claszSymbol)
+ isCZStaticModule = isStaticModuleClass(claszSymbol)
isCZRemote = isRemote(claszSymbol)
thisName = internalName(claszSymbol)
+ val classBType = classBTypeFromSymbol(claszSymbol)
+
cnode = new asm.tree.ClassNode()
initJClass(cnode)
@@ -113,12 +119,21 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
addClassFields()
- innerClassBufferASM ++= trackMemberClasses(claszSymbol, Nil)
-
+ innerClassBufferASM ++= classBType.info.get.nestedClasses
gen(cd.impl)
+ addInnerClassesASM(cnode, innerClassBufferASM.toList)
- assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().")
+ cnode.visitAttribute(classBType.inlineInfoAttribute.get)
+
+ if (AsmUtils.traceClassEnabled && cnode.name.contains(AsmUtils.traceClassPattern))
+ AsmUtils.traceClass(cnode)
+ if (settings.YoptInlinerEnabled) {
+ // The inliner needs to find all classes in the code repo, also those being compiled
+ byteCodeRepository.add(cnode, ByteCodeRepository.CompilationUnit)
+ }
+
+ assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().")
} // end of method genPlainClass()
/*
@@ -127,40 +142,28 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
private def initJClass(jclass: asm.ClassVisitor) {
val ps = claszSymbol.info.parents
- val superClass: String = if (ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else internalName(ps.head.typeSymbol);
- val ifaces: Array[String] = {
- val arrIfacesTr: Array[Tracked] = exemplar(claszSymbol).ifaces
- val arrIfaces = new Array[String](arrIfacesTr.length)
- var i = 0
- while (i < arrIfacesTr.length) {
- val ifaceTr = arrIfacesTr(i)
- val bt = ifaceTr.c
- if (ifaceTr.isInnerClass) { innerClassBufferASM += bt }
- arrIfaces(i) = bt.getInternalName
- i += 1
- }
- arrIfaces
+ val superClass: String = if (ps.isEmpty) ObjectReference.internalName else internalName(ps.head.typeSymbol)
+ val interfaceNames = classBTypeFromSymbol(claszSymbol).info.get.interfaces map {
+ case classBType =>
+ if (classBType.isNestedClass.get) { innerClassBufferASM += classBType }
+ classBType.internalName
}
- // `internalName()` tracks inner classes.
- val flags = mkFlags(
- javaFlags(claszSymbol),
- if (isDeprecated(claszSymbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
- )
+ val flags = javaFlags(claszSymbol)
val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner)
cnode.visit(classfileVersion, flags,
thisName, thisSignature,
- superClass, ifaces)
+ superClass, interfaceNames.toArray)
if (emitSource) {
cnode.visitSource(cunit.source.toString, null /* SourceDebugExtension */)
}
- val enclM = getEnclosingMethodAttribute(claszSymbol)
- if (enclM != null) {
- val EnclMethodEntry(className, methodName, methodType) = enclM
- cnode.visitOuterClass(className, methodName, methodType.getDescriptor)
+ enclosingMethodAttribute(claszSymbol, internalName, asmMethodType(_).descriptor) match {
+ case Some(EnclosingMethodEntry(className, methodName, methodDescriptor)) =>
+ cnode.visitOuterClass(className, methodName, methodDescriptor)
+ case _ => ()
}
val ssa = getAnnotPickle(thisName, claszSymbol)
@@ -200,7 +203,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
*/
private def addModuleInstanceField() {
val fv =
- cnode.visitField(PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ cnode.visitField(GenBCode.PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
strMODULE_INSTANCE_FIELD,
"L" + thisName + ";",
null, // no java-generic-signature
@@ -216,7 +219,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
private def fabricateStaticInit() {
val clinit: asm.MethodVisitor = cnode.visitMethod(
- PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ GenBCode.PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
CLASS_CONSTRUCTOR_NAME,
"()V",
null, // no java-generic-signature
@@ -228,7 +231,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
if (isCZStaticModule) {
clinit.visitTypeInsn(asm.Opcodes.NEW, thisName)
clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL,
- thisName, INSTANCE_CONSTRUCTOR_NAME, "()V")
+ thisName, INSTANCE_CONSTRUCTOR_NAME, "()V", false)
}
if (isCZParcelable) { legacyAddCreatorCode(clinit, cnode, thisName) }
clinit.visitInsn(asm.Opcodes.RETURN)
@@ -247,15 +250,12 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
*/
for (f <- fieldSymbols(claszSymbol)) {
val javagensig = getGenericSignature(f, claszSymbol)
- val flags = mkFlags(
- javaFieldFlags(f),
- if (isDeprecated(f)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
- )
+ val flags = javaFieldFlags(f)
val jfield = new asm.tree.FieldNode(
flags,
f.javaSimpleName.toString,
- symInfoTK(f).getDescriptor,
+ symInfoTK(f).descriptor,
javagensig,
null // no initial value
)
@@ -352,6 +352,13 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
/*
* Bookkeeping for method-local vars and method-params.
+ *
+ * TODO: use fewer slots. local variable slots are never re-used in separate blocks.
+ * In the following example, x and y could use the same slot.
+ * def foo() = {
+ * { val x = 1 }
+ * { val y = "a" }
+ * }
*/
object locals {
@@ -391,8 +398,8 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
assert(nxtIdx != -1, "not a valid start index")
val loc = Local(tk, sym.javaSimpleName.toString, nxtIdx, sym.isSynthetic)
slots += (sym -> loc)
- assert(tk.getSize > 0, "makeLocal called for a symbol whose type is Unit.")
- nxtIdx += tk.getSize
+ assert(tk.size > 0, "makeLocal called for a symbol whose type is Unit.")
+ nxtIdx += tk.size
loc
}
@@ -525,7 +532,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
if (isMethSymStaticCtor) CLASS_CONSTRUCTOR_NAME
else jMethodName
- val mdesc = asmMethodType(methSymbol).getDescriptor
+ val mdesc = asmMethodType(methSymbol).descriptor
mnode = cnode.visitMethod(
flags,
bytecodeName,
@@ -549,7 +556,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
methSymbol = dd.symbol
jMethodName = methSymbol.javaSimpleName.toString
- returnType = asmMethodType(dd.symbol).getReturnType
+ returnType = asmMethodType(dd.symbol).returnType
isMethSymStaticCtor = methSymbol.isStaticConstructor
resetMethodBookkeeping(dd)
@@ -563,18 +570,17 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
if (params.size > MaximumJvmParameters) {
// SI-7324
- cunit.error(methSymbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
+ reporter.error(methSymbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
return
}
val isNative = methSymbol.hasAnnotation(definitions.NativeAttr)
val isAbstractMethod = (methSymbol.isDeferred || methSymbol.owner.isInterface)
- val flags = mkFlags(
+ val flags = GenBCode.mkFlags(
javaFlags(methSymbol),
if (claszSymbol.isInterface) asm.Opcodes.ACC_ABSTRACT else 0,
if (methSymbol.isStrictFP) asm.Opcodes.ACC_STRICT else 0,
- if (isNative) asm.Opcodes.ACC_NATIVE else 0, // native methods of objects are generated in mirror classes
- if (isDeprecated(methSymbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ if (isNative) asm.Opcodes.ACC_NATIVE else 0 // native methods of objects are generated in mirror classes
)
// TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize }
@@ -639,6 +645,10 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
// Note we don't invoke visitMax, thus there are no FrameNode among mnode.instructions.
// The only non-instruction nodes to be found are LabelNode and LineNumberNode.
}
+
+ if (AsmUtils.traceMethodEnabled && mnode.name.contains(AsmUtils.traceMethodPattern))
+ AsmUtils.traceMethod(mnode)
+
mnode = null
} // end of method genDefDef()
@@ -675,8 +685,8 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
val callee = methSymbol.enclClass.primaryConstructor
val jname = callee.javaSimpleName.toString
val jowner = internalName(callee.owner)
- val jtype = asmMethodType(callee).getDescriptor
- insnModB = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESPECIAL, jowner, jname, jtype)
+ val jtype = asmMethodType(callee).descriptor
+ insnModB = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESPECIAL, jowner, jname, jtype, false)
}
var insnParcA: asm.tree.AbstractInsnNode = null
@@ -684,7 +694,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
// android creator code
if (isCZParcelable) {
// add a static field ("CREATOR") to this class to cache android.os.Parcelable$Creator
- val andrFieldDescr = asmClassType(AndroidCreatorClass).getDescriptor
+ val andrFieldDescr = getClassBTypeAndRegisterInnerClass(AndroidCreatorClass).descriptor
cnode.visitField(
asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL,
"CREATOR",
@@ -696,8 +706,8 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
val callee = definitions.getMember(claszSymbol.companionModule, androidFieldName)
val jowner = internalName(callee.owner)
val jname = callee.javaSimpleName.toString
- val jtype = asmMethodType(callee).getDescriptor
- insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype)
+ val jtype = asmMethodType(callee).descriptor
+ insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype, false)
// PUTSTATIC `thisName`.CREATOR;
insnParcB = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisName, "CREATOR", andrFieldDescr)
}
@@ -713,7 +723,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false) {
val Local(tk, name, idx, isSynth) = locals(sym)
if (force || !isSynth) {
- mnode.visitLocalVariable(name, tk.getDescriptor, null, start, end, idx)
+ mnode.visitLocalVariable(name, tk.descriptor, null, start, end, idx)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
index 9ddb7a3ce8..b94208c1a5 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
@@ -9,9 +9,7 @@ package tools.nsc
package backend
package jvm
-import scala.collection.{ mutable, immutable }
-import scala.annotation.switch
-
+import scala.collection.immutable
import scala.tools.asm
/*
@@ -22,7 +20,8 @@ import scala.tools.asm
*/
abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
import global._
-
+ import bTypes._
+ import coreBTypes._
/*
* Functionality to lower `synchronized` and `try` expressions.
@@ -184,7 +183,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
val caseHandlers: List[EHClause] =
for (CaseDef(pat, _, caseBody) <- catches) yield {
pat match {
- case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt), caseBody)
+ case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt).asClassBType, caseBody)
case Ident(nme.WILDCARD) => NamelessEH(ThrowableReference, caseBody)
case Bind(_, _) => BoundEH (pat.symbol, caseBody)
}
@@ -250,7 +249,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
// (2.a) emit case clause proper
val startHandler = currProgramPoint()
var endHandler: asm.Label = null
- var excType: BType = null
+ var excType: ClassBType = null
registerCleanup(finCleanup)
ch match {
case NamelessEH(typeToDrop, caseBody) =>
@@ -269,7 +268,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
nopIfNeeded(startHandler)
endHandler = currProgramPoint()
emitLocalVarScope(patSymbol, startHandler, endHandler)
- excType = patTK
+ excType = patTK.asClassBType
}
unregisterCleanup(finCleanup)
// (2.b) mark the try-body as protected by this case clause.
@@ -285,7 +284,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
* ------
*/
- // a note on terminology: this is not "postHandlers", despite appearences.
+ // a note on terminology: this is not "postHandlers", despite appearances.
// "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts.
if (hasFinally) {
nopIfNeeded(startTryBody)
@@ -357,10 +356,10 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
}
}
- def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: BType) {
+ def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: ClassBType) {
val excInternalName: String =
if (excType == null) null
- else excType.getInternalName
+ else excType.internalName
assert(start != end, "protecting a range of zero instructions leads to illegal class format. Solution: add a NOP to that range.")
mnode.visitTryCatchBlock(start, end, handler, excInternalName)
}
@@ -387,7 +386,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
def mayCleanStack(tree: Tree): Boolean = tree exists { t => t.isInstanceOf[Try] }
trait EHClause
- case class NamelessEH(typeToDrop: BType, caseBody: Tree) extends EHClause
+ case class NamelessEH(typeToDrop: ClassBType, caseBody: Tree) extends EHClause
case class BoundEH (patSymbol: Symbol, caseBody: Tree) extends EHClause
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
deleted file mode 100644
index 1eca69936a..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
+++ /dev/null
@@ -1,880 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2012 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala
-package tools.nsc
-package backend.jvm
-
-import scala.tools.asm
-import scala.collection.{ immutable, mutable }
-
-/*
- * Utilities to mediate between types as represented in Scala ASTs and ASM trees.
- *
- * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
- * @version 1.0
- *
- */
-abstract class BCodeTypes extends BCodeIdiomatic {
-
- import global._
-
- // when compiling the Scala library, some assertions don't hold (e.g., scala.Boolean has null superClass although it's not an interface)
- val isCompilingStdLib = !(settings.sourcepath.isDefault)
-
- val srBoxedUnit = brefType("scala/runtime/BoxedUnit")
-
- // special names
- var StringReference : BType = null
- var ThrowableReference : BType = null
- var jlCloneableReference : BType = null // java/lang/Cloneable
- var jlNPEReference : BType = null // java/lang/NullPointerException
- var jioSerializableReference : BType = null // java/io/Serializable
- var scalaSerializableReference : BType = null // scala/Serializable
- var classCastExceptionReference : BType = null // java/lang/ClassCastException
-
- /* A map from scala primitive type-symbols to BTypes */
- var primitiveTypeMap: Map[Symbol, BType] = null
- /* A map from scala type-symbols for Nothing and Null to (runtime version) BTypes */
- var phantomTypeMap: Map[Symbol, BType] = null
- /* Maps the method symbol for a box method to the boxed type of the result.
- * For example, the method symbol for `Byte.box()`) is mapped to the BType `Ljava/lang/Integer;`. */
- var boxResultType: Map[Symbol, BType] = null
- /* Maps the method symbol for an unbox method to the primitive type of the result.
- * For example, the method symbol for `Byte.unbox()`) is mapped to the BType BYTE. */
- var unboxResultType: Map[Symbol, BType] = null
-
- var hashMethodSym: Symbol = null // scala.runtime.ScalaRunTime.hash
-
- var AndroidParcelableInterface: Symbol = null
- var AndroidCreatorClass : Symbol = null // this is an inner class, use asmType() to get hold of its BType while tracking in innerClassBufferASM
-
- var BeanInfoAttr: Symbol = null
-
- /* The Object => String overload. */
- var String_valueOf: Symbol = null
-
- var ArrayInterfaces: Set[Tracked] = null
-
- // scala.FunctionX and scala.runtim.AbstractFunctionX
- val FunctionReference = new Array[Tracked](definitions.MaxFunctionArity + 1)
- val AbstractFunctionReference = new Array[Tracked](definitions.MaxFunctionArity + 1)
- val abstractFunctionArityMap = mutable.Map.empty[BType, Int]
-
- var PartialFunctionReference: BType = null // scala.PartialFunction
- var AbstractPartialFunctionReference: BType = null // scala.runtime.AbstractPartialFunction
-
- var BoxesRunTime: BType = null
-
- /*
- * must-single-thread
- */
- def initBCodeTypes() {
- import definitions._
-
- primitiveTypeMap =
- Map(
- UnitClass -> UNIT,
- BooleanClass -> BOOL,
- CharClass -> CHAR,
- ByteClass -> BYTE,
- ShortClass -> SHORT,
- IntClass -> INT,
- LongClass -> LONG,
- FloatClass -> FLOAT,
- DoubleClass -> DOUBLE
- )
-
- phantomTypeMap =
- Map(
- NothingClass -> RT_NOTHING,
- NullClass -> RT_NULL,
- NothingClass -> RT_NOTHING, // we map on purpose to RT_NOTHING, getting rid of the distinction compile-time vs. runtime for NullClass.
- NullClass -> RT_NULL // ditto.
- )
-
- boxResultType =
- for((csym, msym) <- currentRun.runDefinitions.boxMethod)
- yield (msym -> classLiteral(primitiveTypeMap(csym)))
-
- unboxResultType =
- for((csym, msym) <- currentRun.runDefinitions.unboxMethod)
- yield (msym -> primitiveTypeMap(csym))
-
- // boxed classes are looked up in the `exemplars` map by jvmWiseLUB().
- // Other than that, they aren't needed there (e.g., `isSubtypeOf()` special-cases boxed classes, similarly for others).
- val boxedClasses = List(BoxedBooleanClass, BoxedCharacterClass, BoxedByteClass, BoxedShortClass, BoxedIntClass, BoxedLongClass, BoxedFloatClass, BoxedDoubleClass)
- for(csym <- boxedClasses) {
- val key = brefType(csym.javaBinaryName.toTypeName)
- val tr = buildExemplar(key, csym)
- symExemplars.put(csym, tr)
- exemplars.put(tr.c, tr)
- }
-
- // reversePrimitiveMap = (primitiveTypeMap map { case (s, pt) => (s.tpe, pt) } map (_.swap)).toMap
-
- hashMethodSym = getMember(ScalaRunTimeModule, nme.hash_)
-
- // TODO avoiding going through through missingHook for every line in the REPL: https://github.com/scala/scala/commit/8d962ed4ddd310cc784121c426a2e3f56a112540
- AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
- AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
-
- // the following couldn't be an eager vals in Phase constructors:
- // that might cause cycles before Global has finished initialization.
- BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
-
- String_valueOf = {
- getMember(StringModule, nme.valueOf) filter (sym =>
- sym.info.paramTypes match {
- case List(pt) => pt.typeSymbol == ObjectClass
- case _ => false
- }
- )
- }
-
- exemplar(JavaCloneableClass)
- exemplar(JavaSerializableClass)
- exemplar(SerializableClass)
-
- StringReference = exemplar(StringClass).c
- StringBuilderReference = exemplar(StringBuilderClass).c
- ThrowableReference = exemplar(ThrowableClass).c
- jlCloneableReference = exemplar(JavaCloneableClass).c
- jlNPEReference = exemplar(NullPointerExceptionClass).c
- jioSerializableReference = exemplar(JavaSerializableClass).c
- scalaSerializableReference = exemplar(SerializableClass).c
- classCastExceptionReference = exemplar(ClassCastExceptionClass).c
-
- /*
- * The bytecode emitter special-cases String concatenation, in that three methods of `JCodeMethodN`
- * ( `genStartConcat()` , `genStringConcat()` , and `genEndConcat()` )
- * don't obtain the method descriptor of the callee via `asmMethodType()` (as normally done)
- * but directly emit callsites on StringBuilder using literal constant for method descriptors.
- * In order to make sure those method descriptors are available as BTypes, they are initialized here.
- */
- BType.getMethodType("()V") // necessary for JCodeMethodN.genStartConcat
- BType.getMethodType("()Ljava/lang/String;") // necessary for JCodeMethodN.genEndConcat
-
- PartialFunctionReference = exemplar(PartialFunctionClass).c
- for(idx <- 0 to definitions.MaxFunctionArity) {
- FunctionReference(idx) = exemplar(FunctionClass(idx))
- AbstractFunctionReference(idx) = exemplar(AbstractFunctionClass(idx))
- abstractFunctionArityMap += (AbstractFunctionReference(idx).c -> idx)
- AbstractPartialFunctionReference = exemplar(AbstractPartialFunctionClass).c
- }
-
- // later a few analyses (e.g. refreshInnerClasses) will look up BTypes based on descriptors in instructions
- // we make sure those BTypes can be found via lookup as opposed to creating them on the fly.
- BoxesRunTime = brefType("scala/runtime/BoxesRunTime")
- asmBoxTo.values foreach { mnat: MethodNameAndType => BType.getMethodType(mnat.mdesc) }
- asmUnboxTo.values foreach { mnat: MethodNameAndType => BType.getMethodType(mnat.mdesc) }
-
- }
-
- /*
- * must-single-thread
- */
- def clearBCodeTypes() {
- symExemplars.clear()
- exemplars.clear()
- }
-
- val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
- val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
-
- val strMODULE_INSTANCE_FIELD = nme.MODULE_INSTANCE_FIELD.toString
-
- // ------------------------------------------------
- // accessory maps tracking the isInterface, innerClasses, superClass, and supportedInterfaces relations,
- // allowing answering `conforms()` without resorting to typer.
- // ------------------------------------------------
-
- val exemplars = new java.util.concurrent.ConcurrentHashMap[BType, Tracked]
- val symExemplars = new java.util.concurrent.ConcurrentHashMap[Symbol, Tracked]
-
- /*
- * Typically, a question about a BType can be answered only by using the BType as lookup key in one or more maps.
- * A `Tracked` object saves time by holding together information required to answer those questions:
- *
- * - `sc` denotes the bytecode-level superclass if any, null otherwise
- *
- * - `ifaces` denotes the interfaces explicitly declared.
- * Not included are those transitively supported, but the utility method `allLeafIfaces()` can be used for that.
- *
- * - `innersChain` denotes the containing classes for a non-package-level class `c`, null otherwise.
- * Note: the optimizer may inline anonymous closures, thus eliding those inner classes
- * (no physical class file is emitted for elided classes).
- * Before committing `innersChain` to bytecode, cross-check with the list of elided classes (SI-6546).
- *
- * All methods of this class can-multi-thread
- */
- case class Tracked(c: BType, flags: Int, sc: Tracked, ifaces: Array[Tracked], innersChain: Array[InnerClassEntry]) {
-
- // not a case-field because we initialize it only for JVM classes we emit.
- private var _directMemberClasses: List[BType] = null
-
- def directMemberClasses: List[BType] = {
- assert(_directMemberClasses != null, s"getter directMemberClasses() invoked too early for $c")
- _directMemberClasses
- }
-
- def directMemberClasses_=(bs: List[BType]) {
- if (_directMemberClasses != null) {
- // TODO we enter here when both mirror class and plain class are emitted for the same ModuleClassSymbol.
- assert(_directMemberClasses == bs.sortBy(_.off))
- }
- _directMemberClasses = bs.sortBy(_.off)
- }
-
- /* `isCompilingStdLib` saves the day when compiling:
- * (1) scala.Nothing (the test `c.isNonSpecial` fails for it)
- * (2) scala.Boolean (it has null superClass and is not an interface)
- */
- assert(c.isNonSpecial || isCompilingStdLib /*(1)*/, s"non well-formed plain-type: $this")
- assert(
- if (sc == null) { (c == ObjectReference) || isInterface || isCompilingStdLib /*(2)*/ }
- else { (c != ObjectReference) && !sc.isInterface }
- , "non well-formed plain-type: " + this
- )
- assert(ifaces.forall(i => i.c.isNonSpecial && i.isInterface), s"non well-formed plain-type: $this")
-
- import asm.Opcodes._
- def hasFlags(mask: Int) = (flags & mask) != 0
- def isInterface = hasFlags(ACC_INTERFACE)
- def isFinal = hasFlags(ACC_FINAL)
- def isInnerClass = { innersChain != null }
- def isLambda = {
- // ie isLCC || isTraditionalClosureClass
- isFinal && (c.getSimpleName.contains(tpnme.ANON_FUN_NAME.toString)) && isFunctionType(c)
- }
-
- /* can-multi-thread */
- def superClasses: List[Tracked] = {
- if (sc == null) Nil else sc :: sc.superClasses
- }
-
- /* can-multi-thread */
- def isSubtypeOf(other: BType): Boolean = {
- assert(other.isNonSpecial, "so called special cases have to be handled in BCodeTypes.conforms()")
-
- if (c == other) return true;
-
- val otherIsIface = exemplars.get(other).isInterface
-
- if (this.isInterface) {
- if (other == ObjectReference) return true;
- if (!otherIsIface) return false;
- }
- else {
- if (sc != null && sc.isSubtypeOf(other)) return true;
- if (!otherIsIface) return false;
- }
-
- var idx = 0
- while (idx < ifaces.length) {
- if (ifaces(idx).isSubtypeOf(other)) return true;
- idx += 1
- }
-
- false
- }
-
- /*
- * The `ifaces` field lists only those interfaces declared by `c`
- * From the set of all supported interfaces, this method discards those which are supertypes of others in the set.
- */
- def allLeafIfaces: Set[Tracked] = {
- if (sc == null) { ifaces.toSet }
- else { minimizeInterfaces(ifaces.toSet ++ sc.allLeafIfaces) }
- }
-
- /*
- * This type may not support in its entirety the interface given by the argument, however it may support some of its super-interfaces.
- * We visualize each such supported subset of the argument's functionality as a "branch". This method returns all such branches.
- *
- * In other words, let Ri be a branch supported by `ib`,
- * this method returns all Ri such that this <:< Ri, where each Ri is maximally deep.
- */
- def supportedBranches(ib: Tracked): Set[Tracked] = {
- assert(ib.isInterface, s"Non-interface argument: $ib")
-
- val result: Set[Tracked] =
- if (this.isSubtypeOf(ib.c)) { Set(ib) }
- else { ib.ifaces.toSet[Tracked].flatMap( bi => supportedBranches(bi) ) }
-
- checkAllInterfaces(result)
-
- result
- }
-
- override def toString = { c.toString }
-
- }
-
- /* must-single-thread */
- final def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
-
- /* must-single-thread */
- final def hasInternalName(sym: Symbol) = { sym.isClass || (sym.isModule && !sym.isMethod) }
-
- /* must-single-thread */
- def getSuperInterfaces(csym: Symbol): List[Symbol] = {
-
- // Additional interface parents based on annotations and other cues
- def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match {
- case definitions.RemoteAttr => definitions.RemoteInterfaceClass
- case _ => NoSymbol
- }
-
- /* Drop redundant interfaces (which are implemented by some other parent) from the immediate parents.
- * In other words, no two interfaces in the result are related by subtyping.
- * This method works on Symbols, a similar one (not duplicate) works on Tracked instances.
- */
- def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
- var rest = lstIfaces
- var leaves = List.empty[Symbol]
- while (!rest.isEmpty) {
- val candidate = rest.head
- val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
- if (!nonLeaf) {
- leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
- }
- rest = rest.tail
- }
-
- leaves
- }
-
- val superInterfaces0: List[Symbol] = csym.mixinClasses
- val superInterfaces = existingSymbols(superInterfaces0 ++ csym.annotations.map(newParentForAttr)).distinct
-
- assert(!superInterfaces.contains(NoSymbol), s"found NoSymbol among: ${superInterfaces.mkString}")
- assert(superInterfaces.forall(s => s.isInterface || s.isTrait), s"found non-interface among: ${superInterfaces.mkString}")
-
- minimizeInterfaces(superInterfaces)
- }
-
- /*
- * Records the superClass and supportedInterfaces relations,
- * so that afterwards queries can be answered without resorting to typer.
- * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
- * On the other hand, this method does record the inner-class status of the argument, via `buildExemplar()`.
- *
- * must-single-thread
- */
- final def exemplar(csym0: Symbol): Tracked = {
- assert(csym0 != NoSymbol, "NoSymbol can't be tracked")
-
- val csym = {
- if (csym0.isJavaDefined && csym0.isModuleClass) csym0.linkedClassOfClass
- else if (csym0.isModule) csym0.moduleClass
- else csym0 // we track only module-classes and plain-classes
- }
-
- assert(!primitiveTypeMap.contains(csym) || isCompilingStdLib, s"primitive types not tracked here: ${csym.fullName}")
- assert(!phantomTypeMap.contains(csym), s"phantom types not tracked here: ${csym.fullName}")
-
- val opt = symExemplars.get(csym)
- if (opt != null) {
- return opt
- }
-
- val key = brefType(csym.javaBinaryName.toTypeName)
- assert(key.isNonSpecial || isCompilingStdLib, s"Not a class to track: ${csym.fullName}")
-
- // TODO accomodate the fix for SI-5031 of https://github.com/scala/scala/commit/0527b2549bcada2fda2201daa630369b377d0877
- // TODO Weaken this assertion? buildExemplar() needs to be updated, too. In the meantime, pos/t5031_3 has been moved to test/disabled/pos.
- val whatWasInExemplars = exemplars.get(key)
- assert(whatWasInExemplars == null, "Maps `symExemplars` and `exemplars` got out of synch.")
- val tr = buildExemplar(key, csym)
- symExemplars.put(csym, tr)
- if (csym != csym0) { symExemplars.put(csym0, tr) }
- exemplars.put(tr.c, tr) // tr.c is the hash-consed, internalized, canonical representative for csym's key.
- tr
- }
-
- val EMPTY_TRACKED_ARRAY = Array.empty[Tracked]
-
- /*
- * must-single-thread
- */
- private def buildExemplar(key: BType, csym: Symbol): Tracked = {
- val sc =
- if (csym.isImplClass) definitions.ObjectClass
- else csym.superClass
- assert(
- if (csym == definitions.ObjectClass)
- sc == NoSymbol
- else if (csym.isInterface)
- sc == definitions.ObjectClass
- else
- ((sc != NoSymbol) && !sc.isInterface) || isCompilingStdLib,
- "superClass out of order"
- )
- val ifaces = getSuperInterfaces(csym) map exemplar;
- val ifacesArr =
- if (ifaces.isEmpty) EMPTY_TRACKED_ARRAY
- else {
- val arr = new Array[Tracked](ifaces.size)
- ifaces.copyToArray(arr)
- arr
- }
-
- val flags = mkFlags(
- javaFlags(csym),
- if (isDeprecated(csym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
- )
-
- val tsc = if (sc == NoSymbol) null else exemplar(sc)
-
- val innersChain = saveInnerClassesFor(csym, key)
-
- Tracked(key, flags, tsc, ifacesArr, innersChain)
- }
-
- // ---------------- utilities around interfaces represented by Tracked instances. ----------------
-
- /* Drop redundant interfaces (those which are implemented by some other).
- * In other words, no two interfaces in the result are related by subtyping.
- * This method works on Tracked elements, a similar one (not duplicate) works on Symbols.
- */
- def minimizeInterfaces(lstIfaces: Set[Tracked]): Set[Tracked] = {
- checkAllInterfaces(lstIfaces)
- var rest = lstIfaces.toList
- var leaves = List.empty[Tracked]
- while (!rest.isEmpty) {
- val candidate = rest.head
- val nonLeaf = leaves exists { leaf => leaf.isSubtypeOf(candidate.c) }
- if (!nonLeaf) {
- leaves = candidate :: (leaves filterNot { leaf => candidate.isSubtypeOf(leaf.c) })
- }
- rest = rest.tail
- }
-
- leaves.toSet
- }
-
- def allInterfaces(is: Iterable[Tracked]): Boolean = { is forall { i => i.isInterface } }
- def nonInterfaces(is: Iterable[Tracked]): Iterable[Tracked] = { is filterNot { i => i.isInterface } }
-
- def checkAllInterfaces(ifaces: Iterable[Tracked]) {
- assert(allInterfaces(ifaces), s"Non-interfaces: ${nonInterfaces(ifaces).mkString}")
- }
-
- /*
- * Subtype check `a <:< b` on BTypes that takes into account the JVM built-in numeric promotions (e.g. BYTE to INT).
- * Its operation can be visualized more easily in terms of the Java bytecode type hierarchy.
- * This method used to be called, in the ICode world, TypeKind.<:<()
- *
- * can-multi-thread
- */
- final def conforms(a: BType, b: BType): Boolean = {
- if (a.isArray) { // may be null
- /* Array subtyping is covariant here, as in Java bytecode. Also necessary for Java interop. */
- if ((b == jlCloneableReference) ||
- (b == jioSerializableReference) ||
- (b == AnyRefReference)) { true }
- else if (b.isArray) { conforms(a.getComponentType, b.getComponentType) }
- else { false }
- }
- else if (a.isBoxed) { // may be null
- if (b.isBoxed) { a == b }
- else if (b == AnyRefReference) { true }
- else if (!(b.hasObjectSort)) { false }
- else { exemplars.get(a).isSubtypeOf(b) } // e.g., java/lang/Double conforms to java/lang/Number
- }
- else if (a.isNullType) { // known to be null
- if (b.isNothingType) { false }
- else if (b.isValueType) { false }
- else { true }
- }
- else if (a.isNothingType) { // known to be Nothing
- true
- }
- else if (a.isUnitType) {
- b.isUnitType
- }
- else if (a.hasObjectSort) { // may be null
- if (a.isNothingType) { true }
- else if (b.hasObjectSort) { exemplars.get(a).isSubtypeOf(b) }
- else if (b.isArray) { a.isNullType } // documentation only, because `if(a.isNullType)` (above) covers this case already.
- else { false }
- }
- else {
-
- def msg = s"(a: $a, b: $b)"
-
- assert(a.isNonUnitValueType, s"a isn't a non-Unit value type. $msg")
- assert(b.isValueType, s"b isn't a value type. $msg")
-
- (a eq b) || (a match {
- case BOOL | BYTE | SHORT | CHAR => b == INT || b == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt().
- case _ => a == b
- })
- }
- }
-
- /* The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative values of Byte and Short. See ticket #2087.
- *
- * can-multi-thread
- */
- def maxValueType(a: BType, other: BType): BType = {
- assert(a.isValueType, "maxValueType() is defined only for 1st arg valuetypes (2nd arg doesn't matter).")
-
- def uncomparable: Nothing = {
- abort(s"Uncomparable BTypes: $a with $other")
- }
-
- if (a.isNothingType) return other;
- if (other.isNothingType) return a;
- if (a == other) return a;
-
- a match {
-
- case UNIT => uncomparable
- case BOOL => uncomparable
-
- case BYTE =>
- if (other == CHAR) INT
- else if (other.isNumericType) other
- else uncomparable
-
- case SHORT =>
- other match {
- case BYTE => SHORT
- case CHAR => INT
- case INT | LONG | FLOAT | DOUBLE => other
- case _ => uncomparable
- }
-
- case CHAR =>
- other match {
- case BYTE | SHORT => INT
- case INT | LONG | FLOAT | DOUBLE => other
- case _ => uncomparable
- }
-
- case INT =>
- other match {
- case BYTE | SHORT | CHAR => INT
- case LONG | FLOAT | DOUBLE => other
- case _ => uncomparable
- }
-
- case LONG =>
- if (other.isIntegralType) LONG
- else if (other.isRealType) DOUBLE
- else uncomparable
-
- case FLOAT =>
- if (other == DOUBLE) DOUBLE
- else if (other.isNumericType) FLOAT
- else uncomparable
-
- case DOUBLE =>
- if (other.isNumericType) DOUBLE
- else uncomparable
-
- case _ => uncomparable
- }
- }
-
- /* Takes promotions of numeric primitives into account.
- *
- * can-multi-thread
- */
- final def maxType(a: BType, other: BType): BType = {
- if (a.isValueType) { maxValueType(a, other) }
- else {
- if (a.isNothingType) return other;
- if (other.isNothingType) return a;
- if (a == other) return a;
- // Approximate `lub`. The common type of two references is always AnyRef.
- // For 'real' least upper bound wrt to subclassing use method 'lub'.
- assert(a.isArray || a.isBoxed || a.hasObjectSort, s"This is not a valuetype and it's not something else, what is it? $a")
- // TODO For some reason, ICode thinks `REFERENCE(...).maxType(BOXED(whatever))` is `uncomparable`. Here, that has maxType AnyRefReference.
- // BTW, when swapping arguments, ICode says BOXED(whatever).maxType(REFERENCE(...)) == AnyRefReference, so I guess the above was an oversight in REFERENCE.maxType()
- if (other.isRefOrArrayType) { AnyRefReference }
- else { abort(s"Uncomparable BTypes: $a with $other") }
- }
- }
-
- /*
- * Whether the argument is a subtype of
- * scala.PartialFunction[-A, +B] extends (A => B)
- * N.B.: this method returns true for a scala.runtime.AbstractPartialFunction
- *
- * can-multi-thread
- */
- def isPartialFunctionType(t: BType): Boolean = {
- (t.hasObjectSort) && exemplars.get(t).isSubtypeOf(PartialFunctionReference)
- }
-
- /*
- * Whether the argument is a subtype of scala.FunctionX where 0 <= X <= definitions.MaxFunctionArity
- *
- * can-multi-thread
- */
- def isFunctionType(t: BType): Boolean = {
- if (!t.hasObjectSort) return false
- var idx = 0
- val et: Tracked = exemplars.get(t)
- while (idx <= definitions.MaxFunctionArity) {
- if (et.isSubtypeOf(FunctionReference(idx).c)) {
- return true
- }
- idx += 1
- }
- false
- }
-
- /*
- * must-single-thread
- */
- def isTopLevelModule(sym: Symbol): Boolean = {
- exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
- }
-
- /*
- * must-single-thread
- */
- def isStaticModule(sym: Symbol): Boolean = {
- sym.isModuleClass && !sym.isImplClass && !sym.isLifted
- }
-
- // ---------------------------------------------------------------------
- // ---------------- InnerClasses attribute (JVMS 4.7.6) ----------------
- // ---------------------------------------------------------------------
-
- val INNER_CLASSES_FLAGS =
- (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
- asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_FINAL)
-
- /*
- * @param name the internal name of an inner class.
- * @param outerName the internal name of the class to which the inner class belongs.
- * May be `null` for non-member inner classes (ie for a Java local class or a Java anonymous class).
- * @param innerName the (simple) name of the inner class inside its enclosing class. It's `null` for anonymous inner classes.
- * @param access the access flags of the inner class as originally declared in the enclosing class.
- */
- case class InnerClassEntry(name: String, outerName: String, innerName: String, access: Int) {
- assert(name != null, "Null isn't good as class name in an InnerClassEntry.")
- }
-
- /* For given symbol return a symbol corresponding to a class that should be declared as inner class.
- *
- * For example:
- * class A {
- * class B
- * object C
- * }
- *
- * then method will return:
- * NoSymbol for A,
- * the same symbol for A.B (corresponding to A$B class), and
- * A$C$ symbol for A.C.
- *
- * must-single-thread
- */
- def innerClassSymbolFor(s: Symbol): Symbol =
- if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
-
- /*
- * Computes the chain of inner-class (over the is-member-of relation) for the given argument.
- * The resulting chain will be cached in `exemplars`.
- *
- * The chain thus cached is valid during this compiler run, see in contrast
- * `innerClassBufferASM` for a cache that is valid only for the class being emitted.
- *
- * The argument can be any symbol, but given that this method is invoked only from `buildExemplar()`,
- * in practice it has been vetted to be a class-symbol.
- *
- * Returns:
- *
- * - a non-empty array of entries for an inner-class argument.
- * The array's first element is the outermost top-level class,
- * the array's last element corresponds to csym.
- *
- * - null otherwise.
- *
- * This method does not add to `innerClassBufferASM`, use instead `exemplar()` for that.
- *
- * must-single-thread
- */
- final def saveInnerClassesFor(csym: Symbol, csymTK: BType): Array[InnerClassEntry] = {
-
- val ics = innerClassSymbolFor(csym)
- if (ics == NoSymbol) {
- return null
- }
- assert(ics == csym, s"Disagreement between innerClassSymbolFor() and exemplar()'s tracked symbol for the same input: ${csym.fullName}")
-
- var chain: List[Symbol] = Nil
- var x = ics
- while (x ne NoSymbol) {
- assert(x.isClass, s"not a class symbol: ${x.fullName}")
- val isInner = !x.rawowner.isPackageClass
- if (isInner) {
- chain ::= x
- x = innerClassSymbolFor(x.rawowner)
- } else {
- x = NoSymbol
- }
- }
-
- // now that we have all of `ics` , `csym` , and soon the inner-classes-chain, it's too tempting not to cache.
- if (chain.isEmpty) { null }
- else {
- val arr = new Array[InnerClassEntry](chain.size)
- (chain map toInnerClassEntry).copyToArray(arr)
-
- arr
- }
- }
-
- /*
- * must-single-thread
- */
- private def toInnerClassEntry(innerSym: Symbol): InnerClassEntry = {
-
- /* The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
- */
- def outerName(innerSym: Symbol): Name = {
- if (innerSym.originalEnclosingMethod != NoSymbol)
- null
- else {
- val outerName = innerSym.rawowner.javaBinaryName
- if (isTopLevelModule(innerSym.rawowner)) nme.stripModuleSuffix(outerName)
- else outerName
- }
- }
-
- def innerName(innerSym: Symbol): String = {
- if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
- null
- else
- innerSym.rawname + innerSym.moduleSuffix
- }
-
- val flagsWithFinal: Int = mkFlags(
- if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
- javaFlags(innerSym),
- if (isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
- ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
- val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding.
-
- val jname = innerSym.javaBinaryName.toString // never null
- val oname = { // null when method-enclosed
- val on = outerName(innerSym)
- if (on == null) null else on.toString
- }
- val iname = { // null for anonymous inner class
- val in = innerName(innerSym)
- if (in == null) null else in.toString
- }
-
- InnerClassEntry(jname, oname, iname, flags)
- }
-
- // --------------------------------------------
- // ---------------- Java flags ----------------
- // --------------------------------------------
-
- /*
- * can-multi-thread
- */
- final def hasPublicBitSet(flags: Int) = ((flags & asm.Opcodes.ACC_PUBLIC) != 0)
-
- /*
- * must-single-thread
- */
- final def isRemote(s: Symbol) = (s hasAnnotation definitions.RemoteAttr)
-
- /*
- * Return the Java modifiers for the given symbol.
- * Java modifiers for classes:
- * - public, abstract, final, strictfp (not used)
- * for interfaces:
- * - the same as for classes, without 'final'
- * for fields:
- * - public, private (*)
- * - static, final
- * for methods:
- * - the same as for fields, plus:
- * - abstract, synchronized (not used), strictfp (not used), native (not used)
- *
- * (*) protected cannot be used, since inner classes 'see' protected members,
- * and they would fail verification after lifted.
- *
- * must-single-thread
- */
- def javaFlags(sym: Symbol): Int = {
- // constructors of module classes should be private
- // PP: why are they only being marked private at this stage and not earlier?
- val privateFlag =
- sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
-
- // Final: the only fields which can receive ACC_FINAL are eager vals.
- // Neither vars nor lazy vals can, because:
- //
- // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
- // "Another problem is that the specification allows aggressive
- // optimization of final fields. Within a thread, it is permissible to
- // reorder reads of a final field with those modifications of a final
- // field that do not take place in the constructor."
- //
- // A var or lazy val which is marked final still has meaning to the
- // scala compiler. The word final is heavily overloaded unfortunately;
- // for us it means "not overridable". At present you can't override
- // vars regardless; this may change.
- //
- // The logic does not check .isFinal (which checks flags for the FINAL flag,
- // and includes symbols marked lateFINAL) instead inspecting rawflags so
- // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
- // avoid breaking proxy software which depends on subclassing, we do not
- // emit ACC_FINAL.
- // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
-
- val finalFlag = (
- (((sym.rawflags & symtab.Flags.FINAL) != 0) || isTopLevelModule(sym))
- && !sym.enclClass.isInterface
- && !sym.isClassConstructor
- && !sym.isMutable // lazy vals and vars both
- )
-
- // Primitives are "abstract final" to prohibit instantiation
- // without having to provide any implementations, but that is an
- // illegal combination of modifiers at the bytecode level so
- // suppress final if abstract if present.
- import asm.Opcodes._
- mkFlags(
- if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
- if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
- if (sym.isInterface) ACC_INTERFACE else 0,
- if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
- if (sym.isStaticMember) ACC_STATIC else 0,
- if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
- if (sym.isArtifact) ACC_SYNTHETIC else 0,
- if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
- if (sym.hasEnumFlag) ACC_ENUM else 0,
- if (sym.isVarargsMethod) ACC_VARARGS else 0,
- if (sym.hasFlag(symtab.Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
- )
- }
-
- /*
- * must-single-thread
- */
- def javaFieldFlags(sym: Symbol) = {
- javaFlags(sym) | mkFlags(
- if (sym hasAnnotation definitions.TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0,
- if (sym hasAnnotation definitions.VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0,
- if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL
- )
- }
-
-} // end of class BCodeTypes
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
new file mode 100644
index 0000000000..e61190bf3a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
@@ -0,0 +1,1167 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+
+import scala.annotation.switch
+import scala.collection.concurrent.TrieMap
+import scala.reflect.internal.util.Position
+import scala.tools.asm
+import asm.Opcodes
+import scala.tools.asm.tree.{MethodNode, MethodInsnNode, InnerClassNode, ClassNode}
+import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, MethodInlineInfo}
+import scala.tools.nsc.backend.jvm.BackendReporting._
+import scala.tools.nsc.backend.jvm.opt._
+import scala.collection.convert.decorateAsScala._
+import scala.tools.nsc.settings.ScalaSettings
+
+/**
+ * The BTypes component defines The BType class hierarchy. A BType stores all type information
+ * that is required after building the ASM nodes. This includes optimizations, generation of
+ * InnerClass attributes and generation of stack map frames.
+ *
+ * The representation is immutable and independent of the compiler data structures, hence it can
+ * be queried by concurrent threads.
+ */
+abstract class BTypes {
+ import BTypes.InternalName
+
+ // Some core BTypes are required here, in class BType, where no Global instance is available.
+ // The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual
+ // implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol.
+ val coreBTypes: CoreBTypesProxyGlobalIndependent[this.type]
+ import coreBTypes._
+
+ /**
+ * Tools for parsing classfiles, used by the inliner.
+ */
+ val byteCodeRepository: ByteCodeRepository
+
+ val localOpt: LocalOpt[this.type]
+
+ val inliner: Inliner[this.type]
+
+ val callGraph: CallGraph[this.type]
+
+ val backendReporting: BackendReporting
+
+ // Allows to define per-run caches here and in the CallGraph component, which don't have a global
+ def recordPerRunCache[T <: collection.generic.Clearable](cache: T): T
+
+ // Allows access to the compiler settings for backend components that don't have a global in scope
+ def compilerSettings: ScalaSettings
+
+
+ /**
+ * A map from internal names to ClassBTypes. Every ClassBType is added to this map on its
+ * construction.
+ *
+ * This map is used when computing stack map frames. The asm.ClassWriter invokes the method
+ * `getCommonSuperClass`. In this method we need to obtain the ClassBType for a given internal
+ * name. The method assumes that every class type that appears in the bytecode exists in the map.
+ *
+ * Concurrent because stack map frames are computed when in the class writer, which might run
+ * on multiple classes concurrently.
+ */
+ val classBTypeFromInternalName: collection.concurrent.Map[InternalName, ClassBType] = recordPerRunCache(TrieMap.empty)
+
+ /**
+ * Store the position of every MethodInsnNode during code generation. This allows each callsite
+ * in the call graph to remember its source position, which is required for inliner warnings.
+ */
+ val callsitePositions: collection.concurrent.Map[MethodInsnNode, Position] = recordPerRunCache(TrieMap.empty)
+
+ /**
+ * Contains the internal names of all classes that are defined in Java source files of the current
+ * compilation run (mixed compilation). Used for more detailed error reporting.
+ */
+ val javaDefinedClasses: collection.mutable.Set[InternalName] = recordPerRunCache(collection.mutable.Set.empty)
+
+ /**
+ * Cache, contains methods whose unreachable instructions are eliminated.
+ *
+ * The ASM Analyzer class does not compute any frame information for unreachable instructions.
+ * Transformations that use an analyzer (including inlining) therefore require unreachable code
+ * to be eliminated.
+ *
+ * This cache allows running dead code elimination whenever an analyzer is used. If the method
+ * is already optimized, DCE can return early.
+ */
+ val unreachableCodeEliminated: collection.mutable.Set[MethodNode] = recordPerRunCache(collection.mutable.Set.empty)
+
+ /**
+ * Obtain the BType for a type descriptor or internal name. For class descriptors, the ClassBType
+ * is constructed by parsing the corresponding classfile.
+ *
+ * Some JVM operations use either a full descriptor or only an internal name. Example:
+ * ANEWARRAY java/lang/String // a new array of strings (internal name for the String class)
+ * ANEWARRAY [Ljava/lang/String; // a new array of array of string (full descriptor for the String class)
+ *
+ * This method supports both descriptors and internal names.
+ */
+ def bTypeForDescriptorOrInternalNameFromClassfile(desc: String): BType = (desc(0): @switch) match {
+ case 'V' => UNIT
+ case 'Z' => BOOL
+ case 'C' => CHAR
+ case 'B' => BYTE
+ case 'S' => SHORT
+ case 'I' => INT
+ case 'F' => FLOAT
+ case 'J' => LONG
+ case 'D' => DOUBLE
+ case '[' => ArrayBType(bTypeForDescriptorOrInternalNameFromClassfile(desc.substring(1)))
+ case 'L' if desc.last == ';' => classBTypeFromParsedClassfile(desc.substring(1, desc.length - 1))
+ case _ => classBTypeFromParsedClassfile(desc)
+ }
+
+ /**
+ * Parse the classfile for `internalName` and construct the [[ClassBType]]. If the classfile cannot
+ * be found in the `byteCodeRepository`, the `info` of the resulting ClassBType is undefined.
+ */
+ def classBTypeFromParsedClassfile(internalName: InternalName): ClassBType = {
+ classBTypeFromInternalName.getOrElse(internalName, {
+ val res = ClassBType(internalName)
+ byteCodeRepository.classNode(internalName) match {
+ case Left(msg) => res.info = Left(NoClassBTypeInfoMissingBytecode(msg)); res
+ case Right(c) => setClassInfoFromParsedClassfile(c, res)
+ }
+ })
+ }
+
+ /**
+ * Construct the [[ClassBType]] for a parsed classfile.
+ */
+ def classBTypeFromClassNode(classNode: ClassNode): ClassBType = {
+ classBTypeFromInternalName.getOrElse(classNode.name, {
+ setClassInfoFromParsedClassfile(classNode, ClassBType(classNode.name))
+ })
+ }
+
+ private def setClassInfoFromParsedClassfile(classNode: ClassNode, classBType: ClassBType): ClassBType = {
+ val superClass = classNode.superName match {
+ case null =>
+ assert(classNode.name == ObjectReference.internalName, s"class with missing super type: ${classNode.name}")
+ None
+ case superName =>
+ Some(classBTypeFromParsedClassfile(superName))
+ }
+
+ val interfaces: List[ClassBType] = classNode.interfaces.asScala.map(classBTypeFromParsedClassfile)(collection.breakOut)
+
+ val flags = classNode.access
+
+ /**
+ * Find all nested classes of classNode. The innerClasses attribute contains all nested classes
+ * that are declared inside classNode or used in the bytecode of classNode. So some of them are
+ * nested in some other class than classNode, and we need to filter them.
+ *
+ * For member classes, innerClassNode.outerName is defined, so we compare that to classNode.name.
+ *
+ * For local and anonymous classes, innerClassNode.outerName is null. Such classes are required
+ * to have an EnclosingMethod attribute declaring the outer class. So we keep those local and
+ * anonymous classes whose outerClass is classNode.name.
+ */
+ def nestedInCurrentClass(innerClassNode: InnerClassNode): Boolean = {
+ (innerClassNode.outerName != null && innerClassNode.outerName == classNode.name) ||
+ (innerClassNode.outerName == null && {
+ val classNodeForInnerClass = byteCodeRepository.classNode(innerClassNode.name).get // TODO: don't get here, but set the info to Left at the end
+ classNodeForInnerClass.outerClass == classNode.name
+ })
+ }
+
+ val nestedClasses: List[ClassBType] = classNode.innerClasses.asScala.collect({
+ case i if nestedInCurrentClass(i) => classBTypeFromParsedClassfile(i.name)
+ })(collection.breakOut)
+
+ // if classNode is a nested class, it has an innerClass attribute for itself. in this
+ // case we build the NestedInfo.
+ val nestedInfo = classNode.innerClasses.asScala.find(_.name == classNode.name) map {
+ case innerEntry =>
+ val enclosingClass =
+ if (innerEntry.outerName != null) {
+ // if classNode is a member class, the outerName is non-null
+ classBTypeFromParsedClassfile(innerEntry.outerName)
+ } else {
+ // for anonymous or local classes, the outerName is null, but the enclosing class is
+ // stored in the EnclosingMethod attribute (which ASM encodes in classNode.outerClass).
+ classBTypeFromParsedClassfile(classNode.outerClass)
+ }
+ val staticFlag = (innerEntry.access & Opcodes.ACC_STATIC) != 0
+ NestedInfo(enclosingClass, Option(innerEntry.outerName), Option(innerEntry.innerName), staticFlag)
+ }
+
+ val inlineInfo = inlineInfoFromClassfile(classNode)
+
+ classBType.info = Right(ClassInfo(superClass, interfaces, flags, nestedClasses, nestedInfo, inlineInfo))
+ classBType
+ }
+
+ /**
+ * Build the InlineInfo for a class. For Scala classes, the information is stored in the
+ * ScalaInlineInfo attribute. If the attribute is missing, the InlineInfo is built using the
+ * metadata available in the classfile (ACC_FINAL flags, etc).
+ */
+ def inlineInfoFromClassfile(classNode: ClassNode): InlineInfo = {
+ def fromClassfileAttribute: Option[InlineInfo] = {
+ if (classNode.attrs == null) None
+ else classNode.attrs.asScala.collect({ case a: InlineInfoAttribute => a}).headOption.map(_.inlineInfo)
+ }
+
+ def fromClassfileWithoutAttribute = {
+ val warning = {
+ val isScala = classNode.attrs != null && classNode.attrs.asScala.exists(a => a.`type` == BTypes.ScalaAttributeName || a.`type` == BTypes.ScalaSigAttributeName)
+ if (isScala) Some(NoInlineInfoAttribute(classNode.name))
+ else None
+ }
+ // when building MethodInlineInfos for the members of a ClassSymbol, we exclude those methods
+ // in scalaPrimitives. This is necessary because some of them have non-erased types, which would
+ // require special handling. Excluding is OK because they are never inlined.
+ // Here we are parsing from a classfile and we don't need to do anything special. Many of these
+ // primitives don't even exist, for example Any.isInstanceOf.
+ val methodInfos = classNode.methods.asScala.map(methodNode => {
+ val info = MethodInlineInfo(
+ effectivelyFinal = BytecodeUtils.isFinalMethod(methodNode),
+ traitMethodWithStaticImplementation = false,
+ annotatedInline = false,
+ annotatedNoInline = false)
+ (methodNode.name + methodNode.desc, info)
+ }).toMap
+ InlineInfo(
+ traitImplClassSelfType = None,
+ isEffectivelyFinal = BytecodeUtils.isFinalClass(classNode),
+ methodInfos = methodInfos,
+ warning)
+ }
+
+ // The InlineInfo is built from the classfile (not from the symbol) for all classes that are NOT
+ // being compiled. For those classes, the info is only needed if the inliner is enabled, othewise
+ // we can save the memory.
+ if (!compilerSettings.YoptInlinerEnabled) BTypes.EmptyInlineInfo
+ else fromClassfileAttribute getOrElse fromClassfileWithoutAttribute
+ }
+
+ /**
+ * A BType is either a primitive type, a ClassBType, an ArrayBType of one of these, or a MethodType
+ * referring to BTypes.
+ */
+ sealed trait BType {
+ final override def toString: String = this match {
+ case UNIT => "V"
+ case BOOL => "Z"
+ case CHAR => "C"
+ case BYTE => "B"
+ case SHORT => "S"
+ case INT => "I"
+ case FLOAT => "F"
+ case LONG => "J"
+ case DOUBLE => "D"
+ case ClassBType(internalName) => "L" + internalName + ";"
+ case ArrayBType(component) => "[" + component
+ case MethodBType(args, res) => "(" + args.mkString + ")" + res
+ }
+
+ /**
+ * @return The Java descriptor of this type. Examples:
+ * - int: I
+ * - java.lang.String: Ljava/lang/String;
+ * - int[]: [I
+ * - Object m(String s, double d): (Ljava/lang/String;D)Ljava/lang/Object;
+ */
+ final def descriptor = toString
+
+ /**
+ * @return 0 for void, 2 for long and double, 1 otherwise
+ */
+ final def size: Int = this match {
+ case UNIT => 0
+ case LONG | DOUBLE => 2
+ case _ => 1
+ }
+
+ final def isPrimitive: Boolean = this.isInstanceOf[PrimitiveBType]
+ final def isRef: Boolean = this.isInstanceOf[RefBType]
+ final def isArray: Boolean = this.isInstanceOf[ArrayBType]
+ final def isClass: Boolean = this.isInstanceOf[ClassBType]
+ final def isMethod: Boolean = this.isInstanceOf[MethodBType]
+
+ final def isNonVoidPrimitiveType = isPrimitive && this != UNIT
+
+ final def isNullType = this == RT_NULL
+ final def isNothingType = this == RT_NOTHING
+
+ final def isBoxed = this.isClass && boxedClasses(this.asClassBType)
+
+ final def isIntSizedType = this == BOOL || this == CHAR || this == BYTE ||
+ this == SHORT || this == INT
+ final def isIntegralType = this == INT || this == BYTE || this == LONG ||
+ this == CHAR || this == SHORT
+ final def isRealType = this == FLOAT || this == DOUBLE
+ final def isNumericType = isIntegralType || isRealType
+ final def isWideType = size == 2
+
+ /*
+ * Subtype check `this <:< other` on BTypes that takes into account the JVM built-in numeric
+ * promotions (e.g. BYTE to INT). Its operation can be visualized more easily in terms of the
+ * Java bytecode type hierarchy.
+ */
+ final def conformsTo(other: BType): Either[NoClassBTypeInfo, Boolean] = tryEither(Right({
+ assert(isRef || isPrimitive, s"conformsTo cannot handle $this")
+ assert(other.isRef || other.isPrimitive, s"conformsTo cannot handle $other")
+
+ this match {
+ case ArrayBType(component) =>
+ if (other == ObjectReference || other == jlCloneableReference || other == jioSerializableReference) true
+ else other match {
+ case ArrayBType(otherComponent) => component.conformsTo(otherComponent).orThrow
+ case _ => false
+ }
+
+ case classType: ClassBType =>
+ if (isBoxed) {
+ if (other.isBoxed) this == other
+ else if (other == ObjectReference) true
+ else other match {
+ case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType).orThrow // e.g., java/lang/Double conforms to java/lang/Number
+ case _ => false
+ }
+ } else if (isNullType) {
+ if (other.isNothingType) false
+ else if (other.isPrimitive) false
+ else true // Null conforms to all classes (except Nothing) and arrays.
+ } else if (isNothingType) {
+ true
+ } else other match {
+ case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType).orThrow
+ // case ArrayBType(_) => this.isNullType // documentation only, because `if (isNullType)` above covers this case
+ case _ =>
+ // isNothingType || // documentation only, because `if (isNothingType)` above covers this case
+ false
+ }
+
+ case UNIT =>
+ other == UNIT
+ case BOOL | BYTE | SHORT | CHAR =>
+ this == other || other == INT || other == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt().
+ case _ =>
+ assert(isPrimitive && other.isPrimitive, s"Expected primitive types $this - $other")
+ this == other
+ }
+ }))
+
+ /**
+ * Compute the upper bound of two types.
+ * Takes promotions of numeric primitives into account.
+ */
+ final def maxType(other: BType): BType = this match {
+ case pt: PrimitiveBType => pt.maxValueType(other)
+
+ case _: ArrayBType | _: ClassBType =>
+ if (isNothingType) return other
+ if (other.isNothingType) return this
+ if (this == other) return this
+
+ assert(other.isRef, s"Cannot compute maxType: $this, $other")
+ // Approximate `lub`. The common type of two references is always ObjectReference.
+ ObjectReference
+
+ case _: MethodBType =>
+ assertionError(s"unexpected method type when computing maxType: $this")
+ }
+
+ /**
+ * See documentation of [[typedOpcode]].
+ * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 8.
+ */
+ private def loadStoreOpcodeOffset: Int = this match {
+ case UNIT | INT => 0
+ case BOOL | BYTE => 5
+ case CHAR => 6
+ case SHORT => 7
+ case FLOAT => 2
+ case LONG => 1
+ case DOUBLE => 3
+ case _ => 4
+ }
+
+ /**
+ * See documentation of [[typedOpcode]].
+ * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 16.
+ */
+ private def typedOpcodeOffset: Int = this match {
+ case UNIT => 5
+ case BOOL | CHAR | BYTE | SHORT | INT => 0
+ case FLOAT => 2
+ case LONG => 1
+ case DOUBLE => 3
+ case _ => 4
+ }
+
+ /**
+ * Some JVM opcodes have typed variants. This method returns the correct opcode according to
+ * the type.
+ *
+ * @param opcode A JVM instruction opcode. This opcode must be one of ILOAD, ISTORE, IALOAD,
+ * IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL, ISHR, IUSHR, IAND, IOR
+ * IXOR and IRETURN.
+ * @return The opcode adapted to this java type. For example, if this type is `float` and
+ * `opcode` is `IRETURN`, this method returns `FRETURN`.
+ */
+ final def typedOpcode(opcode: Int): Int = {
+ if (opcode == Opcodes.IALOAD || opcode == Opcodes.IASTORE)
+ opcode + loadStoreOpcodeOffset
+ else
+ opcode + typedOpcodeOffset
+ }
+
+ /**
+ * The asm.Type corresponding to this BType.
+ *
+ * Note about asm.Type.getObjectType (*): For class types, the method expects the internal
+ * name, i.e. without the surrounding 'L' and ';'. For array types on the other hand, the
+ * method expects a full descriptor, for example "[Ljava/lang/String;".
+ *
+ * See method asm.Type.getType that creates a asm.Type from a type descriptor
+ * - for an OBJECT type, the 'L' and ';' are not part of the range of the created Type
+ * - for an ARRAY type, the full descriptor is part of the range
+ */
+ def toASMType: asm.Type = this match {
+ case UNIT => asm.Type.VOID_TYPE
+ case BOOL => asm.Type.BOOLEAN_TYPE
+ case CHAR => asm.Type.CHAR_TYPE
+ case BYTE => asm.Type.BYTE_TYPE
+ case SHORT => asm.Type.SHORT_TYPE
+ case INT => asm.Type.INT_TYPE
+ case FLOAT => asm.Type.FLOAT_TYPE
+ case LONG => asm.Type.LONG_TYPE
+ case DOUBLE => asm.Type.DOUBLE_TYPE
+ case ClassBType(internalName) => asm.Type.getObjectType(internalName) // see (*) above
+ case a: ArrayBType => asm.Type.getObjectType(a.descriptor)
+ case m: MethodBType => asm.Type.getMethodType(m.descriptor)
+ }
+
+ def asRefBType : RefBType = this.asInstanceOf[RefBType]
+ def asArrayBType : ArrayBType = this.asInstanceOf[ArrayBType]
+ def asClassBType : ClassBType = this.asInstanceOf[ClassBType]
+ def asPrimitiveBType : PrimitiveBType = this.asInstanceOf[PrimitiveBType]
+ }
+
+ sealed trait PrimitiveBType extends BType {
+
+ /**
+ * The upper bound of two primitive types. The `other` type has to be either a primitive
+ * type or Nothing.
+ *
+ * The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative
+ * values of Byte and Short. See ticket #2087.
+ */
+ final def maxValueType(other: BType): BType = {
+
+ def uncomparable: Nothing = assertionError(s"Cannot compute maxValueType: $this, $other")
+
+ if (!other.isPrimitive && !other.isNothingType) uncomparable
+
+ if (other.isNothingType) return this
+ if (this == other) return this
+
+ this match {
+ case BYTE =>
+ if (other == CHAR) INT
+ else if (other.isNumericType) other
+ else uncomparable
+
+ case SHORT =>
+ other match {
+ case BYTE => SHORT
+ case CHAR => INT
+ case INT | LONG | FLOAT | DOUBLE => other
+ case _ => uncomparable
+ }
+
+ case CHAR =>
+ other match {
+ case BYTE | SHORT => INT
+ case INT | LONG | FLOAT | DOUBLE => other
+ case _ => uncomparable
+ }
+
+ case INT =>
+ other match {
+ case BYTE | SHORT | CHAR => INT
+ case LONG | FLOAT | DOUBLE => other
+ case _ => uncomparable
+ }
+
+ case LONG =>
+ if (other.isIntegralType) LONG
+ else if (other.isRealType) DOUBLE
+ else uncomparable
+
+ case FLOAT =>
+ if (other == DOUBLE) DOUBLE
+ else if (other.isNumericType) FLOAT
+ else uncomparable
+
+ case DOUBLE =>
+ if (other.isNumericType) DOUBLE
+ else uncomparable
+
+ case UNIT | BOOL => uncomparable
+ }
+ }
+ }
+
+ case object UNIT extends PrimitiveBType
+ case object BOOL extends PrimitiveBType
+ case object CHAR extends PrimitiveBType
+ case object BYTE extends PrimitiveBType
+ case object SHORT extends PrimitiveBType
+ case object INT extends PrimitiveBType
+ case object FLOAT extends PrimitiveBType
+ case object LONG extends PrimitiveBType
+ case object DOUBLE extends PrimitiveBType
+
+ sealed trait RefBType extends BType {
+ /**
+ * The class or array type of this reference type. Used for ANEWARRAY, MULTIANEWARRAY,
+ * INSTANCEOF and CHECKCAST instructions. Also used for emitting invokevirtual calls to
+ * (a: Array[T]).clone() for any T, see genApply.
+ *
+ * In contrast to the descriptor, this string does not contain the surrounding 'L' and ';' for
+ * class types, for example "java/lang/String".
+ * However, for array types, the full descriptor is used, for example "[Ljava/lang/String;".
+ *
+ * This can be verified for example using javap or ASMifier.
+ */
+ def classOrArrayType: String = this match {
+ case ClassBType(internalName) => internalName
+ case a: ArrayBType => a.descriptor
+ }
+ }
+
+ /**
+ * InnerClass and EnclosingMethod attributes (EnclosingMethod is displayed as OUTERCLASS in asm).
+ *
+ * In this summary, "class" means "class or interface".
+ *
+ * JLS: http://docs.oracle.com/javase/specs/jls/se8/html/index.html
+ * JVMS: http://docs.oracle.com/javase/specs/jvms/se8/html/index.html
+ *
+ * Terminology
+ * -----------
+ *
+ * - Nested class (JLS 8): class whose declaration occurs within the body of another class
+ *
+ * - Top-level class (JLS 8): non-nested class
+ *
+ * - Inner class (JLS 8.1.3): nested class that is not (explicitly or implicitly) static
+ *
+ * - Member class (JLS 8.5): class directly enclosed in the body of a class (and not, for
+ * example, defined in a method). Member classes cannot be anonymous. May be static.
+ *
+ * - Local class (JLS 14.3): nested, non-anonymous class that is not a member of a class
+ * - cannot be static (therefore they are "inner" classes)
+ * - can be defined in a method, a constructor or in an initializer block
+ *
+ * - Initializer block (JLS 8.6 / 8.7): block of statements in a java class
+ * - static initializer: executed before constructor body
+ * - instance initializer: executed when class is initialized (instance creation, static
+ * field access, ...)
+ *
+ * - A static nested class can be defined as
+ * - a static member class (explicitly static), or
+ * - a member class of an interface (implicitly static)
+ * - local classes are never static, even if they are defined in a static method.
+ *
+ * Note: it is NOT the case that all inner classes (non-static) have an outer pointer. Example:
+ * class C { static void foo { class D {} } }
+ * The class D is an inner class (non-static), but javac does not add an outer pointer to it.
+ *
+ * InnerClass
+ * ----------
+ *
+ * The JVMS 4.7.6 requires an entry for every class mentioned in a CONSTANT_Class_info in the
+ * constant pool (CP) that is not a member of a package (JLS 7.1).
+ *
+ * The JLS 13.1, points 9. / 10. requires: a class must reference (in the CP)
+ * - its immediately enclosing class
+ * - all of its member classes
+ * - all local and anonymous classes that are referenced (or declared) elsewhere (method,
+ * constructor, initializer block, field initializer)
+ *
+ * In a comment, the 4.7.6 spec says: this implies an entry in the InnerClass attribute for
+ * - All enclosing classes (except the outermost, which is top-level)
+ * - My comment: not sure how this is implied, below (*) a Java counter-example.
+ * In any case, the Java compiler seems to add all enclosing classes, even if they are not
+ * otherwise mentioned in the CP. So we should do the same.
+ * - All nested classes (including anonymous and local, but not transitively)
+ *
+ * Fields in the InnerClass entries:
+ * - inner class: the (nested) class C we are talking about
+ * - outer class: the class of which C is a member. Has to be null for non-members, i.e. for
+ * local and anonymous classes. NOTE: this co-incides with the presence of an
+ * EnclosingMethod attribute (see below)
+ * - inner name: A string with the simple name of the inner class. Null for anonymous classes.
+ * - flags: access property flags, details in JVMS, table in 4.7.6. Static flag: see
+ * discussion below.
+ *
+ *
+ * Note 1: when a nested class is present in the InnerClass attribute, all of its enclosing
+ * classes have to be present as well (by the rules above). Example:
+ *
+ * class Outer { class I1 { class I2 { } } }
+ * class User { Outer.I1.I2 foo() { } }
+ *
+ * The return type "Outer.I1.I2" puts "Outer$I1$I2" in the CP, therefore the class is added to the
+ * InnerClass attribute. For this entry, the "outer class" field will be "Outer$I1". This in turn
+ * adds "Outer$I1" to the CP, which requires adding that class to the InnerClass attribute.
+ * (For local / anonymous classes this would not be the case, since the "outer class" attribute
+ * would be empty. However, no class (other than the enclosing class) can refer to them, as they
+ * have no name.)
+ *
+ * In the current implementation of the Scala compiler, when adding a class to the InnerClass
+ * attribute, all of its enclosing classes will be added as well. Javac seems to do the same,
+ * see (*).
+ *
+ *
+ * Note 2: If a class name is mentioned only in a CONSTANT_Utf8_info, but not in a
+ * CONSTANT_Class_info, the JVMS does not require an entry in the InnerClass attribute. However,
+ * the Java compiler seems to add such classes anyway. For example, when using an annotation, the
+ * annotation class is stored as a CONSTANT_Utf8_info in the CP:
+ *
+ * @O.Ann void foo() { }
+ *
+ * adds "const #13 = Asciz LO$Ann;;" in the constant pool. The "RuntimeInvisibleAnnotations"
+ * attribute refers to that constant pool entry. Even though there is no other reference to
+ * `O.Ann`, the java compiler adds an entry for that class to the InnerClass attribute (which
+ * entails adding a CONSTANT_Class_info for the class).
+ *
+ *
+ *
+ * EnclosingMethod
+ * ---------------
+ *
+ * JVMS 4.7.7: the attribute must be present "if and only if it represents a local class
+ * or an anonymous class" (i.e. not for member classes).
+ *
+ * The attribute is mis-named, it should be called "EnclosingClass". It has to be defined for all
+ * local and anonymous classes, no matter if there is an enclosing method or not. Accordingly, the
+ * "class" field (see below) must be always defined, while the "method" field may be null.
+ *
+ * NOTE: When an EnclosingMethod attribute is requried (local and anonymous classes), the "outer"
+ * field in the InnerClass table must be null.
+ *
+ * Fields:
+ * - class: the enclosing class
+ * - method: the enclosing method (or constructor). Null if the class is not enclosed by a
+ * method, i.e. for
+ * - local or anonymous classes defined in (static or non-static) initializer blocks
+ * - anonymous classes defined in initializer blocks or field initializers
+ *
+ * Note: the field is required for anonymous classes defined within local variable
+ * initializers (within a method), Java example below (**).
+ *
+ * For local and anonymous classes in initializer blocks or field initializers, and
+ * class-level anonymous classes, the scala compiler sets the "method" field to null.
+ *
+ *
+ * (*)
+ * public class Test {
+ * void foo() {
+ * class Foo1 {
+ * // constructor statement block
+ * {
+ * class Foo2 {
+ * class Foo3 { }
+ * }
+ * }
+ * }
+ * }
+ * }
+ *
+ * The class file Test$1Foo1$1Foo2$Foo3 has no reference to the class Test$1Foo1, however it
+ * still contains an InnerClass attribute for Test$1Foo1.
+ * Maybe this is just because the Java compiler follows the JVMS comment ("InnerClasses
+ * information for each enclosing class").
+ *
+ *
+ * (**)
+ * void foo() {
+ * // anonymous class defined in local variable initializer expression.
+ * Runnable x = true ? (new Runnable() {
+ * public void run() { return; }
+ * }) : null;
+ * }
+ *
+ * The EnclosingMethod attribute of the anonymous class mentions "foo" in the "method" field.
+ *
+ *
+ * Java Compatibility
+ * ------------------
+ *
+ * In the InnerClass entry for classes in top-level modules, the "outer class" is emitted as the
+ * mirror class (or the existing companion class), i.e. C1 is nested in T (not T$).
+ * For classes nested in a nested object, the "outer class" is the module class: C2 is nested in T$N$
+ * object T {
+ * class C1
+ * object N { class C2 }
+ * }
+ *
+ * Reason: java compat. It's a "best effort" "solution". If you want to use "C1" from Java, you
+ * can write "T.C1", and the Java compiler will translate that to the classfile T$C1.
+ *
+ * If we would emit the "outer class" of C1 as "T$", then in Java you'd need to write "T$.C1"
+ * because the java compiler looks at the InnerClass attribute to find if an inner class exists.
+ * However, the Java compiler would then translate the '.' to '$' and you'd get the class name
+ * "T$$C1". This class file obviously does not exist.
+ *
+ * Directly using the encoded class name "T$C1" in Java does not work: since the classfile
+ * describes a nested class, the Java compiler hides it from the classpath and will report
+ * "cannot find symbol T$C1". This means that the class T.N.C2 cannot be referenced from a
+ * Java source file in any way.
+ *
+ *
+ * STATIC flag
+ * -----------
+ *
+ * Java: static member classes have the static flag in the InnerClass attribute, for example B in
+ * class A { static class B { } }
+ *
+ * The spec is not very clear about when the static flag should be emitted. It says: "Marked or
+ * implicitly static in source."
+ *
+ * The presence of the static flag does NOT coincide with the absence of an "outer" field in the
+ * class. The java compiler never puts the static flag for local classes, even if they don't have
+ * an outer pointer:
+ *
+ * class A {
+ * void f() { class B {} }
+ * static void g() { class C {} }
+ * }
+ *
+ * B has an outer pointer, C doesn't. Both B and C are NOT marked static in the InnerClass table.
+ *
+ * It seems sane to follow the same principle in the Scala compiler. So:
+ *
+ * package p
+ * object O1 {
+ * class C1 // static inner class
+ * object O2 { // static inner module
+ * def f = {
+ * class C2 { // non-static inner class, even though there's no outer pointer
+ * class C3 // non-static, has an outer pointer
+ * }
+ * }
+ * }
+ * }
+ *
+ *
+ * Traits Members
+ * --------------
+ *
+ * Some trait methods don't exist in the generated interface, but only in the implementation class
+ * (private methods in traits for example). Since EnclosingMethod expresses a source-level property,
+ * but the source-level enclosing method doesn't exist in the classfile, we the enclosing method
+ * is null (the enclosing class is still emitted).
+ * See BCodeAsmCommon.considerAsTopLevelImplementationArtifact
+ *
+ *
+ * Implementation Classes, Specialized Classes, Delambdafy:method closure classes
+ * ------------------------------------------------------------------------------
+ *
+ * Trait implementation classes and specialized classes are always considered top-level. Again,
+ * the InnerClass / EnclosingMethod attributes describe a source-level properties. The impl
+ * classes are compilation artifacts.
+ *
+ * The same is true for delambdafy:method closure classes. These classes are generated at
+ * top-level in the delambdafy phase, no special support is required in the backend.
+ *
+ *
+ * Mirror Classes
+ * --------------
+ *
+ * TODO: innerclass attributes on mirror class, bean info class
+ */
+
+ /**
+ * A ClassBType represents a class or interface type. The necessary information to build a
+ * ClassBType is extracted from compiler symbols and types, see BTypesFromSymbols.
+ *
+ * The `info` field contains either the class information on an error message why the info could
+ * not be computed. There are two reasons for an erroneous info:
+ * 1. The ClassBType was built from a class symbol that stems from a java source file, and the
+ * symbol's type could not be completed successfully (SI-9111)
+ * 2. The ClassBType should be built from a classfile, but the class could not be found on the
+ * compilation classpath.
+ *
+ * Note that all ClassBTypes required in a non-optimzied run are built during code generation from
+ * the class symbols referenced by the ASTs, so they have a valid info. Therefore the backend
+ * often invokes `info.get` (which asserts the info to exist) when reading data from the ClassBType.
+ *
+ * The inliner on the other hand uses ClassBTypes that are built from classfiles, which may have
+ * a missing info. In order not to crash the compiler unnecessarily, the inliner does not force
+ * infos using `get`, but it reports inliner warnings for missing infos that prevent inlining.
+ */
+ final case class ClassBType(internalName: InternalName) extends RefBType {
+ /**
+ * Write-once variable allows initializing a cyclic graph of infos. This is required for
+ * nested classes. Example: for the definition `class A { class B }` we have
+ *
+ * B.info.nestedInfo.outerClass == A
+ * A.info.nestedClasses contains B
+ */
+ private var _info: Either[NoClassBTypeInfo, ClassInfo] = null
+
+ def info: Either[NoClassBTypeInfo, ClassInfo] = {
+ assert(_info != null, s"ClassBType.info not yet assigned: $this")
+ _info
+ }
+
+ def info_=(i: Either[NoClassBTypeInfo, ClassInfo]): Unit = {
+ assert(_info == null, s"Cannot set ClassBType.info multiple times: $this")
+ _info = i
+ checkInfoConsistency()
+ }
+
+ classBTypeFromInternalName(internalName) = this
+
+ private def checkInfoConsistency(): Unit = {
+ if (info.isLeft) return
+
+ // we assert some properties. however, some of the linked ClassBType (members, superClass,
+ // interfaces) may not yet have an `_info` (initialization of cyclic structures). so we do a
+ // best-effort verification. also we don't report an error if the info is a Left.
+ def ifInit(c: ClassBType)(p: ClassBType => Boolean): Boolean = c._info == null || c.info.isLeft || p(c)
+
+ def isJLO(t: ClassBType) = t.internalName == ObjectReference.internalName
+
+ assert(!ClassBType.isInternalPhantomType(internalName), s"Cannot create ClassBType for phantom type $this")
+
+ assert(
+ if (info.get.superClass.isEmpty) { isJLO(this) || (isCompilingPrimitive && ClassBType.hasNoSuper(internalName)) }
+ else if (isInterface.get) isJLO(info.get.superClass.get)
+ else !isJLO(this) && ifInit(info.get.superClass.get)(!_.isInterface.get),
+ s"Invalid superClass in $this: ${info.get.superClass}"
+ )
+ assert(
+ info.get.interfaces.forall(c => ifInit(c)(_.isInterface.get)),
+ s"Invalid interfaces in $this: ${info.get.interfaces}"
+ )
+
+ assert(info.get.nestedClasses.forall(c => ifInit(c)(_.isNestedClass.get)), info.get.nestedClasses)
+ }
+
+ /**
+ * @return The class name without the package prefix
+ */
+ def simpleName: String = internalName.split("/").last
+
+ def isInterface: Either[NoClassBTypeInfo, Boolean] = info.map(i => (i.flags & asm.Opcodes.ACC_INTERFACE) != 0)
+
+ def superClassesTransitive: Either[NoClassBTypeInfo, List[ClassBType]] = info.flatMap(i => i.superClass match {
+ case None => Right(Nil)
+ case Some(sc) => sc.superClassesTransitive.map(sc :: _)
+ })
+
+ /**
+ * The prefix of the internal name until the last '/', or the empty string.
+ */
+ def packageInternalName: String = {
+ val name = internalName
+ name.lastIndexOf('/') match {
+ case -1 => ""
+ case i => name.substring(0, i)
+ }
+ }
+
+ def isPublic: Either[NoClassBTypeInfo, Boolean] = info.map(i => (i.flags & asm.Opcodes.ACC_PUBLIC) != 0)
+
+ def isNestedClass: Either[NoClassBTypeInfo, Boolean] = info.map(_.nestedInfo.isDefined)
+
+ def enclosingNestedClassesChain: Either[NoClassBTypeInfo, List[ClassBType]] = {
+ isNestedClass.flatMap(isNested => {
+ // if isNested is true, we know that info.get is defined, and nestedInfo.get is also defined.
+ if (isNested) info.get.nestedInfo.get.enclosingClass.enclosingNestedClassesChain.map(this :: _)
+ else Right(Nil)
+ })
+ }
+
+ def innerClassAttributeEntry: Either[NoClassBTypeInfo, Option[InnerClassEntry]] = info.map(i => i.nestedInfo map {
+ case NestedInfo(_, outerName, innerName, isStaticNestedClass) =>
+ InnerClassEntry(
+ internalName,
+ outerName.orNull,
+ innerName.orNull,
+ GenBCode.mkFlags(
+ // the static flag in the InnerClass table has a special meaning, see InnerClass comment
+ i.flags & ~Opcodes.ACC_STATIC,
+ if (isStaticNestedClass) Opcodes.ACC_STATIC else 0
+ ) & ClassBType.INNER_CLASSES_FLAGS
+ )
+ })
+
+ def inlineInfoAttribute: Either[NoClassBTypeInfo, InlineInfoAttribute] = info.map(i => {
+ // InlineInfos are serialized for classes being compiled. For those the info was built by
+ // buildInlineInfoFromClassSymbol, which only adds a warning under SI-9111, which in turn
+ // only happens for class symbols of java source files.
+ // we could put this assertion into InlineInfoAttribute, but it is more safe to put it here
+ // where it affect only GenBCode, and not add any assertion to GenASM in 2.11.6.
+ assert(i.inlineInfo.warning.isEmpty, i.inlineInfo.warning)
+ InlineInfoAttribute(i.inlineInfo)
+ })
+
+ def isSubtypeOf(other: ClassBType): Either[NoClassBTypeInfo, Boolean] = try {
+ if (this == other) return Right(true)
+ if (isInterface.orThrow) {
+ if (other == ObjectReference) return Right(true) // interfaces conform to Object
+ if (!other.isInterface.orThrow) return Right(false) // this is an interface, the other is some class other than object. interfaces cannot extend classes, so the result is false.
+ // else: this and other are both interfaces. continue to (*)
+ } else {
+ val sc = info.orThrow.superClass
+ if (sc.isDefined && sc.get.isSubtypeOf(other).orThrow) return Right(true) // the superclass of this class conforms to other
+ if (!other.isInterface.orThrow) return Right(false) // this and other are both classes, and the superclass of this does not conform
+ // else: this is a class, the other is an interface. continue to (*)
+ }
+
+ // (*) check if some interface of this class conforms to other.
+ Right(info.orThrow.interfaces.exists(_.isSubtypeOf(other).orThrow))
+ } catch {
+ case Invalid(noInfo: NoClassBTypeInfo) => Left(noInfo)
+ }
+
+ /**
+ * Finding the least upper bound in agreement with the bytecode verifier
+ * Background:
+ * http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
+ * http://comments.gmane.org/gmane.comp.java.vm.languages/2293
+ * https://issues.scala-lang.org/browse/SI-3872
+ */
+ def jvmWiseLUB(other: ClassBType): Either[NoClassBTypeInfo, ClassBType] = {
+ def isNotNullOrNothing(c: ClassBType) = !c.isNullType && !c.isNothingType
+ assert(isNotNullOrNothing(this) && isNotNullOrNothing(other), s"jvmWiseLub for null or nothing: $this - $other")
+
+ tryEither {
+ val res: ClassBType = (this.isInterface.orThrow, other.isInterface.orThrow) match {
+ case (true, true) =>
+ // exercised by test/files/run/t4761.scala
+ if (other.isSubtypeOf(this).orThrow) this
+ else if (this.isSubtypeOf(other).orThrow) other
+ else ObjectReference
+
+ case (true, false) =>
+ if (other.isSubtypeOf(this).orThrow) this else ObjectReference
+
+ case (false, true) =>
+ if (this.isSubtypeOf(other).orThrow) other else ObjectReference
+
+ case _ =>
+ // TODO @lry I don't really understand the reasoning here.
+ // Both this and other are classes. The code takes (transitively) all superclasses and
+ // finds the first common one.
+ // MOST LIKELY the answer can be found here, see the comments and links by Miguel:
+ // - https://issues.scala-lang.org/browse/SI-3872
+ firstCommonSuffix(this :: this.superClassesTransitive.orThrow, other :: other.superClassesTransitive.orThrow)
+ }
+
+ assert(isNotNullOrNothing(res), s"jvmWiseLub computed: $res")
+ Right(res)
+ }
+ }
+
+ private def firstCommonSuffix(as: List[ClassBType], bs: List[ClassBType]): ClassBType = {
+ var chainA = as
+ var chainB = bs
+ var fcs: ClassBType = null
+ do {
+ if (chainB contains chainA.head) fcs = chainA.head
+ else if (chainA contains chainB.head) fcs = chainB.head
+ else {
+ chainA = chainA.tail
+ chainB = chainB.tail
+ }
+ } while (fcs == null)
+ fcs
+ }
+ }
+
+ object ClassBType {
+ /**
+ * Valid flags for InnerClass attribute entry.
+ * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6
+ */
+ private val INNER_CLASSES_FLAGS = {
+ asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE |
+ asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION |
+ asm.Opcodes.ACC_ENUM
+ }
+
+ // Primitive classes have no super class. A ClassBType for those is only created when
+ // they are actually being compiled (e.g., when compiling scala/Boolean.scala).
+ private val hasNoSuper = Set(
+ "scala/Unit",
+ "scala/Boolean",
+ "scala/Char",
+ "scala/Byte",
+ "scala/Short",
+ "scala/Int",
+ "scala/Float",
+ "scala/Long",
+ "scala/Double"
+ )
+
+ private val isInternalPhantomType = Set(
+ "scala/Null",
+ "scala/Nothing"
+ )
+ }
+
+ /**
+ * The type info for a class. Used for symboltable-independent subtype checks in the backend.
+ *
+ * @param superClass The super class, not defined for class java/lang/Object.
+ * @param interfaces All transitively implemented interfaces, except for those inherited
+ * through the superclass.
+ * @param flags The java flags, obtained through `javaFlags`. Used also to derive
+ * the flags for InnerClass entries.
+ * @param nestedClasses Classes nested in this class. Those need to be added to the
+ * InnerClass table, see the InnerClass spec summary above.
+ * @param nestedInfo If this describes a nested class, information for the InnerClass table.
+ * @param inlineInfo Information about this class for the inliner.
+ */
+ final case class ClassInfo(superClass: Option[ClassBType], interfaces: List[ClassBType], flags: Int,
+ nestedClasses: List[ClassBType], nestedInfo: Option[NestedInfo],
+ inlineInfo: InlineInfo)
+
+ /**
+ * Information required to add a class to an InnerClass table.
+ * The spec summary above explains what information is required for the InnerClass entry.
+ *
+ * @param enclosingClass The enclosing class, if it is also nested. When adding a class
+ * to the InnerClass table, enclosing nested classes are also added.
+ * @param outerName The outerName field in the InnerClass entry, may be None.
+ * @param innerName The innerName field, may be None.
+ * @param isStaticNestedClass True if this is a static nested class (not inner class) (*)
+ *
+ * (*) Note that the STATIC flag in ClassInfo.flags, obtained through javaFlags(classSym), is not
+ * correct for the InnerClass entry, see javaFlags. The static flag in the InnerClass describes
+ * a source-level property: if the class is in a static context (does not have an outer pointer).
+ * This is checked when building the NestedInfo.
+ */
+ final case class NestedInfo(enclosingClass: ClassBType,
+ outerName: Option[String],
+ innerName: Option[String],
+ isStaticNestedClass: Boolean)
+
+ /**
+ * This class holds the data for an entry in the InnerClass table. See the InnerClass summary
+ * above in this file.
+ *
+ * There's some overlap with the class NestedInfo, but it's not exactly the same and cleaner to
+ * keep separate.
+ * @param name The internal name of the class.
+ * @param outerName The internal name of the outer class, may be null.
+ * @param innerName The simple name of the inner class, may be null.
+ * @param flags The flags for this class in the InnerClass entry.
+ */
+ final case class InnerClassEntry(name: String, outerName: String, innerName: String, flags: Int)
+
+ final case class ArrayBType(componentType: BType) extends RefBType {
+ def dimension: Int = componentType match {
+ case a: ArrayBType => 1 + a.dimension
+ case _ => 1
+ }
+
+ def elementType: BType = componentType match {
+ case a: ArrayBType => a.elementType
+ case t => t
+ }
+ }
+
+ final case class MethodBType(argumentTypes: List[BType], returnType: BType) extends BType
+
+ /* Some definitions that are required for the implementation of BTypes. They are abstract because
+ * initializing them requires information from types / symbols, which is not accessible here in
+ * BTypes.
+ *
+ * They are defs (not vals) because they are implemented using vars (see comment on CoreBTypes).
+ */
+
+ /**
+ * Just a named pair, used in CoreBTypes.asmBoxTo/asmUnboxTo.
+ */
+ final case class MethodNameAndType(name: String, methodType: MethodBType)
+
+ /**
+ * True if the current compilation unit is of a primitive class (scala.Boolean et al).
+ * Used only in assertions. Abstract here because its implementation depends on global.
+ */
+ def isCompilingPrimitive: Boolean
+}
+
+object BTypes {
+ /**
+ * A marker for strings that represent class internal names.
+ * Ideally the type would be incompatible with String, for example by making it a value class.
+ * But that would create overhead in a Collection[InternalName].
+ */
+ type InternalName = String
+
+ /**
+ * Metadata about a ClassBType, used by the inliner.
+ *
+ * More information may be added in the future to enable more elaborate inlinine heuristics.
+ *
+ * @param traitImplClassSelfType `Some(tp)` if this InlineInfo describes a trait, and the `self`
+ * parameter type of the methods in the implementation class is not
+ * the trait itself. Example:
+ * trait T { self: U => def f = 1 }
+ * Generates something like:
+ * class T$class { static def f(self: U) = 1 }
+ *
+ * In order to inline a trat method call, the INVOKEINTERFACE is
+ * rewritten to an INVOKESTATIC of the impl class, so we need the
+ * self type (U) to get the right signature.
+ *
+ * `None` if the self type is the interface type, or if this
+ * InlineInfo does not describe a trait.
+ *
+ * @param isEffectivelyFinal True if the class cannot have subclasses: final classes, module
+ * classes, trait impl classes.
+ *
+ * @param methodInfos The [[MethodInlineInfo]]s for the methods declared in this class.
+ * The map is indexed by the string s"$name$descriptor" (to
+ * disambiguate overloads).
+ *
+ * @param warning Contains an warning message if an error occured when building this
+ * InlineInfo, for example if some classfile could not be found on
+ * the classpath. This warning can be reported later by the inliner.
+ */
+ final case class InlineInfo(traitImplClassSelfType: Option[InternalName],
+ isEffectivelyFinal: Boolean,
+ methodInfos: Map[String, MethodInlineInfo],
+ warning: Option[ClassInlineInfoWarning])
+
+ val EmptyInlineInfo = InlineInfo(None, false, Map.empty, None)
+
+ /**
+ * Metadata about a method, used by the inliner.
+ *
+ * @param effectivelyFinal True if the method cannot be overridden (in Scala)
+ * @param traitMethodWithStaticImplementation True if the method is an interface method method of
+ * a trait method and has a static counterpart in the
+ * implementation class.
+ * @param annotatedInline True if the method is annotated `@inline`
+ * @param annotatedNoInline True if the method is annotated `@noinline`
+ */
+ final case class MethodInlineInfo(effectivelyFinal: Boolean,
+ traitMethodWithStaticImplementation: Boolean,
+ annotatedInline: Boolean,
+ annotatedNoInline: Boolean)
+
+ // no static way (without symbol table instance) to get to nme.ScalaATTR / ScalaSignatureATTR
+ val ScalaAttributeName = "Scala"
+ val ScalaSigAttributeName = "ScalaSig"
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
new file mode 100644
index 0000000000..1b9fd5e298
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
@@ -0,0 +1,571 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.tools.nsc.backend.jvm.opt.{LocalOpt, CallGraph, Inliner, ByteCodeRepository}
+import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, MethodInlineInfo, InternalName}
+import BackendReporting._
+import scala.tools.nsc.settings.ScalaSettings
+
+/**
+ * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary
+ * information from a symbol and its type to create the corresponding ClassBType. It requires
+ * access to the compiler (global parameter).
+ *
+ * The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes
+ * uses classBTypeFromSymbol, hence requires access to the compiler (global).
+ *
+ * BTypesFromSymbols extends BTypes because the implementation of BTypes requires access to some
+ * of the core btypes. They are declared in BTypes as abstract members. Note that BTypes does
+ * not have access to the compiler instance.
+ */
+class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
+ import global._
+ import definitions._
+
+ val bCodeICodeCommon: BCodeICodeCommon[global.type] = new BCodeICodeCommon(global)
+ val bCodeAsmCommon: BCodeAsmCommon[global.type] = new BCodeAsmCommon(global)
+ import bCodeAsmCommon._
+
+ // Why the proxy, see documentation of class [[CoreBTypes]].
+ val coreBTypes = new CoreBTypesProxy[this.type](this)
+ import coreBTypes._
+
+ val byteCodeRepository = new ByteCodeRepository(global.classPath, javaDefinedClasses, recordPerRunCache(collection.concurrent.TrieMap.empty))
+
+ val localOpt: LocalOpt[this.type] = new LocalOpt(this)
+
+ val inliner: Inliner[this.type] = new Inliner(this)
+
+ val callGraph: CallGraph[this.type] = new CallGraph(this)
+
+ val backendReporting: BackendReporting = new BackendReportingImpl(global)
+
+ final def initializeCoreBTypes(): Unit = {
+ coreBTypes.setBTypes(new CoreBTypes[this.type](this))
+ }
+
+ def recordPerRunCache[T <: collection.generic.Clearable](cache: T): T = perRunCaches.recordCache(cache)
+
+ def compilerSettings: ScalaSettings = settings
+
+ // helpers that need access to global.
+ // TODO @lry create a separate component, they don't belong to BTypesFromSymbols
+
+ final val strMODULE_INSTANCE_FIELD = nme.MODULE_INSTANCE_FIELD.toString
+
+ private val primitiveCompilationUnits = Set(
+ "Unit.scala",
+ "Boolean.scala",
+ "Char.scala",
+ "Byte.scala",
+ "Short.scala",
+ "Int.scala",
+ "Float.scala",
+ "Long.scala",
+ "Double.scala"
+ )
+
+ /**
+ * True if the current compilation unit is of a primitive class (scala.Boolean et al).
+ * Used only in assertions.
+ */
+ def isCompilingPrimitive = {
+ primitiveCompilationUnits(currentUnit.source.file.name)
+ }
+
+ def isCompilingArray = {
+ currentUnit.source.file.name == "Array.scala"
+ }
+
+ // end helpers
+
+ /**
+ * The ClassBType for a class symbol `classSym`.
+ *
+ * The class symbol scala.Nothing is mapped to the class scala.runtime.Nothing$. Similarly,
+ * scala.Null is mapped to scala.runtime.Null$. This is because there exist no class files
+ * for the Nothing / Null. If used for example as a parameter type, we use the runtime classes
+ * in the classfile method signature.
+ *
+ * Note that the referenced class symbol may be an implementation class. For example when
+ * compiling a mixed-in method that forwards to the static method in the implementation class,
+ * the class descriptor of the receiver (the implementation class) is obtained by creating the
+ * ClassBType.
+ */
+ final def classBTypeFromSymbol(classSym: Symbol): ClassBType = {
+ assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol")
+ assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym")
+ assertClassNotArrayNotPrimitive(classSym)
+ assert(!primitiveTypeMap.contains(classSym) || isCompilingPrimitive, s"Cannot create ClassBType for primitive class symbol $classSym")
+ if (classSym == NothingClass) RT_NOTHING
+ else if (classSym == NullClass) RT_NULL
+ else {
+ val internalName = classSym.javaBinaryName.toString
+ classBTypeFromInternalName.getOrElse(internalName, {
+ // The new ClassBType is added to the map in its constructor, before we set its info. This
+ // allows initializing cyclic dependencies, see the comment on variable ClassBType._info.
+ val res = ClassBType(internalName)
+ if (completeSilentlyAndCheckErroneous(classSym)) {
+ res.info = Left(NoClassBTypeInfoClassSymbolInfoFailedSI9111(classSym.fullName))
+ res
+ } else {
+ setClassInfo(classSym, res)
+ }
+ })
+ }
+ }
+
+ /**
+ * Builds a [[MethodBType]] for a method symbol.
+ */
+ final def methodBTypeFromSymbol(methodSymbol: Symbol): MethodBType = {
+ assert(methodSymbol.isMethod, s"not a method-symbol: $methodSymbol")
+ val resultType: BType =
+ if (methodSymbol.isClassConstructor || methodSymbol.isConstructor) UNIT
+ else typeToBType(methodSymbol.tpe.resultType)
+ MethodBType(methodSymbol.tpe.paramTypes map typeToBType, resultType)
+ }
+
+ /**
+ * This method returns the BType for a type reference, for example a parameter type.
+ *
+ * If `t` references a class, typeToBType ensures that the class is not an implementation class.
+ * See also comment on classBTypeFromSymbol, which is invoked for implementation classes.
+ */
+ final def typeToBType(t: Type): BType = {
+ import definitions.ArrayClass
+
+ /**
+ * Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int.
+ * The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType.
+ */
+ def primitiveOrClassToBType(sym: Symbol): BType = {
+ assertClassNotArray(sym)
+ assert(!sym.isImplClass, sym)
+ primitiveTypeMap.getOrElse(sym, classBTypeFromSymbol(sym))
+ }
+
+ /**
+ * When compiling Array.scala, the type parameter T is not erased and shows up in method
+ * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference.
+ */
+ def nonClassTypeRefToBType(sym: Symbol): ClassBType = {
+ assert(sym.isType && isCompilingArray, sym)
+ ObjectReference
+ }
+
+ t.dealiasWiden match {
+ case TypeRef(_, ArrayClass, List(arg)) => ArrayBType(typeToBType(arg)) // Array type such as Array[Int] (kept by erasure)
+ case TypeRef(_, sym, _) if !sym.isClass => nonClassTypeRefToBType(sym) // See comment on nonClassTypeRefToBType
+ case TypeRef(_, sym, _) => primitiveOrClassToBType(sym) // Common reference to a type such as scala.Int or java.lang.String
+ case ClassInfoType(_, _, sym) => primitiveOrClassToBType(sym) // We get here, for example, for genLoadModule, which invokes typeToBType(moduleClassSymbol.info)
+
+ /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for
+ * meta-annotated annotations (@(ann @getter) val x = 0), so we don't emit a warning.
+ * The type in the AnnotationInfo is an AnnotatedTpe. Tested in jvm/annotations.scala.
+ */
+ case a @ AnnotatedType(_, t) =>
+ debuglog(s"typeKind of annotated type $a")
+ typeToBType(t)
+
+ /* ExistentialType should (probably) be eliminated by erasure. We know they get here for
+ * classOf constants:
+ * class C[T]
+ * class T { final val k = classOf[C[_]] }
+ */
+ case e @ ExistentialType(_, t) =>
+ debuglog(s"typeKind of existential type $e")
+ typeToBType(t)
+
+ /* The cases below should probably never occur. They are kept for now to avoid introducing
+ * new compiler crashes, but we added a warning. The compiler / library bootstrap and the
+ * test suite don't produce any warning.
+ */
+
+ case tp =>
+ currentUnit.warning(tp.typeSymbol.pos,
+ s"an unexpected type representation reached the compiler backend while compiling $currentUnit: $tp. " +
+ "If possible, please file a bug on issues.scala-lang.org.")
+
+ tp match {
+ case ThisType(ArrayClass) => ObjectReference // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test
+ case ThisType(sym) => classBTypeFromSymbol(sym)
+ case SingleType(_, sym) => primitiveOrClassToBType(sym)
+ case ConstantType(_) => typeToBType(t.underlying)
+ case RefinedType(parents, _) => parents.map(typeToBType(_).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b).get)
+ }
+ }
+ }
+
+ def assertClassNotArray(sym: Symbol): Unit = {
+ assert(sym.isClass, sym)
+ assert(sym != definitions.ArrayClass || isCompilingArray, sym)
+ }
+
+ def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = {
+ assertClassNotArray(sym)
+ assert(!primitiveTypeMap.contains(sym) || isCompilingPrimitive, sym)
+ }
+
+ private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = {
+ val superClassSym = if (classSym.isImplClass) ObjectClass else classSym.superClass
+ assert(
+ if (classSym == ObjectClass)
+ superClassSym == NoSymbol
+ else if (classSym.isInterface)
+ superClassSym == ObjectClass
+ else
+ // A ClassBType for a primitive class (scala.Boolean et al) is only created when compiling these classes.
+ ((superClassSym != NoSymbol) && !superClassSym.isInterface) || (isCompilingPrimitive && primitiveTypeMap.contains(classSym)),
+ s"Bad superClass for $classSym: $superClassSym"
+ )
+ val superClass = if (superClassSym == NoSymbol) None
+ else Some(classBTypeFromSymbol(superClassSym))
+
+ val interfaces = implementedInterfaces(classSym).map(classBTypeFromSymbol)
+
+ val flags = javaFlags(classSym)
+
+ /* The InnerClass table of a class C must contain all nested classes of C, even if they are only
+ * declared but not otherwise referenced in C (from the bytecode or a method / field signature).
+ * We collect them here.
+ *
+ * Nested classes that are also referenced in C will be added to the innerClassBufferASM during
+ * code generation, but those duplicates will be eliminated when emitting the InnerClass
+ * attribute.
+ *
+ * Why do we need to collect classes into innerClassBufferASM at all? To collect references to
+ * nested classes, but NOT nested in C, that are used within C.
+ */
+ val nestedClassSymbols = {
+ val linkedClass = exitingPickler(classSym.linkedClassOfClass) // linkedCoC does not work properly in late phases
+
+ // The lambdalift phase lifts all nested classes to the enclosing class, so if we collect
+ // member classes right after lambdalift, we obtain all nested classes, including local and
+ // anonymous ones.
+ val nestedClasses = {
+ val allNested = exitingPhase(currentRun.lambdaliftPhase)(memberClassesForInnerClassTable(classSym))
+ val nested = {
+ // Classes nested in value classes are nested in the companion at this point. For InnerClass /
+ // EnclosingMethod, we use the value class as the outer class. So we remove nested classes
+ // from the companion that were originally nested in the value class.
+ if (exitingPickler(linkedClass.isDerivedValueClass)) allNested.filterNot(classOriginallyNestedInClass(_, linkedClass))
+ else allNested
+ }
+
+ if (isTopLevelModuleClass(classSym)) {
+ // For Java compatibility, member classes of top-level objects are treated as members of
+ // the top-level companion class, see comment below.
+ val members = exitingPickler(memberClassesForInnerClassTable(classSym))
+ nested diff members
+ } else {
+ nested
+ }
+ }
+
+ val companionModuleMembers = if (considerAsTopLevelImplementationArtifact(classSym)) Nil else {
+ // If this is a top-level non-impl (*) class, the member classes of the companion object are
+ // added as members of the class. For example:
+ // class C { }
+ // object C {
+ // class D
+ // def f = { class E }
+ // }
+ // The class D is added as a member of class C. The reason is: for Java compatibility, the
+ // InnerClass attribute for D has "C" (NOT the module class "C$") as the outer class of D
+ // (done by buildNestedInfo). See comment in BTypes.
+ // For consistency, the InnerClass entry for D needs to be present in C - to Java it looks
+ // like D is a member of C, not C$.
+ //
+ // (*) We exclude impl classes: if the classfile for the impl class exists on the classpath,
+ // a linkedClass symbol is found for which isTopLevelModule is true, so we end up searching
+ // members of that weird impl-class-module-class-symbol. that search probably cannot return
+ // any classes, but it's better to exclude it.
+ val javaCompatMembers = {
+ if (linkedClass != NoSymbol && isTopLevelModuleClass(linkedClass))
+ // phase travel to exitingPickler: this makes sure that memberClassesForInnerClassTable only sees member
+ // classes, not local classes of the companion module (E in the exmaple) that were lifted by lambdalift.
+ exitingPickler(memberClassesForInnerClassTable(linkedClass))
+ else
+ Nil
+ }
+
+ // Classes nested in value classes are nested in the companion at this point. For InnerClass /
+ // EnclosingMethod we use the value class as enclosing class. Here we search nested classes
+ // in the companion that were originally nested in the value class, and we add them as nested
+ // in the value class.
+ val valueClassCompanionMembers = {
+ if (linkedClass != NoSymbol && exitingPickler(classSym.isDerivedValueClass)) {
+ val moduleMemberClasses = exitingPhase(currentRun.lambdaliftPhase)(memberClassesForInnerClassTable(linkedClass))
+ moduleMemberClasses.filter(classOriginallyNestedInClass(_, classSym))
+ } else
+ Nil
+ }
+
+ javaCompatMembers ++ valueClassCompanionMembers
+ }
+
+ nestedClasses ++ companionModuleMembers
+ }
+
+ /**
+ * For nested java classes, the scala compiler creates both a class and a module (and therefore
+ * a module class) symbol. For example, in `class A { class B {} }`, the nestedClassSymbols
+ * for A contain both the class B and the module class B.
+ * Here we get rid of the module class B, making sure that the class B is present.
+ */
+ val nestedClassSymbolsNoJavaModuleClasses = nestedClassSymbols.filter(s => {
+ if (s.isJavaDefined && s.isModuleClass) {
+ // We could also search in nestedClassSymbols for s.linkedClassOfClass, but sometimes that
+ // returns NoSymbol, so it doesn't work.
+ val nb = nestedClassSymbols.count(mc => mc.name == s.name && mc.owner == s.owner)
+ assert(nb == 2, s"Java member module without member class: $s - $nestedClassSymbols")
+ false
+ } else true
+ })
+
+ val nestedClasses = nestedClassSymbolsNoJavaModuleClasses.map(classBTypeFromSymbol)
+
+ val nestedInfo = buildNestedInfo(classSym)
+
+ val inlineInfo = buildInlineInfo(classSym, classBType.internalName)
+
+ classBType.info = Right(ClassInfo(superClass, interfaces, flags, nestedClasses, nestedInfo, inlineInfo))
+ classBType
+ }
+
+ private def buildNestedInfo(innerClassSym: Symbol): Option[NestedInfo] = {
+ assert(innerClassSym.isClass, s"Cannot build NestedInfo for non-class symbol $innerClassSym")
+
+ val isTopLevel = innerClassSym.rawowner.isPackageClass
+ // impl classes are considered top-level, see comment in BTypes
+ if (isTopLevel || considerAsTopLevelImplementationArtifact(innerClassSym)) None
+ else {
+ // See comment in BTypes, when is a class marked static in the InnerClass table.
+ val isStaticNestedClass = isOriginallyStaticOwner(innerClassSym.originalOwner)
+
+ // After lambdalift (which is where we are), the rawowoner field contains the enclosing class.
+ val enclosingClass = {
+ // (1) Example java source: class C { static class D { } }
+ // The Scala compiler creates a class and a module symbol for C. Because D is a static
+ // nested class, the symbol for D is nested in the module class C (not in the class C).
+ // For the InnerClass attribute, we use the class symbol C, which represents the situation
+ // in the source code.
+
+ // (2) Java compatibility. See the big comment in BTypes that summarizes the InnerClass spec.
+ if ((innerClassSym.isJavaDefined && innerClassSym.rawowner.isModuleClass) || // (1)
+ (!isAnonymousOrLocalClass(innerClassSym) && isTopLevelModuleClass(innerClassSym.rawowner))) { // (2)
+ // phase travel for linkedCoC - does not always work in late phases
+ exitingPickler(innerClassSym.rawowner.linkedClassOfClass) match {
+ case NoSymbol =>
+ // For top-level modules without a companion class, see doc of mirrorClassClassBType.
+ mirrorClassClassBType(exitingPickler(innerClassSym.rawowner))
+
+ case companionClass =>
+ classBTypeFromSymbol(companionClass)
+ }
+ } else {
+ classBTypeFromSymbol(innerClassSym.rawowner)
+ }
+ }
+
+ val outerName: Option[String] = {
+ if (isAnonymousOrLocalClass(innerClassSym)) None
+ else Some(enclosingClass.internalName)
+ }
+
+ val innerName: Option[String] = {
+ // phase travel necessary: after flatten, the name includes the name of outer classes.
+ // if some outer name contains $anon, a non-anon class is considered anon.
+ if (exitingPickler(innerClassSym.isAnonymousClass || innerClassSym.isAnonymousFunction)) None
+ else Some(innerClassSym.rawname + innerClassSym.moduleSuffix) // moduleSuffix for module classes
+ }
+
+ Some(NestedInfo(enclosingClass, outerName, innerName, isStaticNestedClass))
+ }
+ }
+
+ /**
+ * Build the InlineInfo for a ClassBType from the class symbol.
+ *
+ * Note that the InlineInfo is only built from the symbolic information for classes that are being
+ * compiled. For all other classes we delegate to inlineInfoFromClassfile. The reason is that
+ * mixed-in methods are only added to class symbols being compiled, but not to other classes
+ * extending traits. Creating the InlineInfo from the symbol would prevent these mixins from being
+ * inlined.
+ *
+ * So for classes being compiled, the InlineInfo is created here and stored in the ScalaInlineInfo
+ * classfile attribute.
+ */
+ private def buildInlineInfo(classSym: Symbol, internalName: InternalName): InlineInfo = {
+ def buildFromSymbol = buildInlineInfoFromClassSymbol(classSym, classBTypeFromSymbol(_).internalName, methodBTypeFromSymbol(_).descriptor)
+
+ // phase travel required, see implementation of `compiles`. for nested classes, it checks if the
+ // enclosingTopLevelClass is being compiled. after flatten, all classes are considered top-level,
+ // so `compiles` would return `false`.
+ if (exitingPickler(currentRun.compiles(classSym))) buildFromSymbol // InlineInfo required for classes being compiled, we have to create the classfile attribute
+ else if (!compilerSettings.YoptInlinerEnabled) BTypes.EmptyInlineInfo // For other classes, we need the InlineInfo only inf the inliner is enabled.
+ else {
+ // For classes not being compiled, the InlineInfo is read from the classfile attribute. This
+ // fixes an issue with mixed-in methods: the mixin phase enters mixin methods only to class
+ // symbols being compiled. For non-compiled classes, we could not build MethodInlineInfos
+ // for those mixin members, which prevents inlining.
+ byteCodeRepository.classNode(internalName) match {
+ case Right(classNode) =>
+ inlineInfoFromClassfile(classNode)
+ case Left(missingClass) =>
+ InlineInfo(None, false, Map.empty, Some(ClassNotFoundWhenBuildingInlineInfoFromSymbol(missingClass)))
+ }
+ }
+ }
+
+ /**
+ * For top-level objects without a companion class, the compilere generates a mirror class with
+ * static forwarders (Java compat). There's no symbol for the mirror class, but we still need a
+ * ClassBType (its info.nestedClasses will hold the InnerClass entries, see comment in BTypes).
+ */
+ def mirrorClassClassBType(moduleClassSym: Symbol): ClassBType = {
+ assert(isTopLevelModuleClass(moduleClassSym), s"not a top-level module class: $moduleClassSym")
+ val internalName = moduleClassSym.javaBinaryName.dropModule.toString
+ classBTypeFromInternalName.getOrElse(internalName, {
+ val c = ClassBType(internalName)
+ // class info consistent with BCodeHelpers.genMirrorClass
+ val nested = exitingPickler(memberClassesForInnerClassTable(moduleClassSym)) map classBTypeFromSymbol
+ c.info = Right(ClassInfo(
+ superClass = Some(ObjectReference),
+ interfaces = Nil,
+ flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL,
+ nestedClasses = nested,
+ nestedInfo = None,
+ InlineInfo(None, true, Map.empty, None))) // no InlineInfo needed, scala never invokes methods on the mirror class
+ c
+ })
+ }
+
+ /**
+ * True for module classes of package level objects. The backend will generate a mirror class for
+ * such objects.
+ */
+ final def isTopLevelModuleClass(sym: Symbol): Boolean = exitingPickler {
+ // phase travel to pickler required for isNestedClass (looks at owner)
+ val r = sym.isModuleClass && !sym.isNestedClass
+ // The mixin phase adds the `lateMODULE` flag to trait implementation classes. Since the flag
+ // is late, it should not be visible here inside the time travel. We check this.
+ if (r) assert(!sym.isImplClass, s"isModuleClass should be false for impl class $sym")
+ r
+ }
+
+ /**
+ * True for module classes of modules that are top-level or owned only by objects. Module classes
+ * for such objects will get a MODULE$ flag and a corresponding static initializer.
+ */
+ final def isStaticModuleClass(sym: Symbol): Boolean = {
+ /* (1) Phase travel to to pickler is required to exclude implementation classes; they have the
+ * lateMODULEs after mixin, so isModuleClass would be true.
+ * (2) isStaticModuleClass is a source-level property. See comment on isOriginallyStaticOwner.
+ */
+ exitingPickler { // (1)
+ sym.isModuleClass &&
+ isOriginallyStaticOwner(sym.originalOwner) // (2)
+ }
+ }
+
+ // legacy, to be removed when the @remote annotation gets removed
+ final def isRemote(s: Symbol) = s hasAnnotation definitions.RemoteAttr
+ final def hasPublicBitSet(flags: Int) = (flags & asm.Opcodes.ACC_PUBLIC) != 0
+
+ /**
+ * Return the Java modifiers for the given symbol.
+ * Java modifiers for classes:
+ * - public, abstract, final, strictfp (not used)
+ * for interfaces:
+ * - the same as for classes, without 'final'
+ * for fields:
+ * - public, private (*)
+ * - static, final
+ * for methods:
+ * - the same as for fields, plus:
+ * - abstract, synchronized (not used), strictfp (not used), native (not used)
+ * for all:
+ * - deprecated
+ *
+ * (*) protected cannot be used, since inner classes 'see' protected members,
+ * and they would fail verification after lifted.
+ */
+ final def javaFlags(sym: Symbol): Int = {
+ // constructors of module classes should be private. introduced in b06edbc, probably to prevent
+ // creating module instances from java. for nested modules, the constructor needs to be public
+ // since they are created by the outer class and stored in a field. a java client can create
+ // new instances via outerClassInstance.new InnerModuleClass$().
+ // TODO: do this early, mark the symbol private.
+ val privateFlag =
+ sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModuleClass(sym.owner))
+
+ // Symbols marked in source as `final` have the FINAL flag. (In the past, the flag was also
+ // added to modules and module classes, not anymore since 296b706).
+ // Note that the presence of the `FINAL` flag on a symbol does not correspond 1:1 to emitting
+ // ACC_FINAL in bytecode.
+ //
+ // Top-level modules are marked ACC_FINAL in bytecode (even without the FINAL flag). Nested
+ // objects don't get the flag to allow overriding (under -Yoverride-objects, SI-5676).
+ //
+ // For fields, only eager val fields can receive ACC_FINAL. vars or lazy vals can't:
+ // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
+ // "Another problem is that the specification allows aggressive
+ // optimization of final fields. Within a thread, it is permissible to
+ // reorder reads of a final field with those modifications of a final
+ // field that do not take place in the constructor."
+ //
+ // A var or lazy val which is marked final still has meaning to the
+ // scala compiler. The word final is heavily overloaded unfortunately;
+ // for us it means "not overridable". At present you can't override
+ // vars regardless; this may change.
+ //
+ // The logic does not check .isFinal (which checks flags for the FINAL flag,
+ // and includes symbols marked lateFINAL) instead inspecting rawflags so
+ // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
+ // avoid breaking proxy software which depends on subclassing, we do not
+ // emit ACC_FINAL.
+
+ val finalFlag = (
+ (((sym.rawflags & symtab.Flags.FINAL) != 0) || isTopLevelModuleClass(sym))
+ && !sym.enclClass.isInterface
+ && !sym.isClassConstructor
+ && !sym.isMutable // lazy vals and vars both
+ )
+
+ // Primitives are "abstract final" to prohibit instantiation
+ // without having to provide any implementations, but that is an
+ // illegal combination of modifiers at the bytecode level so
+ // suppress final if abstract if present.
+ import asm.Opcodes._
+ GenBCode.mkFlags(
+ if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
+ if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
+ if (sym.isInterface) ACC_INTERFACE else 0,
+ if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
+ if (sym.isStaticMember) ACC_STATIC else 0,
+ if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
+ if (sym.isArtifact) ACC_SYNTHETIC else 0,
+ if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+ if (sym.hasEnumFlag) ACC_ENUM else 0,
+ if (sym.isVarargsMethod) ACC_VARARGS else 0,
+ if (sym.hasFlag(symtab.Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0,
+ if (sym.isDeprecated) asm.Opcodes.ACC_DEPRECATED else 0
+ )
+ }
+
+ def javaFieldFlags(sym: Symbol) = {
+ javaFlags(sym) | GenBCode.mkFlags(
+ if (sym hasAnnotation TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0,
+ if (sym hasAnnotation VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0,
+ if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL
+ )
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala
new file mode 100644
index 0000000000..d641f708d2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala
@@ -0,0 +1,279 @@
+package scala.tools.nsc
+package backend.jvm
+
+import scala.tools.asm.tree.{AbstractInsnNode, MethodNode}
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
+import scala.reflect.internal.util.Position
+import scala.tools.nsc.settings.ScalaSettings
+import scala.util.control.ControlThrowable
+
+/**
+ * Interface for emitting inline warnings. The interface is required because the implementation
+ * depends on Global, which is not available in BTypes (only in BTypesFromSymbols).
+ */
+sealed abstract class BackendReporting {
+ def inlinerWarning(pos: Position, message: String): Unit
+}
+
+final class BackendReportingImpl(val global: Global) extends BackendReporting {
+ import global._
+
+ def inlinerWarning(pos: Position, message: String): Unit = {
+ currentRun.reporting.inlinerWarning(pos, message)
+ }
+}
+
+/**
+ * Utilities for error reporting.
+ *
+ * Defines some tools to make error reporting with Either easier. Would be subsumed by a right-biased
+ * Either in the standard library (or scalaz \/) (Validation is different, it accumulates multiple
+ * errors).
+ */
+object BackendReporting {
+ def methodSignature(classInternalName: InternalName, name: String, desc: String) = {
+ classInternalName + "::" + name + desc
+ }
+
+ def methodSignature(classInternalName: InternalName, method: MethodNode): String = {
+ methodSignature(classInternalName, method.name, method.desc)
+ }
+
+ def assertionError(message: String): Nothing = throw new AssertionError(message)
+
+ implicit class RightBiasedEither[A, B](val v: Either[A, B]) extends AnyVal {
+ def map[U](f: B => U) = v.right.map(f)
+ def flatMap[BB](f: B => Either[A, BB]) = v.right.flatMap(f)
+ def filter(f: B => Boolean)(implicit empty: A): Either[A, B] = v match {
+ case Left(_) => v
+ case Right(e) => if (f(e)) v else Left(empty) // scalaz.\/ requires an implicit Monoid m to get m.empty
+ }
+ def foreach[U](f: B => U) = v.right.foreach(f)
+
+ def getOrElse[BB >: B](alt: => BB): BB = v.right.getOrElse(alt)
+
+ /**
+ * Get the value, fail with an assertion if this is an error.
+ */
+ def get: B = {
+ assert(v.isRight, v.left.get)
+ v.right.get
+ }
+
+ /**
+ * Get the right value of an `Either` by throwing a potential error message. Can simplify the
+ * implementation of methods that act on multiple `Either` instances. Instead of flat-mapping,
+ * the first error can be collected as
+ *
+ * tryEither {
+ * eitherOne.orThrow .... eitherTwo.orThrow ... eitherThree.orThrow
+ * }
+ */
+ def orThrow: B = v match {
+ case Left(m) => throw Invalid(m)
+ case Right(t) => t
+ }
+ }
+
+ case class Invalid[A](e: A) extends ControlThrowable
+
+ /**
+ * See documentation of orThrow above.
+ */
+ def tryEither[A, B](op: => Either[A, B]): Either[A, B] = try { op } catch { case Invalid(e) => Left(e.asInstanceOf[A]) }
+
+ sealed trait OptimizerWarning {
+ def emitWarning(settings: ScalaSettings): Boolean
+ }
+
+ // Method filter in RightBiasedEither requires an implicit empty value. Taking the value here
+ // in scope allows for-comprehensions that desugar into filter calls (for example when using a
+ // tuple de-constructor).
+ implicit object emptyOptimizerWarning extends OptimizerWarning {
+ def emitWarning(settings: ScalaSettings): Boolean = false
+ }
+
+ sealed trait MissingBytecodeWarning extends OptimizerWarning {
+ override def toString = this match {
+ case ClassNotFound(internalName, definedInJavaSource) =>
+ s"The classfile for $internalName could not be found on the compilation classpath." + {
+ if (definedInJavaSource) "\nThe class is defined in a Java source file that is being compiled (mixed compilation), therefore no bytecode is available."
+ else ""
+ }
+
+ case MethodNotFound(name, descriptor, ownerInternalName, missingClasses) =>
+ val (javaDef, others) = missingClasses.partition(_.definedInJavaSource)
+ s"The method $name$descriptor could not be found in the class $ownerInternalName or any of its parents." +
+ (if (others.isEmpty) "" else others.map(_.internalName).mkString("\nNote that the following parent classes could not be found on the classpath: ", ", ", "")) +
+ (if (javaDef.isEmpty) "" else javaDef.map(_.internalName).mkString("\nNote that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: ", ",", ""))
+
+ case FieldNotFound(name, descriptor, ownerInternalName, missingClass) =>
+ s"The field node $name$descriptor could not be found because the classfile $ownerInternalName cannot be found on the classpath." +
+ missingClass.map(c => s" Reason:\n$c").getOrElse("")
+ }
+
+ def emitWarning(settings: ScalaSettings): Boolean = this match {
+ case ClassNotFound(_, javaDefined) =>
+ if (javaDefined) settings.YoptWarningNoInlineMixed
+ else settings.YoptWarningNoInlineMissingBytecode
+
+ case m @ MethodNotFound(_, _, _, missing) =>
+ if (m.isArrayMethod) false
+ else settings.YoptWarningNoInlineMissingBytecode || missing.exists(_.emitWarning(settings))
+
+ case FieldNotFound(_, _, _, missing) =>
+ settings.YoptWarningNoInlineMissingBytecode || missing.exists(_.emitWarning(settings))
+ }
+ }
+
+ case class ClassNotFound(internalName: InternalName, definedInJavaSource: Boolean) extends MissingBytecodeWarning
+ case class MethodNotFound(name: String, descriptor: String, ownerInternalNameOrArrayDescriptor: InternalName, missingClasses: List[ClassNotFound]) extends MissingBytecodeWarning {
+ def isArrayMethod = ownerInternalNameOrArrayDescriptor.charAt(0) == '['
+ }
+ case class FieldNotFound(name: String, descriptor: String, ownerInternalName: InternalName, missingClass: Option[ClassNotFound]) extends MissingBytecodeWarning
+
+ sealed trait NoClassBTypeInfo extends OptimizerWarning {
+ override def toString = this match {
+ case NoClassBTypeInfoMissingBytecode(cause) =>
+ cause.toString
+
+ case NoClassBTypeInfoClassSymbolInfoFailedSI9111(classFullName) =>
+ s"Failed to get the type of class symbol $classFullName due to SI-9111."
+ }
+
+ def emitWarning(settings: ScalaSettings): Boolean = this match {
+ case NoClassBTypeInfoMissingBytecode(cause) => cause.emitWarning(settings)
+ case NoClassBTypeInfoClassSymbolInfoFailedSI9111(_) => settings.YoptWarningNoInlineMissingBytecode
+ }
+ }
+
+ case class NoClassBTypeInfoMissingBytecode(cause: MissingBytecodeWarning) extends NoClassBTypeInfo
+ case class NoClassBTypeInfoClassSymbolInfoFailedSI9111(classFullName: String) extends NoClassBTypeInfo
+
+ /**
+ * Used in the CallGraph for nodes where an issue occurred determining the callee information.
+ */
+ sealed trait CalleeInfoWarning extends OptimizerWarning {
+ def declarationClass: InternalName
+ def name: String
+ def descriptor: String
+
+ def warningMessageSignature = BackendReporting.methodSignature(declarationClass, name, descriptor)
+
+ override def toString = this match {
+ case MethodInlineInfoIncomplete(_, _, _, cause) =>
+ s"The inline information for $warningMessageSignature may be incomplete:\n" + cause
+
+ case MethodInlineInfoMissing(_, _, _, cause) =>
+ s"No inline information for method $warningMessageSignature could be found." +
+ cause.map(" Possible reason:\n" + _).getOrElse("")
+
+ case MethodInlineInfoError(_, _, _, cause) =>
+ s"Error while computing the inline information for method $warningMessageSignature:\n" + cause
+
+ case RewriteTraitCallToStaticImplMethodFailed(_, _, _, cause) =>
+ cause.toString
+ }
+
+ def emitWarning(settings: ScalaSettings): Boolean = this match {
+ case MethodInlineInfoIncomplete(_, _, _, cause) => cause.emitWarning(settings)
+
+ case MethodInlineInfoMissing(_, _, _, Some(cause)) => cause.emitWarning(settings)
+ case MethodInlineInfoMissing(_, _, _, None) => settings.YoptWarningNoInlineMissingBytecode
+
+ case MethodInlineInfoError(_, _, _, cause) => cause.emitWarning(settings)
+
+ case RewriteTraitCallToStaticImplMethodFailed(_, _, _, cause) => cause.emitWarning(settings)
+ }
+ }
+
+ case class MethodInlineInfoIncomplete(declarationClass: InternalName, name: String, descriptor: String, cause: ClassInlineInfoWarning) extends CalleeInfoWarning
+ case class MethodInlineInfoMissing(declarationClass: InternalName, name: String, descriptor: String, cause: Option[ClassInlineInfoWarning]) extends CalleeInfoWarning
+ case class MethodInlineInfoError(declarationClass: InternalName, name: String, descriptor: String, cause: NoClassBTypeInfo) extends CalleeInfoWarning
+ case class RewriteTraitCallToStaticImplMethodFailed(declarationClass: InternalName, name: String, descriptor: String, cause: OptimizerWarning) extends CalleeInfoWarning
+
+ sealed trait CannotInlineWarning extends OptimizerWarning {
+ def calleeDeclarationClass: InternalName
+ def name: String
+ def descriptor: String
+
+ def calleeMethodSig = BackendReporting.methodSignature(calleeDeclarationClass, name, descriptor)
+
+ override def toString = this match {
+ case IllegalAccessInstruction(_, _, _, callsiteClass, instruction) =>
+ s"The callee $calleeMethodSig contains the instruction ${AsmUtils.textify(instruction)}" +
+ s"\nthat would cause an IllegalAccessError when inlined into class $callsiteClass."
+
+ case IllegalAccessCheckFailed(_, _, _, callsiteClass, instruction, cause) =>
+ s"Failed to check if $calleeMethodSig can be safely inlined to $callsiteClass without causing an IllegalAccessError. Checking instruction ${AsmUtils.textify(instruction)} failed:\n" + cause
+
+ case MethodWithHandlerCalledOnNonEmptyStack(_, _, _, callsiteClass, callsiteName, callsiteDesc) =>
+ s"""The operand stack at the callsite in ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)} contains more values than the
+ |arguments expected by the callee $calleeMethodSig. These values would be discarded
+ |when entering an exception handler declared in the inlined method.""".stripMargin
+
+ case SynchronizedMethod(_, _, _) =>
+ s"Method $calleeMethodSig cannot be inlined because it is synchronized."
+
+ case StrictfpMismatch(_, _, _, callsiteClass, callsiteName, callsiteDesc) =>
+ s"""The callsite method ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)}
+ |does not have the same strictfp mode as the callee $calleeMethodSig.
+ """.stripMargin
+
+ case ResultingMethodTooLarge(_, _, _, callsiteClass, callsiteName, callsiteDesc) =>
+ s"""The size of the callsite method ${BackendReporting.methodSignature(callsiteClass, callsiteName, callsiteDesc)}
+ |would exceed the JVM method size limit after inlining $calleeMethodSig.
+ """.stripMargin
+ }
+
+ def emitWarning(settings: ScalaSettings): Boolean = this match {
+ case _: IllegalAccessInstruction | _: MethodWithHandlerCalledOnNonEmptyStack | _: SynchronizedMethod | _: StrictfpMismatch | _: ResultingMethodTooLarge =>
+ settings.YoptWarningEmitAtInlineFailed
+
+ case IllegalAccessCheckFailed(_, _, _, _, _, cause) =>
+ cause.emitWarning(settings)
+ }
+ }
+ case class IllegalAccessInstruction(calleeDeclarationClass: InternalName, name: String, descriptor: String,
+ callsiteClass: InternalName, instruction: AbstractInsnNode) extends CannotInlineWarning
+ case class IllegalAccessCheckFailed(calleeDeclarationClass: InternalName, name: String, descriptor: String,
+ callsiteClass: InternalName, instruction: AbstractInsnNode, cause: OptimizerWarning) extends CannotInlineWarning
+ case class MethodWithHandlerCalledOnNonEmptyStack(calleeDeclarationClass: InternalName, name: String, descriptor: String,
+ callsiteClass: InternalName, callsiteName: String, callsiteDesc: String) extends CannotInlineWarning
+ case class SynchronizedMethod(calleeDeclarationClass: InternalName, name: String, descriptor: String) extends CannotInlineWarning
+ case class StrictfpMismatch(calleeDeclarationClass: InternalName, name: String, descriptor: String,
+ callsiteClass: InternalName, callsiteName: String, callsiteDesc: String) extends CannotInlineWarning
+ case class ResultingMethodTooLarge(calleeDeclarationClass: InternalName, name: String, descriptor: String,
+ callsiteClass: InternalName, callsiteName: String, callsiteDesc: String) extends CannotInlineWarning
+
+ /**
+ * Used in the InlineInfo of a ClassBType, when some issue occurred obtaining the inline information.
+ */
+ sealed trait ClassInlineInfoWarning extends OptimizerWarning {
+ override def toString = this match {
+ case NoInlineInfoAttribute(internalName) =>
+ s"The Scala classfile $internalName does not have a ScalaInlineInfo attribute."
+
+ case ClassSymbolInfoFailureSI9111(classFullName) =>
+ s"Failed to get the type of a method of class symbol $classFullName due to SI-9111."
+
+ case ClassNotFoundWhenBuildingInlineInfoFromSymbol(missingClass) =>
+ s"Failed to build the inline information: $missingClass."
+
+ case UnknownScalaInlineInfoVersion(internalName, version) =>
+ s"Cannot read ScalaInlineInfo version $version in classfile $internalName. Use a more recent compiler."
+ }
+
+ def emitWarning(settings: ScalaSettings): Boolean = this match {
+ case NoInlineInfoAttribute(_) => settings.YoptWarningNoInlineMissingScalaInlineInfoAttr
+ case ClassNotFoundWhenBuildingInlineInfoFromSymbol(cause) => cause.emitWarning(settings)
+ case ClassSymbolInfoFailureSI9111(_) => settings.YoptWarningNoInlineMissingBytecode
+ case UnknownScalaInlineInfoVersion(_, _) => settings.YoptWarningNoInlineMissingScalaInlineInfoAttr
+ }
+ }
+
+ case class NoInlineInfoAttribute(internalName: InternalName) extends ClassInlineInfoWarning
+ case class ClassSymbolInfoFailureSI9111(classFullName: String) extends ClassInlineInfoWarning
+ case class ClassNotFoundWhenBuildingInlineInfoFromSymbol(missingClass: ClassNotFound) extends ClassInlineInfoWarning
+ case class UnknownScalaInlineInfoVersion(internalName: InternalName, version: Int) extends ClassInlineInfoWarning
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
new file mode 100644
index 0000000000..03306f30aa
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
@@ -0,0 +1,24 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+
+import scala.reflect.internal.util.Statistics
+
+object BackendStats {
+ import Statistics.{newTimer, newSubTimer}
+ val bcodeTimer = newTimer("time in backend", "jvm")
+
+ val bcodeInitTimer = newSubTimer("bcode initialization", bcodeTimer)
+ val bcodeGenStat = newSubTimer("code generation", bcodeTimer)
+ val methodOptTimer = newSubTimer("intra-method optimizations", bcodeTimer)
+ val bcodeWriteTimer = newSubTimer("classfile writing", bcodeTimer)
+
+ def timed[T](timer: Statistics.Timer)(body: => T): T = {
+ val start = Statistics.startTimer(timer)
+ try body finally Statistics.stopTimer(timer, start)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index 8e6c09213f..1d29fdee10 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -1,6 +1,6 @@
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
+ * @author Martin Odersky
*/
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
new file mode 100644
index 0000000000..492fe3ae79
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
@@ -0,0 +1,292 @@
+package scala.tools.nsc
+package backend.jvm
+
+import scala.annotation.switch
+
+/**
+ * Core BTypes and some other definitions. The initialization of these definitions requires access
+ * to symbols / types (global).
+ *
+ * The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To
+ * make sure the definitions are consistent with the symbols in the current run, the
+ * `intializeCoreBTypes` method in BTypesFromSymbols creates a new instance of CoreBTypes in each
+ * compiler run.
+ *
+ * The class BTypesFromSymbols does not directly reference CoreBTypes, but CoreBTypesProxy. The
+ * reason is that having a `var bTypes: CoreBTypes` would not allow `import bTypes._`. Instead, the
+ * proxy class holds a `CoreBTypes` in a variable field and forwards to this instance.
+ *
+ * The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When
+ * creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the
+ * constructor will actually go through the proxy. The lazy vals make sure the instance is assigned
+ * in the proxy before the fields are initialized.
+ *
+ * Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap
+ * could not be a perRunCache anymore: the classes defined here need to be in that map, they are
+ * added when the ClassBTypes are created. The per run cache removes them, so they would be missing
+ * in the second run.
+ */
+class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) {
+ import bTypes._
+ import global._
+ import rootMirror.{requiredClass, getClassIfDefined}
+ import definitions._
+
+ /**
+ * Maps primitive types to their corresponding PrimitiveBType. The map is defined lexically above
+ * the first use of `classBTypeFromSymbol` because that method looks at the map.
+ */
+ lazy val primitiveTypeMap: Map[Symbol, PrimitiveBType] = Map(
+ UnitClass -> UNIT,
+ BooleanClass -> BOOL,
+ CharClass -> CHAR,
+ ByteClass -> BYTE,
+ ShortClass -> SHORT,
+ IntClass -> INT,
+ LongClass -> LONG,
+ FloatClass -> FLOAT,
+ DoubleClass -> DOUBLE
+ )
+
+ lazy val BOXED_UNIT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Void])
+ lazy val BOXED_BOOLEAN : ClassBType = classBTypeFromSymbol(BoxedBooleanClass)
+ lazy val BOXED_BYTE : ClassBType = classBTypeFromSymbol(BoxedByteClass)
+ lazy val BOXED_SHORT : ClassBType = classBTypeFromSymbol(BoxedShortClass)
+ lazy val BOXED_CHAR : ClassBType = classBTypeFromSymbol(BoxedCharacterClass)
+ lazy val BOXED_INT : ClassBType = classBTypeFromSymbol(BoxedIntClass)
+ lazy val BOXED_LONG : ClassBType = classBTypeFromSymbol(BoxedLongClass)
+ lazy val BOXED_FLOAT : ClassBType = classBTypeFromSymbol(BoxedFloatClass)
+ lazy val BOXED_DOUBLE : ClassBType = classBTypeFromSymbol(BoxedDoubleClass)
+
+ /**
+ * Map from primitive types to their boxed class type. Useful when pushing class literals onto the
+ * operand stack (ldc instruction taking a class literal), see genConstant.
+ */
+ lazy val boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = Map(
+ UNIT -> BOXED_UNIT,
+ BOOL -> BOXED_BOOLEAN,
+ BYTE -> BOXED_BYTE,
+ SHORT -> BOXED_SHORT,
+ CHAR -> BOXED_CHAR,
+ INT -> BOXED_INT,
+ LONG -> BOXED_LONG,
+ FLOAT -> BOXED_FLOAT,
+ DOUBLE -> BOXED_DOUBLE
+ )
+
+ lazy val boxedClasses: Set[ClassBType] = boxedClassOfPrimitive.values.toSet
+
+ /**
+ * Maps the method symbol for a box method to the boxed type of the result. For example, the
+ * method symbol for `Byte.box()` is mapped to the ClassBType `java/lang/Byte`.
+ */
+ lazy val boxResultType: Map[Symbol, ClassBType] = {
+ for ((valueClassSym, boxMethodSym) <- currentRun.runDefinitions.boxMethod)
+ yield boxMethodSym -> boxedClassOfPrimitive(primitiveTypeMap(valueClassSym))
+ }
+
+ /**
+ * Maps the method symbol for an unbox method to the primitive type of the result.
+ * For example, the method symbol for `Byte.unbox()`) is mapped to the PrimitiveBType BYTE. */
+ lazy val unboxResultType: Map[Symbol, PrimitiveBType] = {
+ for ((valueClassSym, unboxMethodSym) <- currentRun.runDefinitions.unboxMethod)
+ yield unboxMethodSym -> primitiveTypeMap(valueClassSym)
+ }
+
+ /*
+ * RT_NOTHING and RT_NULL exist at run-time only. They are the bytecode-level manifestation (in
+ * method signatures only) of what shows up as NothingClass resp. NullClass in Scala ASTs.
+ *
+ * Therefore, when RT_NOTHING or RT_NULL are to be emitted, a mapping is needed: the internal
+ * names of NothingClass and NullClass can't be emitted as-is.
+ */
+ lazy val RT_NOTHING : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.Nothing$])
+ lazy val RT_NULL : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.Null$])
+
+ lazy val ObjectReference : ClassBType = classBTypeFromSymbol(ObjectClass)
+ lazy val objArrayReference : ArrayBType = ArrayBType(ObjectReference)
+
+ lazy val StringReference : ClassBType = classBTypeFromSymbol(StringClass)
+ lazy val StringBuilderReference : ClassBType = classBTypeFromSymbol(StringBuilderClass)
+ lazy val ThrowableReference : ClassBType = classBTypeFromSymbol(ThrowableClass)
+ lazy val jlCloneableReference : ClassBType = classBTypeFromSymbol(JavaCloneableClass) // java/lang/Cloneable
+ lazy val jlNPEReference : ClassBType = classBTypeFromSymbol(NullPointerExceptionClass) // java/lang/NullPointerException
+ lazy val jioSerializableReference : ClassBType = classBTypeFromSymbol(JavaSerializableClass) // java/io/Serializable
+ lazy val scalaSerializableReference : ClassBType = classBTypeFromSymbol(SerializableClass) // scala/Serializable
+ lazy val classCastExceptionReference : ClassBType = classBTypeFromSymbol(ClassCastExceptionClass) // java/lang/ClassCastException
+
+ lazy val srBooleanRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BooleanRef])
+ lazy val srByteRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.ByteRef])
+ lazy val srCharRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.CharRef])
+ lazy val srIntRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.IntRef])
+ lazy val srLongRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LongRef])
+ lazy val srFloatRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.FloatRef])
+ lazy val srDoubleRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.DoubleRef])
+
+ lazy val hashMethodSym: Symbol = getMember(ScalaRunTimeModule, nme.hash_)
+
+ // TODO @lry avoiding going through through missingHook for every line in the REPL: https://github.com/scala/scala/commit/8d962ed4ddd310cc784121c426a2e3f56a112540
+ lazy val AndroidParcelableInterface : Symbol = getClassIfDefined("android.os.Parcelable")
+ lazy val AndroidCreatorClass : Symbol = getClassIfDefined("android.os.Parcelable$Creator")
+
+ lazy val BeanInfoAttr: Symbol = requiredClass[scala.beans.BeanInfo]
+
+ /* The Object => String overload. */
+ lazy val String_valueOf: Symbol = {
+ getMember(StringModule, nme.valueOf) filter (sym => sym.info.paramTypes match {
+ case List(pt) => pt.typeSymbol == ObjectClass
+ case _ => false
+ })
+ }
+
+ // scala.FunctionX and scala.runtim.AbstractFunctionX
+ lazy val FunctionReference : Vector[ClassBType] = (0 to MaxFunctionArity).map(i => classBTypeFromSymbol(FunctionClass(i)))(collection.breakOut)
+ lazy val AbstractFunctionReference : Vector[ClassBType] = (0 to MaxFunctionArity).map(i => classBTypeFromSymbol(AbstractFunctionClass(i)))(collection.breakOut)
+ lazy val AbstractFunctionArityMap : Map[ClassBType, Int] = AbstractFunctionReference.zipWithIndex.toMap
+
+ lazy val PartialFunctionReference : ClassBType = classBTypeFromSymbol(PartialFunctionClass)
+ lazy val AbstractPartialFunctionReference : ClassBType = classBTypeFromSymbol(AbstractPartialFunctionClass)
+
+ lazy val BoxesRunTime: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime])
+
+ /**
+ * Methods in scala.runtime.BoxesRuntime
+ */
+ lazy val asmBoxTo : Map[BType, MethodNameAndType] = Map(
+ BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), BOXED_BOOLEAN)),
+ BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), BOXED_BYTE)),
+ CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), BOXED_CHAR)),
+ SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), BOXED_SHORT)),
+ INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), BOXED_INT)),
+ LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), BOXED_LONG)),
+ FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), BOXED_FLOAT)),
+ DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), BOXED_DOUBLE))
+ )
+
+ lazy val asmUnboxTo: Map[BType, MethodNameAndType] = Map(
+ BOOL -> MethodNameAndType("unboxToBoolean", MethodBType(List(ObjectReference), BOOL)),
+ BYTE -> MethodNameAndType("unboxToByte", MethodBType(List(ObjectReference), BYTE)),
+ CHAR -> MethodNameAndType("unboxToChar", MethodBType(List(ObjectReference), CHAR)),
+ SHORT -> MethodNameAndType("unboxToShort", MethodBType(List(ObjectReference), SHORT)),
+ INT -> MethodNameAndType("unboxToInt", MethodBType(List(ObjectReference), INT)),
+ LONG -> MethodNameAndType("unboxToLong", MethodBType(List(ObjectReference), LONG)),
+ FLOAT -> MethodNameAndType("unboxToFloat", MethodBType(List(ObjectReference), FLOAT)),
+ DOUBLE -> MethodNameAndType("unboxToDouble", MethodBType(List(ObjectReference), DOUBLE))
+ )
+
+ lazy val typeOfArrayOp: Map[Int, BType] = {
+ import scalaPrimitives._
+ Map(
+ (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++
+ (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++
+ (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++
+ (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++
+ (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++
+ (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++
+ (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++
+ (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++
+ (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectReference)) : _*
+ )
+ }
+}
+
+/**
+ * This trait make some core BTypes available that don't depend on a Global instance. Some core
+ * BTypes are required to be accessible in the BTypes trait, which does not have access to Global.
+ *
+ * BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example
+ * the type Symbol in
+ * def primitiveTypeMap: Map[Symbol, PrimitiveBType]
+ */
+trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] {
+ val bTypes: BTS
+ import bTypes._
+
+ def boxedClasses: Set[ClassBType]
+
+ def RT_NOTHING : ClassBType
+ def RT_NULL : ClassBType
+
+ def ObjectReference : ClassBType
+ def jlCloneableReference : ClassBType
+ def jioSerializableReference : ClassBType
+}
+
+/**
+ * See comment in class [[CoreBTypes]].
+ */
+final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) extends CoreBTypesProxyGlobalIndependent[BTFS] {
+ import bTypes._
+ import global._
+
+ private[this] var _coreBTypes: CoreBTypes[bTypes.type] = _
+ def setBTypes(coreBTypes: CoreBTypes[BTFS]): Unit = {
+ _coreBTypes = coreBTypes.asInstanceOf[CoreBTypes[bTypes.type]]
+ }
+
+ def primitiveTypeMap: Map[Symbol, PrimitiveBType] = _coreBTypes.primitiveTypeMap
+
+ def BOXED_UNIT : ClassBType = _coreBTypes.BOXED_UNIT
+ def BOXED_BOOLEAN : ClassBType = _coreBTypes.BOXED_BOOLEAN
+ def BOXED_BYTE : ClassBType = _coreBTypes.BOXED_BYTE
+ def BOXED_SHORT : ClassBType = _coreBTypes.BOXED_SHORT
+ def BOXED_CHAR : ClassBType = _coreBTypes.BOXED_CHAR
+ def BOXED_INT : ClassBType = _coreBTypes.BOXED_INT
+ def BOXED_LONG : ClassBType = _coreBTypes.BOXED_LONG
+ def BOXED_FLOAT : ClassBType = _coreBTypes.BOXED_FLOAT
+ def BOXED_DOUBLE : ClassBType = _coreBTypes.BOXED_DOUBLE
+
+ def boxedClasses: Set[ClassBType] = _coreBTypes.boxedClasses
+
+ def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = _coreBTypes.boxedClassOfPrimitive
+
+ def boxResultType: Map[Symbol, ClassBType] = _coreBTypes.boxResultType
+
+ def unboxResultType: Map[Symbol, PrimitiveBType] = _coreBTypes.unboxResultType
+
+ def RT_NOTHING : ClassBType = _coreBTypes.RT_NOTHING
+ def RT_NULL : ClassBType = _coreBTypes.RT_NULL
+
+ def ObjectReference : ClassBType = _coreBTypes.ObjectReference
+ def objArrayReference : ArrayBType = _coreBTypes.objArrayReference
+
+ def StringReference : ClassBType = _coreBTypes.StringReference
+ def StringBuilderReference : ClassBType = _coreBTypes.StringBuilderReference
+ def ThrowableReference : ClassBType = _coreBTypes.ThrowableReference
+ def jlCloneableReference : ClassBType = _coreBTypes.jlCloneableReference
+ def jlNPEReference : ClassBType = _coreBTypes.jlNPEReference
+ def jioSerializableReference : ClassBType = _coreBTypes.jioSerializableReference
+ def scalaSerializableReference : ClassBType = _coreBTypes.scalaSerializableReference
+ def classCastExceptionReference : ClassBType = _coreBTypes.classCastExceptionReference
+
+ def srBooleanRef : ClassBType = _coreBTypes.srBooleanRef
+ def srByteRef : ClassBType = _coreBTypes.srByteRef
+ def srCharRef : ClassBType = _coreBTypes.srCharRef
+ def srIntRef : ClassBType = _coreBTypes.srIntRef
+ def srLongRef : ClassBType = _coreBTypes.srLongRef
+ def srFloatRef : ClassBType = _coreBTypes.srFloatRef
+ def srDoubleRef : ClassBType = _coreBTypes.srDoubleRef
+
+ def hashMethodSym: Symbol = _coreBTypes.hashMethodSym
+
+ def AndroidParcelableInterface : Symbol = _coreBTypes.AndroidParcelableInterface
+ def AndroidCreatorClass : Symbol = _coreBTypes.AndroidCreatorClass
+
+ def BeanInfoAttr: Symbol = _coreBTypes.BeanInfoAttr
+
+ def String_valueOf: Symbol = _coreBTypes.String_valueOf
+
+ def FunctionReference : Vector[ClassBType] = _coreBTypes.FunctionReference
+ def AbstractFunctionReference : Vector[ClassBType] = _coreBTypes.AbstractFunctionReference
+ def AbstractFunctionArityMap : Map[ClassBType, Int] = _coreBTypes.AbstractFunctionArityMap
+
+ def PartialFunctionReference : ClassBType = _coreBTypes.PartialFunctionReference
+ def AbstractPartialFunctionReference : ClassBType = _coreBTypes.AbstractPartialFunctionReference
+
+ def BoxesRunTime: ClassBType = _coreBTypes.BoxesRunTime
+
+ def asmBoxTo : Map[BType, MethodNameAndType] = _coreBTypes.asmBoxTo
+ def asmUnboxTo: Map[BType, MethodNameAndType] = _coreBTypes.asmUnboxTo
+
+ def typeOfArrayOp: Map[Int, BType] = _coreBTypes.typeOfArrayOp
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index a389816caf..f866c0d038 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -9,6 +9,7 @@ package backend.jvm
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
+import scala.tools.nsc.backend.jvm.opt.InlineInfoAttribute
import scala.tools.nsc.symtab._
import scala.tools.asm
import asm.Label
@@ -20,12 +21,15 @@ import scala.annotation.tailrec
*
* Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2012Q2/GenASM.pdf
*/
-abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { self =>
+abstract class GenASM extends SubComponent with BytecodeWriters { self =>
import global._
import icodes._
import icodes.opcodes._
import definitions._
+ val bCodeAsmCommon: BCodeAsmCommon[global.type] = new BCodeAsmCommon(global)
+ import bCodeAsmCommon._
+
// Strangely I can't find this in the asm code
// 255, but reserving 1 for "this"
final val MaximumJvmParameters = 254
@@ -96,24 +100,83 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
+ private def isJavaEntryPoint(icls: IClass) = {
+ val sym = icls.symbol
+ def fail(msg: String, pos: Position = sym.pos) = {
+ reporter.warning(sym.pos,
+ sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" +
+ " Reason: " + msg
+ // TODO: make this next claim true, if possible
+ // by generating valid main methods as static in module classes
+ // not sure what the jvm allows here
+ // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead."
+ )
+ false
+ }
+ def failNoForwarder(msg: String) = {
+ fail(msg + ", which means no static forwarder can be generated.\n")
+ }
+ val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
+ val hasApproximate = possibles exists { m =>
+ m.info match {
+ case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass
+ case _ => false
+ }
+ }
+ // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
+ hasApproximate && {
+ // Before erasure so we can identify generic mains.
+ enteringErasure {
+ val companion = sym.linkedClassOfClass
+
+ if (hasJavaMainMethod(companion))
+ failNoForwarder("companion contains its own main method")
+ else if (companion.tpe.member(nme.main) != NoSymbol)
+ // this is only because forwarders aren't smart enough yet
+ failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
+ else if (companion.isTrait)
+ failNoForwarder("companion is a trait")
+ // Now either succeeed, or issue some additional warnings for things which look like
+ // attempts to be java main methods.
+ else (possibles exists isJavaMainMethod) || {
+ possibles exists { m =>
+ m.info match {
+ case PolyType(_, _) =>
+ fail("main methods cannot be generic.")
+ case MethodType(params, res) =>
+ if (res.typeSymbol :: params exists (_.isAbstractType))
+ fail("main methods cannot refer to type parameters or abstract types.", m.pos)
+ else
+ isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
+ case tp =>
+ fail("don't know what this is: " + tp, m.pos)
+ }
+ }
+ }
+ }
+ }
+ }
+
override def run() {
if (settings.debug)
inform("[running phase " + name + " on icode]")
- if (settings.Xdce)
- for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
+ if (settings.Xdce) {
+ val classes = icodes.classes.keys.toList // copy to avoid mutating the map while iterating
+ for (sym <- classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
log(s"Optimizer eliminated ${sym.fullNameString}")
deadCode.elidedClosures += sym
icodes.classes -= sym
}
+ }
// For predictably ordered error messages.
var sortedClasses = classes.values.toList sortBy (_.symbol.fullName)
// Warn when classes will overwrite one another on case-insensitive systems.
for ((_, v1 :: v2 :: _) <- sortedClasses groupBy (_.symbol.javaClassName.toString.toLowerCase)) {
- v1.cunit.warning(v1.symbol.pos,
+ reporter.warning(v1.symbol.pos,
s"Class ${v1.symbol.javaClassName} differs only in case from ${v2.symbol.javaClassName}. " +
"Such classes will overwrite one another on case-insensitive filesystems.")
}
@@ -141,7 +204,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
try emitFor(c)
catch {
case e: FileConflictException =>
- c.cunit.error(c.symbol.pos, s"error writing ${c.symbol}: ${e.getMessage}")
+ reporter.error(c.symbol.pos, s"error writing ${c.symbol}: ${e.getMessage}")
}
sortedClasses = sortedClasses.tail
classes -= c.symbol // GC opportunity
@@ -381,6 +444,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case "jvm-1.5" => asm.Opcodes.V1_5
case "jvm-1.6" => asm.Opcodes.V1_6
case "jvm-1.7" => asm.Opcodes.V1_7
+ case "jvm-1.8" => asm.Opcodes.V1_8
}
private val majorVersion: Int = (classfileVersion & 0xFF)
@@ -469,7 +533,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
bytecodeWriter.writeClass(label, jclassName, arr, outF)
} catch {
- case e: java.lang.RuntimeException if e != null && (e.getMessage contains "too large!") =>
+ case e: java.lang.RuntimeException if e.getMessage != null && (e.getMessage contains "too large!") =>
reporter.error(sym.pos,
s"Could not write class $jclassName because it exceeds JVM code size limits. ${e.getMessage}")
}
@@ -532,7 +596,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val x = innerClassSymbolFor(s)
if(x ne NoSymbol) {
assert(x.isClass, "not an inner-class symbol")
- val isInner = !x.rawowner.isPackageClass
+ // impl classes are considered top-level, see comment in BTypes
+ val isInner = !considerAsTopLevelImplementationArtifact(s) && !x.rawowner.isPackageClass
if (isInner) {
innerClassBuffer += x
collectInnerClass(x.rawowner)
@@ -614,31 +679,70 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
- def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
+ def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor, isMirror: Boolean = false) {
/* The outer name for this inner class. Note that it returns null
* when the inner class should not get an index in the constant pool.
* That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
*/
def outerName(innerSym: Symbol): String = {
- if (innerSym.originalEnclosingMethod != NoSymbol)
+ if (isAnonymousOrLocalClass(innerSym))
null
else {
val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
+ if (isTopLevelModule(innerSym.rawowner)) "" + TermName(outerName).dropModule
else outerName
}
}
- def innerName(innerSym: Symbol): String =
- if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
- null
- else
- innerSym.rawname + innerSym.moduleSuffix
+ def innerName(innerSym: Symbol): String = {
+ // phase travel necessary: after flatten, the name includes the name of outer classes.
+ // if some outer name contains $anon, a non-anon class is considered anon.
+ if (exitingPickler(innerSym.isAnonymousClass || innerSym.isAnonymousFunction)) null
+ else innerSym.rawname + innerSym.moduleSuffix
+ }
+
+ val linkedClass = exitingPickler(csym.linkedClassOfClass) // linkedCoC does not work properly in late phases
+
+ innerClassBuffer ++= {
+ val members = exitingPickler(memberClassesForInnerClassTable(csym))
+ // lambdalift makes all classes (also local, anonymous) members of their enclosing class
+ val allNested = exitingPhase(currentRun.lambdaliftPhase)(memberClassesForInnerClassTable(csym))
+ val nested = {
+ // Classes nested in value classes are nested in the companion at this point. For InnerClass /
+ // EnclosingMethod, we use the value class as the outer class. So we remove nested classes
+ // from the companion that were originally nested in the value class.
+ if (exitingPickler(linkedClass.isDerivedValueClass)) allNested.filterNot(classOriginallyNestedInClass(_, linkedClass))
+ else allNested
+ }
+
+ // for the mirror class, we take the members of the companion module class (Java compat, see doc in BTypes.scala).
+ // for module classes, we filter out those members.
+ if (isMirror) members
+ else if (isTopLevelModule(csym)) nested diff members
+ else nested
+ }
+
+ if (!considerAsTopLevelImplementationArtifact(csym)) {
+ // If this is a top-level non-impl class, add members of the companion object. These are the
+ // classes for which we change the InnerClass entry to allow using them from Java.
+ // We exclude impl classes: if the classfile for the impl class exists on the classpath, a
+ // linkedClass symbol is found for which isTopLevelModule is true, so we end up searching
+ // members of that weird impl-class-module-class-symbol. that search probably cannot return
+ // any classes, but it's better to exclude it.
+ if (linkedClass != NoSymbol && isTopLevelModule(linkedClass)) {
+ // phase travel to exitingPickler: this makes sure that memberClassesForInnerClassTable only
+ // sees member classes, not local classes that were lifted by lambdalift.
+ innerClassBuffer ++= exitingPickler(memberClassesForInnerClassTable(linkedClass))
+ }
- // add inner classes which might not have been referenced yet
- exitingErasure {
- for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
- innerClassBuffer += m
+ // Classes nested in value classes are nested in the companion at this point. For InnerClass /
+ // EnclosingMethod we use the value class as enclosing class. Here we search nested classes
+ // in the companion that were originally nested in the value class, and we add them as nested
+ // in the value class.
+ if (linkedClass != NoSymbol && exitingPickler(csym.isDerivedValueClass)) {
+ val moduleMemberClasses = exitingPhase(currentRun.lambdaliftPhase)(memberClassesForInnerClassTable(linkedClass))
+ innerClassBuffer ++= moduleMemberClasses.filter(classOriginallyNestedInClass(_, csym))
+ }
}
val allInners: List[Symbol] = innerClassBuffer.toList filterNot deadCode.elidedClosures
@@ -652,7 +756,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
val flagsWithFinal: Int = mkFlags(
- if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+ // See comment in BTypes, when is a class marked static in the InnerClass table.
+ if (isOriginallyStaticOwner(innerSym.originalOwner)) asm.Opcodes.ACC_STATIC else 0,
javaFlags(innerSym),
if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
@@ -794,15 +899,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for (ThrownException(exc) <- excs.distinct)
yield javaName(exc)
- /** Whether an annotation should be emitted as a Java annotation
- * .initialize: if 'annot' is read from pickle, atp might be un-initialized
- */
- private def shouldEmitAnnotation(annot: AnnotationInfo) =
- annot.symbol.initialize.isJavaDefined &&
- annot.matches(ClassfileAnnotationClass) &&
- annot.args.isEmpty &&
- !annot.matches(DeprecatedAttr)
-
def getCurrentCUnit(): CompilationUnit
def getGenericSignature(sym: Symbol, owner: Symbol) = self.getGenericSignature(sym, owner, getCurrentCUnit())
@@ -864,7 +960,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = cw.visitAnnotation(descriptor(typ), true)
+ val av = cw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -873,7 +969,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = mw.visitAnnotation(descriptor(typ), true)
+ val av = mw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -882,7 +978,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = fw.visitAnnotation(descriptor(typ), true)
+ val av = fw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -894,7 +990,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
annot <- annots) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true)
+ val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), isRuntimeVisible(annot))
emitAssocs(pannVisitor, assocs)
}
}
@@ -975,7 +1071,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
index += jparamType.getSize()
}
- mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, javaType(m).getDescriptor)
+ mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, javaType(m).getDescriptor, false)
mirrorMethod.visitInsn(jReturnType.getOpcode(asm.Opcodes.IRETURN))
mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
@@ -1061,7 +1157,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
asm.Opcodes.INVOKEVIRTUAL,
moduleName,
androidFieldName.toString,
- asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*)
+ asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*),
+ false
)
// PUTSTATIC `thisName`.CREATOR;
@@ -1142,43 +1239,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def isParcelableClass = isAndroidParcelableClass(clasz.symbol)
- def serialVUID: Option[Long] = clasz.symbol getAnnotation SerialVersionUIDAttr collect {
- case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
- }
-
- private def getSuperInterfaces(c: IClass): Array[String] = {
-
- // Additional interface parents based on annotations and other cues
- def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match {
- case RemoteAttr => RemoteInterfaceClass
- case _ => NoSymbol
- }
-
- /* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
- * This is important on Android because there is otherwise an interface explosion.
- */
- def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
- var rest = lstIfaces
- var leaves = List.empty[Symbol]
- while(!rest.isEmpty) {
- val candidate = rest.head
- val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
- if(!nonLeaf) {
- leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
- }
- rest = rest.tail
- }
-
- leaves
- }
-
- val ps = c.symbol.info.parents
- val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses
- val superInterfaces = existingSymbols(superInterfaces0 ++ c.symbol.annotations.map(newParentForAttr)).distinct
-
- if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
- else mkArray(minimizeInterfaces(superInterfaces) map javaName)
- }
+ def serialVUID: Option[Long] = genBCode.serialVUID(clasz.symbol)
var clasz: IClass = _ // this var must be assigned only by genClass()
var jclass: asm.ClassWriter = _ // the classfile being emitted
@@ -1200,7 +1261,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val ps = c.symbol.info.parents
val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol)
- val ifaces = getSuperInterfaces(c)
+ val ifaces: Array[String] = implementedInterfaces(c.symbol).map(javaName)(collection.breakOut)
val thisSignature = getGenericSignature(c.symbol, c.symbol.owner)
val flags = mkFlags(
@@ -1219,10 +1280,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
null /* SourceDebugExtension */)
}
- val enclM = getEnclosingMethodAttribute()
- if(enclM != null) {
- val EnclMethodEntry(className, methodName, methodType) = enclM
- jclass.visitOuterClass(className, methodName, methodType.getDescriptor)
+ enclosingMethodAttribute(clasz.symbol, javaName, javaType(_).getDescriptor) match {
+ case Some(EnclosingMethodEntry(className, methodName, methodDescriptor)) =>
+ jclass.visitOuterClass(className, methodName, methodDescriptor)
+ case _ => ()
}
// typestate: entering mode with valid call sequences:
@@ -1232,6 +1293,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jclass.visitAttribute(if(ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
emitAnnotations(jclass, c.symbol.annotations ++ ssa)
+ if (!settings.YskipInlineInfoAttribute.value)
+ jclass.visitAttribute(InlineInfoAttribute(buildInlineInfoFromClassSymbol(c.symbol, javaName, javaType(_).getDescriptor)))
+
// typestate: entering mode with valid call sequences:
// ( visitInnerClass | visitField | visitMethod )* visitEnd
@@ -1283,45 +1347,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
writeIfNotTooBig("" + c.symbol.name, thisName, jclass, c.symbol)
}
- /**
- * @param owner internal name of the enclosing class of the class.
- *
- * @param name the name of the method that contains the class.
-
- * @param methodType the method that contains the class.
- */
- case class EnclMethodEntry(owner: String, name: String, methodType: asm.Type)
-
- /**
- * @return null if the current class is not internal to a method
- *
- * Quoting from JVMS 4.7.7 The EnclosingMethod Attribute
- * A class must have an EnclosingMethod attribute if and only if it is a local class or an anonymous class.
- * A class may have no more than one EnclosingMethod attribute.
- *
- */
- private def getEnclosingMethodAttribute(): EnclMethodEntry = { // JVMS 4.7.7
- var res: EnclMethodEntry = null
- val clazz = clasz.symbol
- val sym = clazz.originalEnclosingMethod
- if (sym.isMethod) {
- debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
- res = EnclMethodEntry(javaName(sym.enclClass), javaName(sym), javaType(sym))
- } else if (clazz.isAnonymousClass) {
- val enclClass = clazz.rawowner
- assert(enclClass.isClass, enclClass)
- val sym = enclClass.primaryConstructor
- if (sym == NoSymbol) {
- log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass))
- } else {
- debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
- res = EnclMethodEntry(javaName(enclClass), javaName(sym), javaType(sym))
- }
- }
-
- res
- }
-
def genField(f: IField) {
debuglog("Adding field: " + f.symbol.fullName)
@@ -1364,7 +1389,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
if (m.params.size > MaximumJvmParameters) {
- getCurrentCUnit().error(m.symbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
+ reporter.error(m.symbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
return
}
@@ -1402,7 +1427,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// TODO param names: (m.params map (p => javaName(p.sym)))
- // typestate: entering mode with valid call sequences:
+ // typestate: entering mode with valid call sequences: (see ASM Guide, 3.2.1)
// [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )*
emitAnnotations(jmethod, others)
@@ -1447,7 +1472,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0)
genCode(m, emitVars, hasStaticBitSet)
- jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ // visitMaxs needs to be called according to the protocol. The arguments will be ignored
+ // since maximums (and stack map frames) are computed. See ASM Guide, Section 3.2.1,
+ // section "ClassWriter options"
+ jmethod.visitMaxs(0, 0)
}
jmethod.visitEnd()
@@ -1523,7 +1551,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (isStaticModule(clasz.symbol)) {
clinit.visitTypeInsn(asm.Opcodes.NEW, thisName)
clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL,
- thisName, INSTANCE_CONSTRUCTOR_NAME, mdesc_arglessvoid)
+ thisName, INSTANCE_CONSTRUCTOR_NAME, mdesc_arglessvoid, false)
}
if (isParcelableClass) { legacyAddCreatorCode(clinit) }
@@ -1667,16 +1695,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def rem(tk: TypeKind) { emitPrimitive(remOpcodes, tk) }
def invokespecial(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc)
+ jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc, false)
}
def invokestatic(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc)
+ jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc, false)
}
def invokeinterface(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc)
+ jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc, true)
}
def invokevirtual(owner: String, name: String, desc: String) {
- jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc)
+ jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc, false)
}
def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
@@ -2026,7 +2054,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
seen ::= LocVarEntry(lv, start, end)
case _ =>
// TODO SI-6049 track down the cause for these.
- debugwarn(s"$iPos: Visited SCOPE_EXIT before visiting corresponding SCOPE_ENTER. SI-6191")
+ devWarning(s"$iPos: Visited SCOPE_EXIT before visiting corresponding SCOPE_ENTER. SI-6191")
}
}
@@ -2396,7 +2424,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH.
// If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
else if (newNormal.isJumpOnly(b) && m.exh.exists(eh => eh.covers(b))) {
- debugwarn("Had a jump only block that wasn't collapsed")
+ devWarning("Had a jump only block that wasn't collapsed")
emit(asm.Opcodes.NOP)
}
@@ -2806,7 +2834,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym)
- addInnerClasses(modsym, mirrorClass)
+ addInnerClasses(modsym, mirrorClass, isMirror = true)
mirrorClass.visitEnd()
writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym)
}
@@ -2855,8 +2883,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
var fieldList = List[String]()
for (f <- clasz.fields if f.symbol.hasGetter;
- g = f.symbol.getter(clasz.symbol);
- s = f.symbol.setter(clasz.symbol)
+ g = f.symbol.getterIn(clasz.symbol);
+ s = f.symbol.setterIn(clasz.symbol)
if g.isPublic && !(f.symbol.name startsWith "$")
) {
// inserting $outer breaks the bean
@@ -2926,7 +2954,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// invoke the superclass constructor, which will do the
// necessary java reflection and create Method objects.
- constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor)
+ constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor, false)
constructor.visitInsn(asm.Opcodes.RETURN)
constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
@@ -2941,7 +2969,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
} // end of class JBeanInfoBuilder
/** A namespace for utilities to normalize the code of an IMethod, over and beyond what IMethod.normalize() strives for.
- * In particualr, IMethod.normalize() doesn't collapseJumpChains().
+ * In particular, IMethod.normalize() doesn't collapseJumpChains().
*
* TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them).
*/
@@ -3108,13 +3136,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val (remappings, cycles) = detour partition {case (source, target) => source != target}
for ((source, target) <- remappings) {
debuglog(s"Will elide jump only block $source because it can be jumped around to get to $target.")
- if (m.startBlock == source) debugwarn("startBlock should have been re-wired by now")
+ if (m.startBlock == source) devWarning("startBlock should have been re-wired by now")
}
val sources = remappings.keySet
val targets = remappings.values.toSet
val intersection = sources intersect targets
- if (intersection.nonEmpty) debugwarn(s"contradiction: we seem to have some source and target overlap in blocks ${intersection.mkString}. Map was ${detour.mkString}")
+ if (intersection.nonEmpty) devWarning(s"contradiction: we seem to have some source and target overlap in blocks ${intersection.mkString}. Map was ${detour.mkString}")
for ((source, _) <- cycles) {
debuglog(s"Block $source is in a do-nothing infinite loop. Did the user write 'while(true){}'?")
@@ -3156,7 +3184,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- // remove the unusued exception handler references
+ // remove the unused exception handler references
if (settings.debug)
for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks")
m.exh = m.exh filterNot unusedExceptionHandlers
@@ -3243,7 +3271,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
if(!isValidSignature) {
- unit.warning(sym.pos,
+ reporter.warning(sym.pos,
"""|compiler bug: created invalid generic signature for %s in %s
|signature: %s
|if this is reproducible, please report bug at https://issues.scala-lang.org/
@@ -3256,7 +3284,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe))
val bytecodeTpe = owner.thisType.memberInfo(sym)
if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
- unit.warning(sym.pos,
+ reporter.warning(sym.pos,
"""|compiler bug: created generic signature for %s in %s that does not conform to its erasure
|signature: %s
|original type: %s
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
index 193100474c..c6ee36d7b2 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
@@ -9,10 +9,11 @@ package tools.nsc
package backend
package jvm
-import scala.collection.{ mutable, immutable }
-import scala.annotation.switch
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
import scala.tools.asm
+import scala.tools.asm.tree.ClassNode
/*
* Prepare in-memory representations of classfiles using the ASM Tree API, and serialize them to disk.
@@ -46,6 +47,9 @@ import scala.tools.asm
abstract class GenBCode extends BCodeSyncAndTry {
import global._
+ import bTypes._
+ import coreBTypes._
+
val phaseName = "jvm"
override def newPhase(prev: Phase) = new BCodePhase(prev)
@@ -130,7 +134,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
return
}
else {
- try { visit(item) }
+ try { withCurrentUnit(item.cunit)(visit(item)) }
catch {
case ex: Throwable =>
ex.printStackTrace()
@@ -156,7 +160,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
case None =>
caseInsensitively.put(lowercaseJavaClassName, claszSymbol)
case Some(dupClassSym) =>
- item.cunit.warning(
+ reporter.warning(
claszSymbol.pos,
s"Class ${claszSymbol.javaClassName} differs only in case from ${dupClassSym.javaClassName}. " +
"Such classes will overwrite one another on case-insensitive filesystems."
@@ -165,7 +169,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
// -------------- mirror class, if needed --------------
val mirrorC =
- if (isStaticModule(claszSymbol) && isTopLevelModule(claszSymbol)) {
+ if (isTopLevelModuleClass(claszSymbol)) {
if (claszSymbol.companionClass == NoSymbol) {
mirrorCodeGen.genMirrorClass(claszSymbol, cunit)
} else {
@@ -210,8 +214,23 @@ abstract class GenBCode extends BCodeSyncAndTry {
* - converting the plain ClassNode to byte array and placing it on queue-3
*/
class Worker2 {
+ def runGlobalOptimizations(): Unit = {
+ import scala.collection.convert.decorateAsScala._
+ q2.asScala foreach {
+ case Item2(_, _, plain, _, _) =>
+ // skip mirror / bean: wd don't inline into tem, and they are not used in the plain class
+ if (plain != null) callGraph.addClass(plain)
+ }
+ bTypes.inliner.runInliner()
+ }
+
+ def localOptimizations(classNode: ClassNode): Unit = {
+ BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode))
+ }
def run() {
+ if (settings.YoptInlinerEnabled) runGlobalOptimizations()
+
while (true) {
val item = q2.poll
if (item.isPoison) {
@@ -219,8 +238,10 @@ abstract class GenBCode extends BCodeSyncAndTry {
return
}
else {
- try { addToQ3(item) }
- catch {
+ try {
+ localOptimizations(item.plain)
+ addToQ3(item)
+ } catch {
case ex: Throwable =>
ex.printStackTrace()
error(s"Error while emitting ${item.plain.name}\n${ex.getMessage}")
@@ -243,6 +264,12 @@ abstract class GenBCode extends BCodeSyncAndTry {
val plainC = SubItem3(plain.name, getByteArray(plain))
val beanC = if (bean == null) null else SubItem3(bean.name, getByteArray(bean))
+ if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) {
+ if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes)
+ AsmUtils.traceClass(plainC.jclassBytes)
+ if (beanC != null) AsmUtils.traceClass(beanC.jclassBytes)
+ }
+
q3 add Item3(arrivalPos, mirrorC, plainC, beanC, outFolder)
}
@@ -251,7 +278,12 @@ abstract class GenBCode extends BCodeSyncAndTry {
var arrivalPos = 0
- /*
+ /**
+ * The `run` method is overridden because the backend has a different data flow than the default
+ * phase: the backend does not transform compilation units one by one, but on all units in the
+ * same run. This allows cross-unit optimizations and running some stages of the backend
+ * concurrently on multiple units.
+ *
* A run of the BCodePhase phase comprises:
*
* (a) set-up steps (most notably supporting maps in `BCodeTypes`,
@@ -263,10 +295,17 @@ abstract class GenBCode extends BCodeSyncAndTry {
*
*/
override def run() {
+ val bcodeStart = Statistics.startTimer(BackendStats.bcodeTimer)
+ val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer)
arrivalPos = 0 // just in case
- scalaPrimitives.init
- initBCodeTypes()
+ scalaPrimitives.init()
+ bTypes.initializeCoreBTypes()
+ bTypes.javaDefinedClasses.clear()
+ bTypes.javaDefinedClasses ++= currentRun.symSource collect {
+ case (sym, _) if sym.isJavaDefined => sym.javaBinaryName.toString
+ }
+ Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart)
// initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated.
bytecodeWriter = initBytecodeWriter(cleanup.getEntryPoints)
@@ -278,6 +317,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
// closing output files.
bytecodeWriter.close()
+ Statistics.stopTimer(BackendStats.bcodeTimer, bcodeStart)
/* TODO Bytecode can be verified (now that all classfiles have been written to disk)
*
@@ -291,9 +331,6 @@ abstract class GenBCode extends BCodeSyncAndTry {
* (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()`
*
*/
-
- // clearing maps
- clearBCodeTypes()
}
/*
@@ -306,9 +343,15 @@ abstract class GenBCode extends BCodeSyncAndTry {
private def buildAndSendToDisk(needsOutFolder: Boolean) {
feedPipeline1()
+ val genStart = Statistics.startTimer(BackendStats.bcodeGenStat)
(new Worker1(needsOutFolder)).run()
+ Statistics.stopTimer(BackendStats.bcodeGenStat, genStart)
+
(new Worker2).run()
+
+ val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer)
drainQ3()
+ Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart)
}
@@ -379,3 +422,13 @@ abstract class GenBCode extends BCodeSyncAndTry {
} // end of class BCodePhase
} // end of class GenBCode
+
+object GenBCode {
+ def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
+
+ final val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
+ final val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
+
+ val CLASS_CONSTRUCTOR_NAME = "<clinit>"
+ val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
deleted file mode 100644
index 01c4ff5a52..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Jason Zaugg
- */
-
-package scala.tools.nsc
-package backend.jvm
-import scala.tools.nsc.symtab._
-
-/** Code shared between the erstwhile legacy backend (aka GenJVM)
- * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
- * more here, but for now I'm starting with the refactorings that are either
- * straightforward to review or necessary for maintenance.
- */
-trait GenJVMASM {
- val global: Global
- import global._
- import icodes._
- import definitions._
-
- val ExcludedForwarderFlags = {
- import Flags._
- // Should include DEFERRED but this breaks findMember.
- ( SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
- }
-
- protected def isJavaEntryPoint(icls: IClass) = {
- val sym = icls.symbol
- def fail(msg: String, pos: Position = sym.pos) = {
- icls.cunit.warning(sym.pos,
- sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" +
- " Reason: " + msg
- // TODO: make this next claim true, if possible
- // by generating valid main methods as static in module classes
- // not sure what the jvm allows here
- // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead."
- )
- false
- }
- def failNoForwarder(msg: String) = {
- fail(msg + ", which means no static forwarder can be generated.\n")
- }
- val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
- val hasApproximate = possibles exists { m =>
- m.info match {
- case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass
- case _ => false
- }
- }
- // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
- hasApproximate && {
- // Before erasure so we can identify generic mains.
- enteringErasure {
- val companion = sym.linkedClassOfClass
-
- if (hasJavaMainMethod(companion))
- failNoForwarder("companion contains its own main method")
- else if (companion.tpe.member(nme.main) != NoSymbol)
- // this is only because forwarders aren't smart enough yet
- failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
- else if (companion.isTrait)
- failNoForwarder("companion is a trait")
- // Now either succeeed, or issue some additional warnings for things which look like
- // attempts to be java main methods.
- else (possibles exists isJavaMainMethod) || {
- possibles exists { m =>
- m.info match {
- case PolyType(_, _) =>
- fail("main methods cannot be generic.")
- case MethodType(params, res) =>
- if (res.typeSymbol :: params exists (_.isAbstractType))
- fail("main methods cannot refer to type parameters or abstract types.", m.pos)
- else
- isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
- case tp =>
- fail("don't know what this is: " + tp, m.pos)
- }
- }
- }
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
new file mode 100644
index 0000000000..607b7145d6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
@@ -0,0 +1,173 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.tools.asm
+import asm.tree._
+import scala.collection.convert.decorateAsScala._
+import scala.tools.asm.Attribute
+import scala.tools.nsc.backend.jvm.BackendReporting._
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.ClassFileLookup
+import BytecodeUtils._
+import ByteCodeRepository._
+import BTypes.InternalName
+import java.util.concurrent.atomic.AtomicLong
+
+/**
+ * The ByteCodeRepository provides utilities to read the bytecode of classfiles from the compilation
+ * classpath. Parsed classes are cached in the `classes` map.
+ *
+ * @param classPath The compiler classpath where classfiles are searched and read from.
+ * @param classes Cache for parsed ClassNodes. Also stores the source of the bytecode:
+ * [[Classfile]] if read from `classPath`, [[CompilationUnit]] if the bytecode
+ * corresponds to a class being compiled.
+ * The `Long` field encodes the age of the node in the map, which allows removing
+ * old entries when the map grows too large.
+ * For Java classes in mixed compilation, the map contains an error message: no
+ * ClassNode is generated by the backend and also no classfile that could be parsed.
+ */
+class ByteCodeRepository(val classPath: ClassFileLookup[AbstractFile], val isJavaSourceDefined: InternalName => Boolean, val classes: collection.concurrent.Map[InternalName, Either[ClassNotFound, (ClassNode, Source, Long)]]) {
+
+ private val maxCacheSize = 1500
+ private val targetSize = 500
+
+ private val idCounter = new AtomicLong(0)
+
+ /**
+ * Prevent the code repository from growing too large. Profiling reveals that the average size
+ * of a ClassNode is about 30 kb. I observed having 17k+ classes in the cache, i.e., 500 mb.
+ *
+ * We can only remove classes with `Source == Classfile`, those can be parsed again if requested.
+ */
+ private def limitCacheSize(): Unit = {
+ if (classes.count(c => c._2.isRight && c._2.right.get._2 == Classfile) > maxCacheSize) {
+ val removeId = idCounter.get - targetSize
+ val toRemove = classes.iterator.collect({
+ case (name, Right((_, Classfile, id))) if id < removeId => name
+ }).toList
+ toRemove foreach classes.remove
+ }
+ }
+
+ def add(classNode: ClassNode, source: Source) = {
+ classes(classNode.name) = Right((classNode, source, idCounter.incrementAndGet()))
+ }
+
+ /**
+ * The class node and source for an internal name. If the class node is not yet available, it is
+ * parsed from the classfile on the compile classpath.
+ */
+ def classNodeAndSource(internalName: InternalName): Either[ClassNotFound, (ClassNode, Source)] = {
+ val r = classes.getOrElseUpdate(internalName, {
+ limitCacheSize()
+ parseClass(internalName).map((_, Classfile, idCounter.incrementAndGet()))
+ })
+ r.map(v => (v._1, v._2))
+ }
+
+ /**
+ * The class node for an internal name. If the class node is not yet available, it is parsed from
+ * the classfile on the compile classpath.
+ */
+ def classNode(internalName: InternalName): Either[ClassNotFound, ClassNode] = classNodeAndSource(internalName).map(_._1)
+
+ /**
+ * The field node for a field matching `name` and `descriptor`, accessed in class `classInternalName`.
+ * The declaration of the field may be in one of the superclasses.
+ *
+ * @return The [[FieldNode]] of the requested field and the [[InternalName]] of its declaring
+ * class, or an error message if the field could not be found
+ */
+ def fieldNode(classInternalName: InternalName, name: String, descriptor: String): Either[FieldNotFound, (FieldNode, InternalName)] = {
+ def fieldNodeImpl(parent: InternalName): Either[FieldNotFound, (FieldNode, InternalName)] = {
+ def msg = s"The field node $name$descriptor could not be found in class $classInternalName or any of its superclasses."
+ classNode(parent) match {
+ case Left(e) => Left(FieldNotFound(name, descriptor, classInternalName, Some(e)))
+ case Right(c) =>
+ c.fields.asScala.find(f => f.name == name && f.desc == descriptor) match {
+ case Some(f) => Right((f, parent))
+ case None =>
+ if (c.superName == null) Left(FieldNotFound(name, descriptor, classInternalName, None))
+ else fieldNode(c.superName, name, descriptor)
+ }
+ }
+ }
+ fieldNodeImpl(classInternalName)
+ }
+
+ /**
+ * The method node for a method matching `name` and `descriptor`, accessed in class `classInternalName`.
+ * The declaration of the method may be in one of the parents.
+ *
+ * @return The [[MethodNode]] of the requested method and the [[InternalName]] of its declaring
+ * class, or an error message if the method could not be found.
+ */
+ def methodNode(ownerInternalNameOrArrayDescriptor: String, name: String, descriptor: String): Either[MethodNotFound, (MethodNode, InternalName)] = {
+ // on failure, returns a list of class names that could not be found on the classpath
+ def methodNodeImpl(ownerInternalName: InternalName): Either[List[ClassNotFound], (MethodNode, InternalName)] = {
+ classNode(ownerInternalName) match {
+ case Left(e) => Left(List(e))
+ case Right(c) =>
+ c.methods.asScala.find(m => m.name == name && m.desc == descriptor) match {
+ case Some(m) => Right((m, ownerInternalName))
+ case None => findInParents(Option(c.superName) ++: c.interfaces.asScala.toList, Nil)
+ }
+ }
+ }
+
+ // find the MethodNode in one of the parent classes
+ def findInParents(parents: List[InternalName], failedClasses: List[ClassNotFound]): Either[List[ClassNotFound], (MethodNode, InternalName)] = parents match {
+ case x :: xs => methodNodeImpl(x).left.flatMap(failed => findInParents(xs, failed ::: failedClasses))
+ case Nil => Left(failedClasses)
+ }
+
+ // In a MethodInsnNode, the `owner` field may be an array descriptor, for exmple when invoking `clone`. We don't have a method node to return in this case.
+ if (ownerInternalNameOrArrayDescriptor.charAt(0) == '[')
+ Left(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, Nil))
+ else
+ methodNodeImpl(ownerInternalNameOrArrayDescriptor).left.map(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, _))
+ }
+
+ private def parseClass(internalName: InternalName): Either[ClassNotFound, ClassNode] = {
+ val fullName = internalName.replace('/', '.')
+ classPath.findClassFile(fullName) map { classFile =>
+ val classNode = new asm.tree.ClassNode()
+ val classReader = new asm.ClassReader(classFile.toByteArray)
+
+ // Passing the InlineInfoAttributePrototype makes the ClassReader invoke the specific `read`
+ // method of the InlineInfoAttribute class, instead of putting the byte array into a generic
+ // Attribute.
+ // We don't need frames when inlining, but we want to keep the local variable table, so we
+ // don't use SKIP_DEBUG.
+ classReader.accept(classNode, Array[Attribute](InlineInfoAttributePrototype), asm.ClassReader.SKIP_FRAMES)
+ // SKIP_FRAMES leaves line number nodes. Remove them because they are not correct after
+ // inlining.
+ // TODO: we need to remove them also for classes that are not parsed from classfiles, why not simplify and do it once when inlining?
+ // OR: instead of skipping line numbers for inlined code, use write a SourceDebugExtension
+ // attribute that contains JSR-45 data that encodes debugging info.
+ // http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.7.11
+ // https://jcp.org/aboutJava/communityprocess/final/jsr045/index.html
+ removeLineNumberNodes(classNode)
+ classNode
+ } match {
+ case Some(node) => Right(node)
+ case None => Left(ClassNotFound(internalName, isJavaSourceDefined(internalName)))
+ }
+ }
+}
+
+object ByteCodeRepository {
+ /**
+ * The source of a ClassNode in the ByteCodeRepository. Can be either [[CompilationUnit]] if the
+ * class is being compiled or [[Classfile]] if the class was parsed from the compilation classpath.
+ */
+ sealed trait Source
+ object CompilationUnit extends Source
+ object Classfile extends Source
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
new file mode 100644
index 0000000000..201ab15177
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
@@ -0,0 +1,340 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.annotation.{tailrec, switch}
+import scala.collection.mutable
+import scala.reflect.internal.util.Collections._
+import scala.tools.asm.commons.CodeSizeEvaluator
+import scala.tools.asm.tree.analysis._
+import scala.tools.asm.{MethodWriter, ClassWriter, Label, Opcodes}
+import scala.tools.asm.tree._
+import scala.collection.convert.decorateAsScala._
+import GenBCode._
+import scala.collection.convert.decorateAsScala._
+import scala.collection.convert.decorateAsJava._
+import scala.tools.nsc.backend.jvm.BTypes._
+
+object BytecodeUtils {
+
+ // http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.9.1
+ final val maxJVMMethodSize = 65535
+
+ // 5% margin, more than enough for the instructions added by the inliner (store / load args, null check for instance methods)
+ final val maxMethodSizeAfterInline = maxJVMMethodSize - (maxJVMMethodSize / 20)
+
+ object Goto {
+ def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = {
+ if (instruction.getOpcode == Opcodes.GOTO) Some(instruction.asInstanceOf[JumpInsnNode])
+ else None
+ }
+ }
+
+ object JumpNonJsr {
+ def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = {
+ if (isJumpNonJsr(instruction)) Some(instruction.asInstanceOf[JumpInsnNode])
+ else None
+ }
+ }
+
+ object ConditionalJump {
+ def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = {
+ if (isConditionalJump(instruction)) Some(instruction.asInstanceOf[JumpInsnNode])
+ else None
+ }
+ }
+
+ object VarInstruction {
+ def unapply(instruction: AbstractInsnNode): Option[VarInsnNode] = {
+ if (isVarInstruction(instruction)) Some(instruction.asInstanceOf[VarInsnNode])
+ else None
+ }
+
+ }
+
+ def isJumpNonJsr(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ // JSR is deprecated in classfile version 50, disallowed in 51. historically, it was used to implement finally.
+ op == Opcodes.GOTO || isConditionalJump(instruction)
+ }
+
+ def isConditionalJump(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ (op >= Opcodes.IFEQ && op <= Opcodes.IF_ACMPNE) || op == Opcodes.IFNULL || op == Opcodes.IFNONNULL
+ }
+
+ def isReturn(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ op >= Opcodes.IRETURN && op <= Opcodes.RETURN
+ }
+
+ def isVarInstruction(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ (op >= Opcodes.ILOAD && op <= Opcodes.ALOAD) || (op >= Opcodes.ISTORE && op <= Opcodes.ASTORE)
+ }
+
+ def isExecutable(instruction: AbstractInsnNode): Boolean = instruction.getOpcode >= 0
+
+ def isConstructor(methodNode: MethodNode): Boolean = {
+ methodNode.name == INSTANCE_CONSTRUCTOR_NAME || methodNode.name == CLASS_CONSTRUCTOR_NAME
+ }
+
+ def isStaticMethod(methodNode: MethodNode): Boolean = (methodNode.access & Opcodes.ACC_STATIC) != 0
+
+ def isAbstractMethod(methodNode: MethodNode): Boolean = (methodNode.access & Opcodes.ACC_ABSTRACT) != 0
+
+ def isSynchronizedMethod(methodNode: MethodNode): Boolean = (methodNode.access & Opcodes.ACC_SYNCHRONIZED) != 0
+
+ def isNativeMethod(methodNode: MethodNode): Boolean = (methodNode.access & Opcodes.ACC_NATIVE) != 0
+
+ def isFinalClass(classNode: ClassNode): Boolean = (classNode.access & Opcodes.ACC_FINAL) != 0
+
+ def isFinalMethod(methodNode: MethodNode): Boolean = (methodNode.access & (Opcodes.ACC_FINAL | Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC)) != 0
+
+ def isStrictfpMethod(methodNode: MethodNode): Boolean = (methodNode.access & Opcodes.ACC_STRICT) != 0
+
+ def nextExecutableInstruction(instruction: AbstractInsnNode, alsoKeep: AbstractInsnNode => Boolean = Set()): Option[AbstractInsnNode] = {
+ var result = instruction
+ do { result = result.getNext }
+ while (result != null && !isExecutable(result) && !alsoKeep(result))
+ Option(result)
+ }
+
+ def sameTargetExecutableInstruction(a: JumpInsnNode, b: JumpInsnNode): Boolean = {
+ // Compare next executable instead of the the labels. Identifies a, b as the same target:
+ // LabelNode(a)
+ // LabelNode(b)
+ // Instr
+ nextExecutableInstruction(a.label) == nextExecutableInstruction(b.label)
+ }
+
+ def removeJumpAndAdjustStack(method: MethodNode, jump: JumpInsnNode) {
+ val instructions = method.instructions
+ val op = jump.getOpcode
+ if ((op >= Opcodes.IFEQ && op <= Opcodes.IFGE) || op == Opcodes.IFNULL || op == Opcodes.IFNONNULL) {
+ instructions.insert(jump, getPop(1))
+ } else if ((op >= Opcodes.IF_ICMPEQ && op <= Opcodes.IF_ICMPLE) || op == Opcodes.IF_ACMPEQ || op == Opcodes.IF_ACMPNE) {
+ instructions.insert(jump, getPop(1))
+ instructions.insert(jump, getPop(1))
+ } else {
+ // we can't remove JSR: its execution does not only jump, it also adds a return address to the stack
+ assert(jump.getOpcode == Opcodes.GOTO)
+ }
+ instructions.remove(jump)
+ }
+
+ def finalJumpTarget(source: JumpInsnNode): LabelNode = {
+ @tailrec def followGoto(label: LabelNode, seenLabels: Set[LabelNode]): LabelNode = nextExecutableInstruction(label) match {
+ case Some(Goto(dest)) =>
+ if (seenLabels(dest.label)) dest.label
+ else followGoto(dest.label, seenLabels + dest.label)
+
+ case _ => label
+ }
+ followGoto(source.label, Set(source.label))
+ }
+
+ def negateJumpOpcode(jumpOpcode: Int): Int = (jumpOpcode: @switch) match {
+ case Opcodes.IFEQ => Opcodes.IFNE
+ case Opcodes.IFNE => Opcodes.IFEQ
+
+ case Opcodes.IFLT => Opcodes.IFGE
+ case Opcodes.IFGE => Opcodes.IFLT
+
+ case Opcodes.IFGT => Opcodes.IFLE
+ case Opcodes.IFLE => Opcodes.IFGT
+
+ case Opcodes.IF_ICMPEQ => Opcodes.IF_ICMPNE
+ case Opcodes.IF_ICMPNE => Opcodes.IF_ICMPEQ
+
+ case Opcodes.IF_ICMPLT => Opcodes.IF_ICMPGE
+ case Opcodes.IF_ICMPGE => Opcodes.IF_ICMPLT
+
+ case Opcodes.IF_ICMPGT => Opcodes.IF_ICMPLE
+ case Opcodes.IF_ICMPLE => Opcodes.IF_ICMPGT
+
+ case Opcodes.IF_ACMPEQ => Opcodes.IF_ACMPNE
+ case Opcodes.IF_ACMPNE => Opcodes.IF_ACMPEQ
+
+ case Opcodes.IFNULL => Opcodes.IFNONNULL
+ case Opcodes.IFNONNULL => Opcodes.IFNULL
+ }
+
+ def getPop(size: Int): InsnNode = {
+ val op = if (size == 1) Opcodes.POP else Opcodes.POP2
+ new InsnNode(op)
+ }
+
+ def labelReferences(method: MethodNode): Map[LabelNode, Set[AnyRef]] = {
+ val res = mutable.Map.empty[LabelNode, Set[AnyRef]]
+ def add(l: LabelNode, ref: AnyRef) = if (res contains l) res(l) = res(l) + ref else res(l) = Set(ref)
+
+ method.instructions.iterator().asScala foreach {
+ case jump: JumpInsnNode => add(jump.label, jump)
+ case line: LineNumberNode => add(line.start, line)
+ case switch: LookupSwitchInsnNode => switch.labels.asScala.foreach(add(_, switch)); add(switch.dflt, switch)
+ case switch: TableSwitchInsnNode => switch.labels.asScala.foreach(add(_, switch)); add(switch.dflt, switch)
+ case _ =>
+ }
+ if (method.localVariables != null) {
+ method.localVariables.iterator().asScala.foreach(l => { add(l.start, l); add(l.end, l) })
+ }
+ if (method.tryCatchBlocks != null) {
+ method.tryCatchBlocks.iterator().asScala.foreach(l => { add(l.start, l); add(l.handler, l); add(l.end, l) })
+ }
+
+ res.toMap
+ }
+
+ def substituteLabel(reference: AnyRef, from: LabelNode, to: LabelNode): Unit = {
+ def substList(list: java.util.List[LabelNode]) = {
+ foreachWithIndex(list.asScala.toList) { case (l, i) =>
+ if (l == from) list.set(i, to)
+ }
+ }
+ reference match {
+ case jump: JumpInsnNode => jump.label = to
+ case line: LineNumberNode => line.start = to
+ case switch: LookupSwitchInsnNode => substList(switch.labels); if (switch.dflt == from) switch.dflt = to
+ case switch: TableSwitchInsnNode => substList(switch.labels); if (switch.dflt == from) switch.dflt = to
+ case local: LocalVariableNode =>
+ if (local.start == from) local.start = to
+ if (local.end == from) local.end = to
+ case handler: TryCatchBlockNode =>
+ if (handler.start == from) handler.start = to
+ if (handler.handler == from) handler.handler = to
+ if (handler.end == from) handler.end = to
+ }
+ }
+
+ /**
+ * In order to run an Analyzer, the maxLocals / maxStack fields need to be available. The ASM
+ * framework only computes these values during bytecode generation.
+ *
+ * Since there's currently no better way, we run a bytecode generator on the method and extract
+ * the computed values. This required changes to the ASM codebase:
+ * - the [[MethodWriter]] class was made public
+ * - accessors for maxLocals / maxStack were added to the MethodWriter class
+ *
+ * We could probably make this faster (and allocate less memory) by hacking the ASM framework
+ * more: create a subclass of MethodWriter with a /dev/null byteVector. Another option would be
+ * to create a separate visitor for computing those values, duplicating the functionality from the
+ * MethodWriter.
+ */
+ def computeMaxLocalsMaxStack(method: MethodNode): Unit = {
+ val cw = new ClassWriter(ClassWriter.COMPUTE_MAXS)
+ val excs = method.exceptions.asScala.toArray
+ val mw = cw.visitMethod(method.access, method.name, method.desc, method.signature, excs).asInstanceOf[MethodWriter]
+ method.accept(mw)
+ method.maxLocals = mw.getMaxLocals
+ method.maxStack = mw.getMaxStack
+ }
+
+ def codeSizeOKForInlining(caller: MethodNode, callee: MethodNode): Boolean = {
+ // Looking at the implementation of CodeSizeEvaluator, all instructions except tableswitch and
+ // lookupswitch are <= 8 bytes. These should be rare enough for 8 to be an OK rough upper bound.
+ def roughUpperBound(methodNode: MethodNode): Int = methodNode.instructions.size * 8
+
+ def maxSize(methodNode: MethodNode): Int = {
+ val eval = new CodeSizeEvaluator(null)
+ methodNode.accept(eval)
+ eval.getMaxSize
+ }
+
+ (roughUpperBound(caller) + roughUpperBound(callee) > maxMethodSizeAfterInline) &&
+ (maxSize(caller) + maxSize(callee) > maxMethodSizeAfterInline)
+ }
+
+ def removeLineNumberNodes(classNode: ClassNode): Unit = {
+ for (m <- classNode.methods.asScala) removeLineNumberNodes(m.instructions)
+ }
+
+ def removeLineNumberNodes(instructions: InsnList): Unit = {
+ val iter = instructions.iterator()
+ while (iter.hasNext) iter.next() match {
+ case _: LineNumberNode => iter.remove()
+ case _ =>
+ }
+ }
+
+ def cloneLabels(methodNode: MethodNode): Map[LabelNode, LabelNode] = {
+ methodNode.instructions.iterator().asScala.collect({
+ case labelNode: LabelNode => (labelNode, newLabelNode)
+ }).toMap
+ }
+
+ /**
+ * Create a new [[LabelNode]] with a correctly associated [[Label]].
+ */
+ def newLabelNode: LabelNode = {
+ val label = new Label
+ val labelNode = new LabelNode(label)
+ label.info = labelNode
+ labelNode
+ }
+
+ /**
+ * Clone the instructions in `methodNode` into a new [[InsnList]], mapping labels according to
+ * the `labelMap`. Returns the new instruction list and a map from old to new instructions.
+ */
+ def cloneInstructions(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode]): (InsnList, Map[AbstractInsnNode, AbstractInsnNode]) = {
+ val javaLabelMap = labelMap.asJava
+ val result = new InsnList
+ var map = Map.empty[AbstractInsnNode, AbstractInsnNode]
+ for (ins <- methodNode.instructions.iterator.asScala) {
+ val cloned = ins.clone(javaLabelMap)
+ result add cloned
+ map += ((ins, cloned))
+ }
+ (result, map)
+ }
+
+ /**
+ * Clone the local variable descriptors of `methodNode` and map their `start` and `end` labels
+ * according to the `labelMap`.
+ */
+ def cloneLocalVariableNodes(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], prefix: String): List[LocalVariableNode] = {
+ methodNode.localVariables.iterator().asScala.map(localVariable => new LocalVariableNode(
+ prefix + localVariable.name,
+ localVariable.desc,
+ localVariable.signature,
+ labelMap(localVariable.start),
+ labelMap(localVariable.end),
+ localVariable.index
+ )).toList
+ }
+
+ /**
+ * Clone the local try/catch blocks of `methodNode` and map their `start` and `end` and `handler`
+ * labels according to the `labelMap`.
+ */
+ def cloneTryCatchBlockNodes(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode]): List[TryCatchBlockNode] = {
+ methodNode.tryCatchBlocks.iterator().asScala.map(tryCatch => new TryCatchBlockNode(
+ labelMap(tryCatch.start),
+ labelMap(tryCatch.end),
+ labelMap(tryCatch.handler),
+ tryCatch.`type`
+ )).toList
+ }
+
+ /**
+ * A wrapper to make ASM's Analyzer a bit easier to use.
+ */
+ class AsmAnalyzer[V <: Value](methodNode: MethodNode, classInternalName: InternalName, interpreter: Interpreter[V] = new BasicInterpreter) {
+ val analyzer = new Analyzer(interpreter)
+ analyzer.analyze(classInternalName, methodNode)
+ def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction))
+ }
+
+ implicit class `frame extensions`[V <: Value](val frame: Frame[V]) extends AnyVal {
+ def peekDown(n: Int): V = {
+ val topIndex = frame.getStackSize - 1
+ frame.getStack(topIndex - n)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
new file mode 100644
index 0000000000..028f0f8fa6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala
@@ -0,0 +1,195 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.reflect.internal.util.{NoPosition, Position}
+import scala.tools.asm.tree._
+import scala.collection.convert.decorateAsScala._
+import scala.tools.nsc.backend.jvm.BTypes.{MethodInlineInfo, InternalName}
+import scala.tools.nsc.backend.jvm.BackendReporting._
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils.AsmAnalyzer
+import ByteCodeRepository.{Source, CompilationUnit}
+
+class CallGraph[BT <: BTypes](val btypes: BT) {
+ import btypes._
+
+ val callsites: collection.concurrent.Map[MethodInsnNode, Callsite] = recordPerRunCache(collection.concurrent.TrieMap.empty[MethodInsnNode, Callsite])
+
+ def addClass(classNode: ClassNode): Unit = {
+ for (m <- classNode.methods.asScala; callsite <- analyzeCallsites(m, classBTypeFromClassNode(classNode)))
+ callsites(callsite.callsiteInstruction) = callsite
+ }
+
+ def analyzeCallsites(methodNode: MethodNode, definingClass: ClassBType): List[Callsite] = {
+
+ case class CallsiteInfo(safeToInline: Boolean, safeToRewrite: Boolean,
+ annotatedInline: Boolean, annotatedNoInline: Boolean,
+ warning: Option[CalleeInfoWarning])
+
+ /**
+ * Analyze a callsite and gather meta-data that can be used for inlining decisions.
+ */
+ def analyzeCallsite(calleeMethodNode: MethodNode, calleeDeclarationClassBType: ClassBType, receiverTypeInternalName: InternalName, calleeSource: Source): CallsiteInfo = {
+ val methodSignature = calleeMethodNode.name + calleeMethodNode.desc
+
+ try {
+ // The inlineInfo.methodInfos of a ClassBType holds an InlineInfo for each method *declared*
+ // within a class (not for inherited methods). Since we already have the classBType of the
+ // callee, we only check there for the methodInlineInfo, we should find it there.
+ calleeDeclarationClassBType.info.orThrow.inlineInfo.methodInfos.get(methodSignature) match {
+ case Some(methodInlineInfo) =>
+ val canInlineFromSource = compilerSettings.YoptInlineGlobal || calleeSource == CompilationUnit
+
+ val isAbstract = BytecodeUtils.isAbstractMethod(calleeMethodNode)
+
+ // (1) A non-final method can be safe to inline if the receiver type is a final subclass. Example:
+ // class A { @inline def f = 1 }; object B extends A; B.f // can be inlined
+ //
+ // TODO: type analysis can render more calls statically resolved. Example:
+ // new A.f // can be inlined, the receiver type is known to be exactly A.
+ val isStaticallyResolved: Boolean = {
+ methodInlineInfo.effectivelyFinal ||
+ classBTypeFromParsedClassfile(receiverTypeInternalName).info.orThrow.inlineInfo.isEffectivelyFinal // (1)
+ }
+
+ val isRewritableTraitCall = isStaticallyResolved && methodInlineInfo.traitMethodWithStaticImplementation
+
+ val warning = calleeDeclarationClassBType.info.orThrow.inlineInfo.warning.map(
+ MethodInlineInfoIncomplete(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, _))
+
+ // (1) For invocations of final trait methods, the callee isStaticallyResolved but also
+ // abstract. Such a callee is not safe to inline - it needs to be re-written to the
+ // static impl method first (safeToRewrite).
+ // (2) Final trait methods can be rewritten from the interface to the static implementation
+ // method to enable inlining.
+ CallsiteInfo(
+ safeToInline =
+ canInlineFromSource &&
+ isStaticallyResolved && // (1)
+ !isAbstract &&
+ !BytecodeUtils.isConstructor(calleeMethodNode) &&
+ !BytecodeUtils.isNativeMethod(calleeMethodNode),
+ safeToRewrite = canInlineFromSource && isRewritableTraitCall, // (2)
+ annotatedInline = methodInlineInfo.annotatedInline,
+ annotatedNoInline = methodInlineInfo.annotatedNoInline,
+ warning = warning)
+
+ case None =>
+ val warning = MethodInlineInfoMissing(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, calleeDeclarationClassBType.info.orThrow.inlineInfo.warning)
+ CallsiteInfo(false, false, false, false, Some(warning))
+ }
+ } catch {
+ case Invalid(noInfo: NoClassBTypeInfo) =>
+ val warning = MethodInlineInfoError(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, noInfo)
+ CallsiteInfo(false, false, false, false, Some(warning))
+ }
+ }
+
+ // TODO: run dataflow analyses to make the call graph more precise
+ // - producers to get forwarded parameters (ForwardedParam)
+ // - typeAnalysis for more precise argument types, more precise callee
+ // - nullAnalysis to skip emitting the receiver-null-check when inlining
+
+ // TODO: for now we run a basic analyzer to get the stack height at the call site.
+ // once we run a more elaborate analyzer (types, nullness), we can get the stack height out of there.
+ localOpt.minimalRemoveUnreachableCode(methodNode, definingClass.internalName)
+ val analyzer = new AsmAnalyzer(methodNode, definingClass.internalName)
+
+ methodNode.instructions.iterator.asScala.collect({
+ case call: MethodInsnNode =>
+ val callee: Either[OptimizerWarning, Callee] = for {
+ (method, declarationClass) <- byteCodeRepository.methodNode(call.owner, call.name, call.desc): Either[OptimizerWarning, (MethodNode, InternalName)]
+ (declarationClassNode, source) <- byteCodeRepository.classNodeAndSource(declarationClass): Either[OptimizerWarning, (ClassNode, Source)]
+ declarationClassBType = classBTypeFromClassNode(declarationClassNode)
+ } yield {
+ val CallsiteInfo(safeToInline, safeToRewrite, annotatedInline, annotatedNoInline, warning) = analyzeCallsite(method, declarationClassBType, call.owner, source)
+ Callee(
+ callee = method,
+ calleeDeclarationClass = declarationClassBType,
+ safeToInline = safeToInline,
+ safeToRewrite = safeToRewrite,
+ annotatedInline = annotatedInline,
+ annotatedNoInline = annotatedNoInline,
+ calleeInfoWarning = warning)
+ }
+
+ val argInfos = if (callee.isLeft) Nil else {
+ // TODO: for now it's Nil, because we don't run any data flow analysis
+ // there's no point in using the parameter types, that doesn't add any information.
+ // NOTE: need to run the same analyses after inlining, to re-compute the argInfos for the
+ // new duplicated callsites, see Inliner.inline
+ Nil
+ }
+
+ Callsite(
+ callsiteInstruction = call,
+ callsiteMethod = methodNode,
+ callsiteClass = definingClass,
+ callee = callee,
+ argInfos = argInfos,
+ callsiteStackHeight = analyzer.frameAt(call).getStackSize,
+ callsitePosition = callsitePositions.getOrElse(call, NoPosition)
+ )
+ }).toList
+ }
+
+ /**
+ * A callsite in the call graph.
+ *
+ * @param callsiteInstruction The invocation instruction
+ * @param callsiteMethod The method containing the callsite
+ * @param callsiteClass The class containing the callsite
+ * @param callee The callee, as it appears in the invocation instruction. For virtual
+ * calls, an override of the callee might be invoked. Also, the callee
+ * can be abstract. Contains a warning message if the callee MethodNode
+ * cannot be found in the bytecode repository.
+ * @param argInfos Information about the invocation receiver and arguments
+ * @param callsiteStackHeight The stack height at the callsite, required by the inliner
+ * @param callsitePosition The source position of the callsite, used for inliner warnings.
+ */
+ final case class Callsite(callsiteInstruction: MethodInsnNode, callsiteMethod: MethodNode, callsiteClass: ClassBType,
+ callee: Either[OptimizerWarning, Callee], argInfos: List[ArgInfo],
+ callsiteStackHeight: Int, callsitePosition: Position) {
+ override def toString =
+ "Invocation of" +
+ s" ${callee.map(_.calleeDeclarationClass.internalName).getOrElse("?")}.${callsiteInstruction.name + callsiteInstruction.desc}" +
+ s"@${callsiteMethod.instructions.indexOf(callsiteInstruction)}" +
+ s" in ${callsiteClass.internalName}.${callsiteMethod.name}"
+ }
+
+ /**
+ * Information about invocation arguments, obtained through data flow analysis of the callsite method.
+ */
+ sealed trait ArgInfo
+ final case class ArgTypeInfo(argType: BType, isPrecise: Boolean, knownNotNull: Boolean) extends ArgInfo
+ final case class ForwardedParam(index: Int) extends ArgInfo
+ // can be extended, e.g., with constant types
+
+ /**
+ * A callee in the call graph.
+ *
+ * @param callee The callee, as it appears in the invocation instruction. For
+ * virtual calls, an override of the callee might be invoked. Also,
+ * the callee can be abstract.
+ * @param calleeDeclarationClass The class in which the callee is declared
+ * @param safeToInline True if the callee can be safely inlined: it cannot be overridden,
+ * and the inliner settings (project / global) allow inlining it.
+ * @param safeToRewrite True if the callee the interface method of a concrete trait method
+ * that can be safely re-written to the static implementation method.
+ * @param annotatedInline True if the callee is annotated @inline
+ * @param annotatedNoInline True if the callee is annotated @noinline
+ * @param calleeInfoWarning An inliner warning if some information was not available while
+ * gathering the information about this callee.
+ */
+ final case class Callee(callee: MethodNode, calleeDeclarationClass: ClassBType,
+ safeToInline: Boolean, safeToRewrite: Boolean,
+ annotatedInline: Boolean, annotatedNoInline: Boolean,
+ calleeInfoWarning: Option[CalleeInfoWarning]) {
+ assert(!(safeToInline && safeToRewrite), s"A callee of ${callee.name} can be either safeToInline or safeToRewrite, but not both.")
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala
new file mode 100644
index 0000000000..e7dd5abc57
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala
@@ -0,0 +1,148 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.tools.asm._
+import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, MethodInlineInfo}
+import scala.tools.nsc.backend.jvm.BackendReporting.UnknownScalaInlineInfoVersion
+
+/**
+ * This attribute stores the InlineInfo for a ClassBType as an independent classfile attribute.
+ * The compiler does so for every class being compiled.
+ *
+ * The reason is that a precise InlineInfo can only be obtained if the symbol for a class is available.
+ * For example, we need to know if a method is final in Scala's terms, or if it has the @inline annotation.
+ * Looking up a class symbol for a given class filename is brittle (name-mangling).
+ *
+ * The attribute is also helpful for inlining mixin methods. The mixin phase only adds mixin method
+ * symbols to classes that are being compiled. For all other class symbols, there are no mixin members.
+ * However, the inliner requires an InlineInfo for inlining mixin members. That problem is solved by
+ * reading the InlineInfo from this attribute.
+ *
+ * In principle we could encode the InlineInfo into a Java annotation (instead of a classfile attribute).
+ * However, an attribute allows us to save many bits. In particular, note that the strings in an
+ * InlineInfo are serialized as references to constants in the constant pool, and those strings
+ * (traitImplClassSelfType, method names, method signatures) would exist in there anyway. So the
+ * ScalaInlineAttribute remains relatively compact.
+ */
+case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineInfoAttribute.attributeName) {
+ /**
+ * Not sure what this method is good for, it is not invoked anywhere in the ASM framework. However,
+ * the example in the ASM manual also overrides it to `false` for custom attributes, so it might be
+ * a good idea.
+ */
+ override def isUnknown: Boolean = false
+
+ /**
+ * Serialize the `inlineInfo` into a byte array. Strings are added to the constant pool and serialized
+ * as references.
+ */
+ override def write(cw: ClassWriter, code: Array[Byte], len: Int, maxStack: Int, maxLocals: Int): ByteVector = {
+ val result = new ByteVector()
+
+ result.putByte(InlineInfoAttribute.VERSION)
+
+ var hasSelfIsFinal = 0
+ if (inlineInfo.isEffectivelyFinal) hasSelfIsFinal |= 1
+ if (inlineInfo.traitImplClassSelfType.isDefined) hasSelfIsFinal |= 2
+ result.putByte(hasSelfIsFinal)
+
+ for (selfInternalName <- inlineInfo.traitImplClassSelfType) {
+ result.putShort(cw.newUTF8(selfInternalName))
+ }
+
+ // The method count fits in a short (the methods_count in a classfile is also a short)
+ result.putShort(inlineInfo.methodInfos.size)
+
+ // Sort the methodInfos for stability of classfiles
+ for ((nameAndType, info) <- inlineInfo.methodInfos.toList.sortBy(_._1)) {
+ val (name, desc) = nameAndType.span(_ != '(')
+ // Name and desc are added separately because a NameAndType entry also stores them separately.
+ // This makes sure that we use the existing constant pool entries for the method.
+ result.putShort(cw.newUTF8(name))
+ result.putShort(cw.newUTF8(desc))
+
+ var inlineInfo = 0
+ if (info.effectivelyFinal) inlineInfo |= 1
+ if (info.traitMethodWithStaticImplementation) inlineInfo |= 2
+ if (info.annotatedInline) inlineInfo |= 4
+ if (info.annotatedNoInline) inlineInfo |= 8
+ result.putByte(inlineInfo)
+ }
+
+ result
+ }
+
+ /**
+ * De-serialize the attribute into an InlineInfo. The attribute starts at cr.b(off), but we don't
+ * need to access that array directly, we can use the `read` methods provided by the ClassReader.
+ *
+ * `buf` is a pre-allocated character array that is guaranteed to be long enough to hold any
+ * string of the constant pool. So we can use it to invoke `cr.readUTF8`.
+ */
+ override def read(cr: ClassReader, off: Int, len: Int, buf: Array[Char], codeOff: Int, labels: Array[Label]): InlineInfoAttribute = {
+ var next = off
+
+ def nextByte() = { val r = cr.readByte(next) ; next += 1; r }
+ def nextUTF8() = { val r = cr.readUTF8(next, buf); next += 2; r }
+ def nextShort() = { val r = cr.readShort(next) ; next += 2; r }
+
+ val version = nextByte()
+ if (version == 1) {
+ val hasSelfIsFinal = nextByte()
+ val isFinal = (hasSelfIsFinal & 1) != 0
+ val hasSelf = (hasSelfIsFinal & 2) != 0
+
+ val self = if (hasSelf) {
+ val selfName = nextUTF8()
+ Some(selfName)
+ } else {
+ None
+ }
+
+ val numEntries = nextShort()
+ val infos = (0 until numEntries).map(_ => {
+ val name = nextUTF8()
+ val desc = nextUTF8()
+
+ val inlineInfo = nextByte()
+ val isFinal = (inlineInfo & 1) != 0
+ val traitMethodWithStaticImplementation = (inlineInfo & 2) != 0
+ val isInline = (inlineInfo & 4) != 0
+ val isNoInline = (inlineInfo & 8) != 0
+ (name + desc, MethodInlineInfo(isFinal, traitMethodWithStaticImplementation, isInline, isNoInline))
+ }).toMap
+
+ InlineInfoAttribute(InlineInfo(self, isFinal, infos, None))
+ } else {
+ val msg = UnknownScalaInlineInfoVersion(cr.getClassName, version)
+ InlineInfoAttribute(BTypes.EmptyInlineInfo.copy(warning = Some(msg)))
+ }
+ }
+}
+
+object InlineInfoAttribute {
+ /**
+ * [u1] version
+ * [u1] isEffectivelyFinal (<< 0), hasTraitImplClassSelfType (<< 1)
+ * [u2]? traitImplClassSelfType (reference)
+ * [u2] numMethodEntries
+ * [u2] name (reference)
+ * [u2] descriptor (reference)
+ * [u1] isFinal (<< 0), traitMethodWithStaticImplementation (<< 1), hasInlineAnnotation (<< 2), hasNoInlineAnnotation (<< 3)
+ */
+ final val VERSION: Byte = 1
+
+ final val attributeName = "ScalaInlineInfo"
+}
+
+/**
+ * In order to instruct the ASM framework to de-serialize the ScalaInlineInfo attribute, we need
+ * to pass a prototype instance when running the class reader.
+ */
+object InlineInfoAttributePrototype extends InlineInfoAttribute(InlineInfo(null, false, null, null))
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
new file mode 100644
index 0000000000..ac5c9ce2e6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala
@@ -0,0 +1,681 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.annotation.tailrec
+import scala.tools.asm
+import asm.Opcodes._
+import asm.tree._
+import scala.collection.convert.decorateAsScala._
+import scala.collection.convert.decorateAsJava._
+import AsmUtils._
+import BytecodeUtils._
+import collection.mutable
+import scala.tools.asm.tree.analysis.SourceInterpreter
+import BackendReporting._
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
+
+class Inliner[BT <: BTypes](val btypes: BT) {
+ import btypes._
+ import callGraph._
+
+ def eliminateUnreachableCodeAndUpdateCallGraph(methodNode: MethodNode, definingClass: InternalName): Unit = {
+ localOpt.minimalRemoveUnreachableCode(methodNode, definingClass) foreach {
+ case invocation: MethodInsnNode => callGraph.callsites.remove(invocation)
+ case _ =>
+ }
+ }
+
+ def runInliner(): Unit = {
+ rewriteFinalTraitMethodInvocations()
+
+ for (request <- collectAndOrderInlineRequests) {
+ val Right(callee) = request.callee // collectAndOrderInlineRequests returns callsites with a known callee
+
+ // Inlining a method can create unreachable code. Example:
+ // def f = throw e
+ // def g = f; println() // println is unreachable after inlining f
+ // If we have an inline request for a call to g, and f has been already inlined into g, we
+ // need to run DCE before inlining g.
+ eliminateUnreachableCodeAndUpdateCallGraph(callee.callee, callee.calleeDeclarationClass.internalName)
+
+ // DCE above removes unreachable callsites from the call graph. If the inlining request denotes
+ // such an eliminated callsite, do nothing.
+ if (callGraph.callsites contains request.callsiteInstruction) {
+ val r = inline(request.callsiteInstruction, request.callsiteStackHeight, request.callsiteMethod, request.callsiteClass,
+ callee.callee, callee.calleeDeclarationClass,
+ receiverKnownNotNull = false, keepLineNumbers = false)
+
+ for (warning <- r) {
+ if ((callee.annotatedInline && btypes.compilerSettings.YoptWarningEmitAtInlineFailed) || warning.emitWarning(compilerSettings)) {
+ val annotWarn = if (callee.annotatedInline) " is annotated @inline but" else ""
+ val msg = s"${BackendReporting.methodSignature(callee.calleeDeclarationClass.internalName, callee.callee)}$annotWarn could not be inlined:\n$warning"
+ backendReporting.inlinerWarning(request.callsitePosition, msg)
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Ordering for inline requests. Required to make the inliner deterministic:
+ * - Always remove the same request when breaking inlining cycles
+ * - Perform inlinings in a consistent order
+ */
+ object callsiteOrdering extends Ordering[Callsite] {
+ override def compare(x: Callsite, y: Callsite): Int = {
+ val cls = x.callsiteClass.internalName compareTo y.callsiteClass.internalName
+ if (cls != 0) return cls
+
+ val name = x.callsiteMethod.name compareTo y.callsiteMethod.name
+ if (name != 0) return name
+
+ val desc = x.callsiteMethod.desc compareTo y.callsiteMethod.desc
+ if (desc != 0) return desc
+
+ def pos(c: Callsite) = c.callsiteMethod.instructions.indexOf(c.callsiteInstruction)
+ pos(x) - pos(y)
+ }
+ }
+
+ /**
+ * Select callsites from the call graph that should be inlined. The resulting list of inlining
+ * requests is allowed to have cycles, and the callsites can appear in any order.
+ */
+ def selectCallsitesForInlining: List[Callsite] = {
+ callsites.valuesIterator.filter({
+ case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, pos) =>
+ val res = doInlineCallsite(callsite)
+
+ if (!res) {
+ if (annotatedInline && btypes.compilerSettings.YoptWarningEmitAtInlineFailed) {
+ // if the callsite is annotated @inline, we report an inline warning even if the underlying
+ // reason is, for example, mixed compilation (which has a separate -Yopt-warning flag).
+ def initMsg = s"${BackendReporting.methodSignature(calleeDeclClass.internalName, callee)} is annotated @inline but cannot be inlined"
+ def warnMsg = warning.map(" Possible reason:\n" + _).getOrElse("")
+ if (doRewriteTraitCallsite(callsite))
+ backendReporting.inlinerWarning(pos, s"$initMsg: the trait method call could not be rewritten to the static implementation method." + warnMsg)
+ else if (!safeToInline)
+ backendReporting.inlinerWarning(pos, s"$initMsg: the method is not final and may be overridden." + warnMsg)
+ else
+ backendReporting.inlinerWarning(pos, s"$initMsg." + warnMsg)
+ } else if (warning.isDefined && warning.get.emitWarning(compilerSettings)) {
+ // when annotatedInline is false, and there is some warning, the callsite metadata is possibly incomplete.
+ backendReporting.inlinerWarning(pos, s"there was a problem determining if method ${callee.name} can be inlined: \n"+ warning.get)
+ }
+ }
+
+ res
+
+ case Callsite(ins, _, _, Left(warning), _, _, pos) =>
+ if (warning.emitWarning(compilerSettings))
+ backendReporting.inlinerWarning(pos, s"failed to determine if ${ins.name} should be inlined:\n$warning")
+ false
+ }).toList
+ }
+
+ /**
+ * The current inlining heuristics are simple: inline calls to methods annotated @inline.
+ */
+ def doInlineCallsite(callsite: Callsite): Boolean = callsite match {
+ case Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, pos) =>
+ if (compilerSettings.YoptInlineHeuristics.value == "everything") safeToInline
+ else annotatedInline && safeToInline
+
+ case _ => false
+ }
+
+ def rewriteFinalTraitMethodInvocations(): Unit = {
+ // Rewriting final trait method callsites to the implementation class enables inlining.
+ // We cannot just iterate over the values of the `callsites` map because the rewrite changes the
+ // map. Therefore we first copy the values to a list.
+ callsites.values.toList.foreach(rewriteFinalTraitMethodInvocation)
+ }
+
+ /**
+ * True for statically resolved trait callsites that should be rewritten to the static implementation method.
+ */
+ def doRewriteTraitCallsite(callsite: Callsite) = callsite.callee match {
+ case Right(Callee(callee, calleeDeclarationClass, safeToInline, true, annotatedInline, annotatedNoInline, infoWarning)) => true
+ case _ => false
+ }
+
+ /**
+ * Rewrite the INVOKEINTERFACE callsite of a final trait method invocation to INVOKESTATIC of the
+ * corresponding method in the implementation class. This enables inlining final trait methods.
+ *
+ * In a final trait method callsite, the callee is safeToInline and the callee method is abstract
+ * (the receiver type is the interface, so the method is abstract).
+ */
+ def rewriteFinalTraitMethodInvocation(callsite: Callsite): Unit = {
+ if (doRewriteTraitCallsite(callsite)) {
+ val Right(Callee(callee, calleeDeclarationClass, _, _, annotatedInline, annotatedNoInline, infoWarning)) = callsite.callee
+
+ val traitMethodArgumentTypes = asm.Type.getArgumentTypes(callee.desc)
+
+ val implClassInternalName = calleeDeclarationClass.internalName + "$class"
+
+ val selfParamTypeV: Either[OptimizerWarning, ClassBType] = calleeDeclarationClass.info.map(_.inlineInfo.traitImplClassSelfType match {
+ case Some(internalName) => classBTypeFromParsedClassfile(internalName)
+ case None => calleeDeclarationClass
+ })
+
+ def implClassMethodV(implMethodDescriptor: String): Either[OptimizerWarning, MethodNode] = {
+ byteCodeRepository.methodNode(implClassInternalName, callee.name, implMethodDescriptor).map(_._1)
+ }
+
+ // The rewrite reading the implementation class and the implementation method from the bytecode
+ // repository. If either of the two fails, the rewrite is not performed.
+ val res = for {
+ selfParamType <- selfParamTypeV
+ implMethodDescriptor = asm.Type.getMethodDescriptor(asm.Type.getReturnType(callee.desc), selfParamType.toASMType +: traitMethodArgumentTypes: _*)
+ implClassMethod <- implClassMethodV(implMethodDescriptor)
+ implClassBType = classBTypeFromParsedClassfile(implClassInternalName)
+ selfTypeOk <- calleeDeclarationClass.isSubtypeOf(selfParamType)
+ } yield {
+
+ // The self parameter type may be incompatible with the trait type.
+ // trait T { self: S => def foo = 1 }
+ // The $self parameter type of T$class.foo is S, which may be unrelated to T. If we re-write
+ // a call to T.foo to T$class.foo, we need to cast the receiver to S, otherwise we get a
+ // VerifyError. We run a `SourceInterpreter` to find all producer instructions of the
+ // receiver value and add a cast to the self type after each.
+ if (!selfTypeOk) {
+ // there's no need to run eliminateUnreachableCode here. building the call graph does that
+ // already, no code can become unreachable in the meantime.
+ val analyzer = new AsmAnalyzer(callsite.callsiteMethod, callsite.callsiteClass.internalName, new SourceInterpreter)
+ val receiverValue = analyzer.frameAt(callsite.callsiteInstruction).peekDown(traitMethodArgumentTypes.length)
+ for (i <- receiverValue.insns.asScala) {
+ val cast = new TypeInsnNode(CHECKCAST, selfParamType.internalName)
+ callsite.callsiteMethod.instructions.insert(i, cast)
+ }
+ }
+
+ val newCallsiteInstruction = new MethodInsnNode(INVOKESTATIC, implClassInternalName, callee.name, implMethodDescriptor, false)
+ callsite.callsiteMethod.instructions.insert(callsite.callsiteInstruction, newCallsiteInstruction)
+ callsite.callsiteMethod.instructions.remove(callsite.callsiteInstruction)
+
+ callGraph.callsites.remove(callsite.callsiteInstruction)
+ val staticCallsite = Callsite(
+ callsiteInstruction = newCallsiteInstruction,
+ callsiteMethod = callsite.callsiteMethod,
+ callsiteClass = callsite.callsiteClass,
+ callee = Right(Callee(
+ callee = implClassMethod,
+ calleeDeclarationClass = implClassBType,
+ safeToInline = true,
+ safeToRewrite = false,
+ annotatedInline = annotatedInline,
+ annotatedNoInline = annotatedNoInline,
+ calleeInfoWarning = infoWarning)),
+ argInfos = Nil,
+ callsiteStackHeight = callsite.callsiteStackHeight,
+ callsitePosition = callsite.callsitePosition
+ )
+ callGraph.callsites(newCallsiteInstruction) = staticCallsite
+ }
+
+ for (warning <- res.left) {
+ val Right(callee) = callsite.callee
+ val newCallee = callee.copy(calleeInfoWarning = Some(RewriteTraitCallToStaticImplMethodFailed(calleeDeclarationClass.internalName, callee.callee.name, callee.callee.desc, warning)))
+ callGraph.callsites(callsite.callsiteInstruction) = callsite.copy(callee = Right(newCallee))
+ }
+ }
+ }
+
+ /**
+ * Returns the callsites that can be inlined. Ensures that the returned inline request graph does
+ * not contain cycles.
+ *
+ * The resulting list is sorted such that the leaves of the inline request graph are on the left.
+ * Once these leaves are inlined, the successive elements will be leaves, etc.
+ */
+ private def collectAndOrderInlineRequests: List[Callsite] = {
+ val requests = selectCallsitesForInlining
+
+ // This map is an index to look up the inlining requests for a method. The value sets are mutable
+ // to allow removing elided requests (to break inlining cycles). The map itself is mutable to
+ // allow efficient building: requests.groupBy would build values as List[Callsite] that need to
+ // be transformed to mutable sets.
+ val inlineRequestsForMethod: mutable.Map[MethodNode, mutable.Set[Callsite]] = mutable.HashMap.empty.withDefaultValue(mutable.HashSet.empty)
+ for (r <- requests) inlineRequestsForMethod.getOrElseUpdate(r.callsiteMethod, mutable.HashSet.empty) += r
+
+ /**
+ * Break cycles in the inline request graph by removing callsites.
+ *
+ * The list `requests` is traversed left-to-right, removing those callsites that are part of a
+ * cycle. Elided callsites are also removed from the `inlineRequestsForMethod` map.
+ */
+ def breakInlineCycles(requests: List[Callsite]): List[Callsite] = {
+ // is there a path of inline requests from start to goal?
+ def isReachable(start: MethodNode, goal: MethodNode): Boolean = {
+ @tailrec def reachableImpl(check: List[MethodNode], visited: Set[MethodNode]): Boolean = check match {
+ case x :: xs =>
+ if (x == goal) true
+ else if (visited(x)) reachableImpl(xs, visited)
+ else {
+ val callees = inlineRequestsForMethod(x).map(_.callee.get.callee)
+ reachableImpl(xs ::: callees.toList, visited + x)
+ }
+
+ case Nil =>
+ false
+ }
+ reachableImpl(List(start), Set.empty)
+ }
+
+ val result = new mutable.ListBuffer[Callsite]()
+ // sort the inline requests to ensure that removing requests is deterministic
+ for (r <- requests.sorted(callsiteOrdering)) {
+ // is there a chain of inlining requests that would inline the callsite method into the callee?
+ if (isReachable(r.callee.get.callee, r.callsiteMethod))
+ inlineRequestsForMethod(r.callsiteMethod) -= r
+ else
+ result += r
+ }
+ result.toList
+ }
+
+ // sort the remaining inline requests such that the leaves appear first, then those requests
+ // that become leaves, etc.
+ def leavesFirst(requests: List[Callsite], visited: Set[Callsite] = Set.empty): List[Callsite] = {
+ if (requests.isEmpty) Nil
+ else {
+ val (leaves, others) = requests.partition(r => {
+ val inlineRequestsForCallee = inlineRequestsForMethod(r.callee.get.callee)
+ inlineRequestsForCallee.forall(visited)
+ })
+ assert(leaves.nonEmpty, requests)
+ leaves ::: leavesFirst(others, visited ++ leaves)
+ }
+ }
+
+ leavesFirst(breakInlineCycles(requests))
+ }
+
+
+ /**
+ * Copy and adapt the instructions of a method to a callsite.
+ *
+ * Preconditions:
+ * - The maxLocals and maxStack values of the callsite method are correctly computed
+ * - The callsite method contains no unreachable basic blocks, i.e., running an [[Analyzer]]
+ * does not produce any `null` frames
+ *
+ * @param callsiteInstruction The invocation instruction
+ * @param callsiteStackHeight The stack height at the callsite
+ * @param callsiteMethod The method in which the invocation occurs
+ * @param callsiteClass The class in which the callsite method is defined
+ * @param callee The invoked method
+ * @param calleeDeclarationClass The class in which the invoked method is defined
+ * @param receiverKnownNotNull `true` if the receiver is known to be non-null
+ * @param keepLineNumbers `true` if LineNumberNodes should be copied to the call site
+ * @return `Some(message)` if inlining cannot be performed, `None` otherwise
+ */
+ def inline(callsiteInstruction: MethodInsnNode, callsiteStackHeight: Int, callsiteMethod: MethodNode, callsiteClass: ClassBType,
+ callee: MethodNode, calleeDeclarationClass: ClassBType,
+ receiverKnownNotNull: Boolean, keepLineNumbers: Boolean): Option[CannotInlineWarning] = {
+ canInline(callsiteInstruction, callsiteStackHeight, callsiteMethod, callsiteClass, callee, calleeDeclarationClass) orElse {
+ // New labels for the cloned instructions
+ val labelsMap = cloneLabels(callee)
+ val (clonedInstructions, instructionMap) = cloneInstructions(callee, labelsMap)
+ if (!keepLineNumbers) {
+ removeLineNumberNodes(clonedInstructions)
+ }
+
+ // local vars in the callee are shifted by the number of locals at the callsite
+ val localVarShift = callsiteMethod.maxLocals
+ clonedInstructions.iterator.asScala foreach {
+ case varInstruction: VarInsnNode => varInstruction.`var` += localVarShift
+ case iinc: IincInsnNode => iinc.`var` += localVarShift
+ case _ => ()
+ }
+
+ // add a STORE instruction for each expected argument, including for THIS instance if any
+ val argStores = new InsnList
+ var nextLocalIndex = callsiteMethod.maxLocals
+ if (!isStaticMethod(callee)) {
+ if (!receiverKnownNotNull) {
+ argStores.add(new InsnNode(DUP))
+ val nonNullLabel = newLabelNode
+ argStores.add(new JumpInsnNode(IFNONNULL, nonNullLabel))
+ argStores.add(new InsnNode(ACONST_NULL))
+ argStores.add(new InsnNode(ATHROW))
+ argStores.add(nonNullLabel)
+ }
+ argStores.add(new VarInsnNode(ASTORE, nextLocalIndex))
+ nextLocalIndex += 1
+ }
+
+ // We just use an asm.Type here, no need to create the MethodBType.
+ val calleAsmType = asm.Type.getMethodType(callee.desc)
+
+ for(argTp <- calleAsmType.getArgumentTypes) {
+ val opc = argTp.getOpcode(ISTORE) // returns the correct xSTORE instruction for argTp
+ argStores.insert(new VarInsnNode(opc, nextLocalIndex)) // "insert" is "prepend" - the last argument is on the top of the stack
+ nextLocalIndex += argTp.getSize
+ }
+
+ clonedInstructions.insert(argStores)
+
+ // label for the exit of the inlined functions. xRETURNs are rplaced by GOTOs to this label.
+ val postCallLabel = newLabelNode
+ clonedInstructions.add(postCallLabel)
+
+ // replace xRETURNs:
+ // - store the return value (if any)
+ // - clear the stack of the inlined method (insert DROPs)
+ // - load the return value
+ // - GOTO postCallLabel
+
+ val returnType = calleAsmType.getReturnType
+ val hasReturnValue = returnType.getSort != asm.Type.VOID
+ val returnValueIndex = callsiteMethod.maxLocals + callee.maxLocals
+ nextLocalIndex += returnType.getSize
+
+ def returnValueStore(returnInstruction: AbstractInsnNode) = {
+ val opc = returnInstruction.getOpcode match {
+ case IRETURN => ISTORE
+ case LRETURN => LSTORE
+ case FRETURN => FSTORE
+ case DRETURN => DSTORE
+ case ARETURN => ASTORE
+ }
+ new VarInsnNode(opc, returnValueIndex)
+ }
+
+ // We run an interpreter to know the stack height at each xRETURN instruction and the sizes
+ // of the values on the stack.
+ val analyzer = new AsmAnalyzer(callee, calleeDeclarationClass.internalName)
+
+ for (originalReturn <- callee.instructions.iterator().asScala if isReturn(originalReturn)) {
+ val frame = analyzer.frameAt(originalReturn)
+ var stackHeight = frame.getStackSize
+
+ val inlinedReturn = instructionMap(originalReturn)
+ val returnReplacement = new InsnList
+
+ def drop(slot: Int) = returnReplacement add getPop(frame.peekDown(slot).getSize)
+
+ // for non-void methods, store the stack top into the return local variable
+ if (hasReturnValue) {
+ returnReplacement add returnValueStore(originalReturn)
+ stackHeight -= 1
+ }
+
+ // drop the rest of the stack
+ for (i <- 0 until stackHeight) drop(i)
+
+ returnReplacement add new JumpInsnNode(GOTO, postCallLabel)
+ clonedInstructions.insert(inlinedReturn, returnReplacement)
+ clonedInstructions.remove(inlinedReturn)
+ }
+
+ // Load instruction for the return value
+ if (hasReturnValue) {
+ val retVarLoad = {
+ val opc = returnType.getOpcode(ILOAD)
+ new VarInsnNode(opc, returnValueIndex)
+ }
+ clonedInstructions.insert(postCallLabel, retVarLoad)
+ }
+
+ callsiteMethod.instructions.insert(callsiteInstruction, clonedInstructions)
+ callsiteMethod.instructions.remove(callsiteInstruction)
+
+ callsiteMethod.localVariables.addAll(cloneLocalVariableNodes(callee, labelsMap, callee.name + "_").asJava)
+ callsiteMethod.tryCatchBlocks.addAll(cloneTryCatchBlockNodes(callee, labelsMap).asJava)
+
+ // Add all invocation instructions that were inlined to the call graph
+ callee.instructions.iterator().asScala foreach {
+ case originalCallsiteIns: MethodInsnNode =>
+ callGraph.callsites.get(originalCallsiteIns) match {
+ case Some(originalCallsite) =>
+ val newCallsiteIns = instructionMap(originalCallsiteIns).asInstanceOf[MethodInsnNode]
+ callGraph.callsites(newCallsiteIns) = Callsite(
+ callsiteInstruction = newCallsiteIns,
+ callsiteMethod = callsiteMethod,
+ callsiteClass = callsiteClass,
+ callee = originalCallsite.callee,
+ argInfos = Nil, // TODO: re-compute argInfos for new destination (once we actually compute them)
+ callsiteStackHeight = callsiteStackHeight + originalCallsite.callsiteStackHeight,
+ callsitePosition = originalCallsite.callsitePosition
+ )
+
+ case None =>
+ }
+
+ case _ =>
+ }
+ // Remove the elided invocation from the call graph
+ callGraph.callsites.remove(callsiteInstruction)
+
+ // Inlining a method body can render some code unreachable, see example above (in runInliner).
+ unreachableCodeEliminated -= callsiteMethod
+
+ callsiteMethod.maxLocals += returnType.getSize + callee.maxLocals
+ callsiteMethod.maxStack = math.max(callsiteMethod.maxStack, callee.maxStack + callsiteStackHeight)
+
+ None
+ }
+ }
+
+ /**
+ * Check whether an inling can be performed. Parmeters are described in method [[inline]].
+ * @return `Some(message)` if inlining cannot be performed, `None` otherwise
+ */
+ def canInline(callsiteInstruction: MethodInsnNode, callsiteStackHeight: Int, callsiteMethod: MethodNode, callsiteClass: ClassBType,
+ callee: MethodNode, calleeDeclarationClass: ClassBType): Option[CannotInlineWarning] = {
+
+ def calleeDesc = s"${callee.name} of type ${callee.desc} in ${calleeDeclarationClass.internalName}"
+ def methodMismatch = s"Wrong method node for inlining ${textify(callsiteInstruction)}: $calleeDesc"
+ assert(callsiteInstruction.name == callee.name, methodMismatch)
+ assert(callsiteInstruction.desc == callee.desc, methodMismatch)
+ assert(!isConstructor(callee), s"Constructors cannot be inlined: $calleeDesc")
+ assert(!BytecodeUtils.isAbstractMethod(callee), s"Callee is abstract: $calleeDesc")
+ assert(callsiteMethod.instructions.contains(callsiteInstruction), s"Callsite ${textify(callsiteInstruction)} is not an instruction of $calleeDesc")
+
+ // When an exception is thrown, the stack is cleared before jumping to the handler. When
+ // inlining a method that catches an exception, all values that were on the stack before the
+ // call (in addition to the arguments) would be cleared (SI-6157). So we don't inline methods
+ // with handlers in case there are values on the stack.
+ // Alternatively, we could save all stack values below the method arguments into locals, but
+ // that would be inefficient: we'd need to pop all parameters, save the values, and push the
+ // parameters back for the (inlined) invocation. Similarly for the result after the call.
+ def stackHasNonParameters: Boolean = {
+ val expectedArgs = asm.Type.getArgumentTypes(callsiteInstruction.desc).length + (callsiteInstruction.getOpcode match {
+ case INVOKEVIRTUAL | INVOKESPECIAL | INVOKEINTERFACE => 1
+ case INVOKESTATIC => 0
+ case INVOKEDYNAMIC =>
+ assertionError(s"Unexpected opcode, cannot inline ${textify(callsiteInstruction)}")
+ })
+ callsiteStackHeight > expectedArgs
+ }
+
+ if (codeSizeOKForInlining(callsiteMethod, callee)) {
+ Some(ResultingMethodTooLarge(
+ calleeDeclarationClass.internalName, callee.name, callee.desc,
+ callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc))
+ } else if (isSynchronizedMethod(callee)) {
+ // Could be done by locking on the receiver, wrapping the inlined code in a try and unlocking
+ // in finally. But it's probably not worth the effort, scala never emits synchronized methods.
+ Some(SynchronizedMethod(calleeDeclarationClass.internalName, callee.name, callee.desc))
+ } else if (isStrictfpMethod(callsiteMethod) != isStrictfpMethod(callee)) {
+ Some(StrictfpMismatch(
+ calleeDeclarationClass.internalName, callee.name, callee.desc,
+ callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc))
+ } else if (!callee.tryCatchBlocks.isEmpty && stackHasNonParameters) {
+ Some(MethodWithHandlerCalledOnNonEmptyStack(
+ calleeDeclarationClass.internalName, callee.name, callee.desc,
+ callsiteClass.internalName, callsiteMethod.name, callsiteMethod.desc))
+ } else findIllegalAccess(callee.instructions, calleeDeclarationClass, callsiteClass) map {
+ case (illegalAccessIns, None) =>
+ IllegalAccessInstruction(
+ calleeDeclarationClass.internalName, callee.name, callee.desc,
+ callsiteClass.internalName, illegalAccessIns)
+
+ case (illegalAccessIns, Some(warning)) =>
+ IllegalAccessCheckFailed(
+ calleeDeclarationClass.internalName, callee.name, callee.desc,
+ callsiteClass.internalName, illegalAccessIns, warning)
+ }
+ }
+
+ /**
+ * Returns the first instruction in the `instructions` list that would cause a
+ * [[java.lang.IllegalAccessError]] when inlined into the `destinationClass`.
+ *
+ * If validity of some instruction could not be checked because an error occurred, the instruction
+ * is returned together with a warning message that describes the problem.
+ */
+ def findIllegalAccess(instructions: InsnList, calleeDeclarationClass: ClassBType, destinationClass: ClassBType): Option[(AbstractInsnNode, Option[OptimizerWarning])] = {
+
+ /**
+ * Check if a type is accessible to some class, as defined in JVMS 5.4.4.
+ * (A1) C is public
+ * (A2) C and D are members of the same run-time package
+ */
+ def classIsAccessible(accessed: BType, from: ClassBType = destinationClass): Either[OptimizerWarning, Boolean] = (accessed: @unchecked) match {
+ // TODO: A2 requires "same run-time package", which seems to be package + classloader (JMVS 5.3.). is the below ok?
+ case c: ClassBType => c.isPublic.map(_ || c.packageInternalName == from.packageInternalName)
+ case a: ArrayBType => classIsAccessible(a.elementType, from)
+ case _: PrimitiveBType => Right(true)
+ }
+
+ /**
+ * Check if a member reference is accessible from the [[destinationClass]], as defined in the
+ * JVMS 5.4.4. Note that the class name in a field / method reference is not necessarily the
+ * class in which the member is declared:
+ *
+ * class A { def f = 0 }; class B extends A { f }
+ *
+ * The INVOKEVIRTUAL instruction uses a method reference "B.f ()I". Therefore this method has
+ * two parameters:
+ *
+ * @param memberDeclClass The class in which the member is declared (A)
+ * @param memberRefClass The class used in the member reference (B)
+ *
+ * JVMS 5.4.4 summary: A field or method R is accessible to a class D (destinationClass) iff
+ * (B1) R is public
+ * (B2) R is protected, declared in C (memberDeclClass) and D is a subclass of C.
+ * If R is not static, R must contain a symbolic reference to a class T (memberRefClass),
+ * such that T is either a subclass of D, a superclass of D, or D itself.
+ * (B3) R is either protected or has default access and declared by a class in the same
+ * run-time package as D.
+ * (B4) R is private and is declared in D.
+ */
+ def memberIsAccessible(memberFlags: Int, memberDeclClass: ClassBType, memberRefClass: ClassBType): Either[OptimizerWarning, Boolean] = {
+ // TODO: B3 requires "same run-time package", which seems to be package + classloader (JMVS 5.3.). is the below ok?
+ def samePackageAsDestination = memberDeclClass.packageInternalName == destinationClass.packageInternalName
+
+ val key = (ACC_PUBLIC | ACC_PROTECTED | ACC_PRIVATE) & memberFlags
+ key match {
+ case ACC_PUBLIC => // B1
+ Right(true)
+
+ case ACC_PROTECTED => // B2
+ tryEither {
+ val condB2 = destinationClass.isSubtypeOf(memberDeclClass).orThrow && {
+ val isStatic = (ACC_STATIC & memberFlags) != 0
+ isStatic || memberRefClass.isSubtypeOf(destinationClass).orThrow || destinationClass.isSubtypeOf(memberRefClass).orThrow
+ }
+ Right(condB2 || samePackageAsDestination) // B3 (protected)
+ }
+
+ case 0 => // B3 (default access)
+ Right(samePackageAsDestination)
+
+ case ACC_PRIVATE => // B4
+ Right(memberDeclClass == destinationClass)
+ }
+ }
+
+ /**
+ * Check if `instruction` can be transplanted to `destinationClass`.
+ *
+ * If the instruction references a class, method or field that cannot be found in the
+ * byteCodeRepository, it is considered as not legal. This is known to happen in mixed
+ * compilation: for Java classes there is no classfile that could be parsed, nor does the
+ * compiler generate any bytecode.
+ *
+ * Returns a warning message describing the problem if checking the legality for the instruction
+ * failed.
+ */
+ def isLegal(instruction: AbstractInsnNode): Either[OptimizerWarning, Boolean] = instruction match {
+ case ti: TypeInsnNode =>
+ // NEW, ANEWARRAY, CHECKCAST or INSTANCEOF. For these instructions, the reference
+ // "must be a symbolic reference to a class, array, or interface type" (JVMS 6), so
+ // it can be an internal name, or a full array descriptor.
+ classIsAccessible(bTypeForDescriptorOrInternalNameFromClassfile(ti.desc))
+
+ case ma: MultiANewArrayInsnNode =>
+ // "a symbolic reference to a class, array, or interface type"
+ classIsAccessible(bTypeForDescriptorOrInternalNameFromClassfile(ma.desc))
+
+ case fi: FieldInsnNode =>
+ val fieldRefClass = classBTypeFromParsedClassfile(fi.owner)
+ for {
+ (fieldNode, fieldDeclClassNode) <- byteCodeRepository.fieldNode(fieldRefClass.internalName, fi.name, fi.desc): Either[OptimizerWarning, (FieldNode, InternalName)]
+ fieldDeclClass = classBTypeFromParsedClassfile(fieldDeclClassNode)
+ res <- memberIsAccessible(fieldNode.access, fieldDeclClass, fieldRefClass)
+ } yield {
+ res
+ }
+
+ case mi: MethodInsnNode =>
+ if (mi.owner.charAt(0) == '[') Right(true) // array methods are accessible
+ else {
+ def canInlineCall(opcode: Int, methodFlags: Int, methodDeclClass: ClassBType, methodRefClass: ClassBType): Either[OptimizerWarning, Boolean] = {
+ opcode match {
+ case INVOKESPECIAL if mi.name != GenBCode.INSTANCE_CONSTRUCTOR_NAME =>
+ // invokespecial is used for private method calls, super calls and instance constructor calls.
+ // private method and super calls can only be inlined into the same class.
+ Right(destinationClass == calleeDeclarationClass)
+
+ case _ => // INVOKEVIRTUAL, INVOKESTATIC, INVOKEINTERFACE and INVOKESPECIAL of constructors
+ memberIsAccessible(methodFlags, methodDeclClass, methodRefClass)
+ }
+ }
+
+ val methodRefClass = classBTypeFromParsedClassfile(mi.owner)
+ for {
+ (methodNode, methodDeclClassNode) <- byteCodeRepository.methodNode(methodRefClass.internalName, mi.name, mi.desc): Either[OptimizerWarning, (MethodNode, InternalName)]
+ methodDeclClass = classBTypeFromParsedClassfile(methodDeclClassNode)
+ res <- canInlineCall(mi.getOpcode, methodNode.access, methodDeclClass, methodRefClass)
+ } yield {
+ res
+ }
+ }
+
+ case ivd: InvokeDynamicInsnNode =>
+ // TODO @lry check necessary conditions to inline an indy, instead of giving up
+ Right(false)
+
+ case ci: LdcInsnNode => ci.cst match {
+ case t: asm.Type => classIsAccessible(bTypeForDescriptorOrInternalNameFromClassfile(t.getInternalName))
+ case _ => Right(true)
+ }
+
+ case _ => Right(true)
+ }
+
+ val it = instructions.iterator.asScala
+ @tailrec def find: Option[(AbstractInsnNode, Option[OptimizerWarning])] = {
+ if (!it.hasNext) None // all instructions are legal
+ else {
+ val i = it.next()
+ isLegal(i) match {
+ case Left(warning) => Some((i, Some(warning))) // checking isLegal for i failed
+ case Right(false) => Some((i, None)) // an illegal instruction was found
+ case _ => find
+ }
+ }
+ }
+ find
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
new file mode 100644
index 0000000000..5f51a94673
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
@@ -0,0 +1,584 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.annotation.switch
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree.analysis.{Analyzer, BasicInterpreter}
+import scala.tools.asm.tree._
+import scala.collection.convert.decorateAsScala._
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
+
+/**
+ * Optimizations within a single method.
+ *
+ * unreachable code
+ * - removes instructions of basic blocks to which no branch instruction points
+ * + enables eliminating some exception handlers and local variable descriptors
+ * > eliminating them is required for correctness, as explained in `removeUnreachableCode`
+ *
+ * empty exception handlers
+ * - removes exception handlers whose try block is empty
+ * + eliminating a handler where the try block is empty and reachable will turn the catch block
+ * unreachable. in this case "unreachable code" is invoked recursively until reaching a fixpoint.
+ * > for try blocks that are unreachable, "unreachable code" removes also the instructions of the
+ * catch block, and the recursive invocation is not necessary.
+ *
+ * simplify jumps
+ * - various simplifications, see doc domments of individual optimizations
+ * + changing or eliminating jumps may render some code unreachable, therefore "simplify jumps" is
+ * executed in a loop with "unreachable code"
+ *
+ * empty local variable descriptors
+ * - removes entries from the local variable table where the variable is not actually used
+ * + enables eliminating labels that the entry points to (if they are not otherwise referenced)
+ *
+ * empty line numbers
+ * - eliminates line number nodes that describe no executable instructions
+ * + enables eliminating the label of the line number node (if it's not otherwise referenced)
+ *
+ * stale labels
+ * - eliminate labels that are not referenced, merge sequences of label definitions.
+ */
+class LocalOpt[BT <: BTypes](val btypes: BT) {
+ import LocalOptImpls._
+ import btypes._
+
+ /**
+ * Remove unreachable code from a method.
+ *
+ * This implementation only removes instructions that are unreachable for an ASM analyzer /
+ * interpreter. This ensures that future analyses will not produce `null` frames. The inliner
+ * and call graph builder depend on this property.
+ *
+ * @return A set containing the eliminated instructions
+ */
+ def minimalRemoveUnreachableCode(method: MethodNode, ownerClassName: InternalName): Set[AbstractInsnNode] = {
+ if (method.instructions.size == 0) return Set.empty // fast path for abstract methods
+ if (unreachableCodeEliminated(method)) return Set.empty // we know there is no unreachable code
+
+ // For correctness, after removing unreachable code, we have to eliminate empty exception
+ // handlers, see scaladoc of def methodOptimizations. Removing an live handler may render more
+ // code unreachable and therefore requires running another round.
+ def removalRound(): Set[AbstractInsnNode] = {
+ val (removedInstructions, liveLabels) = removeUnreachableCodeImpl(method, ownerClassName)
+ val removedRecursively = if (removedInstructions.nonEmpty) {
+ val liveHandlerRemoved = removeEmptyExceptionHandlers(method).exists(h => liveLabels(h.start))
+ if (liveHandlerRemoved) removalRound()
+ else Set.empty
+ } else Set.empty
+ removedInstructions ++ removedRecursively
+ }
+
+ val removedInstructions = removalRound()
+ if (removedInstructions.nonEmpty) removeUnusedLocalVariableNodes(method)()
+ unreachableCodeEliminated += method
+ removedInstructions
+ }
+
+ /**
+ * Remove unreachable instructions from all (non-abstract) methods and apply various other
+ * cleanups to the bytecode.
+ *
+ * @param clazz The class whose methods are optimized
+ * @return `true` if unreachable code was eliminated in some method, `false` otherwise.
+ */
+ def methodOptimizations(clazz: ClassNode): Boolean = {
+ !compilerSettings.YoptNone && clazz.methods.asScala.foldLeft(false) {
+ case (changed, method) => methodOptimizations(method, clazz.name) || changed
+ }
+ }
+
+ /**
+ * Remove unreachable code from a method.
+ *
+ * We rely on dead code elimination provided by the ASM framework, as described in the ASM User
+ * Guide (http://asm.ow2.org/index.html), Section 8.2.1. It runs a data flow analysis, which only
+ * computes Frame information for reachable instructions. Instructions for which no Frame data is
+ * available after the analysis are unreachable.
+ *
+ * Also simplifies branching instructions, removes unused local variable descriptors, empty
+ * exception handlers, unnecessary label declarations and empty line number nodes.
+ *
+ * Returns `true` if the bytecode of `method` was changed.
+ */
+ def methodOptimizations(method: MethodNode, ownerClassName: InternalName): Boolean = {
+ if (method.instructions.size == 0) return false // fast path for abstract methods
+
+ // unreachable-code also removes unused local variable nodes and empty exception handlers.
+ // This is required for correctness, for example:
+ //
+ // def f = { return 0; try { 1 } catch { case _ => 2 } }
+ //
+ // The result after removeUnreachableCodeImpl:
+ //
+ // TRYCATCHBLOCK L0 L1 L2 java/lang/Exception
+ // L4
+ // ICONST_0
+ // IRETURN
+ // L0
+ // L1
+ // L2
+ //
+ // If we don't eliminate the handler, the ClassWriter emits:
+ //
+ // TRYCATCHBLOCK L0 L0 L0 java/lang/Exception
+ // L1
+ // ICONST_0
+ // IRETURN
+ // L0
+ //
+ // This triggers "ClassFormatError: Illegal exception table range in class file C". Similar
+ // for local variables in dead blocks. Maybe that's a bug in the ASM framework.
+
+ def removalRound(): Boolean = {
+ // unreachable-code, empty-handlers and simplify-jumps run until reaching a fixpoint (see doc on class LocalOpt)
+ val (codeRemoved, handlersRemoved, liveHandlerRemoved) = if (compilerSettings.YoptUnreachableCode) {
+ val (removedInstructions, liveLabels) = removeUnreachableCodeImpl(method, ownerClassName)
+ val removedHandlers = removeEmptyExceptionHandlers(method)
+ (removedInstructions.nonEmpty, removedHandlers.nonEmpty, removedHandlers.exists(h => liveLabels(h.start)))
+ } else {
+ (false, false, false)
+ }
+
+ val jumpsChanged = if (compilerSettings.YoptSimplifyJumps) simplifyJumps(method) else false
+
+ // Eliminating live handlers and simplifying jump instructions may render more code
+ // unreachable, so we need to run another round.
+ if (liveHandlerRemoved || jumpsChanged) removalRound()
+
+ codeRemoved || handlersRemoved || jumpsChanged
+ }
+
+ val codeHandlersOrJumpsChanged = removalRound()
+
+ // (*) Removing stale local variable descriptors is required for correctness of unreachable-code
+ val localsRemoved =
+ if (compilerSettings.YoptCompactLocals) compactLocalVariables(method) // also removes unused
+ else if (compilerSettings.YoptUnreachableCode) removeUnusedLocalVariableNodes(method)() // (*)
+ else false
+
+ val lineNumbersRemoved = if (compilerSettings.YoptEmptyLineNumbers) removeEmptyLineNumbers(method) else false
+
+ val labelsRemoved = if (compilerSettings.YoptEmptyLabels) removeEmptyLabelNodes(method) else false
+
+ // assert that local variable annotations are empty (we don't emit them) - otherwise we'd have
+ // to eliminate those covering an empty range, similar to removeUnusedLocalVariableNodes.
+ def nullOrEmpty[T](l: java.util.List[T]) = l == null || l.isEmpty
+ assert(nullOrEmpty(method.visibleLocalVariableAnnotations), method.visibleLocalVariableAnnotations)
+ assert(nullOrEmpty(method.invisibleLocalVariableAnnotations), method.invisibleLocalVariableAnnotations)
+
+ unreachableCodeEliminated += method
+
+ codeHandlersOrJumpsChanged || localsRemoved || lineNumbersRemoved || labelsRemoved
+ }
+
+}
+
+object LocalOptImpls {
+ /**
+ * Removes unreachable basic blocks.
+ *
+ * TODO: rewrite, don't use computeMaxLocalsMaxStack (runs a ClassWriter) / Analyzer. Too slow.
+ *
+ * @return A set containing eliminated instructions, and a set containing all live label nodes.
+ */
+ def removeUnreachableCodeImpl(method: MethodNode, ownerClassName: InternalName): (Set[AbstractInsnNode], Set[LabelNode]) = {
+ // The data flow analysis requires the maxLocals / maxStack fields of the method to be computed.
+ computeMaxLocalsMaxStack(method)
+ val a = new Analyzer(new BasicInterpreter)
+ a.analyze(ownerClassName, method)
+ val frames = a.getFrames
+
+ val initialSize = method.instructions.size
+ var i = 0
+ var liveLabels = Set.empty[LabelNode]
+ var removedInstructions = Set.empty[AbstractInsnNode]
+ val itr = method.instructions.iterator()
+ while (itr.hasNext) {
+ itr.next() match {
+ case l: LabelNode =>
+ if (frames(i) != null) liveLabels += l
+
+ case ins =>
+ // label nodes are not removed: they might be referenced for example in a LocalVariableNode
+ if (frames(i) == null || ins.getOpcode == Opcodes.NOP) {
+ // Instruction iterators allow removing during iteration.
+ // Removing is O(1): instructions are doubly linked list elements.
+ itr.remove()
+ removedInstructions += ins
+ }
+ }
+ i += 1
+ }
+ (removedInstructions, liveLabels)
+ }
+
+ /**
+ * Remove exception handlers that cover empty code blocks. A block is considered empty if it
+ * consist only of labels, frames, line numbers, nops and gotos.
+ *
+ * There are no executable instructions that we can assume don't throw (eg ILOAD). The JVM spec
+ * basically says that a VirtualMachineError may be thrown at any time:
+ * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.3
+ *
+ * Note that no instructions are eliminated.
+ *
+ * @return the set of removed handlers
+ */
+ def removeEmptyExceptionHandlers(method: MethodNode): Set[TryCatchBlockNode] = {
+ /** True if there exists code between start and end. */
+ def containsExecutableCode(start: AbstractInsnNode, end: LabelNode): Boolean = {
+ start != end && ((start.getOpcode : @switch) match {
+ // FrameNode, LabelNode and LineNumberNode have opcode == -1.
+ case -1 | Opcodes.GOTO => containsExecutableCode(start.getNext, end)
+ case _ => true
+ })
+ }
+
+ var removedHandlers = Set.empty[TryCatchBlockNode]
+ val handlersIter = method.tryCatchBlocks.iterator()
+ while(handlersIter.hasNext) {
+ val handler = handlersIter.next()
+ if (!containsExecutableCode(handler.start, handler.end)) {
+ removedHandlers += handler
+ handlersIter.remove()
+ }
+ }
+ removedHandlers
+ }
+
+ /**
+ * Remove all non-parameter entries from the local variable table which denote variables that are
+ * not actually read or written.
+ *
+ * Note that each entry in the local variable table has a start, end and index. Two entries with
+ * the same index, but distinct start / end ranges are different variables, they may have not the
+ * same type or name.
+ */
+ def removeUnusedLocalVariableNodes(method: MethodNode)(fistLocalIndex: Int = parametersSize(method), renumber: Int => Int = identity): Boolean = {
+ def variableIsUsed(start: AbstractInsnNode, end: LabelNode, varIndex: Int): Boolean = {
+ start != end && (start match {
+ case v: VarInsnNode if v.`var` == varIndex => true
+ case _ => variableIsUsed(start.getNext, end, varIndex)
+ })
+ }
+
+ val initialNumVars = method.localVariables.size
+ val localsIter = method.localVariables.iterator()
+ while (localsIter.hasNext) {
+ val local = localsIter.next()
+ val index = local.index
+ // parameters and `this` (the lowest indices, starting at 0) are never removed or renumbered
+ if (index >= fistLocalIndex) {
+ if (!variableIsUsed(local.start, local.end, index)) localsIter.remove()
+ else if (renumber(index) != index) local.index = renumber(index)
+ }
+ }
+ method.localVariables.size != initialNumVars
+ }
+
+ /**
+ * The number of local variable slots used for parameters and for the `this` reference.
+ */
+ private def parametersSize(method: MethodNode): Int = {
+ // Double / long fields occupy two slots, so we sum up the sizes. Since getSize returns 0 for
+ // void, we have to add `max 1`.
+ val paramsSize = scala.tools.asm.Type.getArgumentTypes(method.desc).iterator.map(_.getSize max 1).sum
+ val thisSize = if ((method.access & Opcodes.ACC_STATIC) == 0) 1 else 0
+ paramsSize + thisSize
+ }
+
+ /**
+ * Compact the local variable slots used in the method's implementation. This prevents having
+ * unused slots for example after eliminating unreachable code.
+ *
+ * This transformation reduces the size of the frame for invoking the method. For example, if the
+ * method has an ISTORE instruction to the local variable 3, the maxLocals of the method is at
+ * least 4, even if some local variable slots below 3 are not used by any instruction.
+ *
+ * This could be improved by doing proper register allocation.
+ */
+ def compactLocalVariables(method: MethodNode): Boolean = {
+ // This array is built up to map local variable indices from old to new.
+ val renumber = collection.mutable.ArrayBuffer.empty[Int]
+
+ // Add the index of the local variable used by `varIns` to the `renumber` array.
+ def addVar(varIns: VarInsnNode): Unit = {
+ val index = varIns.`var`
+ val isWide = (varIns.getOpcode: @switch) match {
+ case Opcodes.LLOAD | Opcodes.DLOAD | Opcodes.LSTORE | Opcodes.DSTORE => true
+ case _ => false
+ }
+
+ // Ensure the length of `renumber`. Unused variable indices are mapped to -1.
+ val minLength = if (isWide) index + 2 else index + 1
+ for (i <- renumber.length until minLength) renumber += -1
+
+ renumber(index) = index
+ if (isWide) renumber(index + 1) = index
+ }
+
+ // first phase: collect all used local variables. if the variable at index x is used, set
+ // renumber(x) = x, otherwise renumber(x) = -1. if the variable is wide (long or double), set
+ // renumber(x+1) = x.
+
+ val firstLocalIndex = parametersSize(method)
+ for (i <- 0 until firstLocalIndex) renumber += i // parameters and `this` are always used.
+ method.instructions.iterator().asScala foreach {
+ case VarInstruction(varIns) => addVar(varIns)
+ case _ =>
+ }
+
+ // assign the next free slot to each used local variable.
+ // for example, rewrite (0, 1, -1, 3, -1, 5) to (0, 1, -1, 2, -1, 3).
+
+ var nextIndex = firstLocalIndex
+ for (i <- firstLocalIndex until renumber.length if renumber(i) != -1) {
+ renumber(i) = nextIndex
+ nextIndex += 1
+ }
+
+ // Update the local variable descriptors according to the renumber table, and eliminate stale entries
+ val removedLocalVariableDescriptors = removeUnusedLocalVariableNodes(method)(firstLocalIndex, renumber)
+
+ if (nextIndex == renumber.length) removedLocalVariableDescriptors
+ else {
+ // update variable instructions according to the renumber table
+ method.maxLocals = nextIndex
+ method.instructions.iterator().asScala.foreach {
+ case VarInstruction(varIns) =>
+ val oldIndex = varIns.`var`
+ if (oldIndex >= firstLocalIndex && renumber(oldIndex) != oldIndex)
+ varIns.`var` = renumber(varIns.`var`)
+ case _ =>
+ }
+ true
+ }
+ }
+
+ /**
+ * Removes LineNumberNodes that don't describe any executable instructions.
+ *
+ * This method expects (and asserts) that the `start` label of each LineNumberNode is the
+ * lexically preceding label declaration.
+ */
+ def removeEmptyLineNumbers(method: MethodNode): Boolean = {
+ def isEmpty(node: AbstractInsnNode): Boolean = node.getNext match {
+ case null => true
+ case l: LineNumberNode => true
+ case n if n.getOpcode >= 0 => false
+ case n => isEmpty(n)
+ }
+
+ val initialSize = method.instructions.size
+ val iterator = method.instructions.iterator()
+ var previousLabel: LabelNode = null
+ while (iterator.hasNext) {
+ iterator.next match {
+ case label: LabelNode => previousLabel = label
+ case line: LineNumberNode if isEmpty(line) =>
+ assert(line.start == previousLabel)
+ iterator.remove()
+ case _ =>
+ }
+ }
+ method.instructions.size != initialSize
+ }
+
+ /**
+ * Removes unreferenced label declarations, also squashes sequences of label definitions.
+ *
+ * [ops]; Label(a); Label(b); [ops];
+ * => subs([ops], b, a); Label(a); subs([ops], b, a);
+ */
+ def removeEmptyLabelNodes(method: MethodNode): Boolean = {
+ val references = labelReferences(method)
+
+ val initialSize = method.instructions.size
+ val iterator = method.instructions.iterator()
+ var prev: LabelNode = null
+ while (iterator.hasNext) {
+ iterator.next match {
+ case label: LabelNode =>
+ if (!references.contains(label)) iterator.remove()
+ else if (prev != null) {
+ references(label).foreach(substituteLabel(_, label, prev))
+ iterator.remove()
+ } else prev = label
+
+ case instruction =>
+ if (instruction.getOpcode >= 0) prev = null
+ }
+ }
+ method.instructions.size != initialSize
+ }
+
+ /**
+ * Apply various simplifications to branching instructions.
+ */
+ def simplifyJumps(method: MethodNode): Boolean = {
+ var changed = false
+
+ val allHandlers = method.tryCatchBlocks.asScala.toSet
+
+ // A set of all exception handlers that guard the current instruction, required for simplifyGotoReturn
+ var activeHandlers = Set.empty[TryCatchBlockNode]
+
+ // Instructions that need to be removed. simplifyBranchOverGoto returns an instruction to be
+ // removed. It cannot remove it itself because the instruction may be the successor of the current
+ // instruction of the iterator, which is not supported in ASM.
+ var instructionsToRemove = Set.empty[AbstractInsnNode]
+
+ val iterator = method.instructions.iterator()
+ while (iterator.hasNext) {
+ val instruction = iterator.next()
+
+ instruction match {
+ case l: LabelNode =>
+ activeHandlers ++= allHandlers.filter(_.start == l)
+ activeHandlers = activeHandlers.filter(_.end != l)
+ case _ =>
+ }
+
+ if (instructionsToRemove(instruction)) {
+ iterator.remove()
+ instructionsToRemove -= instruction
+ } else if (isJumpNonJsr(instruction)) { // fast path - all of the below only treat jumps
+ var jumpRemoved = simplifyThenElseSameTarget(method, instruction)
+
+ if (!jumpRemoved) {
+ changed = collapseJumpChains(instruction) || changed
+ jumpRemoved = removeJumpToSuccessor(method, instruction)
+
+ if (!jumpRemoved) {
+ val staleGoto = simplifyBranchOverGoto(method, instruction)
+ instructionsToRemove ++= staleGoto
+ changed ||= staleGoto.nonEmpty
+ changed = simplifyGotoReturn(method, instruction, inTryBlock = activeHandlers.nonEmpty) || changed
+ }
+ }
+ changed ||= jumpRemoved
+ }
+ }
+ assert(instructionsToRemove.isEmpty, "some optimization required removing a previously traversed instruction. add `instructionsToRemove.foreach(method.instructions.remove)`")
+ changed
+ }
+
+ /**
+ * Removes a conditional jump if it is followed by a GOTO to the same destination.
+ *
+ * CondJump l; [nops]; GOTO l; [...]
+ * POP*; [nops]; GOTO l; [...]
+ *
+ * Introduces 1 or 2 POP instructions, depending on the number of values consumed by the CondJump.
+ */
+ private def simplifyThenElseSameTarget(method: MethodNode, instruction: AbstractInsnNode): Boolean = instruction match {
+ case ConditionalJump(jump) =>
+ nextExecutableInstruction(instruction) match {
+ case Some(Goto(elseJump)) if sameTargetExecutableInstruction(jump, elseJump) =>
+ removeJumpAndAdjustStack(method, jump)
+ true
+
+ case _ => false
+ }
+ case _ => false
+ }
+
+ /**
+ * Replace jumps to a sequence of GOTO instructions by a jump to the final destination.
+ *
+ * Jump l; [any ops]; l: GOTO m; [any ops]; m: GOTO n; [any ops]; n: NotGOTO; [...]
+ * => Jump n; [rest unchaned]
+ *
+ * If there's a loop of GOTOs, the initial jump is replaced by one of the labels in the loop.
+ */
+ private def collapseJumpChains(instruction: AbstractInsnNode): Boolean = instruction match {
+ case JumpNonJsr(jump) =>
+ val target = finalJumpTarget(jump)
+ if (jump.label == target) false else {
+ jump.label = target
+ true
+ }
+
+ case _ => false
+ }
+
+ /**
+ * Eliminates unnecessary jump instructions
+ *
+ * Jump l; [nops]; l: [...]
+ * => POP*; [nops]; l: [...]
+ *
+ * Introduces 0, 1 or 2 POP instructions, depending on the number of values consumed by the Jump.
+ */
+ private def removeJumpToSuccessor(method: MethodNode, instruction: AbstractInsnNode) = instruction match {
+ case JumpNonJsr(jump) if nextExecutableInstruction(jump, alsoKeep = Set(jump.label)) == Some(jump.label) =>
+ removeJumpAndAdjustStack(method, jump)
+ true
+ case _ => false
+ }
+
+ /**
+ * If the "else" part of a conditional branch is a simple GOTO, negates the conditional branch
+ * and eliminates the GOTO.
+ *
+ * CondJump l; [nops, no labels]; GOTO m; [nops]; l: [...]
+ * => NegatedCondJump m; [nops, no labels]; [nops]; l: [...]
+ *
+ * Note that no label definitions are allowed in the first [nops] section. Otherwise, there could
+ * be some other jump to the GOTO, and eliminating it would change behavior.
+ *
+ * For technical reasons, we cannot remove the GOTO here (*).Instead this method returns an Option
+ * containing the GOTO that needs to be eliminated.
+ *
+ * (*) The ASM instruction iterator (used in the caller [[simplifyJumps]]) has an undefined
+ * behavior if the successor of the current instruction is removed, which may be the case here
+ */
+ private def simplifyBranchOverGoto(method: MethodNode, instruction: AbstractInsnNode): Option[JumpInsnNode] = instruction match {
+ case ConditionalJump(jump) =>
+ // don't skip over labels, see doc comment
+ nextExecutableInstruction(jump, alsoKeep = _.isInstanceOf[LabelNode]) match {
+ case Some(Goto(goto)) =>
+ if (nextExecutableInstruction(goto, alsoKeep = Set(jump.label)) == Some(jump.label)) {
+ val newJump = new JumpInsnNode(negateJumpOpcode(jump.getOpcode), goto.label)
+ method.instructions.set(jump, newJump)
+ Some(goto)
+ } else None
+
+ case _ => None
+ }
+ case _ => None
+ }
+
+ /**
+ * Inlines xRETURN and ATHROW
+ *
+ * GOTO l; [any ops]; l: xRETURN/ATHROW
+ * => xRETURN/ATHROW; [any ops]; l: xRETURN/ATHROW
+ *
+ * inlining is only done if the GOTO instruction is not part of a try block, otherwise the
+ * rewrite might change the behavior. For xRETURN, the reason is that return instructions may throw
+ * an IllegalMonitorStateException, as described here:
+ * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.return
+ */
+ private def simplifyGotoReturn(method: MethodNode, instruction: AbstractInsnNode, inTryBlock: Boolean): Boolean = !inTryBlock && (instruction match {
+ case Goto(jump) =>
+ nextExecutableInstruction(jump.label) match {
+ case Some(target) =>
+ if (isReturn(target) || target.getOpcode == Opcodes.ATHROW) {
+ method.instructions.set(jump, target.clone(null))
+ true
+ } else false
+
+ case _ => false
+ }
+ case _ => false
+ })
+}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index c49f23852f..a866173a88 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -56,11 +56,8 @@ abstract class ClosureElimination extends SubComponent {
case (BOX(t1), UNBOX(t2)) if (t1 == t2) =>
Some(Nil)
- case (LOAD_FIELD(sym, isStatic), DROP(_)) if !sym.hasAnnotation(definitions.VolatileAttr) =>
- if (isStatic)
- Some(Nil)
- else
- Some(DROP(REFERENCE(definitions.ObjectClass)) :: Nil)
+ case (LOAD_FIELD(sym, /* isStatic */false), DROP(_)) if !sym.hasAnnotation(definitions.VolatileAttr) && inliner.isClosureClass(sym.owner) =>
+ Some(DROP(REFERENCE(definitions.ObjectClass)) :: Nil)
case _ => None
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
index 1fadcb8920..0e6ee76eb2 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
@@ -7,7 +7,6 @@ package scala
package tools.nsc
package backend.opt
-import scala.tools.nsc.backend.icode.analysis.LubException
import scala.annotation.tailrec
/**
@@ -19,7 +18,7 @@ import scala.annotation.tailrec
*
* With some more work it could be extended to
* - cache stable values (final fields, modules) in locals
- * - replace the copy propagation in ClosureElilmination
+ * - replace the copy propagation in ClosureElimination
* - fold constants
* - eliminate unnecessary stores and loads
* - propagate knowledge gathered from conditionals for further optimization
@@ -438,7 +437,7 @@ abstract class ConstantOptimization extends SubComponent {
// TODO if we do all that we need to be careful in the
// case that success and failure are the same target block
// because we're using a Map and don't want one possible state to clobber the other
- // alternative mayb we should just replace the conditional with a jump if both targets are the same
+ // alternative maybe we should just replace the conditional with a jump if both targets are the same
def mightEqual = val1 mightEqual val2
def mightNotEqual = val1 mightNotEqual val2
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 90c37ba0b3..1b6631e7a4 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -169,9 +169,14 @@ abstract class DeadCodeElimination extends SubComponent {
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
- LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() | CHECK_CAST(_) =>
+ LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() | CHECK_CAST(_) | CREATE_ARRAY(_, _) =>
moveToWorkList()
+ case LOAD_FIELD(sym, isStatic) if isStatic || !inliner.isClosureClass(sym.owner) =>
+ // static load may trigger static initialization.
+ // non-static load can throw NPE (but we know closure fields can't be accessed via a
+ // null reference.
+ moveToWorkList()
case CALL_METHOD(m1, _) if isSideEffecting(m1) =>
moveToWorkList()
@@ -186,13 +191,15 @@ abstract class DeadCodeElimination extends SubComponent {
case LOAD_EXCEPTION(_) | DUP(_) | LOAD_MODULE(_) => true
case _ =>
dropOf((bb1, idx1)) = (bb,idx) :: dropOf.getOrElse((bb1, idx1), Nil)
- debuglog("DROP is innessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
+ debuglog("DROP is inessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
false
}
}
moveToWorkListIf(necessary)
case LOAD_MODULE(sym) if isLoadNeeded(sym) =>
moveToWorkList() // SI-4859 Module initialization might side-effect.
+ case CALL_PRIMITIVE(Arithmetic(DIV | REM, INT | LONG) | ArrayLength(_)) =>
+ moveToWorkList() // SI-8601 Might divide by zero
case _ => ()
moveToWorkListIf(cond = false)
}
@@ -216,7 +223,7 @@ abstract class DeadCodeElimination extends SubComponent {
debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
val instr = bb(idx)
- // adds the instrutions that define the stack values about to be consumed to the work list to
+ // adds the instructions that define the stack values about to be consumed to the work list to
// be marked useful
def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
debuglog(s"\t${bb1(idx1)} is consumed by $instr")
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
index 235e954f88..425c10d153 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -182,7 +182,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
// in other words: what's on the stack MUST conform to what's in the THROW(..)!
if (!canReplaceHandler) {
- currentClass.cunit.warning(NoPosition, "Unable to inline the exception handler inside incorrect" +
+ reporter.warning(NoPosition, "Unable to inline the exception handler inside incorrect" +
" block:\n" + bblock.iterator.mkString("\n") + "\nwith stack: " + typeInfo + " just " +
"before instruction index " + index)
}
@@ -383,7 +383,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
Some((exceptionLocal, copy))
case _ =>
- currentClass.cunit.warning(NoPosition, "Unable to inline the exception handler due to incorrect format:\n" +
+ reporter.warning(NoPosition, "Unable to inline the exception handler due to incorrect format:\n" +
handler.iterator.mkString("\n"))
None
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index f6de522d09..8f6fc65706 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -26,7 +26,7 @@ import scala.reflect.internal.util.NoSourceFile
* where `p` is defined in a library L, and is accessed from a library C (for Client),
* where C was compiled against L', an optimized version of L where the inliner made `p` public at the bytecode level.
* The only such members are fields, either synthetic or isParamAccessor, and thus having a dollar sign in their name
- * (the accesibility of methods and constructors isn't touched by the inliner).
+ * (the accessibility of methods and constructors isn't touched by the inliner).
*
* Thus we add one more goal to our list:
* (c) Compile C (either optimized or not) against any of L or L',
@@ -195,7 +195,7 @@ abstract class Inliners extends SubComponent {
/** The current iclass */
private var currentIClazz: IClass = _
- private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
+ private def warn(pos: Position, msg: String) = currentRun.reporting.inlinerWarning(pos, msg)
private def ownedName(sym: Symbol): String = exitingUncurry {
val count = (
@@ -283,14 +283,14 @@ abstract class Inliners extends SubComponent {
}
val tfa = new analysis.MTFAGrowable()
- tfa.stat = global.settings.Ystatistics.value
+ tfa.stat = global.settings.YstatisticsEnabled
val staleOut = new mutable.ListBuffer[BasicBlock]
val splicedBlocks = mutable.Set.empty[BasicBlock]
val staleIn = mutable.Set.empty[BasicBlock]
/**
* A transformation local to the body of the IMethod received as argument.
- * An linining decision consists in replacing a callsite with the body of the callee.
+ * An inlining decision consists in replacing a callsite with the body of the callee.
* Please notice that, because `analyzeMethod()` itself may modify a method body,
* the particular callee bodies that end up being inlined depend on the particular order in which methods are visited
* (no topological sorting over the call-graph is attempted).
diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala
new file mode 100644
index 0000000000..3f06264e3c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala
@@ -0,0 +1,125 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.net.URL
+import scala.annotation.tailrec
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
+import scala.tools.nsc.util.ClassRepresentation
+
+/**
+ * A classpath unifying multiple class- and sourcepath entries.
+ * Flat classpath can obtain entries for classes and sources independently
+ * so it tries to do operations quite optimally - iterating only these collections
+ * which are needed in the given moment and only as far as it's necessary.
+ * @param aggregates classpath instances containing entries which this class processes
+ */
+case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatClassPath {
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ @tailrec
+ def find(aggregates: Seq[FlatClassPath]): Option[AbstractFile] =
+ if (aggregates.nonEmpty) {
+ val classFile = aggregates.head.findClassFile(className)
+ if (classFile.isDefined) classFile
+ else find(aggregates.tail)
+ } else None
+
+ find(aggregates)
+ }
+
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+
+ @tailrec
+ def findEntry[T <: ClassRepClassPathEntry](aggregates: Seq[FlatClassPath], getEntries: FlatClassPath => Seq[T]): Option[T] =
+ if (aggregates.nonEmpty) {
+ val entry = getEntries(aggregates.head)
+ .find(_.name == simpleClassName)
+ if (entry.isDefined) entry
+ else findEntry(aggregates.tail, getEntries)
+ } else None
+
+ val classEntry = findEntry(aggregates, classesGetter(pkg))
+ val sourceEntry = findEntry(aggregates, sourcesGetter(pkg))
+
+ mergeClassesAndSources(classEntry.toList, sourceEntry.toList).headOption
+ }
+
+ override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs)
+
+ override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct
+
+ override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*)
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct
+ aggregatedPackages
+ }
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] =
+ getDistinctEntries(classesGetter(inPackage))
+
+ override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] =
+ getDistinctEntries(sourcesGetter(inPackage))
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ val (packages, classesAndSources) = aggregates.map(_.list(inPackage)).unzip
+ val distinctPackages = packages.flatten.distinct
+ val distinctClassesAndSources = mergeClassesAndSources(classesAndSources: _*)
+ FlatClassPathEntries(distinctPackages, distinctClassesAndSources)
+ }
+
+ /**
+ * Returns only one entry for each name. If there's both a source and a class entry, it
+ * creates an entry containing both of them. If there would be more than one class or source
+ * entries for the same class it always would use the first entry of each type found on a classpath.
+ */
+ private def mergeClassesAndSources(entries: Seq[ClassRepClassPathEntry]*): Seq[ClassRepClassPathEntry] = {
+ // based on the implementation from MergedClassPath
+ var count = 0
+ val indices = collection.mutable.HashMap[String, Int]()
+ val mergedEntries = new ArrayBuffer[ClassRepClassPathEntry](1024)
+
+ for {
+ partOfEntries <- entries
+ entry <- partOfEntries
+ } {
+ val name = entry.name
+ if (indices contains name) {
+ val index = indices(name)
+ val existing = mergedEntries(index)
+
+ if (existing.binary.isEmpty && entry.binary.isDefined)
+ mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get)
+ if (existing.source.isEmpty && entry.source.isDefined)
+ mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get)
+ }
+ else {
+ indices(name) = count
+ mergedEntries += entry
+ count += 1
+ }
+ }
+ mergedEntries.toIndexedSeq
+ }
+
+ private def getDistinctEntries[EntryType <: ClassRepClassPathEntry](getEntries: FlatClassPath => Seq[EntryType]): Seq[EntryType] = {
+ val seenNames = collection.mutable.HashSet[String]()
+ val entriesBuffer = new ArrayBuffer[EntryType](1024)
+ for {
+ cp <- aggregates
+ entry <- getEntries(cp) if !seenNames.contains(entry.name)
+ } {
+ entriesBuffer += entry
+ seenNames += entry.name
+ }
+ entriesBuffer.toIndexedSeq
+ }
+
+ private def classesGetter(pkg: String) = (cp: FlatClassPath) => cp.classes(pkg)
+ private def sourcesGetter(pkg: String) = (cp: FlatClassPath) => cp.sources(pkg)
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
new file mode 100644
index 0000000000..9bf4e3f779
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
+
+/**
+ * A trait that contains factory methods for classpath elements of type T.
+ *
+ * The logic has been abstracted from ClassPath#ClassPathContext so it's possible
+ * to have common trait that supports both recursive and flat classpath representations.
+ *
+ * Therefore, we expect that T will be either ClassPath[U] or FlatClassPath.
+ */
+trait ClassPathFactory[T] {
+
+ /**
+ * Create a new classpath based on the abstract file.
+ */
+ def newClassPath(file: AbstractFile): T
+
+ /**
+ * Creators for sub classpaths which preserve this context.
+ */
+ def sourcesInPath(path: String): List[T]
+
+ def expandPath(path: String, expandStar: Boolean = true): List[String] = ClassPath.expandPath(path, expandStar)
+
+ def expandDir(extdir: String): List[String] = ClassPath.expandDir(extdir)
+
+ def contentsOfDirsInPath(path: String): List[T] =
+ for {
+ dir <- expandPath(path, expandStar = false)
+ name <- expandDir(dir)
+ entry <- Option(AbstractFile.getDirectory(name))
+ } yield newClassPath(entry)
+
+ def classesInExpandedPath(path: String): IndexedSeq[T] =
+ classesInPathImpl(path, expand = true).toIndexedSeq
+
+ def classesInPath(path: String) = classesInPathImpl(path, expand = false)
+
+ def classesInManifest(useManifestClassPath: Boolean) =
+ if (useManifestClassPath) ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url))
+ else Nil
+
+ // Internal
+ protected def classesInPathImpl(path: String, expand: Boolean) =
+ for {
+ file <- expandPath(path, expand)
+ dir <- Option(AbstractFile.getDirectory(file))
+ } yield newClassPath(dir)
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala
new file mode 100644
index 0000000000..81d2f7320f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import java.io.FileFilter
+import java.net.URL
+import scala.reflect.io.AbstractFile
+import scala.reflect.io.PlainFile
+import scala.tools.nsc.util.ClassRepresentation
+import FileUtils._
+
+/**
+ * A trait allowing to look for classpath entries of given type in directories.
+ * It provides common logic for classes handling class and source files.
+ * It makes use of the fact that in the case of nested directories it's easy to find a file
+ * when we have a name of a package.
+ */
+trait DirectoryFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath {
+ val dir: File
+ assert(dir != null, "Directory file in DirectoryFileLookup cannot be null")
+
+ override def asURLs: Seq[URL] = Seq(dir.toURI.toURL)
+ override def asClassPathStrings: Seq[String] = Seq(dir.getPath)
+
+ import FlatClassPath.RootPackage
+ private def getDirectory(forPackage: String): Option[File] = {
+ if (forPackage == RootPackage) {
+ Some(dir)
+ } else {
+ val packageDirName = FileUtils.dirPath(forPackage)
+ val packageDir = new File(dir, packageDirName)
+ if (packageDir.exists && packageDir.isDirectory) {
+ Some(packageDir)
+ } else None
+ }
+ }
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ val dirForPackage = getDirectory(inPackage)
+ val nestedDirs: Array[File] = dirForPackage match {
+ case None => Array.empty
+ case Some(directory) => directory.listFiles(DirectoryFileLookup.packageDirectoryFileFilter)
+ }
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ val entries = nestedDirs map { file =>
+ PackageEntryImpl(prefix + file.getName)
+ }
+ entries
+ }
+
+ protected def files(inPackage: String): Seq[FileEntryType] = {
+ val dirForPackage = getDirectory(inPackage)
+ val files: Array[File] = dirForPackage match {
+ case None => Array.empty
+ case Some(directory) => directory.listFiles(fileFilter)
+ }
+ val entries = files map { file =>
+ val wrappedFile = new scala.reflect.io.File(file)
+ createFileEntry(new PlainFile(wrappedFile))
+ }
+ entries
+ }
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ val dirForPackage = getDirectory(inPackage)
+ val files: Array[File] = dirForPackage match {
+ case None => Array.empty
+ case Some(directory) => directory.listFiles()
+ }
+ val packagePrefix = PackageNameUtils.packagePrefix(inPackage)
+ val packageBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
+ val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
+ for (file <- files) {
+ if (file.isPackage) {
+ val pkgEntry = PackageEntryImpl(packagePrefix + file.getName)
+ packageBuf += pkgEntry
+ } else if (fileFilter.accept(file)) {
+ val wrappedFile = new scala.reflect.io.File(file)
+ val abstractFile = new PlainFile(wrappedFile)
+ fileBuf += createFileEntry(abstractFile)
+ }
+ }
+ FlatClassPathEntries(packageBuf, fileBuf)
+ }
+
+ protected def createFileEntry(file: AbstractFile): FileEntryType
+ protected def fileFilter: FileFilter
+}
+
+object DirectoryFileLookup {
+
+ private[classpath] object packageDirectoryFileFilter extends FileFilter {
+ override def accept(pathname: File): Boolean = pathname.isPackage
+ }
+}
+
+case class DirectoryFlatClassPath(dir: File)
+ extends DirectoryFileLookup[ClassFileEntryImpl]
+ with NoSourcePaths {
+
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = findClassFile(className) map ClassFileEntryImpl
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className)
+ val classFile = new File(s"$dir/$relativePath.class")
+ if (classFile.exists) {
+ val wrappedClassFile = new scala.reflect.io.File(classFile)
+ val abstractClassFile = new PlainFile(wrappedClassFile)
+ Some(abstractClassFile)
+ } else None
+ }
+
+ override protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ override protected def fileFilter: FileFilter = DirectoryFlatClassPath.classFileFilter
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+}
+
+object DirectoryFlatClassPath {
+
+ private val classFileFilter = new FileFilter {
+ override def accept(pathname: File): Boolean = pathname.isClass
+ }
+}
+
+case class DirectoryFlatSourcePath(dir: File)
+ extends DirectoryFileLookup[SourceFileEntryImpl]
+ with NoClassPaths {
+
+ override def asSourcePathString: String = asClassPathString
+
+ override protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file)
+ override protected def fileFilter: FileFilter = DirectoryFlatSourcePath.sourceFileFilter
+
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
+ findSourceFile(className) map SourceFileEntryImpl
+ }
+
+ private def findSourceFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className)
+ val sourceFile = Stream("scala", "java")
+ .map(ext => new File(s"$dir/$relativePath.$ext"))
+ .collectFirst { case file if file.exists() => file }
+
+ sourceFile.map { file =>
+ val wrappedSourceFile = new scala.reflect.io.File(file)
+ val abstractSourceFile = new PlainFile(wrappedSourceFile)
+ abstractSourceFile
+ }
+ }
+
+ override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage)
+}
+
+object DirectoryFlatSourcePath {
+
+ private val sourceFileFilter = new FileFilter {
+ override def accept(pathname: File): Boolean = endsScalaOrJava(pathname.getName)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala
new file mode 100644
index 0000000000..ee2528e15c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.{ File => JFile }
+import java.net.URL
+import scala.reflect.internal.FatalError
+import scala.reflect.io.AbstractFile
+
+/**
+ * Common methods related to Java files and abstract files used in the context of classpath
+ */
+object FileUtils {
+ implicit class AbstractFileOps(val file: AbstractFile) extends AnyVal {
+ def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name)
+
+ def isClass: Boolean = !file.isDirectory && file.hasExtension("class")
+
+ def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java"))
+
+ // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip?
+ def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip")
+
+ /**
+ * Safe method returning a sequence containing one URL representing this file, when underlying file exists,
+ * and returning given default value in other case
+ */
+ def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL)
+ }
+
+ implicit class FileOps(val file: JFile) extends AnyVal {
+ def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName)
+
+ def isClass: Boolean = file.isFile && file.getName.endsWith(".class")
+ }
+
+ def stripSourceExtension(fileName: String): String = {
+ if (endsScala(fileName)) stripClassExtension(fileName)
+ else if (endsJava(fileName)) stripJavaExtension(fileName)
+ else throw new FatalError("Unexpected source file ending: " + fileName)
+ }
+
+ def dirPath(forPackage: String) = forPackage.replace('.', '/')
+
+ def endsClass(fileName: String): Boolean =
+ fileName.length > 6 && fileName.substring(fileName.length - 6) == ".class"
+
+ def endsScalaOrJava(fileName: String): Boolean =
+ endsScala(fileName) || endsJava(fileName)
+
+ def endsJava(fileName: String): Boolean =
+ fileName.length > 5 && fileName.substring(fileName.length - 5) == ".java"
+
+ def endsScala(fileName: String): Boolean =
+ fileName.length > 6 && fileName.substring(fileName.length - 6) == ".scala"
+
+ def stripClassExtension(fileName: String): String =
+ fileName.substring(0, fileName.length - 6) // equivalent of fileName.length - ".class".length
+
+ def stripJavaExtension(fileName: String): String =
+ fileName.substring(0, fileName.length - 5)
+
+ // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed
+ // because then some tests in partest don't pass
+ private def mayBeValidPackage(dirName: String): Boolean =
+ (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.')
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala
new file mode 100644
index 0000000000..cb201617d2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, ClassRepresentation }
+
+/**
+ * A base trait for the particular flat classpath representation implementations.
+ *
+ * We call this variant of a classpath representation flat because it's possible to
+ * query the whole classpath using just single instance extending this trait.
+ *
+ * This is an alternative design compared to scala.tools.nsc.util.ClassPath
+ */
+trait FlatClassPath extends ClassFileLookup[AbstractFile] {
+ /** Empty string represents root package */
+ private[nsc] def packages(inPackage: String): Seq[PackageEntry]
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry]
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry]
+
+ /** Allows to get entries for packages and classes merged with sources possibly in one pass. */
+ private[nsc] def list(inPackage: String): FlatClassPathEntries
+
+ // A default implementation which should be overridden, if we can create the more efficient
+ // solution for a given type of FlatClassPath
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+
+ val foundClassFromClassFiles = classes(pkg)
+ .find(_.name == simpleClassName)
+
+ def findClassInSources = sources(pkg)
+ .find(_.name == simpleClassName)
+
+ foundClassFromClassFiles orElse findClassInSources
+ }
+
+ override def asClassPathString: String = ClassPath.join(asClassPathStrings: _*)
+ def asClassPathStrings: Seq[String]
+}
+
+object FlatClassPath {
+ val RootPackage = ""
+}
+
+case class FlatClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepClassPathEntry])
+
+object FlatClassPathEntries {
+ import scala.language.implicitConversions
+ // to have working unzip method
+ implicit def entry2Tuple(entry: FlatClassPathEntries) = (entry.packages, entry.classesAndSources)
+}
+
+sealed trait ClassRepClassPathEntry extends ClassRepresentation[AbstractFile]
+
+trait ClassFileEntry extends ClassRepClassPathEntry {
+ def file: AbstractFile
+}
+
+trait SourceFileEntry extends ClassRepClassPathEntry {
+ def file: AbstractFile
+}
+
+trait PackageEntry {
+ def name: String
+}
+
+private[nsc] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry {
+ override def name = FileUtils.stripClassExtension(file.name) // class name
+
+ override def binary: Option[AbstractFile] = Some(file)
+ override def source: Option[AbstractFile] = None
+}
+
+private[nsc] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry {
+ override def name = FileUtils.stripSourceExtension(file.name)
+
+ override def binary: Option[AbstractFile] = None
+ override def source: Option[AbstractFile] = Some(file)
+}
+
+private[nsc] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepClassPathEntry {
+ override def name = FileUtils.stripClassExtension(classFile.name)
+
+ override def binary: Option[AbstractFile] = Some(classFile)
+ override def source: Option[AbstractFile] = Some(srcFile)
+}
+
+private[nsc] case class PackageEntryImpl(name: String) extends PackageEntry
+
+private[nsc] trait NoSourcePaths {
+ def asSourcePathString: String = ""
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty
+}
+
+private[nsc] trait NoClassPaths {
+ def findClassFile(className: String): Option[AbstractFile] = None
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala
new file mode 100644
index 0000000000..7f67381d4d
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.tools.nsc.Settings
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
+import FileUtils.AbstractFileOps
+
+/**
+ * Provides factory methods for flat classpath. When creating classpath instances for a given path,
+ * it uses proper type of classpath depending on a types of particular files containing sources or classes.
+ */
+class FlatClassPathFactory(settings: Settings) extends ClassPathFactory[FlatClassPath] {
+
+ override def newClassPath(file: AbstractFile): FlatClassPath =
+ if (file.isJarOrZip)
+ ZipAndJarFlatClassPathFactory.create(file, settings)
+ else if (file.isDirectory)
+ new DirectoryFlatClassPath(file.file)
+ else
+ sys.error(s"Unsupported classpath element: $file")
+
+ override def sourcesInPath(path: String): List[FlatClassPath] =
+ for {
+ file <- expandPath(path, expandStar = false)
+ dir <- Option(AbstractFile getDirectory file)
+ } yield createSourcePath(dir)
+
+ private def createSourcePath(file: AbstractFile): FlatClassPath =
+ if (file.isJarOrZip)
+ ZipAndJarFlatSourcePathFactory.create(file, settings)
+ else if (file.isDirectory)
+ new DirectoryFlatSourcePath(file.file)
+ else
+ sys.error(s"Unsupported sourcepath element: $file")
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
new file mode 100644
index 0000000000..c907d565d2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.tools.nsc.classpath.FlatClassPath.RootPackage
+
+/**
+ * Common methods related to package names represented as String
+ */
+object PackageNameUtils {
+
+ /**
+ * @param fullClassName full class name with package
+ * @return (package, simple class name)
+ */
+ def separatePkgAndClassNames(fullClassName: String): (String, String) = {
+ val lastDotIndex = fullClassName.lastIndexOf('.')
+ if (lastDotIndex == -1)
+ (RootPackage, fullClassName)
+ else
+ (fullClassName.substring(0, lastDotIndex), fullClassName.substring(lastDotIndex + 1))
+ }
+
+ def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "."
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
new file mode 100644
index 0000000000..84e21a3ccd
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
@@ -0,0 +1,180 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import java.net.URL
+import scala.annotation.tailrec
+import scala.reflect.io.{ AbstractFile, FileZipArchive, ManifestResources }
+import scala.tools.nsc.Settings
+import FileUtils._
+
+/**
+ * A trait providing an optional cache for classpath entries obtained from zip and jar files.
+ * It's possible to create such a cache assuming that entries in such files won't change (at
+ * least will be the same each time we'll load classpath during the lifetime of JVM process)
+ * - unlike class and source files in directories, which can be modified and recompiled.
+ * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE
+ * when there are a lot of projects having a lot of common dependencies.
+ */
+sealed trait ZipAndJarFileLookupFactory {
+
+ private val cache = collection.mutable.Map.empty[AbstractFile, FlatClassPath]
+
+ def create(zipFile: AbstractFile, settings: Settings): FlatClassPath = {
+ if (settings.YdisableFlatCpCaching) createForZipFile(zipFile)
+ else createUsingCache(zipFile, settings)
+ }
+
+ protected def createForZipFile(zipFile: AbstractFile): FlatClassPath
+
+ private def createUsingCache(zipFile: AbstractFile, settings: Settings): FlatClassPath = cache.synchronized {
+ def newClassPathInstance = {
+ if (settings.verbose || settings.Ylogcp)
+ println(s"$zipFile is not yet in the classpath cache")
+ createForZipFile(zipFile)
+ }
+ cache.getOrElseUpdate(zipFile, newClassPathInstance)
+ }
+}
+
+/**
+ * Manages creation of flat classpath for class files placed in zip and jar files.
+ * It should be the only way of creating them as it provides caching.
+ */
+object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
+
+ private case class ZipArchiveFlatClassPath(zipFile: File)
+ extends ZipArchiveFileLookup[ClassFileEntryImpl]
+ with NoSourcePaths {
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+ classes(pkg).find(_.name == simpleClassName).map(_.file)
+ }
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+
+ override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass
+ }
+
+ /**
+ * This type of classpath is closly related to the support for JSR-223.
+ * Its usage can be observed e.g. when running:
+ * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala
+ * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry:
+ * Name: scala/Function2$mcFJD$sp.class
+ */
+ private case class ManifestResourcesFlatClassPath(file: ManifestResources)
+ extends FlatClassPath
+ with NoSourcePaths {
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+ classes(pkg).find(_.name == simpleClassName).map(_.file)
+ }
+
+ override def asClassPathStrings: Seq[String] = Seq(file.path)
+
+ override def asURLs: Seq[URL] = file.toURLs()
+
+ import ManifestResourcesFlatClassPath.PackageFileInfo
+ import ManifestResourcesFlatClassPath.PackageInfo
+
+ /**
+ * A cache mapping package name to abstract file for package directory and subpackages of given package.
+ *
+ * ManifestResources can iterate through the collections of entries from e.g. remote jar file.
+ * We can't just specify the path to the concrete directory etc. so we can't just 'jump' into
+ * given package, when it's needed. On the other hand we can iterate over entries to get
+ * AbstractFiles, iterate over entries of these files etc.
+ *
+ * Instead of traversing a tree of AbstractFiles once and caching all entries or traversing each time,
+ * when we need subpackages of a given package or its classes, we traverse once and cache only packages.
+ * Classes for given package can be then easily loaded when they are needed.
+ */
+ private lazy val cachedPackages: collection.mutable.HashMap[String, PackageFileInfo] = {
+ val packages = collection.mutable.HashMap[String, PackageFileInfo]()
+
+ def getSubpackages(dir: AbstractFile): List[AbstractFile] =
+ (for (file <- dir if file.isPackage) yield file)(collection.breakOut)
+
+ @tailrec
+ def traverse(packagePrefix: String,
+ filesForPrefix: List[AbstractFile],
+ subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match {
+ case pkgFile :: remainingFiles =>
+ val subpackages = getSubpackages(pkgFile)
+ val fullPkgName = packagePrefix + pkgFile.name
+ packages.put(fullPkgName, PackageFileInfo(pkgFile, subpackages))
+ val newPackagePrefix = fullPkgName + "."
+ subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages))
+ traverse(packagePrefix, remainingFiles, subpackagesQueue)
+ case Nil if subpackagesQueue.nonEmpty =>
+ val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue()
+ traverse(packagePrefix, filesForPrefix, subpackagesQueue)
+ case _ =>
+ }
+
+ val subpackages = getSubpackages(file)
+ packages.put(FlatClassPath.RootPackage, PackageFileInfo(file, subpackages))
+ traverse(FlatClassPath.RootPackage, subpackages, collection.mutable.Queue())
+ packages
+ }
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = cachedPackages.get(inPackage) match {
+ case None => Seq.empty
+ case Some(PackageFileInfo(_, subpackages)) =>
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ subpackages.map(packageFile => PackageEntryImpl(prefix + packageFile.name))
+ }
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = cachedPackages.get(inPackage) match {
+ case None => Seq.empty
+ case Some(PackageFileInfo(pkg, _)) =>
+ (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file))(collection.breakOut)
+ }
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(packages(inPackage), classes(inPackage))
+ }
+
+ private object ManifestResourcesFlatClassPath {
+ case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile])
+ case class PackageInfo(packageName: String, subpackages: List[AbstractFile])
+ }
+
+ override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath =
+ if (zipFile.file == null) createWithoutUnderlyingFile(zipFile)
+ else ZipArchiveFlatClassPath(zipFile.file)
+
+ private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match {
+ case manifestRes: ManifestResources =>
+ ManifestResourcesFlatClassPath(manifestRes)
+ case _ =>
+ val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile"
+ throw new IllegalArgumentException(errorMsg)
+ }
+}
+
+/**
+ * Manages creation of flat classpath for source files placed in zip and jar files.
+ * It should be the only way of creating them as it provides caching.
+ */
+object ZipAndJarFlatSourcePathFactory extends ZipAndJarFileLookupFactory {
+
+ private case class ZipArchiveFlatSourcePath(zipFile: File)
+ extends ZipArchiveFileLookup[SourceFileEntryImpl]
+ with NoClassPaths {
+
+ override def asSourcePathString: String = asClassPathString
+
+ override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage)
+
+ override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file)
+ override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource
+ }
+
+ override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath = ZipArchiveFlatSourcePath(zipFile.file)
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
new file mode 100644
index 0000000000..1d0de57779
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import java.net.URL
+import scala.collection.Seq
+import scala.reflect.io.AbstractFile
+import scala.reflect.io.FileZipArchive
+import FileUtils.AbstractFileOps
+
+/**
+ * A trait allowing to look for classpath entries of given type in zip and jar files.
+ * It provides common logic for classes handling class and source files.
+ * It's aware of things like e.g. META-INF directory which is correctly skipped.
+ */
+trait ZipArchiveFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath {
+ val zipFile: File
+
+ assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null")
+
+ override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL)
+ override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath)
+
+ private val archive = new FileZipArchive(zipFile)
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ for {
+ dirEntry <- findDirEntry(inPackage).toSeq
+ entry <- dirEntry.iterator if entry.isPackage
+ } yield PackageEntryImpl(prefix + entry.name)
+ }
+
+ protected def files(inPackage: String): Seq[FileEntryType] =
+ for {
+ dirEntry <- findDirEntry(inPackage).toSeq
+ entry <- dirEntry.iterator if isRequiredFileType(entry)
+ } yield createFileEntry(entry)
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ val foundDirEntry = findDirEntry(inPackage)
+
+ foundDirEntry map { dirEntry =>
+ val pkgBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
+ val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+
+ for (entry <- dirEntry.iterator) {
+ if (entry.isPackage)
+ pkgBuf += PackageEntryImpl(prefix + entry.name)
+ else if (isRequiredFileType(entry))
+ fileBuf += createFileEntry(entry)
+ }
+ FlatClassPathEntries(pkgBuf, fileBuf)
+ } getOrElse FlatClassPathEntries(Seq.empty, Seq.empty)
+ }
+
+ private def findDirEntry(pkg: String) = {
+ val dirName = s"${FileUtils.dirPath(pkg)}/"
+ archive.allDirs.get(dirName)
+ }
+
+ protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType
+ protected def isRequiredFileType(file: AbstractFile): Boolean
+}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index a61ad392ee..d34c14be0f 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -13,6 +13,7 @@ import symtab.Flags
import JavaTokens._
import scala.language.implicitConversions
import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.ListOfNil
trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val global : Global
@@ -26,10 +27,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def freshName(prefix: String): Name = freshTermName(prefix)
def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
- def deprecationWarning(off: Int, msg: String) = unit.deprecationWarning(off, msg)
+ def deprecationWarning(off: Int, msg: String) = currentRun.reporting.deprecationWarning(off, msg)
implicit def i2p(offset : Int) : Position = Position.offset(unit.source, offset)
- def warning(pos : Int, msg : String) : Unit = unit.warning(pos, msg)
- def syntaxError(pos: Int, msg: String) : Unit = unit.error(pos, msg)
+ def warning(pos : Int, msg : String) : Unit = reporter.warning(pos, msg)
+ def syntaxError(pos: Int, msg: String) : Unit = reporter.error(pos, msg)
}
abstract class JavaParser extends ParserCommon {
@@ -125,7 +126,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
makeParam(nme.syntheticParamName(count), tpt)
def makeParam(name: String, tpt: Tree): ValDef =
- makeParam(name: TermName, tpt)
+ makeParam(TermName(name), tpt)
def makeParam(name: TermName, tpt: Tree): ValDef =
ValDef(Modifiers(Flags.JAVA | Flags.PARAM), name, tpt, EmptyTree)
@@ -488,7 +489,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val vparams = formalParams()
if (!isVoid) rtpt = optArrayBrackets(rtpt)
optThrows()
- val bodyOk = !inInterface || (mods hasFlag Flags.DEFAULTMETHOD)
+ val isStatic = mods hasFlag Flags.STATIC
+ val bodyOk = !inInterface || ((mods hasFlag Flags.DEFAULTMETHOD) || isStatic)
val body =
if (bodyOk && in.token == LBRACE) {
methodBody()
@@ -507,7 +509,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
EmptyTree
}
}
- if (inInterface) mods1 |= Flags.DEFERRED
+ if (inInterface && !isStatic) mods1 |= Flags.DEFERRED
List {
atPos(pos) {
DefDef(mods1, name.toTermName, tparams, List(vparams), rtpt, body)
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index c5401219dd..ac86dfd665 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -860,9 +860,9 @@ trait JavaScanners extends ast.parser.ScannersCommon {
class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner {
in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError)
init()
- def error (pos: Int, msg: String) = unit. error(pos, msg)
- def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg)
- def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg)
+ def error (pos: Int, msg: String) = reporter.error(pos, msg)
+ def incompleteInputError(pos: Int, msg: String) = currentRun.parsing.incompleteInputError(pos, msg)
+ def deprecationWarning(pos: Int, msg: String) = currentRun.reporting.deprecationWarning(pos, msg)
implicit def g2p(pos: Int): Position = Position.offset(unit.source, pos)
}
}
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 7837f9a11a..1a5529140c 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package plugins
import scala.tools.nsc.io.{ Jar }
-import scala.tools.nsc.util.ScalaClassLoader
+import scala.reflect.internal.util.ScalaClassLoader
import scala.reflect.io.{ Directory, File, Path }
import java.io.InputStream
import java.util.zip.ZipException
@@ -60,6 +60,8 @@ abstract class Plugin {
* @return true to continue, or false to opt out
*/
def init(options: List[String], error: String => Unit): Boolean = {
+ // call to deprecated method required here, we must continue to support
+ // code that subclasses that override `processOptions`.
processOptions(options, error)
true
}
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 12f9aeba27..4b1805479d 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -7,7 +7,7 @@
package scala.tools.nsc
package plugins
-import scala.reflect.io.{ File, Path }
+import scala.reflect.io.Path
import scala.tools.nsc.util.ClassPath
import scala.tools.util.PathResolver.Defaults
@@ -33,7 +33,7 @@ trait Plugins { global: Global =>
}
val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value)
val (goods, errors) = maybes partition (_.isSuccess)
- // Explicit parameterization of recover to suppress -Xlint warning about inferred Any
+ // Explicit parameterization of recover to avoid -Xlint warning about inferred Any
errors foreach (_.recover[Any] {
// legacy behavior ignores altogether, so at least warn devs
case e: MissingPluginException => if (global.isDeveloper) warning(e.getMessage)
diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
index 16d432438a..5e4914fa83 100644
--- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
@@ -30,6 +30,7 @@ abstract class AbstractReporter extends Reporter {
private def isVerbose = settings.verbose.value
private def noWarnings = settings.nowarnings.value
private def isPromptSet = settings.prompt.value
+ private def isDebug = settings.debug
protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) {
if (severity == INFO) {
@@ -46,7 +47,7 @@ abstract class AbstractReporter extends Reporter {
severity.count += 1
display(pos, msg, severity)
}
- else if (settings.debug) {
+ else if (isDebug) {
severity.count += 1
display(pos, "[ suppressed ] " + msg, severity)
}
@@ -57,17 +58,19 @@ abstract class AbstractReporter extends Reporter {
}
}
+
/** Logs a position and returns true if it was already logged.
* @note Two positions are considered identical for logging if they have the same point.
*/
private def testAndLog(pos: Position, severity: Severity, msg: String): Boolean =
pos != null && pos.isDefined && {
- val fpos = pos.focus
+ val fpos = pos.focus
val suppress = positions(fpos) match {
- case ERROR => true // already error at position
- case highest if highest > severity => true // already message higher than present severity
- case `severity` => messages(fpos) contains msg // already issued this exact message
- case _ => false // good to go
+ case ERROR => true // already error at position
+ case highest
+ if highest.id > severity.id => true // already message higher than present severity
+ case `severity` => messages(fpos) contains msg // already issued this exact message
+ case _ => false // good to go
}
suppress || {
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index 3f210a543c..5bf611a7b0 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -12,8 +12,7 @@ import scala.reflect.internal.util._
import StringOps._
/**
- * This class implements a Reporter that displays messages on a text
- * console.
+ * This class implements a Reporter that displays messages on a text console.
*/
class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: PrintWriter) extends AbstractReporter {
def this(settings: Settings) = this(settings, Console.in, new PrintWriter(Console.err, true))
@@ -30,7 +29,7 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
case INFO => null
}
- private def clabel(severity: Severity): String = {
+ protected def clabel(severity: Severity): String = {
val label0 = label(severity)
if (label0 eq null) "" else label0 + ": "
}
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index 68362c066d..3d688efae1 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -8,76 +8,50 @@ package reporters
import scala.reflect.internal.util._
-/**
- * This interface provides methods to issue information, warning and
- * error messages.
+/** Report information, warnings and errors.
+ *
+ * This describes the internal interface for issuing information, warnings and errors.
+ * The only abstract method in this class must be info0.
+ *
+ * TODO: Move external clients (sbt/ide/partest) to reflect.internal.Reporter,
+ * and remove this class.
*/
-abstract class Reporter {
- protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit
-
- object severity extends Enumeration
- class Severity(val id: Int) extends severity.Value {
- var count: Int = 0
- }
- val INFO = new Severity(0) {
- override def toString: String = "INFO"
- }
- val WARNING = new Severity(1) {
- override def toString: String = "WARNING"
- }
- val ERROR = new Severity(2) {
- override def toString: String = "ERROR"
- }
-
- /** Whether very long lines can be truncated. This exists so important
- * debugging information (like printing the classpath) is not rendered
- * invisible due to the max message length.
- */
- private var _truncationOK: Boolean = true
- def truncationOK = _truncationOK
- def withoutTruncating[T](body: => T): T = {
- val saved = _truncationOK
- _truncationOK = false
- try body
- finally _truncationOK = saved
- }
-
- private var incompleteHandler: (Position, String) => Unit = null
- def incompleteHandled = incompleteHandler != null
- def withIncompleteHandler[T](handler: (Position, String) => Unit)(thunk: => T) = {
- val saved = incompleteHandler
- incompleteHandler = handler
- try thunk
- finally incompleteHandler = saved
- }
-
- var cancelled = false
- def hasErrors = ERROR.count > 0 || cancelled
- def hasWarnings = WARNING.count > 0
+abstract class Reporter extends scala.reflect.internal.Reporter {
+ /** Informational messages. If `!force`, they may be suppressed. */
+ final def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
/** For sending a message which should not be labeled as a warning/error,
* but also shouldn't require -verbose to be visible.
*/
- def echo(msg: String): Unit = info(NoPosition, msg, force = true)
- def echo(pos: Position, msg: String): Unit = info(pos, msg, force = true)
+ def echo(msg: String): Unit = info(NoPosition, msg, force = true)
- /** Informational messages, suppressed unless -verbose or force=true. */
- def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
+ // overridden by sbt, IDE -- should not be in the reporting interface
+ // (IDE receives comments from ScaladocAnalyzer using this hook method)
+ // TODO: IDE should override a hook method in the parser instead
+ def comment(pos: Position, msg: String): Unit = {}
- /** Warnings and errors. */
- def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, force = false))
- def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, force = false))
- def incompleteInputError(pos: Position, msg: String): Unit = {
- if (incompleteHandled) incompleteHandler(pos, msg)
- else error(pos, msg)
- }
+ // used by sbt (via unit.cancel) to cancel a compile (see hasErrors)
+ // TODO: figure out how sbt uses this, come up with a separate interface for controlling the build
+ var cancelled: Boolean = false
- def comment(pos: Position, msg: String) { }
- def flush() { }
- def reset() {
- INFO.count = 0
- ERROR.count = 0
- WARNING.count = 0
- cancelled = false
+ override def hasErrors: Boolean = super.hasErrors || cancelled
+
+ override def reset(): Unit = {
+ super.reset()
+ cancelled = false
}
+
+ // the below is copy/pasted from ReporterImpl for now
+ // partest expects this inner class
+ // TODO: rework partest to use the scala.reflect.internal interface,
+ // remove duplication here, and consolidate reflect.internal.{ReporterImpl & ReporterImpl}
+ class Severity(val id: Int)(name: String) { var count: Int = 0 ; override def toString = name}
+ object INFO extends Severity(0)("INFO")
+ object WARNING extends Severity(1)("WARNING")
+ // reason for copy/paste: this is used by partest (must be a val, not an object)
+ // TODO: use count(ERROR) in scala.tools.partest.nest.DirectCompiler#errorCount, rather than ERROR.count
+ lazy val ERROR = new Severity(2)("ERROR")
+
+ def count(severity: Severity): Int = severity.count
+ def resetCount(severity: Severity): Unit = severity.count = 0
}
diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
index 04c5bdf824..24a61cb171 100644
--- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
@@ -10,8 +10,7 @@ import scala.collection.mutable
import scala.reflect.internal.util.Position
/**
- * This class implements a Reporter that displays messages on a text
- * console.
+ * This class implements a Reporter that stores its reports in the set `infos`.
*/
class StoreReporter extends Reporter {
case class Info(pos: Position, msg: String, severity: Severity) {
diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
index 8b897b83b2..6b339b2a6d 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
@@ -7,19 +7,24 @@ package scala
package tools.nsc
package settings
+import scala.language.higherKinds
+
trait AbsScalaSettings {
self: AbsSettings =>
+ type MultiChoiceEnumeration <: Enumeration
+
type Setting <: AbsSetting
- type BooleanSetting <: Setting { type T = Boolean }
- type ChoiceSetting <: Setting { type T = String }
- type IntSetting <: Setting { type T = Int }
- type MultiStringSetting <: Setting { type T = List[String] }
- type PathSetting <: Setting { type T = String }
- type PhasesSetting <: Setting { type T = List[String] }
- type StringSetting <: Setting { type T = String }
- type PrefixSetting <: Setting { type T = List[String] }
+ type BooleanSetting <: Setting { type T = Boolean }
+ type ChoiceSetting <: Setting { type T = String }
+ type IntSetting <: Setting { type T = Int }
+ type MultiStringSetting <: Setting { type T = List[String] }
+ type MultiChoiceSetting[E <: MultiChoiceEnumeration] <: Setting { type T <: E#ValueSet }
+ type PathSetting <: Setting { type T = String }
+ type PhasesSetting <: Setting { type T = List[String] }
+ type StringSetting <: Setting { type T = String }
+ type PrefixSetting <: Setting { type T = List[String] }
type OutputDirs
type OutputSetting <: Setting
@@ -28,6 +33,7 @@ trait AbsScalaSettings {
def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String): ChoiceSetting
def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]): IntSetting
def MultiStringSetting(name: String, helpArg: String, descr: String): MultiStringSetting
+ def MultiChoiceSetting[E <: MultiChoiceEnumeration](name: String, helpArg: String, descr: String, domain: E, default: Option[List[String]]): MultiChoiceSetting[E]
def OutputSetting(outputDirs: OutputDirs, default: String): OutputSetting
def PathSetting(name: String, descr: String, default: String): PathSetting
def PhasesSetting(name: String, descr: String, default: String): PhasesSetting
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index 4727e6d867..060a24d8d4 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -35,7 +35,11 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
case s: AbsSettings => this.userSetSettings == s.userSetSettings
case _ => false
}
- override def toString() = "Settings {\n%s}\n" format (userSetSettings map (" " + _ + "\n")).mkString
+ override def toString() = {
+ val uss = userSetSettings
+ val indent = if (uss.nonEmpty) " " * 2 else ""
+ uss.mkString(f"Settings {%n$indent", f"%n$indent", f"%n}%n")
+ }
def toConciseString = userSetSettings.mkString("(", " ", ")")
def checkDependencies =
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 8c2b510bfd..fffbb4333f 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -22,13 +22,15 @@ class FscSettings(error: String => Unit) extends Settings(error) {
val reset = BooleanSetting("-reset", "Reset compile server caches")
val shutdown = BooleanSetting("-shutdown", "Shutdown compile server")
val server = StringSetting ("-server", "hostname:portnumber", "Specify compile server socket", "")
+ val port = IntSetting ("-port", "Search and start compile server in given port only",
+ 0, Some((0, Int.MaxValue)), (_: String) => None)
val preferIPv4 = BooleanSetting("-ipv4", "Use IPv4 rather than IPv6 for the server socket")
val idleMins = IntSetting ("-max-idle", "Set idle timeout in minutes for fsc (use 0 for no timeout)",
30, Some((0, Int.MaxValue)), (_: String) => None)
// For improved help output, separating fsc options from the others.
def fscSpecific = Set[Settings#Setting](
- currentDir, reset, shutdown, server, preferIPv4, idleMins
+ currentDir, reset, shutdown, server, port, preferIPv4, idleMins
)
val isFscSpecific: String => Boolean = fscSpecific map (_.name)
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index 3590254128..b4987e1240 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -125,14 +125,26 @@ class MutableSettings(val errorFn: String => Unit)
case Some(cmd) => setter(cmd)(args)
}
- // if arg is of form -Xfoo:bar,baz,quux
- def parseColonArg(s: String): Option[List[String]] = {
- val (p, args) = StringOps.splitWhere(s, _ == ':', doDropIndex = true) getOrElse (return None)
-
- // any non-Nil return value means failure and we return s unmodified
- tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _)
+ // -Xfoo: clears Clearables
+ def clearIfExists(cmd: String): Option[List[String]] = lookupSetting(cmd) match {
+ case Some(c: Clearable) => c.clear() ; Some(Nil)
+ case Some(s) => s.errorAndValue(s"Missing argument to $cmd", None)
+ case None => None
}
+ // if arg is of form -Xfoo:bar,baz,quux
+ // the entire arg is consumed, so return None for failure
+ // any non-Nil return value means failure and we return s unmodified
+ def parseColonArg(s: String): Option[List[String]] =
+ if (s endsWith ":") {
+ clearIfExists(s.init)
+ } else {
+ for {
+ (p, args) <- StringOps.splitWhere(s, _ == ':', doDropIndex = true)
+ rest <- tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _)
+ } yield rest
+ }
+
// if arg is of form -Xfoo or -Xfoo bar (name = "-Xfoo")
def parseNormalArg(p: String, args: List[String]): Option[List[String]] =
tryToSetIfExists(p, args, (s: Setting) => s.tryToSet _)
@@ -209,12 +221,16 @@ class MutableSettings(val errorFn: String => Unit)
def BooleanSetting(name: String, descr: String) = add(new BooleanSetting(name, descr))
def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String) =
add(new ChoiceSetting(name, helpArg, descr, choices, default))
- def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]) = add(new IntSetting(name, descr, default, range, parser))
+ def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]) =
+ add(new IntSetting(name, descr, default, range, parser))
def MultiStringSetting(name: String, arg: String, descr: String) = add(new MultiStringSetting(name, arg, descr))
+ def MultiChoiceSetting[E <: MultiChoiceEnumeration](name: String, helpArg: String, descr: String, domain: E, default: Option[List[String]] = None) =
+ add(new MultiChoiceSetting[E](name, helpArg, descr, domain, default))
def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default))
def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
- def ScalaVersionSetting(name: String, arg: String, descr: String, default: ScalaVersion) = add(new ScalaVersionSetting(name, arg, descr, default))
+ def ScalaVersionSetting(name: String, arg: String, descr: String, initial: ScalaVersion, default: Option[ScalaVersion] = None) =
+ add(new ScalaVersionSetting(name, arg, descr, initial, default))
def PathSetting(name: String, descr: String, default: String): PathSetting = {
val prepend = StringSetting(name + "/p", "", "", "").internalOnly()
val append = StringSetting(name + "/a", "", "", "").internalOnly()
@@ -363,7 +379,7 @@ class MutableSettings(val errorFn: String => Unit)
def withDeprecationMessage(msg: String): this.type = { _deprecationMessage = Some(msg) ; this }
}
- /** A setting represented by an integer */
+ /** A setting represented by an integer. */
class IntSetting private[nsc](
name: String,
descr: String,
@@ -439,7 +455,7 @@ class MutableSettings(val errorFn: String => Unit)
value = s.equalsIgnoreCase("true")
}
override def tryToSetColon(args: List[String]) = args match {
- case Nil => tryToSet(Nil)
+ case Nil => tryToSet(Nil)
case List(x) =>
if (x.equalsIgnoreCase("true")) {
value = true
@@ -447,7 +463,8 @@ class MutableSettings(val errorFn: String => Unit)
} else if (x.equalsIgnoreCase("false")) {
value = false
Some(Nil)
- } else errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None)
+ } else errorAndValue(s"'$x' is not a valid choice for '$name'", None)
+ case _ => errorAndValue(s"'$name' accepts only one boolean value", None)
}
}
@@ -490,28 +507,35 @@ class MutableSettings(val errorFn: String => Unit)
withHelpSyntax(name + " <" + arg + ">")
}
- /** A setting represented by a Scala version, (`default` unless set) */
+ /** A setting represented by a Scala version.
+ * The `initial` value is used if the setting is not specified.
+ * The `default` value is used if the option is specified without argument (e.g., `-Xmigration`).
+ */
class ScalaVersionSetting private[nsc](
name: String,
val arg: String,
descr: String,
- default: ScalaVersion)
+ initial: ScalaVersion,
+ default: Option[ScalaVersion])
extends Setting(name, descr) {
type T = ScalaVersion
- protected var v: T = NoScalaVersion
+ protected var v: T = initial
+ // This method is invoked if there are no colonated args. In this case the default value is
+ // used. No arguments are consumed.
override def tryToSet(args: List[String]) = {
- value = default
+ default match {
+ case Some(d) => value = d
+ case None => errorFn(s"$name requires an argument, the syntax is $helpSyntax")
+ }
Some(args)
}
override def tryToSetColon(args: List[String]) = args match {
- case Nil => value = default; Some(Nil)
- case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
+ case x :: xs => value = ScalaVersion(x, errorFn); Some(xs)
+ case nil => Some(nil)
}
- override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
-
def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
withHelpSyntax(s"${name}:<${arg}>")
@@ -528,6 +552,7 @@ class MutableSettings(val errorFn: String => Unit)
def prepend(s: String) = prependPath.value = join(s, prependPath.value)
def append(s: String) = appendPath.value = join(appendPath.value, s)
+ override def isDefault = super.isDefault && prependPath.isDefault && appendPath.isDefault
override def value = join(
prependPath.value,
super.value,
@@ -548,8 +573,198 @@ class MutableSettings(val errorFn: String => Unit)
}
}
+ /**
+ * Each [[MultiChoiceSetting]] takes a MultiChoiceEnumeration as domain. The enumeration may
+ * use the Choice class to define values, or simply use the default `Value` constructor:
+ *
+ * object SettingDomain extends MultiChoiceEnumeration { val arg1, arg2 = Value }
+ *
+ * Or
+ *
+ * object SettingDomain extends MultiChoiceEnumeration {
+ * val arg1 = Choice("arg1", "help")
+ * val arg2 = Choice("arg2", "help")
+ * }
+ *
+ * Choices with a non-empty `expandsTo` enable other options. Note that expanding choices are
+ * not present in the multiChoiceSetting.value set, only their expansion.
+ */
+ abstract class MultiChoiceEnumeration extends Enumeration {
+ case class Choice(name: String, help: String = "", expandsTo: List[Choice] = Nil) extends Val(name)
+ }
+
+ /**
+ * A Setting that collects string-valued settings from an enumerated domain.
+ * - These choices can be turned on or off: "-option:on,-off"
+ * - If an option is set both on and off, then the option is on
+ * - The choice "_" enables all choices that have not been explicitly disabled
+ *
+ * Arguments can be provided in colonated or non-colonated mode, i.e. "-option a b" or
+ * "-option:a,b". Note that arguments starting with a "-" can only be provided in colonated mode,
+ * otherwise they are interpreted as a new option.
+ *
+ * In non-colonated mode, the setting stops consuming arguments at the first non-choice,
+ * i.e. "-option a b c" only consumes "a" and "b" if "c" is not a valid choice.
+ *
+ * @param name command-line setting name, eg "-Xlint"
+ * @param helpArg help description for the kind of arguments it takes, eg "warning"
+ * @param descr description of the setting
+ * @param domain enumeration of choices implementing MultiChoice, or the string value is
+ * taken for the name
+ * @param default If Some(args), the default options if none are provided. If None, an
+ * error is printed if there are no arguments.
+ */
+ class MultiChoiceSetting[E <: MultiChoiceEnumeration] private[nsc](
+ name: String,
+ helpArg: String,
+ descr: String,
+ val domain: E,
+ val default: Option[List[String]]
+ ) extends Setting(name, s"$descr: `_' for all, `$name:help' to list") with Clearable {
+
+ withHelpSyntax(s"$name:<_,$helpArg,-$helpArg>")
+
+ object ChoiceOrVal {
+ def unapply(a: domain.Value): Option[(String, String, List[domain.Choice])] = a match {
+ case c: domain.Choice => Some((c.name, c.help, c.expandsTo))
+ case v: domain.Value => Some((v.toString, "", Nil))
+ }
+ }
+
+ type T = domain.ValueSet
+ protected var v: T = domain.ValueSet.empty
+
+ // Explicitly enabled or disabled. Yeas may contain expanding options, nays may not.
+ private var yeas = domain.ValueSet.empty
+ private var nays = domain.ValueSet.empty
+
+ // Asked for help
+ private var sawHelp = false
+ // Wildcard _ encountered
+ private var sawAll = false
+
+ private def badChoice(s: String) = errorFn(s"'$s' is not a valid choice for '$name'")
+ private def isChoice(s: String) = (s == "_") || (choices contains pos(s))
+
+ private def pos(s: String) = s stripPrefix "-"
+ private def isPos(s: String) = !(s startsWith "-")
+
+ override val choices: List[String] = domain.values.toList map {
+ case ChoiceOrVal(name, _, _) => name
+ }
+
+ def descriptions: List[String] = domain.values.toList map {
+ case ChoiceOrVal(_, "", x :: xs) => "Enables the options "+ (x :: xs).map(_.name).mkString(", ")
+ case ChoiceOrVal(_, descr, _) => descr
+ case _ => ""
+ }
+
+ /** (Re)compute from current yeas, nays, wildcard status. */
+ def compute() = {
+ def simple(v: domain.Value) = v match {
+ case ChoiceOrVal(_, _, Nil) => true
+ case _ => false
+ }
+
+ /**
+ * Expand an expanding option, if necessary recursively. Expanding options are not included in
+ * the result (consistent with "_", which is not in `value` either).
+ *
+ * Note: by precondition, options in nays are not expanding, they can only be leaves.
+ */
+ def expand(vs: domain.ValueSet): domain.ValueSet = vs flatMap {
+ case c @ ChoiceOrVal(_, _, Nil) => domain.ValueSet(c)
+ case ChoiceOrVal(_, _, others) => expand(domain.ValueSet(others: _*))
+ }
+
+ // yeas from _ or expansions are weak: an explicit nay will disable them
+ val weakYeas = if (sawAll) domain.values filter simple else expand(yeas filterNot simple)
+ value = (yeas filter simple) | (weakYeas &~ nays)
+ }
+
+ /** Add a named choice to the multichoice value. */
+ def add(arg: String) = arg match {
+ case _ if !isChoice(arg) =>
+ badChoice(arg)
+ case "_" =>
+ sawAll = true
+ compute()
+ case _ if isPos(arg) =>
+ yeas += domain withName arg
+ compute()
+ case _ =>
+ val choice = domain withName pos(arg)
+ choice match {
+ case ChoiceOrVal(_, _, _ :: _) => errorFn(s"'${pos(arg)}' cannot be negated, it enables other arguments")
+ case _ =>
+ }
+ nays += choice
+ compute()
+ }
+
+ def tryToSet(args: List[String]) = tryToSetArgs(args, halting = true)
+ override def tryToSetColon(args: List[String]) = tryToSetArgs(args, halting = false)
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide
+
+ /** Try to set args, handling "help" and default.
+ * The "halting" parameter means args were "-option a b c -else" so halt
+ * on "-else" or other non-choice. Otherwise, args were "-option:a,b,c,d",
+ * so process all and report non-choices as errors.
+ * @param args args to process
+ * @param halting stop on non-arg
+ */
+ private def tryToSetArgs(args: List[String], halting: Boolean) = {
+ val added = collection.mutable.ListBuffer.empty[String]
+
+ def tryArg(arg: String) = arg match {
+ case "help" => sawHelp = true
+ case s if isChoice(s) => added += s // this case also adds "_"
+ case s => badChoice(s)
+ }
+ def loop(args: List[String]): List[String] = args match {
+ case arg :: _ if halting && (!isPos(arg) || !isChoice(arg)) => args
+ case arg :: rest => tryArg(arg) ; loop(rest)
+ case Nil => Nil
+ }
+ val rest = loop(args)
+
+ // if no arg consumed, use defaults or error; otherwise, add what they added
+ if (rest.size == args.size) default match {
+ case Some(defaults) => defaults foreach add
+ case None => errorFn(s"'$name' requires an option. See '$name:help'.")
+ } else {
+ added foreach add
+ }
+
+ Some(rest)
+ }
+
+ def contains(choice: domain.Value): Boolean = value contains choice
+
+ def isHelping: Boolean = sawHelp
+
+ def help: String = {
+ val choiceLength = choices.map(_.length).max + 1
+ val formatStr = s" %-${choiceLength}s %s"
+ choices.zipAll(descriptions, "", "").map {
+ case (arg, descr) => formatStr.format(arg, descr)
+ } mkString (f"$descr%n", f"%n", "")
+ }
+
+ def clear(): Unit = {
+ v = domain.ValueSet.empty
+ yeas = domain.ValueSet.empty
+ nays = domain.ValueSet.empty
+ sawAll = false
+ sawHelp = false
+ }
+ def unparse: List[String] = value.toList map (s => s"$name:$s")
+ def contains(s: String) = domain.values.find(_.toString == s).exists(value.contains)
+ }
+
/** A setting that accumulates all strings supplied to it,
- * until it encounters one starting with a '-'. */
+ * until it encounters one starting with a '-'.
+ */
class MultiStringSetting private[nsc](
name: String,
val arg: String,
@@ -557,18 +772,23 @@ class MutableSettings(val errorFn: String => Unit)
extends Setting(name, descr) with Clearable {
type T = List[String]
protected var v: T = Nil
- def appendToValue(str: String) { value ++= List(str) }
-
- def tryToSet(args: List[String]) = {
- val (strings, rest) = args span (x => !x.startsWith("-"))
- strings foreach appendToValue
+ def appendToValue(str: String) = value ++= List(str)
- Some(rest)
+ // try to set. halting means halt at first non-arg
+ protected def tryToSetArgs(args: List[String], halting: Boolean) = {
+ def loop(args: List[String]): List[String] = args match {
+ case arg :: rest => if (halting && (arg startsWith "-")) args else { appendToValue(arg) ; loop(rest) }
+ case Nil => Nil
+ }
+ Some(loop(args))
}
- override def tryToSetColon(args: List[String]) = tryToSet(args)
+ def tryToSet(args: List[String]) = tryToSetArgs(args, halting = true)
+ override def tryToSetColon(args: List[String]) = tryToSetArgs(args, halting = false)
override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide
- def clear(): Unit = (v = Nil)
+
+ def clear(): Unit = (v = Nil)
def unparse: List[String] = value map (name + ":" + _)
+ def contains(s: String) = value contains s
withHelpSyntax(name + ":<" + arg + ">")
}
@@ -587,10 +807,8 @@ class MutableSettings(val errorFn: String => Unit)
protected var v: T = default
def indexOfChoice: Int = choices indexOf value
- private def usageErrorMessage = {
- "Usage: %s:<%s>\n where <%s> choices are %s (default: %s)\n".format(
- name, helpArg, helpArg, choices mkString ", ", default)
- }
+ private def usageErrorMessage = f"Usage: $name:<$helpArg>%n where <$helpArg> choices are ${choices mkString ", "} (default: $default)%n"
+
def tryToSet(args: List[String]) = errorAndValue(usageErrorMessage, None)
override def tryToSetColon(args: List[String]) = args match {
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index a643a08614..630276e412 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -42,10 +42,13 @@ trait ScalaSettings extends AbsScalaSettings
def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce, YconstOptimization)
/** If any of these settings is enabled, the compiler should print a message and exit. */
- def infoSettings = List[Setting](help, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph)
+ def infoSettings = List[Setting](version, help, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph)
+
+ /** Any -multichoice:help? Nicer if any option could report that it had help to offer. */
+ private def multihelp = allSettings exists { case s: MultiChoiceSetting[_] => s.isHelping case _ => false }
/** Is an info setting set? */
- def isInfo = infoSettings exists (_.isSetByUser)
+ def isInfo = (infoSettings exists (_.isSetByUser)) || multihelp
/** Disable a setting */
def disable(s: Setting) = allSettings -= s
@@ -62,14 +65,35 @@ trait ScalaSettings extends AbsScalaSettings
/*val argfiles = */ BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
val d = OutputSetting (outputDirs, ".")
- val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.")
- val language = MultiStringSetting("-language", "feature", "Enable one or more language features.")
+ val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.")
+
+ // Would be nice to build this dynamically from scala.languageFeature.
+ // The two requirements: delay error checking until you have symbols, and let compiler command build option-specific help.
+ object languageFeatures extends MultiChoiceEnumeration {
+ val dynamics = Choice("dynamics", "Allow direct or indirect subclasses of scala.Dynamic")
+ val postfixOps = Choice("postfixOps", "Allow postfix operator notation, such as `1 to 10 toList'")
+ val reflectiveCalls = Choice("reflectiveCalls", "Allow reflective access to members of structural types")
+ val implicitConversions = Choice("implicitConversions", "Allow definition of implicit functions called views")
+ val higherKinds = Choice("higherKinds", "Allow higher-kinded types")
+ val existentials = Choice("existentials", "Existential types (besides wildcard types) can be written and inferred")
+ val macros = Choice("experimental.macros", "Allow macro defintion (besides implementation and application)")
+ }
+ val language = {
+ val description = "Enable or disable language features"
+ MultiChoiceSetting(
+ name = "-language",
+ helpArg = "feature",
+ descr = description,
+ domain = languageFeatures
+ )
+ }
/*
* The previous "-source" option is intended to be used mainly
* though this helper.
*/
- lazy val isScala211: Boolean = (source.value >= ScalaVersion("2.11.0"))
+ def isScala211: Boolean = source.value >= ScalaVersion("2.11.0")
+ def isScala212: Boolean = source.value >= ScalaVersion("2.12.0")
/**
* -X "Advanced" settings
@@ -88,7 +112,7 @@ trait ScalaSettings extends AbsScalaSettings
val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
- val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
+ val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", initial = NoScalaVersion, default = Some(AnyScalaVersion))
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
@@ -110,11 +134,23 @@ trait ScalaSettings extends AbsScalaSettings
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
val strictInference = BooleanSetting ("-Xstrict-inference", "Don't infer known-unsound types")
- val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", ScalaVersion("2.11")) withPostSetHook ( _ => isScala211)
+ val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", initial = ScalaVersion("2.11"))
val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
+ // XML parsing options
+ object XxmlSettings extends MultiChoiceEnumeration {
+ val coalescing = Choice("coalescing", "Convert PCData to Text and coalesce sibling nodes")
+ def isCoalescing = (Xxml contains coalescing) || (!isScala212 && !Xxml.isSetByUser)
+ }
+ val Xxml = MultiChoiceSetting(
+ name = "-Xxml",
+ helpArg = "property",
+ descr = "Configure XML parsing",
+ domain = XxmlSettings
+ )
+
/** Compatibility stubs for options whose value name did
* not previously match the option name.
*/
@@ -144,7 +180,7 @@ trait ScalaSettings extends AbsScalaSettings
val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error")
val inline = BooleanSetting ("-Yinline", "Perform inlining when possible.")
val inlineHandlers = BooleanSetting ("-Yinline-handlers", "Perform exception handler inlining when possible.")
- val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally surpressed due to high volume)")
+ val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally suppressed due to high volume)")
val Xlinearizer = ChoiceSetting ("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo")
val log = PhasesSetting ("-Ylog", "Log operations during")
val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.")
@@ -165,7 +201,6 @@ trait ScalaSettings extends AbsScalaSettings
val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
val Ygenasmp = StringSetting ("-Ygen-asmp", "dir", "Generate a parallel output directory of .asmp files (ie ASM Textifier output).", "")
val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
- val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (scala.reflect.internal.util.Statistics.enabled = _)
val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
val stopBefore = PhasesSetting ("-Ystop-before", "Stop before")
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
@@ -176,10 +211,12 @@ trait ScalaSettings extends AbsScalaSettings
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects")
val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "")
- val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
+ val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.")
val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").withDeprecationMessage(removalIn212)
val inferByName = BooleanSetting ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212)
- val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
+ val YclasspathImpl = ChoiceSetting ("-YclasspathImpl", "implementation", "Choose classpath scanning method.", List(ClassPathRepresentationType.Recursive, ClassPathRepresentationType.Flat), ClassPathRepresentationType.Recursive)
+ val YdisableFlatCpCaching = BooleanSetting ("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.")
+
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.")
val YnoLoadImplClass = BooleanSetting ("-Yno-load-impl-class", "Do not load $class.class files.")
@@ -188,8 +225,100 @@ trait ScalaSettings extends AbsScalaSettings
// the current standard is "inline" but we are moving towards "method"
val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "inline")
+ val YskipInlineInfoAttribute = BooleanSetting("-Yskip-inline-info-attribute", "Do not add the ScalaInlineInfo attribute to classfiles generated by -Ybackend:GenASM")
+
+ object YoptChoices extends MultiChoiceEnumeration {
+ val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers protecting no instructions, debug information of eliminated variables.")
+ val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.")
+ val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.")
+ val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.")
+ val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.")
+ val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled")
+ val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath")
+
+ val lNone = Choice("l:none", "Don't enable any optimizations.")
+
+ private val defaultChoices = List(unreachableCode)
+ val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices)
+
+ private val methodChoices = List(unreachableCode, simplifyJumps, emptyLineNumbers, emptyLabels, compactLocals)
+ val lMethod = Choice("l:method", "Enable intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices)
+
+ private val projectChoices = List(lMethod, inlineProject)
+ val lProject = Choice("l:project", "Enable cross-method optimizations within the current project: "+ projectChoices.mkString(","), expandsTo = projectChoices)
+
+ private val classpathChoices = List(lProject, inlineGlobal)
+ val lClasspath = Choice("l:classpath", "Enable cross-method optimizations across the entire classpath: "+ classpathChoices.mkString(","), expandsTo = classpathChoices)
+ }
+
+ val Yopt = MultiChoiceSetting(
+ name = "-Yopt",
+ helpArg = "optimization",
+ descr = "Enable optimizations",
+ domain = YoptChoices)
+
+ def YoptNone = Yopt.isSetByUser && Yopt.value.isEmpty
+ def YoptUnreachableCode = !Yopt.isSetByUser || Yopt.contains(YoptChoices.unreachableCode)
+ def YoptSimplifyJumps = Yopt.contains(YoptChoices.simplifyJumps)
+ def YoptEmptyLineNumbers = Yopt.contains(YoptChoices.emptyLineNumbers)
+ def YoptEmptyLabels = Yopt.contains(YoptChoices.emptyLabels)
+ def YoptCompactLocals = Yopt.contains(YoptChoices.compactLocals)
+
+ def YoptInlineProject = Yopt.contains(YoptChoices.inlineProject)
+ def YoptInlineGlobal = Yopt.contains(YoptChoices.inlineGlobal)
+ def YoptInlinerEnabled = YoptInlineProject || YoptInlineGlobal
+
+ val YoptInlineHeuristics = ChoiceSetting(
+ name = "-Yopt-inline-heuristics",
+ helpArg = "strategy",
+ descr = "Set the heuristics for inlining decisions.",
+ choices = List("at-inline-annotated", "everything"),
+ default = "at-inline-annotated")
+
+ object YoptWarningsChoices extends MultiChoiceEnumeration {
+ val none = Choice("none" , "No optimizer warnings.")
+ val atInlineFailedSummary = Choice("at-inline-failed-summary" , "One-line summary if there were @inline method calls that could not be inlined.")
+ val atInlineFailed = Choice("at-inline-failed" , "A detailed warning for each @inline method call that could not be inlined.")
+ val noInlineMixed = Choice("no-inline-mixed" , "In mixed compilation, warn at callsites methods defined in java sources (the inlining decision cannot be made without bytecode).")
+ val noInlineMissingBytecode = Choice("no-inline-missing-bytecode" , "Warn if an inlining decision cannot be made because a the bytecode of a class or member cannot be found on the compilation classpath.")
+ val noInlineMissingScalaInlineInfoAttr = Choice("no-inline-missing-attribute", "Warn if an inlining decision cannot be made because a Scala classfile does not have a ScalaInlineInfo attribute.")
+ }
+
+ val YoptWarnings = MultiChoiceSetting(
+ name = "-Yopt-warnings",
+ helpArg = "warning",
+ descr = "Enable optimizer warnings",
+ domain = YoptWarningsChoices,
+ default = Some(List(YoptWarningsChoices.atInlineFailed.name))) withPostSetHook (self => {
+ if (self.value subsetOf Set(YoptWarningsChoices.none, YoptWarningsChoices.atInlineFailedSummary)) YinlinerWarnings.value = false
+ else YinlinerWarnings.value = true
+ })
+
+ def YoptWarningEmitAtInlineFailed =
+ !YoptWarnings.isSetByUser ||
+ YoptWarnings.contains(YoptWarningsChoices.atInlineFailedSummary) ||
+ YoptWarnings.contains(YoptWarningsChoices.atInlineFailed)
+
+ def YoptWarningNoInlineMixed = YoptWarnings.contains(YoptWarningsChoices.noInlineMixed)
+ def YoptWarningNoInlineMissingBytecode = YoptWarnings.contains(YoptWarningsChoices.noInlineMissingBytecode)
+ def YoptWarningNoInlineMissingScalaInlineInfoAttr = YoptWarnings.contains(YoptWarningsChoices.noInlineMissingScalaInlineInfoAttr)
+
private def removalIn212 = "This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug."
+ object YstatisticsPhases extends MultiChoiceEnumeration { val parser, typer, patmat, erasure, cleanup, jvm = Value }
+ val Ystatistics = {
+ val description = "Print compiler statistics for specific phases"
+ MultiChoiceSetting(
+ name = "-Ystatistics",
+ helpArg = "phase",
+ descr = description,
+ domain = YstatisticsPhases,
+ default = Some(List("_"))
+ ) withPostSetHook { _ => scala.reflect.internal.util.Statistics.enabled = true }
+ }
+
+ def YstatisticsEnabled = Ystatistics.value.nonEmpty
+
/** Area-specific debug output.
*/
val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.")
@@ -201,6 +330,8 @@ trait ScalaSettings extends AbsScalaSettings
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
+ val YpatmatExhaustdepth = IntSetting("-Ypatmat-exhaust-depth", "off", 20, Some((10, Int.MaxValue)),
+ str => Some(if(str.equalsIgnoreCase("off")) Int.MaxValue else str.toInt))
val Yquasiquotedebug = BooleanSetting("-Yquasiquote-debug", "Trace quasiquote-related activities.")
// TODO 2.12 Remove
@@ -242,12 +373,7 @@ trait ScalaSettings extends AbsScalaSettings
/** Test whether this is scaladoc we're looking at */
def isScaladoc = false
- /**
- * Helper utilities for use by checkConflictingSettings()
- */
- def isBCodeActive = !isICodeAskedFor
- def isBCodeAskedFor = (Ybackend.value != "GenASM")
- def isICodeAskedFor = ((Ybackend.value == "GenASM") || optimiseSettings.exists(_.value) || writeICode.isSetByUser)
+ def isBCodeActive = Ybackend.value == "GenBCode"
object MacroExpand {
val None = "none"
@@ -255,3 +381,8 @@ trait ScalaSettings extends AbsScalaSettings
val Discard = "discard"
}
}
+
+object ClassPathRepresentationType {
+ val Flat = "flat"
+ val Recursive = "recursive"
+}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index 4f45043c5e..43bdad5882 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -34,7 +34,7 @@ case object NoScalaVersion extends ScalaVersion {
* to segregate builds
*/
case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
- def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+ def unparse = s"${major}.${minor}.${rev}${build.unparse}"
def compare(that: ScalaVersion): Int = that match {
case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index 37dfafb01c..d42c0dd730 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -39,7 +39,7 @@ trait StandardScalaSettings {
val optimise: BooleanSetting // depends on post hook which mutates other settings
val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.")
val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.",
- List("jvm-1.5", "jvm-1.6", "jvm-1.7"), "jvm-1.6")
+ List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8"), "jvm-1.6")
val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.")
val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.")
val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index 1509ad13b8..41ce0837cb 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -17,41 +17,99 @@ trait Warnings {
// Warning semantics.
val fatalWarnings = BooleanSetting("-Xfatal-warnings", "Fail the compilation if there are any warnings.")
- // These warnings are all so noisy as to be useless in their
- // present form, but have the potential to offer useful info.
- protected def allWarnings = lintWarnings ++ List(
- warnDeadCode,
- warnValueDiscard,
- warnNumericWiden
- )
- // These warnings should be pretty quiet unless you're doing
- // something inadvisable.
- protected def lintWarnings = List(
- warnInaccessible,
- warnNullaryOverride,
- warnNullaryUnit,
- warnAdaptedArgs,
- warnInferAny
- // warnUnused SI-7712, SI-7707 warnUnused not quite ready for prime-time
- // warnUnusedImport currently considered too noisy for general use
- )
+ // Non-lint warnings
- private lazy val warnSelectNullable = BooleanSetting("-Xcheck-null", "This option is obsolete and does nothing.")
+ val warnDeadCode = BooleanSetting("-Ywarn-dead-code", "Warn when dead code is identified.")
+ val warnValueDiscard = BooleanSetting("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.")
+ val warnNumericWiden = BooleanSetting("-Ywarn-numeric-widen", "Warn when numerics are widened.")
+ // SI-7712, SI-7707 warnUnused not quite ready for prime-time
+ val warnUnused = BooleanSetting("-Ywarn-unused", "Warn when local and private vals, vars, defs, and types are are unused.")
+ // currently considered too noisy for general use
+ val warnUnusedImport = BooleanSetting("-Ywarn-unused-import", "Warn when imports are unused.")
+
+ // Experimental lint warnings that are turned off, but which could be turned on programmatically.
+ // They are not activated by -Xlint and can't be enabled on the command line because they are not
+ // created using the standard factory methods.
+
+ val warnValueOverrides = {
+ val flag = new BooleanSetting("value-overrides", "Generated value class method overrides an implementation.")
+ flag.value = false
+ flag
+ }
+
+ // Lint warnings
+
+ object LintWarnings extends MultiChoiceEnumeration {
+ class LintWarning(name: String, help: String, val yAliased: Boolean) extends Choice(name, help)
+ def LintWarning(name: String, help: String, yAliased: Boolean = false) = new LintWarning(name, help, yAliased)
+
+ val AdaptedArgs = LintWarning("adapted-args", "Warn if an argument list is modified to match the receiver.", true)
+ val NullaryUnit = LintWarning("nullary-unit", "Warn when nullary methods return Unit.", true)
+ val Inaccessible = LintWarning("inaccessible", "Warn about inaccessible types in method signatures.", true)
+ val NullaryOverride = LintWarning("nullary-override", "Warn when non-nullary `def f()' overrides nullary `def f'.", true)
+ val InferAny = LintWarning("infer-any", "Warn when a type argument is inferred to be `Any`.", true)
+ val MissingInterpolator = LintWarning("missing-interpolator", "A string literal appears to be missing an interpolator id.")
+ val DocDetached = LintWarning("doc-detached", "A ScalaDoc comment appears to be detached from its element.")
+ val PrivateShadow = LintWarning("private-shadow", "A private field (or class parameter) shadows a superclass field.")
+ val TypeParameterShadow = LintWarning("type-parameter-shadow", "A local type parameter shadows a type already in scope.")
+ val PolyImplicitOverload = LintWarning("poly-implicit-overload", "Parameterized overloaded implicit methods are not visible as view bounds.")
+ val OptionImplicit = LintWarning("option-implicit", "Option.apply used implicit view.")
+ val DelayedInitSelect = LintWarning("delayedinit-select", "Selecting member of DelayedInit.")
+ val ByNameRightAssociative = LintWarning("by-name-right-associative", "By-name parameter of right associative operator.")
+ val PackageObjectClasses = LintWarning("package-object-classes", "Class or object defined in package object.")
+ val UnsoundMatch = LintWarning("unsound-match", "Pattern match may not be typesafe.")
+ val StarsAlign = LintWarning("stars-align", "Pattern sequence wildcard must align with sequence component.")
- // Individual warnings.
- val warnAdaptedArgs = BooleanSetting ("-Ywarn-adapted-args", "Warn if an argument list is modified to match the receiver.")
- val warnDeadCode = BooleanSetting ("-Ywarn-dead-code", "Warn when dead code is identified.")
- val warnValueDiscard = BooleanSetting ("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.")
- val warnNumericWiden = BooleanSetting ("-Ywarn-numeric-widen", "Warn when numerics are widened.")
- val warnNullaryUnit = BooleanSetting ("-Ywarn-nullary-unit", "Warn when nullary methods return Unit.")
- val warnInaccessible = BooleanSetting ("-Ywarn-inaccessible", "Warn about inaccessible types in method signatures.")
- val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override", "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.")
- val warnInferAny = BooleanSetting ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.")
- val warnUnused = BooleanSetting ("-Ywarn-unused", "Warn when local and private vals, vars, defs, and types are are unused")
- val warnUnusedImport = BooleanSetting ("-Ywarn-unused-import", "Warn when imports are unused")
-
- // Warning groups.
- val lint = BooleanSetting("-Xlint", "Enable recommended additional warnings.") enablingIfNotSetByUser lintWarnings
+ def allLintWarnings = values.toSeq.asInstanceOf[Seq[LintWarning]]
+ }
+ import LintWarnings._
+
+ def warnAdaptedArgs = lint contains AdaptedArgs
+ def warnNullaryUnit = lint contains NullaryUnit
+ def warnInaccessible = lint contains Inaccessible
+ def warnNullaryOverride = lint contains NullaryOverride
+ def warnInferAny = lint contains InferAny
+ def warnMissingInterpolator = lint contains MissingInterpolator
+ def warnDocDetached = lint contains DocDetached
+ def warnPrivateShadow = lint contains PrivateShadow
+ def warnTypeParameterShadow = lint contains TypeParameterShadow
+ def warnPolyImplicitOverload = lint contains PolyImplicitOverload
+ def warnOptionImplicit = lint contains OptionImplicit
+ def warnDelayedInit = lint contains DelayedInitSelect
+ def warnByNameRightAssociative = lint contains ByNameRightAssociative
+ def warnPackageObjectClasses = lint contains PackageObjectClasses
+ def warnUnsoundMatch = lint contains UnsoundMatch
+ def warnStarsAlign = lint contains StarsAlign
+
+ // Lint warnings that are currently -Y, but deprecated in that usage
+ @deprecated("Use warnAdaptedArgs", since="2.11.2")
+ def YwarnAdaptedArgs = warnAdaptedArgs
+ @deprecated("Use warnNullaryUnit", since="2.11.2")
+ def YwarnNullaryUnit = warnNullaryUnit
+ @deprecated("Use warnInaccessible", since="2.11.2")
+ def YwarnInaccessible = warnInaccessible
+ @deprecated("Use warnNullaryOverride", since="2.11.2")
+ def YwarnNullaryOverride = warnNullaryOverride
+ @deprecated("Use warnInferAny", since="2.11.2")
+ def YwarnInferAny = warnInferAny
+
+ // The Xlint warning group.
+ val lint = MultiChoiceSetting(
+ name = "-Xlint",
+ helpArg = "warning",
+ descr = "Enable or disable specific warnings",
+ domain = LintWarnings,
+ default = Some(List("_")))
+
+ allLintWarnings foreach {
+ case w if w.yAliased =>
+ BooleanSetting(s"-Ywarn-${w.name}", {w.help}) withPostSetHook { s =>
+ lint.add(if (s) w.name else s"-${w.name}")
+ } // withDeprecationMessage s"Enable -Xlint:${c._1}"
+ case _ =>
+ }
+
+ private lazy val warnSelectNullable = BooleanSetting("-Xcheck-null", "This option is obsolete and does nothing.")
// Backward compatibility.
@deprecated("Use fatalWarnings", "2.11.0") def Xwarnfatal = fatalWarnings // used by sbt
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 8b739958ff..8fd2ea45e4 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -6,13 +6,15 @@
package scala.tools.nsc
package symtab
+import classfile.ClassfileParser
import java.io.IOException
import scala.compat.Platform.currentTime
-import scala.tools.nsc.util.{ ClassPath }
-import classfile.ClassfileParser
import scala.reflect.internal.MissingRequirementError
import scala.reflect.internal.util.Statistics
import scala.reflect.io.{ AbstractFile, NoAbstractFile }
+import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.{ ClassPath, ClassRepresentation }
/** This class ...
*
@@ -86,8 +88,7 @@ abstract class SymbolLoaders {
// require yjp.jar at runtime. See SI-2089.
if (settings.termConflict.isDefault)
throw new TypeError(
- root+" contains object and package with same name: "+
- name+"\none of them needs to be removed from classpath"
+ s"$root contains object and package with same name: $name\none of them needs to be removed from classpath"
)
else if (settings.termConflict.value == "package") {
warning(
@@ -154,7 +155,7 @@ abstract class SymbolLoaders {
/** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
*/
- def initializeFromClassPath(owner: Symbol, classRep: ClassPath[AbstractFile]#ClassRep) {
+ def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation[AbstractFile]) {
((classRep.binary, classRep.source) : @unchecked) match {
case (Some(bin), Some(src))
if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
@@ -169,7 +170,7 @@ abstract class SymbolLoaders {
}
/** Create a new loader from a binary classfile.
- * This is intented as a hook allowing to support loading symbols from
+ * This is intended as a hook allowing to support loading symbols from
* files other than .class files.
*/
protected def newClassLoader(bin: AbstractFile): SymbolLoader =
@@ -240,24 +241,68 @@ abstract class SymbolLoaders {
}
}
+ private def phaseBeforeRefchecks: Phase = {
+ var resPhase = phase
+ while (resPhase.refChecked) resPhase = resPhase.prev
+ resPhase
+ }
+
/**
* Load contents of a package
*/
class PackageLoader(classpath: ClassPath[AbstractFile]) extends SymbolLoader with FlagAgnosticCompleter {
- protected def description = "package loader "+ classpath.name
+ protected def description = s"package loader ${classpath.name}"
protected def doComplete(root: Symbol) {
assert(root.isPackageClass, root)
- root.setInfo(new PackageClassInfoType(newScope, root))
+ // Time travel to a phase before refchecks avoids an initialization issue. `openPackageModule`
+ // creates a module symbol and invokes invokes `companionModule` while the `infos` field is
+ // still null. This calls `isModuleNotMethod`, which forces the `info` if run after refchecks.
+ enteringPhase(phaseBeforeRefchecks) {
+ root.setInfo(new PackageClassInfoType(newScope, root))
+
+ if (!root.isRoot) {
+ for (classRep <- classpath.classes) {
+ initializeFromClassPath(root, classRep)
+ }
+ }
+ if (!root.isEmptyPackageClass) {
+ for (pkg <- classpath.packages) {
+ enterPackage(root, pkg.name, new PackageLoader(pkg))
+ }
- if (!root.isRoot) {
- for (classRep <- classpath.classes if platform.doLoad(classRep)) {
- initializeFromClassPath(root, classRep)
+ openPackageModule(root)
}
}
+ }
+ }
+
+ /**
+ * Loads contents of a package
+ */
+ class PackageLoaderUsingFlatClassPath(packageName: String, classPath: FlatClassPath) extends SymbolLoader with FlagAgnosticCompleter {
+ protected def description = {
+ val shownPackageName = if (packageName == FlatClassPath.RootPackage) "<root package>" else packageName
+ s"package loader $shownPackageName"
+ }
+
+ protected def doComplete(root: Symbol) {
+ assert(root.isPackageClass, root)
+ root.setInfo(new PackageClassInfoType(newScope, root))
+
+ val classPathEntries = classPath.list(packageName)
+
+ if (!root.isRoot)
+ for (entry <- classPathEntries.classesAndSources) initializeFromClassPath(root, entry)
if (!root.isEmptyPackageClass) {
- for (pkg <- classpath.packages) {
- enterPackage(root, pkg.name, new PackageLoader(pkg))
+ for (pkg <- classPathEntries.packages) {
+ val fullName = pkg.name
+
+ val name =
+ if (packageName == FlatClassPath.RootPackage) fullName
+ else fullName.substring(packageName.length + 1)
+ val packageLoader = new PackageLoaderUsingFlatClassPath(fullName, classPath)
+ enterPackage(root, name, packageLoader)
}
openPackageModule(root)
@@ -282,15 +327,26 @@ abstract class SymbolLoaders {
*
*/
private type SymbolLoadersRefined = SymbolLoaders { val symbolTable: classfileParser.symbolTable.type }
+
val loaders = SymbolLoaders.this.asInstanceOf[SymbolLoadersRefined]
- val classPath = platform.classPath
+
+ override def classFileLookup: util.ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Recursive => platform.classPath
+ case ClassPathRepresentationType.Flat => platform.flatClassPath
+ }
}
protected def description = "class file "+ classfile.toString
protected def doComplete(root: Symbol) {
val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null
- classfileParser.parse(classfile, root)
+
+ // Running the classfile parser after refchecks can lead to "illegal class file dependency"
+ // errors. More concretely, the classfile parser calls "sym.companionModule", which calls
+ // "isModuleNotMethod" on the companion. After refchecks, this method forces the info, which
+ // may run the classfile parser. This produces the error.
+ enteringPhase(phaseBeforeRefchecks)(classfileParser.parse(classfile, root))
+
if (root.associatedFile eq NoAbstractFile) {
root match {
// In fact, the ModuleSymbol forwards its setter to the module class
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index ea600bc586..518a402230 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -15,9 +15,9 @@ import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
import scala.annotation.switch
import scala.reflect.internal.{ JavaAccFlags }
import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
+import scala.reflect.io.NoAbstractFile
import scala.tools.nsc.io.AbstractFile
-
-import util.ClassPath
+import scala.tools.nsc.util.ClassFileLookup
/** This abstract class implements a class file parser.
*
@@ -43,8 +43,8 @@ abstract class ClassfileParser {
*/
protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
- /** The compiler classpath. */
- def classPath: ClassPath[AbstractFile]
+ /** The way of the class file lookup used by the compiler. */
+ def classFileLookup: ClassFileLookup[AbstractFile]
import definitions._
import scala.reflect.internal.ClassfileConstants._
@@ -53,6 +53,7 @@ abstract class ClassfileParser {
protected type ThisConstantPool <: ConstantPool
protected def newConstantPool: ThisConstantPool
+ protected var file: AbstractFile = _ // the class file
protected var in: AbstractFileReader = _ // the class file reader
protected var clazz: Symbol = _ // the class symbol containing dynamic members
protected var staticModule: Symbol = _ // the module symbol containing static members
@@ -97,14 +98,14 @@ abstract class ClassfileParser {
private def handleMissing(e: MissingRequirementError) = {
if (settings.debug) e.printStackTrace
- throw new IOException(s"Missing dependency '${e.req}', required by ${in.file}")
+ throw new IOException(s"Missing dependency '${e.req}', required by $file")
}
private def handleError(e: Exception) = {
if (settings.debug) e.printStackTrace()
- throw new IOException(s"class file '${in.file}' is broken\n(${e.getClass}/${e.getMessage})")
+ throw new IOException(s"class file '$file' is broken\n(${e.getClass}/${e.getMessage})")
}
private def mismatchError(c: Symbol) = {
- throw new IOException(s"class file '${in.file}' has location not matching its contents: contains $c")
+ throw new IOException(s"class file '$file' has location not matching its contents: contains $c")
}
private def parseErrorHandler[T]: PartialFunction[Throwable, T] = {
@@ -131,6 +132,7 @@ abstract class ClassfileParser {
def parse(file: AbstractFile, root: Symbol): Unit = {
debuglog("[class] >> " + root.fullName)
+ this.file = file
pushBusy(root) {
this.in = new AbstractFileReader(file)
this.clazz = if (root.isModule) root.companionClass else root
@@ -352,13 +354,17 @@ abstract class ClassfileParser {
}
private def loadClassSymbol(name: Name): Symbol = {
- val file = classPath findSourceFile ("" +name) getOrElse {
+ val file = classFileLookup findClassFile name.toString getOrElse {
// SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
// therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
// that are not in their correct place (see bug for details)
- if (!settings.isScaladoc)
- warning(s"Class $name not found - continuing with a stub.")
- return NoSymbol.newClass(name.toTypeName)
+
+ // TODO More consistency with use of stub symbols in `Unpickler`
+ // - better owner than `NoSymbol`
+ // - remove eager warning
+ val msg = s"Class $name not found - continuing with a stub."
+ if (!settings.isScaladoc) warning(msg)
+ return NoSymbol.newStubSymbol(name.toTypeName, msg)
}
val completer = new loaders.ClassfileLoader(file)
var owner: Symbol = rootMirror.RootClass
@@ -584,7 +590,7 @@ abstract class ClassfileParser {
info = MethodType(newParams, clazz.tpe)
}
- // Note: the info may be overrwritten later with a generic signature
+ // Note: the info may be overwritten later with a generic signature
// parsed from SignatureATTR
sym setInfo info
propagatePackageBoundary(jflags, sym)
@@ -765,7 +771,7 @@ abstract class ClassfileParser {
classTParams = tparams
val parents = new ListBuffer[Type]()
while (index < end) {
- parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter
+ parents += sig2type(tparams, skiptvs = false) // here the variance doesn't matter
}
ClassInfoType(parents.toList, instanceScope, sym)
}
@@ -804,10 +810,10 @@ abstract class ClassfileParser {
val c = pool.getConstant(u2)
val c1 = convertTo(c, symtype)
if (c1 ne null) sym.setInfo(ConstantType(c1))
- else debugwarn(s"failure to convert $c to $symtype")
+ else devWarning(s"failure to convert $c to $symtype")
case tpnme.ScalaSignatureATTR =>
if (!isScalaAnnot) {
- debugwarn(s"symbol ${sym.fullName} has pickled signature in attribute")
+ devWarning(s"symbol ${sym.fullName} has pickled signature in attribute")
unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.name)
}
in.skip(attrLen)
@@ -919,6 +925,7 @@ abstract class ClassfileParser {
Some(ScalaSigBytes(pool.getBytes(entries.toList)))
}
+ // TODO SI-9296 duplicated code, refactor
/* Parse and return a single annotation. If it is malformed,
* return None.
*/
@@ -1019,11 +1026,18 @@ abstract class ClassfileParser {
val sflags = jflags.toScalaFlags
val owner = ownerForFlags(jflags)
val scope = getScope(jflags)
- val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
- val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
+ def newStub(name: Name) =
+ owner.newStubSymbol(name, s"Class file for ${entry.externalName} not found").setFlag(JAVA)
- innerModule.moduleClass setInfo loaders.moduleClassLoader
- List(innerClass, innerModule.moduleClass) foreach (_.associatedFile = file)
+ val (innerClass, innerModule) = if (file == NoAbstractFile) {
+ (newStub(name.toTypeName), newStub(name.toTermName))
+ } else {
+ val cls = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
+ val mod = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
+ mod.moduleClass setInfo loaders.moduleClassLoader
+ List(cls, mod.moduleClass) foreach (_.associatedFile = file)
+ (cls, mod)
+ }
scope enter innerClass
scope enter innerModule
@@ -1043,10 +1057,8 @@ abstract class ClassfileParser {
for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClass) {
- val file = classPath.findSourceFile(entry.externalName.toString) getOrElse {
- throw new AssertionError(entry.externalName)
- }
- enterClassAndModule(entry, file)
+ val file = classFileLookup.findClassFile(entry.externalName.toString)
+ enterClassAndModule(entry, file.getOrElse(NoAbstractFile))
}
}
}
@@ -1100,7 +1112,7 @@ abstract class ClassfileParser {
def enclosing = if (jflags.isStatic) enclModule else enclClass
// The name of the outer class, without its trailing $ if it has one.
- private def strippedOuter = nme stripModuleSuffix outerName
+ private def strippedOuter = outerName.dropModule
private def isInner = innerClasses contains strippedOuter
private def enclClass = if (isInner) innerClasses innerSymbol strippedOuter else classNameToSymbol(strippedOuter)
private def enclModule = enclClass.companionModule
@@ -1120,7 +1132,7 @@ abstract class ClassfileParser {
def add(entry: InnerClassEntry): Unit = {
inners get entry.externalName foreach (existing =>
- debugwarn(s"Overwriting inner class entry! Was $existing, now $entry")
+ devWarning(s"Overwriting inner class entry! Was $existing, now $entry")
)
inners(entry.externalName) = entry
}
@@ -1132,16 +1144,12 @@ abstract class ClassfileParser {
private def innerSymbol(entry: InnerClassEntry): Symbol = {
val name = entry.originalName.toTypeName
val enclosing = entry.enclosing
- def getMember = (
+ val member = (
if (enclosing == clazz) entry.scope lookup name
else lookupMemberAtTyperPhaseIfPossible(enclosing, name)
)
- getMember
- /* There used to be an assertion that this result is not NoSymbol; changing it to an error
- * revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
- * in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
- * thought it wasn't triggering, I removed it entirely.
- */
+ def newStub = enclosing.newStubSymbol(name, s"Unable to locate class corresponding to inner class entry for $name in owner ${entry.outerName}")
+ member.orElse(newStub)
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 6ca2205881..ea46116976 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -74,7 +74,7 @@ abstract class ICodeReader extends ClassfileParser {
first != CONSTANT_METHODREF &&
first != CONSTANT_INTFMETHODREF) errorBadTag(start)
val ownerTpe = getClassOrArrayType(in.getChar(start + 1).toInt)
- debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
+ debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.unexpandedName)
val (name0, tpe0) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
@@ -130,7 +130,7 @@ abstract class ICodeReader extends ClassfileParser {
log("ICodeReader reading " + cls)
val name = cls.javaClassName
- classPath.findSourceFile(name) match {
+ classFileLookup.findClassFile(name) match {
case Some(classFile) => parse(classFile, cls)
case _ => MissingRequirementError.notFound("Could not find bytecode for " + cls)
}
@@ -780,32 +780,40 @@ abstract class ICodeReader extends ClassfileParser {
bb = otherBlock
// Console.println("\t> entering bb: " + bb)
}
- instr match {
- case LJUMP(target) =>
- otherBlock = blocks(target)
- bb.emitOnly(JUMP(otherBlock))
- case LCJUMP(success, failure, cond, kind) =>
- otherBlock = blocks(success)
- val failBlock = blocks(failure)
- bb.emitOnly(CJUMP(otherBlock, failBlock, cond, kind))
+ if (bb.closed) {
+ // the basic block is closed, i.e. the previous instruction was a jump, return or throw,
+ // but the next instruction is not a jump target. this means that the next instruction is
+ // dead code. we can therefore advance until the next jump target.
+ debuglog(s"ICode reader skipping dead instruction $instr in classfile $instanceCode")
+ } else {
+ instr match {
+ case LJUMP(target) =>
+ otherBlock = blocks(target)
+ bb.emitOnly(JUMP(otherBlock))
+
+ case LCJUMP(success, failure, cond, kind) =>
+ otherBlock = blocks(success)
+ val failBlock = blocks(failure)
+ bb.emitOnly(CJUMP(otherBlock, failBlock, cond, kind))
- case LCZJUMP(success, failure, cond, kind) =>
- otherBlock = blocks(success)
- val failBlock = blocks(failure)
- bb.emitOnly(CZJUMP(otherBlock, failBlock, cond, kind))
+ case LCZJUMP(success, failure, cond, kind) =>
+ otherBlock = blocks(success)
+ val failBlock = blocks(failure)
+ bb.emitOnly(CZJUMP(otherBlock, failBlock, cond, kind))
- case LSWITCH(tags, targets) =>
- bb.emitOnly(SWITCH(tags, targets map blocks))
+ case LSWITCH(tags, targets) =>
+ bb.emitOnly(SWITCH(tags, targets map blocks))
- case RETURN(_) =>
- bb emitOnly instr
+ case RETURN(_) =>
+ bb emitOnly instr
- case THROW(clasz) =>
- bb emitOnly instr
+ case THROW(clasz) =>
+ bb emitOnly instr
- case _ =>
- bb emit instr
+ case _ =>
+ bb emit instr
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 592c5497b5..25e13a1314 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -69,7 +69,7 @@ abstract class Pickler extends SubComponent {
// OPT: do this only as a recovery after fatal error. Checking in advance was expensive.
if (t.isErroneous) {
if (settings.debug) e.printStackTrace()
- unit.error(t.pos, "erroneous or inaccessible type")
+ reporter.error(t.pos, "erroneous or inaccessible type")
return
}
}
@@ -186,7 +186,16 @@ abstract class Pickler extends SubComponent {
val (locals, globals) = sym.children partition (_.isLocalClass)
val children =
if (locals.isEmpty) globals
- else globals + sym.newClassWithInfo(tpnme.LOCAL_CHILD, List(sym.tpe), EmptyScope, pos = sym.pos)
+ else {
+ // The LOCAL_CHILD was introduced in 12a2b3b to fix Aladdin bug 1055. When a sealed
+ // class/trait has local subclasses, a single <local child> class symbol is added
+ // as pickled child (instead of a reference to the anonymous class; that was done
+ // initially, but seems not to work, as the bug shows).
+ // Adding the LOCAL_CHILD is necessary to retain exhaustivity warnings under separate
+ // compilation. See test neg/aladdin1055.
+ val parents = (if (sym.isTrait) List(definitions.ObjectTpe) else Nil) ::: List(sym.tpe)
+ globals + sym.newClassWithInfo(tpnme.LOCAL_CHILD, parents, EmptyScope, pos = sym.pos)
+ }
putChildren(sym, children.toList sortBy (_.sealedSortName))
}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 2b7c6cca2c..79776485de 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -8,6 +8,7 @@ package transform
import symtab._
import Flags._
+import scala.tools.nsc.util.ClassPath
abstract class AddInterfaces extends InfoTransform { self: Erasure =>
import global._ // the global environment
@@ -54,7 +55,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
)
/** Does symbol need an implementation method? */
- private def needsImplMethod(sym: Symbol) = (
+ def needsImplMethod(sym: Symbol) = (
sym.isMethod
&& isInterfaceMember(sym)
&& (!sym.hasFlag(DEFERRED | SUPERACCESSOR) || (sym hasFlag lateDEFERRED))
@@ -67,25 +68,30 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
val implName = tpnme.implClassName(iface.name)
val implFlags = (iface.flags & ~(INTERFACE | lateINTERFACE)) | IMPLCLASS
- val impl0 = (
+ val impl0 = {
if (!inClass) NoSymbol
- else iface.owner.info.decl(implName) match {
- case NoSymbol => NoSymbol
- case implSym =>
- // Unlink a pre-existing symbol only if the implementation class is
- // visible on the compilation classpath. In general this is true under
- // -optimise and not otherwise, but the classpath can use arbitrary
- // logic so the classpath must be queried.
- if (classPath.context.isValidName(implName + ".class")) {
- iface.owner.info.decls unlink implSym
- NoSymbol
- }
- else {
- log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.")
- implSym
- }
+ else {
+ val typeInfo = iface.owner.info
+ typeInfo.decl(implName) match {
+ case NoSymbol => NoSymbol
+ case implSym =>
+ // Unlink a pre-existing symbol only if the implementation class is
+ // visible on the compilation classpath. In general this is true under
+ // -optimise and not otherwise, but the classpath can use arbitrary
+ // logic so the classpath must be queried.
+ // TODO this is not taken into account by flat classpath yet
+ classPath match {
+ case cp: ClassPath[_] if !cp.context.isValidName(implName + ".class") =>
+ log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.")
+ implSym
+ case _ =>
+ typeInfo.decls unlink implSym
+ NoSymbol
+ }
+ }
}
- )
+ }
+
val impl = impl0 orElse {
val impl = iface.owner.newImplClass(implName, iface.pos, implFlags)
if (iface.thisSym != iface) {
@@ -201,7 +207,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
}
def transformMixinInfo(tp: Type): Type = tp match {
- case ClassInfoType(parents, decls, clazz) =>
+ case ClassInfoType(parents, decls, clazz) if clazz.isPackageClass || !clazz.isJavaDefined =>
if (clazz.needsImplClass)
implClass(clazz setFlag lateINTERFACE) // generate an impl class
@@ -345,6 +351,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
while (owner != sym && owner != impl) owner = owner.owner;
if (owner == impl) This(impl) setPos tree.pos
else tree
+ //TODO what about this commented out code?
/* !!!
case Super(qual, mix) =>
val mix1 = mix
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index f14fce5de9..c29826551b 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -76,7 +76,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
val qual0 = ad.qual
val params = ad.args
if (settings.logReflectiveCalls)
- unit.echo(ad.pos, "method invocation uses reflection")
+ reporter.echo(ad.pos, "method invocation uses reflection")
val typedPos = typedWithPos(ad.pos) _
@@ -360,13 +360,13 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
assert(params.length == mparams.length, ((params, mparams)))
(mparams, resType)
case tpe @ OverloadedType(pre, alts) =>
- unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
+ reporter.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
case mt @ MethodType(mparams, resType) :: Nil =>
- unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
+ reporter.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
(mparams, resType)
case _ =>
- unit.error(ad.pos, "Cannot resolve overload.")
+ reporter.error(ad.pos, "Cannot resolve overload.")
(Nil, NoType)
}
}
@@ -520,7 +520,9 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
* And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler
* have little in common.
*/
- case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply =>
+ case Apply(fn @ Select(qual, _), (arg @ Literal(Constant(symname: String))) :: Nil)
+ if treeInfo.isQualifierSafeToElide(qual) && fn.symbol == Symbol_apply && !currentClass.isTrait =>
+
def transformApply = {
// add the symbol name to a map if it's not there already
val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 391bce5abb..86685d46de 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -8,6 +8,7 @@ package transform
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.ListOfNil
import symtab.Flags._
/** This phase converts classes with parameters into Java-like classes with
@@ -54,7 +55,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
def check(tree: Tree) = {
for (t <- tree) t match {
case t: RefTree if uninitializedVals(t.symbol.accessedOrSelf) && t.qualifier.symbol == clazz =>
- unit.warning(t.pos, s"Reference to uninitialized ${t.symbol.accessedOrSelf}")
+ reporter.warning(t.pos, s"Reference to uninitialized ${t.symbol.accessedOrSelf}")
case _ =>
}
}
@@ -314,7 +315,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
}
def rewriteDelayedInit() {
- /* XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
+ /* XXX This is not correct: remainingConstrStats.nonEmpty excludes too much,
* but excluding it includes too much. The constructor sequence being mimicked
* needs to be reproduced with total fidelity.
*
@@ -535,7 +536,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
* whether `sym` denotes a param-accessor (ie a field) that fulfills all of:
* (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and
* (b) isn't subject to specialization. We might be processing statements for:
- * (b.1) the constructur in the generic (super-)class; or
+ * (b.1) the constructor in the generic (super-)class; or
* (b.2) the constructor in the specialized (sub-)class.
* (c) isn't part of a DelayedInit subclass.
*/
@@ -685,7 +686,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
// mangling before we introduce more of it.
val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
if (conflict ne NoSymbol)
- unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
+ reporter.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
copyParam(acc, parameter(acc))
}
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
index 1468680fe0..92db57c533 100644
--- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -9,34 +9,48 @@ import scala.reflect.internal.Symbols
import scala.collection.mutable.LinkedHashMap
/**
- * This transformer is responisble for turning lambdas into anonymous classes.
+ * This transformer is responsible for preparing lambdas for runtime, by either translating to anonymous classes
+ * or to a tree that will be convereted to invokedynamic by the JVM 1.8+ backend.
+ *
* The main assumption it makes is that a lambda {args => body} has been turned into
* {args => liftedBody()} where lifted body is a top level method that implements the body of the lambda.
* Currently Uncurry is responsible for that transformation.
*
- * From a lambda, Delambdafy will create
- * 1) a static forwarder at the top level of the class that contained the lambda
- * 2) a new top level class that
- a) has fields and a constructor taking the captured environment (including possbily the "this"
+ * From a lambda, Delambdafy will create:
+ *
+ * Under -target:jvm-1.7 and below:
+ *
+ * 1) a new top level class that
+ a) has fields and a constructor taking the captured environment (including possibly the "this"
* reference)
- * b) an apply method that calls the static forwarder
+ * b) an apply method that calls the target method
* c) if needed a bridge method for the apply method
- * 3) an instantiation of the newly created class which replaces the lambda
+ * 2) an instantiation of the newly created class which replaces the lambda
+ *
+ * Under -target:jvm-1.8 with GenBCode:
*
- * TODO the main work left to be done is to plug into specialization. Primarily that means choosing a
- * specialized FunctionN trait instead of the generic FunctionN trait as a parent and creating the
- * appropriately named applysp method
+ * 1) An application of the captured arguments to a fictional symbol representing the lambda factory.
+ * This will be translated by the backed into an invokedynamic using a bootstrap method in JDK8's `LambdaMetaFactory`.
+ * The captured arguments include `this` if `liftedBody` is unable to be made STATIC.
*/
abstract class Delambdafy extends Transform with TypingTransformers with ast.TreeDSL with TypeAdaptingTransformer {
import global._
import definitions._
- import CODE._
val analyzer: global.analyzer.type = global.analyzer
/** the following two members override abstract members in Transform */
val phaseName: String = "delambdafy"
+ override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = {
+ if (settings.Ydelambdafy.value == "method") new Phase(prev)
+ else new SkipPhase(prev)
+ }
+
+ class SkipPhase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
+ def apply(unit: global.CompilationUnit): Unit = ()
+ }
+
protected def newTransformer(unit: CompilationUnit): Transformer =
new DelambdafyTransformer(unit)
@@ -68,36 +82,29 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
referrers
}
- val accessorMethods = mutable.ArrayBuffer[Tree]()
-
- // the result of the transformFunction method. A class definition for the lambda, an expression
- // insantiating the lambda class, and an accessor method for the lambda class to be able to
- // call the implementation
- case class TransformedFunction(lambdaClassDef: ClassDef, newExpr: Tree, accessorMethod: Tree)
+ // the result of the transformFunction method.
+ sealed abstract class TransformedFunction
+ // A class definition for the lambda, an expression instantiating the lambda class
+ case class DelambdafyAnonClass(lambdaClassDef: ClassDef, newExpr: Tree) extends TransformedFunction
+ case class InvokeDynamicLambda(tree: Apply) extends TransformedFunction
// here's the main entry point of the transform
override def transform(tree: Tree): Tree = tree match {
// the main thing we care about is lambdas
case fun @ Function(_, _) =>
- // a lambda beccomes a new class, an instantiation expression, and an
- // accessor method
- val TransformedFunction(lambdaClassDef, newExpr, accessorMethod) = transformFunction(fun)
- // we'll add accessor methods to the current template later
- accessorMethods += accessorMethod
- val pkg = lambdaClassDef.symbol.owner
-
- // we'll add the lambda class to the package later
- lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg)
-
- super.transform(newExpr)
- // when we encounter a template (basically the thing that holds body of a class/trait)
- // we need to updated it to include newly created accesor methods after transforming it
- case Template(_, _, _) =>
- try {
- // during this call accessorMethods will be populated from the Function case
- val Template(parents, self, body) = super.transform(tree)
- Template(parents, self, body ++ accessorMethods)
- } finally accessorMethods.clear()
+ transformFunction(fun) match {
+ case DelambdafyAnonClass(lambdaClassDef, newExpr) =>
+ // a lambda becomes a new class, an instantiation expression
+ val pkg = lambdaClassDef.symbol.owner
+
+ // we'll add the lambda class to the package later
+ lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg)
+
+ super.transform(newExpr)
+ case InvokeDynamicLambda(apply) =>
+ // ... or an invokedynamic call
+ super.transform(apply)
+ }
case _ => super.transform(tree)
}
@@ -105,13 +112,14 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
// after working on the entire compilation until we'll have a set of
// new class definitions to add to the top level
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
- super.transformStats(stats, exprOwner) ++ lambdaClassDefs(exprOwner)
+ // Need to remove from the lambdaClassDefs map: there may be multiple PackageDef for the same
+ // package when defining a package object. We only add the lambda class to one. See SI-9097.
+ super.transformStats(stats, exprOwner) ++ lambdaClassDefs.remove(exprOwner).getOrElse(Nil)
}
private def optionSymbol(sym: Symbol): Option[Symbol] = if (sym.exists) Some(sym) else None
- // turns a lambda into a new class def, a New expression instantiating that class, and an
- // accessor method fo the body of the lambda
+ // turns a lambda into a new class def, a New expression instantiating that class
private def transformFunction(originalFunction: Function): TransformedFunction = {
val functionTpe = originalFunction.tpe
val targs = functionTpe.typeArgs
@@ -122,46 +130,17 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
// passed into the constructor of the anonymous function class
val captures = FreeVarTraverser.freeVarsOf(originalFunction)
- /**
- * Creates the apply method for the anonymous subclass of FunctionN
- */
- def createAccessorMethod(thisProxy: Symbol, fun: Function): DefDef = {
- val target = targetMethod(fun)
- if (!thisProxy.exists) {
- target setFlag STATIC
- }
- val params = ((optionSymbol(thisProxy) map {proxy:Symbol => ValDef(proxy)}) ++ (target.paramss.flatten map ValDef.apply)).toList
-
- val methSym = oldClass.newMethod(unit.freshTermName(nme.accessor.toString()), target.pos, FINAL | BRIDGE | SYNTHETIC | PROTECTED | STATIC)
+ val target = targetMethod(originalFunction)
+ target.makeNotPrivate(target.owner)
+ if (!thisReferringMethods.contains(target))
+ target setFlag STATIC
- val paramSyms = params map {param => methSym.newSyntheticValueParam(param.symbol.tpe, param.name) }
-
- params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym }
- params foreach (_.symbol.owner = methSym)
-
- val methodType = MethodType(paramSyms, restpe)
- methSym setInfo methodType
-
- oldClass.info.decls enter methSym
-
- val body = localTyper.typed {
- val newTarget = Select(if (thisProxy.exists) gen.mkAttributedRef(paramSyms(0)) else gen.mkAttributedThis(oldClass), target)
- val newParams = paramSyms drop (if (thisProxy.exists) 1 else 0) map Ident
- Apply(newTarget, newParams)
- } setPos fun.pos
- val methDef = DefDef(methSym, List(params), body)
-
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- // TODO probably don't need packedType
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
+ val isStatic = target.hasFlag(STATIC)
/**
* Creates the apply method for the anonymous subclass of FunctionN
*/
- def createApplyMethod(newClass: Symbol, fun: Function, accessor: DefDef, thisProxy: Symbol): DefDef = {
+ def createApplyMethod(newClass: Symbol, fun: Function, thisProxy: Symbol): DefDef = {
val methSym = newClass.newMethod(nme.apply, fun.pos, FINAL | SYNTHETIC)
val params = fun.vparams map (_.duplicate)
@@ -177,8 +156,12 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
newClass.info.decls enter methSym
val Apply(_, oldParams) = fun.body
+ val qual = if (thisProxy.exists)
+ Select(gen.mkAttributedThis(newClass), thisProxy)
+ else
+ gen.mkAttributedThis(oldClass) // sort of a lie, EmptyTree.<static method> would be more honest, but the backend chokes on that.
- val body = localTyper typed Apply(Select(gen.mkAttributedThis(oldClass), accessor.symbol), (optionSymbol(thisProxy) map {tp => Select(gen.mkAttributedThis(newClass), tp)}).toList ++ oldParams)
+ val body = localTyper typed Apply(Select(qual, target), oldParams)
body.substituteSymbols(fun.vparams map (_.symbol), params map (_.symbol))
body changeOwner (fun.symbol -> methSym)
@@ -228,7 +211,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val abstractFunctionErasedType = AbstractFunctionClass(formals.length).tpe
// anonymous subclass of FunctionN with an apply method
- def makeAnonymousClass = {
+ def makeAnonymousClass: ClassDef = {
val parents = addSerializable(abstractFunctionErasedType)
val funOwner = originalFunction.symbol.owner
@@ -236,55 +219,59 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
// - make `anonClass.isAnonymousClass` true.
// - use `newAnonymousClassSymbol` or push the required variations into a similar factory method
// - reinstate the assertion in `Erasure.resolveAnonymousBridgeClash`
- val suffix = "$lambda$" + (
+ val suffix = nme.DELAMBDAFY_LAMBDA_CLASS_NAME + "$" + (
if (funOwner.isPrimaryConstructor) ""
- else "$" + funOwner.name
+ else "$" + funOwner.name + "$"
)
- val name = unit.freshTypeName(s"${oldClass.name.decode}$suffix")
+ val oldClassPart = oldClass.name.decode
+ // make sure the class name doesn't contain $anon, otherwise isAnonymousClass/Function may be true
+ val name = unit.freshTypeName(s"$oldClassPart$suffix".replace("$anon", "$nestedInAnon"))
- val anonClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation
- anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+ val lambdaClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation
+ // make sure currentRun.compiles(lambdaClass) is true (AddInterfaces does the same for trait impl classes)
+ currentRun.symSource(lambdaClass) = funOwner.sourceFile
+ lambdaClass setInfo ClassInfoType(parents, newScope, lambdaClass)
+ assert(!lambdaClass.isAnonymousClass && !lambdaClass.isAnonymousFunction, "anonymous class name: "+ lambdaClass.name)
+ assert(lambdaClass.isDelambdafyFunction, "not lambda class name: " + lambdaClass.name)
val captureProxies2 = new LinkedHashMap[Symbol, TermSymbol]
captures foreach {capture =>
- val sym = anonClass.newVariable(capture.name.toTermName, capture.pos, SYNTHETIC)
+ val sym = lambdaClass.newVariable(unit.freshTermName(capture.name.toString + "$"), capture.pos, SYNTHETIC)
sym setInfo capture.info
captureProxies2 += ((capture, sym))
}
- // the Optional proxy that will hold a reference to the 'this'
- // object used by the lambda, if any. NoSymbol if there is no this proxy
- val thisProxy = {
- val target = targetMethod(originalFunction)
- if (thisReferringMethods contains target) {
- val sym = anonClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC)
- sym.info = oldClass.tpe
- sym
- } else NoSymbol
- }
-
- val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, anonClass, originalFunction.symbol.pos, thisProxy)
+ // the Optional proxy that will hold a reference to the 'this'
+ // object used by the lambda, if any. NoSymbol if there is no this proxy
+ val thisProxy = {
+ if (isStatic)
+ NoSymbol
+ else {
+ val sym = lambdaClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC)
+ sym.setInfo(oldClass.tpe)
+ }
+ }
- val accessorMethod = createAccessorMethod(thisProxy, originalFunction)
+ val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, lambdaClass, originalFunction.symbol.pos, thisProxy)
- val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function]
+ val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function]
- val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member =>
- anonClass.info.decls enter member
- ValDef(member, gen.mkZero(member.tpe)) setPos decapturedFunction.pos
- }
+ val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member =>
+ lambdaClass.info.decls enter member
+ ValDef(member, gen.mkZero(member.tpe)) setPos decapturedFunction.pos
+ }
- // constructor
- val constr = createConstructor(anonClass, members)
+ // constructor
+ val constr = createConstructor(lambdaClass, members)
- // apply method with same arguments and return type as original lambda.
- val applyMethodDef = createApplyMethod(anonClass, decapturedFunction, accessorMethod, thisProxy)
+ // apply method with same arguments and return type as original lambda.
+ val applyMethodDef = createApplyMethod(lambdaClass, decapturedFunction, thisProxy)
- val bridgeMethod = createBridgeMethod(anonClass, originalFunction, applyMethodDef)
+ val bridgeMethod = createBridgeMethod(lambdaClass, originalFunction, applyMethodDef)
- def fulldef(sym: Symbol) =
- if (sym == NoSymbol) sym.toString
- else s"$sym: ${sym.tpe} in ${sym.owner}"
+ def fulldef(sym: Symbol) =
+ if (sym == NoSymbol) sym.toString
+ else s"$sym: ${sym.tpe} in ${sym.owner}"
bridgeMethod foreach (bm =>
// TODO SI-6260 maybe just create the apply method with the signature (Object => Object) in all cases
@@ -296,22 +283,39 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val body = members ++ List(constr, applyMethodDef) ++ bridgeMethod
// TODO if member fields are private this complains that they're not accessible
- (localTyper.typedPos(decapturedFunction.pos)(ClassDef(anonClass, body)).asInstanceOf[ClassDef], thisProxy, accessorMethod)
+ localTyper.typedPos(decapturedFunction.pos)(ClassDef(lambdaClass, body)).asInstanceOf[ClassDef]
}
- val (anonymousClassDef, thisProxy, accessorMethod) = makeAnonymousClass
-
- pkg.info.decls enter anonymousClassDef.symbol
-
- val thisArg = optionSymbol(thisProxy) map (_ => gen.mkAttributedThis(oldClass) setPos originalFunction.pos)
- val captureArgs = captures map (capture => Ident(capture) setPos originalFunction.pos)
-
- val newStat =
- Typed(New(anonymousClassDef.symbol, (thisArg.toList ++ captureArgs): _*), TypeTree(abstractFunctionErasedType))
-
- val typedNewStat = localTyper.typedPos(originalFunction.pos)(newStat)
+ val allCaptureArgs: List[Tree] = {
+ val thisArg = if (isStatic) Nil else (gen.mkAttributedThis(oldClass) setPos originalFunction.pos) :: Nil
+ val captureArgs = captures.iterator.map(capture => gen.mkAttributedRef(capture) setPos originalFunction.pos).toList
+ thisArg ::: captureArgs
+ }
- TransformedFunction(anonymousClassDef, typedNewStat, accessorMethod)
+ val functionalInterface = java8CompatFunctionalInterface(target, originalFunction.tpe)
+ if (functionalInterface.exists) {
+ // Create a symbol representing a fictional lambda factory method that accepts the captured
+ // arguments and returns a Function.
+ val msym = currentOwner.newMethod(nme.ANON_FUN_NAME, originalFunction.pos, ARTIFACT)
+ val argTypes: List[Type] = allCaptureArgs.map(_.tpe)
+ val params = msym.newSyntheticValueParams(argTypes)
+ msym.setInfo(MethodType(params, originalFunction.tpe))
+ val arity = originalFunction.vparams.length
+
+ // We then apply this symbol to the captures.
+ val apply = localTyper.typedPos(originalFunction.pos)(Apply(Ident(msym), allCaptureArgs)).asInstanceOf[Apply]
+
+ // The backend needs to know the target of the lambda and the functional interface in order
+ // to emit the invokedynamic instruction. We pass this information as tree attachment.
+ apply.updateAttachment(LambdaMetaFactoryCapable(target, arity, functionalInterface))
+ InvokeDynamicLambda(apply)
+ } else {
+ val anonymousClassDef = makeAnonymousClass
+ pkg.info.decls enter anonymousClassDef.symbol
+ val newStat = Typed(New(anonymousClassDef.symbol, allCaptureArgs: _*), TypeTree(abstractFunctionErasedType))
+ val typedNewStat = localTyper.typedPos(originalFunction.pos)(newStat)
+ DelambdafyAnonClass(anonymousClassDef, typedNewStat)
+ }
}
/**
@@ -422,7 +426,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
}
/**
- * Get the symbol of the target lifted lambad body method from a function. I.e. if
+ * Get the symbol of the target lifted lambda body method from a function. I.e. if
* the function is {args => anonfun(args)} then this method returns anonfun's symbol
*/
private def targetMethod(fun: Function): Symbol = fun match {
@@ -461,4 +465,38 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
super.traverse(tree)
}
}
+
+ final case class LambdaMetaFactoryCapable(target: Symbol, arity: Int, functionalInterface: Symbol)
+
+ // The functional interface that can be used to adapt the lambda target method `target` to the
+ // given function type. Returns `NoSymbol` if the compiler settings are unsuitable, or `LambdaMetaFactory`
+ // would be unable to generate the correct implementation (e.g. functions referring to derived value classes)
+ private def java8CompatFunctionalInterface(target: Symbol, functionType: Type): Symbol = {
+ val canUseLambdaMetafactory: Boolean = {
+ val hasValueClass = exitingErasure {
+ val methodType: Type = target.info
+ methodType.exists(_.isInstanceOf[ErasedValueType])
+ }
+ val isTarget18 = settings.target.value.contains("jvm-1.8")
+ settings.isBCodeActive && isTarget18 && !hasValueClass
+ }
+
+ def functionalInterface: Symbol = {
+ val sym = functionType.typeSymbol
+ val pack = currentRun.runDefinitions.Scala_Java8_CompatPackage
+ val name1 = specializeTypes.specializedFunctionName(sym, functionType.typeArgs)
+ val paramTps :+ restpe = functionType.typeArgs
+ val arity = paramTps.length
+ if (name1.toTypeName == sym.name) {
+ val returnUnit = restpe.typeSymbol == UnitClass
+ val functionInterfaceArray =
+ if (returnUnit) currentRun.runDefinitions.Scala_Java8_CompatPackage_JProcedure
+ else currentRun.runDefinitions.Scala_Java8_CompatPackage_JFunction
+ functionInterfaceArray.apply(arity)
+ } else {
+ pack.info.decl(name1.toTypeName.prepend("J"))
+ }
+ }
+ if (canUseLambdaMetafactory) functionalInterface else NoSymbol
+ }
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index e036035397..9fdc3a9d72 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -98,7 +98,7 @@ abstract class Erasure extends AddInterfaces
val len = sig.length
val copy: Array[Char] = sig.toCharArray
var changed = false
- while (i < sig.length) {
+ while (i < len) {
val ch = copy(i)
if (ch == '.' && last != '>') {
copy(i) = '$'
@@ -185,6 +185,27 @@ abstract class Erasure extends AddInterfaces
private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
+ /* Drop redundant types (ones which are implemented by some other parent) from the immediate parents.
+ * This is important on Android because there is otherwise an interface explosion.
+ */
+ def minimizeParents(parents: List[Type]): List[Type] = if (parents.isEmpty) parents else {
+ def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait
+
+ var rest = parents.tail
+ var leaves = collection.mutable.ListBuffer.empty[Type] += parents.head
+ while(rest.nonEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol }
+ if(!nonLeaf) {
+ leaves = leaves filterNot { t => isInterfaceOrTrait(t.typeSymbol) && (candidate.typeSymbol isSubClass t.typeSymbol) }
+ leaves += candidate
+ }
+ rest = rest.tail
+ }
+ leaves.toList
+ }
+
+
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
@@ -192,16 +213,24 @@ abstract class Erasure extends AddInterfaces
val isTraitSignature = sym0.enclClass.isTrait
def superSig(parents: List[Type]) = {
- val ps = (
- if (isTraitSignature) {
+ def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait
+
+ // a signature should always start with a class
+ def ensureClassAsFirstParent(tps: List[Type]) = tps match {
+ case Nil => ObjectTpe :: Nil
+ case head :: tail if isInterfaceOrTrait(head.typeSymbol) => ObjectTpe :: tps
+ case _ => tps
+ }
+
+ val minParents = minimizeParents(parents)
+ val validParents =
+ if (isTraitSignature)
// java is unthrilled about seeing interfaces inherit from classes
- val ok = parents filter (p => p.typeSymbol.isTrait || p.typeSymbol.isInterface)
- // traits should always list Object.
- if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectTpe :: ok
- else ok
- }
- else parents
- )
+ minParents filter (p => isInterfaceOrTrait(p.typeSymbol))
+ else minParents
+
+ val ps = ensureClassAsFirstParent(validParents)
+
(ps map boxedSig).mkString
}
def boxedSig(tp: Type) = jsig(tp, primitiveOK = false)
@@ -403,14 +432,13 @@ abstract class Erasure extends AddInterfaces
* a name clash. The present method guards against these name clashes.
*
* @param member The original member
- * @param other The overidden symbol for which the bridge was generated
+ * @param other The overridden symbol for which the bridge was generated
* @param bridge The bridge
*/
def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): Seq[(Position, String)] = {
def fulldef(sym: Symbol) =
if (sym == NoSymbol) sym.toString
else s"$sym: ${sym.tpe} in ${sym.owner}"
- var noclash = true
val clashErrors = mutable.Buffer[(Position, String)]()
def clashError(what: String) = {
val pos = if (member.owner == root) member.pos else root.pos
@@ -468,8 +496,12 @@ abstract class Erasure extends AddInterfaces
if (!bridgeNeeded)
return
- val newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
- val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos
+ var newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
+ // If `member` is a ModuleSymbol, the bridge should not also be a ModuleSymbol. Otherwise we
+ // end up with two module symbols with the same name in the same scope, which is surprising
+ // when implementing later phases.
+ if (member.isModule) newFlags = (newFlags | METHOD) & ~(MODULE | lateMETHOD | STABLE)
+ val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos
debuglog("generating bridge from %s (%s): %s to %s: %s".format(
other, flagsToString(newFlags),
@@ -488,7 +520,7 @@ abstract class Erasure extends AddInterfaces
|| (checkBridgeOverrides(member, other, bridge) match {
case Nil => true
case es if member.owner.isAnonymousClass => resolveAnonymousBridgeClash(member, bridge); true
- case es => for ((pos, msg) <- es) unit.error(pos, msg); false
+ case es => for ((pos, msg) <- es) reporter.error(pos, msg); false
})
)
@@ -724,7 +756,7 @@ abstract class Erasure extends AddInterfaces
)
val when = if (exitingRefchecks(lowType matches highType)) "" else " after erasure: " + exitingPostErasure(highType)
- unit.error(pos,
+ reporter.error(pos,
s"""|$what:
|${exitingRefchecks(highString)} and
|${exitingRefchecks(lowString)}
@@ -865,7 +897,7 @@ abstract class Erasure extends AddInterfaces
fn match {
case TypeApply(sel @ Select(qual, name), List(targ)) =>
if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefTpe)
- unit.error(sel.pos, "isInstanceOf cannot test if value types are references.")
+ reporter.error(sel.pos, "isInstanceOf cannot test if value types are references.")
def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
Apply(
@@ -952,7 +984,7 @@ abstract class Erasure extends AddInterfaces
case nme.length => nme.array_length
case nme.update => nme.array_update
case nme.clone_ => nme.array_clone
- case _ => unit.error(tree.pos, "Unexpected array member, no translation exists.") ; nme.NO_NAME
+ case _ => reporter.error(tree.pos, "Unexpected array member, no translation exists.") ; nme.NO_NAME
}
gen.mkRuntimeCall(arrayMethodName, qual :: args)
}
@@ -1006,7 +1038,7 @@ abstract class Erasure extends AddInterfaces
// See SI-5568.
tree setSymbol Object_getClass
} else {
- debugwarn(s"The symbol '${fn.symbol}' was interecepted but didn't match any cases, that means the intercepted methods set doesn't match the code")
+ devWarning(s"The symbol '${fn.symbol}' was interecepted but didn't match any cases, that means the intercepted methods set doesn't match the code")
tree
}
} else qual match {
@@ -1050,20 +1082,18 @@ abstract class Erasure extends AddInterfaces
}
}
- def isAccessible(sym: Symbol) = localTyper.context.isAccessible(sym, sym.owner.thisType)
- if (!isAccessible(owner) && qual.tpe != null) {
+ def isJvmAccessible(sym: Symbol) = (sym.isClass && !sym.isJavaDefined) || localTyper.context.isAccessible(sym, sym.owner.thisType)
+ if (!isJvmAccessible(owner) && qual.tpe != null) {
qual match {
case Super(_, _) =>
- // Insert a cast here at your peril -- see SI-5162. Bail out if the target method is defined in
- // Java, otherwise, we'd get an IllegalAccessError at runtime. If the target method is defined in
- // Scala, however, we should have access.
- if (owner.isJavaDefined) unit.error(tree.pos, s"Unable to access ${tree.symbol.fullLocationString} with a super reference.")
+ // Insert a cast here at your peril -- see SI-5162.
+ reporter.error(tree.pos, s"Unable to access ${tree.symbol.fullLocationString} with a super reference.")
tree
case _ =>
// Todo: Figure out how qual.tpe could be null in the check above (it does appear in build where SwingWorker.this
// has a null type).
val qualSym = qual.tpe.widen.typeSymbol
- if (isAccessible(qualSym) && !qualSym.isPackageClass && !qualSym.isPackageObjectClass) {
+ if (isJvmAccessible(qualSym) && !qualSym.isPackageClass && !qualSym.isPackageObjectClass) {
// insert cast to prevent illegal access error (see #4283)
// util.trace("insert erasure cast ") (*/
treeCopy.Select(tree, gen.mkAttributedCast(qual, qual.tpe.widen), name) //)
@@ -1125,7 +1155,7 @@ abstract class Erasure extends AddInterfaces
}
}
- /** The main transform function: Pretransfom the tree, and then
+ /** The main transform function: Pretransform the tree, and then
* re-type it at phase erasure.next.
*/
override def transform(tree: Tree): Tree = {
@@ -1135,7 +1165,7 @@ abstract class Erasure extends AddInterfaces
val tree2 = mixinTransformer.transform(tree1)
// debuglog("tree after addinterfaces: \n" + tree2)
- newTyper(rootContext(unit, tree, erasedTypes = true)).typed(tree2)
+ newTyper(rootContextPostTyper(unit, tree)).typed(tree2)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 0447e23e9e..540de2cfe1 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -207,7 +207,7 @@ abstract class ExplicitOuter extends InfoTransform
// class needs to have a common naming scheme, independently of whether
// the field was accessed from an inner class or not. See #2946
if (sym.owner.isTrait && sym.isLocalToThis &&
- (sym.getter(sym.owner.toInterface) == NoSymbol))
+ (sym.getterIn(sym.owner.toInterface) == NoSymbol))
sym.makeNotPrivate(sym.owner)
tp
}
@@ -441,7 +441,7 @@ abstract class ExplicitOuter extends InfoTransform
else atPos(tree.pos)(outerPath(outerValue, currentClass.outerClass, sym)) // (5)
case Select(qual, name) =>
- // make not private symbol acessed from inner classes, as well as
+ // make not private symbol accessed from inner classes, as well as
// symbols accessed from @inline methods
//
// See SI-6552 for an example of why `sym.owner.enclMethod hasAnnotation ScalaInlineClass`
@@ -481,7 +481,7 @@ abstract class ExplicitOuter extends InfoTransform
// since we can't fix SI-4440 properly (we must drop the outer accessors of final classes when there's no immediate reference to them in sight)
// at least don't crash... this duplicates maybeOmittable from constructors
(acc.owner.isEffectivelyFinal && !acc.isOverridingSymbol)) {
- unit.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.")
+ currentRun.reporting.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.")
transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
} else {
// println("(base, acc)= "+(base, acc))
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 2235a93ca4..116047a2ad 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -127,7 +127,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit =
if (seen contains clazz)
- unit.error(pos, "value class may not unbox to itself")
+ reporter.error(pos, "value class may not unbox to itself")
else {
val unboxed = definitions.underlyingOfValueClass(clazz).typeSymbol
if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed)
@@ -208,7 +208,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
def makeExtensionMethodSymbol = {
val extensionName = extensionNames(origMeth).head.toTermName
val extensionMeth = (
- companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED & ~LOCAL | FINAL)
setAnnotations origMeth.annotations
)
origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now.
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index c3fbfae322..fbb0307773 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -76,8 +76,20 @@ abstract class Flatten extends InfoTransform {
for (sym <- decls) {
if (sym.isTerm && !sym.isStaticModule) {
decls1 enter sym
- if (sym.isModule)
+ if (sym.isModule) {
+ // In theory, we could assert(sym.isMethod), because nested, non-static modules are
+ // transformed to methods (lateMETHOD flag added in RefChecks). But this requires
+ // forcing sym.info (see comment on isModuleNotMethod), which forces stub symbols
+ // too eagerly (SI-8907).
+
+ // Note that module classes are not entered into the 'decls' of the ClassInfoType
+ // of the outer class, only the module symbols are. So the current loop does
+ // not visit module classes. Therefore we set the LIFTED flag here for module
+ // classes.
+ // TODO: should we also set the LIFTED flag for static, nested module classes?
+ // currently they don't get the flag, even though they are lifted to the package
sym.moduleClass setFlag LIFTED
+ }
} else if (sym.isClass)
liftSymbol(sym)
}
@@ -154,7 +166,7 @@ abstract class Flatten extends InfoTransform {
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
val stats1 = super.transformStats(stats, exprOwner)
if (currentOwner.isPackageClass) {
- val lifted = liftedDefs(currentOwner).toList
+ val lifted = liftedDefs.remove(currentOwner).toList.flatten
stats1 ::: lifted
}
else stats1
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index e38c034f4d..d1be1558b9 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -250,21 +250,30 @@ abstract class LambdaLift extends InfoTransform {
debuglog("renaming in %s: %s => %s".format(sym.owner.fullLocationString, originalName, sym.name))
}
+ // make sure that the name doesn't make the symbol accidentally `isAnonymousClass` (et.al) by
+ // introducing `$anon` in its name. to be cautious, we don't make this change in the default
+ // backend under 2.11.x, so only in GenBCode.
+ def nonAnon(s: String) = if (settings.Ybackend.value == "GenBCode") nme.ensureNonAnon(s) else s
+
def newName(sym: Symbol): Name = {
val originalName = sym.name
def freshen(prefix: String): Name =
if (originalName.isTypeName) unit.freshTypeName(prefix)
else unit.freshTermName(prefix)
+ val join = nme.NAME_JOIN_STRING
if (sym.isAnonymousFunction && sym.owner.isMethod) {
- freshen(sym.name + nme.NAME_JOIN_STRING + sym.owner.name + nme.NAME_JOIN_STRING)
+ freshen(sym.name + join + nonAnon(sym.owner.name.toString) + join)
} else {
+ val name = freshen(sym.name + join)
// SI-5652 If the lifted symbol is accessed from an inner class, it will be made public. (where?)
- // Generating a unique name, mangled with the enclosing class name, avoids a VerifyError
- // in the case that a sub-class happens to lifts out a method with the *same* name.
- val name = freshen("" + sym.name + nme.NAME_JOIN_STRING)
- if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name.toTermName, sym.enclClass)
- else name
+ // Generating a unique name, mangled with the enclosing full class name (including
+ // package - subclass might have the same name), avoids a VerifyError in the case
+ // that a sub-class happens to lifts out a method with the *same* name.
+ if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym))
+ newTermNameCached(nonAnon(sym.enclClass.fullName('$')) + nme.EXPAND_SEPARATOR_STRING + name)
+ else
+ name
}
}
@@ -339,7 +348,7 @@ abstract class LambdaLift extends InfoTransform {
if (clazz.isStaticOwner) clazz.fullLocationString
else s"the unconstructed `this` of ${clazz.fullLocationString}"
val msg = s"Implementation restriction: access of ${sym.fullLocationString} from ${currentClass.fullLocationString}, would require illegal premature access to $what"
- currentUnit.error(curTree.pos, msg)
+ reporter.error(curTree.pos, msg)
}
val qual =
if (clazz == currentClass) gen.mkAttributedThis(clazz)
@@ -367,7 +376,7 @@ abstract class LambdaLift extends InfoTransform {
private def addFreeArgs(pos: Position, sym: Symbol, args: List[Tree]) = {
free get sym match {
- case Some(fvs) => args ++ (fvs.toList map (fv => atPos(pos)(proxyRef(fv))))
+ case Some(fvs) => addFree(sym, free = fvs.toList map (fv => atPos(pos)(proxyRef(fv))), original = args)
case _ => args
}
}
@@ -379,9 +388,9 @@ abstract class LambdaLift extends InfoTransform {
case DefDef(_, _, _, vparams :: _, _, _) =>
val addParams = cloneSymbols(ps).map(_.setFlag(PARAM))
sym.updateInfo(
- lifted(MethodType(sym.info.params ::: addParams, sym.info.resultType)))
+ lifted(MethodType(addFree(sym, free = addParams, original = sym.info.params), sym.info.resultType)))
- copyDefDef(tree)(vparamss = List(vparams ++ freeParams))
+ copyDefDef(tree)(vparamss = List(addFree(sym, free = freeParams, original = vparams)))
case ClassDef(_, _, _, _) =>
// SI-6231
// Disabled attempt to to add getters to freeParams
@@ -402,7 +411,7 @@ abstract class LambdaLift extends InfoTransform {
}
/* SI-6231: Something like this will be necessary to eliminate the implementation
- * restiction from paramGetter above:
+ * restriction from paramGetter above:
* We need to pass getters to the interface of an implementation class.
private def fixTraitGetters(lifted: List[Tree]): List[Tree] =
for (stat <- lifted) yield stat match {
@@ -449,6 +458,8 @@ abstract class LambdaLift extends InfoTransform {
if (sym.isClass) sym.owner = sym.owner.toInterface
if (sym.isMethod) sym setFlag LIFTED
liftedDefs(sym.owner) ::= tree
+ // TODO: this modifies the ClassInfotype of the enclosing class, which is associated with another phase (explicitouter).
+ // This breaks type history: in a phase travel to before lambda lift, the ClassInfoType will contain lifted classes.
sym.owner.info.decls enterUnique sym
debuglog("lifted: " + sym + " from " + oldOwner + " to " + sym.owner)
EmptyTree
@@ -537,12 +548,11 @@ abstract class LambdaLift extends InfoTransform {
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
def addLifted(stat: Tree): Tree = stat match {
case ClassDef(_, _, _, _) =>
- val lifted = liftedDefs get stat.symbol match {
+ val lifted = liftedDefs remove stat.symbol match {
case Some(xs) => xs reverseMap addLifted
case _ => log("unexpectedly no lifted defs for " + stat.symbol) ; Nil
}
- try deriveClassDef(stat)(impl => deriveTemplate(impl)(_ ::: lifted))
- finally liftedDefs -= stat.symbol
+ deriveClassDef(stat)(impl => deriveTemplate(impl)(_ ::: lifted))
case DefDef(_, _, _, _, _, Block(Nil, expr)) if !stat.symbol.isConstructor =>
deriveDefDef(stat)(_ => expr)
@@ -561,4 +571,12 @@ abstract class LambdaLift extends InfoTransform {
}
} // class LambdaLifter
+ private def addFree[A](sym: Symbol, free: List[A], original: List[A]): List[A] = {
+ val prependFree = (
+ !sym.isConstructor // this condition is redundant for now. It will be needed if we remove the second condition in 2.12.x
+ && (settings.Ydelambdafy.value == "method" && sym.isDelambdafyTarget) // SI-8359 Makes the lambda body a viable as the target MethodHandle for a call to LambdaMetafactory
+ )
+ if (prependFree) free ::: original
+ else original ::: free
+ }
}
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index b71d14a04f..df622d4d1d 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -192,13 +192,15 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree): Tree = {
+ // Q: is there a reason to first set owner to `clazz` (by using clazz.newMethod), and then
+ // changing it to lzyVal.owner very soon after? Could we just do lzyVal.owner.newMethod?
val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, STABLE | PRIVATE)
defSym setInfo MethodType(List(), lzyVal.tpe.resultType)
defSym.owner = lzyVal.owner
debuglog(s"crete slow compute path $defSym with owner ${defSym.owner} for lazy val $lzyVal")
if (bitmaps.contains(lzyVal))
bitmaps(lzyVal).map(_.owner = defSym)
- val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
+ val rhs: Tree = gen.mkSynchronizedCheck(clazz, cond, syncBody, stats).changeOwner(currentOwner -> defSym)
DefDef(defSym, addBitmapDefs(lzyVal, BLOCK(rhs, retVal)))
}
@@ -217,7 +219,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
* l$
* } or
* <rhs> when the lazy value has type Unit (for which there is no field
- * to cache it's value.
+ * to cache its value.
*
* Similarly as for normal lazy val members (see Mixin), the result will be a tree of the form
* { if ((bitmap&n & MASK) == 0) this.l$compute()
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 673bc04bd9..11f9483f77 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -26,7 +26,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType
- /** Map a lazy, mixedin field accessor to it's trait member accessor */
+ /** Map a lazy, mixedin field accessor to its trait member accessor */
private val initializer = perRunCaches.newMap[Symbol, Symbol]()
// --------- helper functions -----------------------------------------------
@@ -232,13 +232,13 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
for (member <- impl.info.decls) {
if (!member.isMethod && !member.isModule && !member.isModuleVar) {
assert(member.isTerm && !member.isDeferred, member)
- if (member.getter(impl).isPrivate) {
+ if (member.getterIn(impl).isPrivate) {
member.makeNotPrivate(clazz) // this will also make getter&setter not private
}
- val getter = member.getter(clazz)
+ val getter = member.getterIn(clazz)
if (getter == NoSymbol) addMember(clazz, newGetter(member))
if (!member.tpe.isInstanceOf[ConstantType] && !member.isLazy) {
- val setter = member.setter(clazz)
+ val setter = member.setterIn(clazz)
if (setter == NoSymbol) addMember(clazz, newSetter(member))
}
}
@@ -267,7 +267,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/* Mix in members of implementation class mixinClass into class clazz */
def mixinImplClassMembers(mixinClass: Symbol, mixinInterface: Symbol) {
- if (!mixinClass.isImplClass) debugwarn ("Impl class flag is not set " +
+ if (!mixinClass.isImplClass) devWarning ("Impl class flag is not set " +
((mixinClass.debugLocationString, mixinInterface.debugLocationString)))
for (member <- mixinClass.info.decls ; if isForwarded(member)) {
@@ -336,7 +336,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
rebindSuper(clazz, mixinMember.alias, mixinClass) match {
case NoSymbol =>
- unit.error(clazz.pos, "Member %s of mixin %s is missing a concrete super implementation.".format(
+ reporter.error(clazz.pos, "Member %s of mixin %s is missing a concrete super implementation.".format(
mixinMember.alias, mixinClass))
case alias1 =>
superAccessor.asInstanceOf[TermSymbol] setAlias alias1
@@ -391,7 +391,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else {
sourceModule setPos sym.pos
if (sourceModule.flags != MODULE) {
- log("!!! Directly setting sourceModule flags from %s to MODULE".format(sourceModule.flagString))
+ log(s"!!! Directly setting sourceModule flags for $sourceModule from ${sourceModule.flagString} to MODULE")
sourceModule.flags = MODULE
}
}
@@ -872,7 +872,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = {
- val sym = fieldSym.getter(fieldSym.owner)
+ val sym = fieldSym.getterIn(fieldSym.owner)
val bitmapSym = bitmapFor(clazz, offset, sym)
val kind = bitmapKind(sym)
val mask = maskForOffset(offset, sym, kind)
@@ -886,7 +886,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
/* Complete lazy field accessors. Applies only to classes,
- * for it's own (non inherited) lazy fields. If 'checkinit'
+ * for its own (non inherited) lazy fields. If 'checkinit'
* is enabled, getters that check for the initialized bit are
* generated, and the class constructor is changed to set the
* initialized bits.
@@ -921,7 +921,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
deriveDefDef(stat)(addInitBits(clazz, _))
}
else if (settings.checkInit && !clazz.isTrait && sym.isSetter) {
- val getter = sym.getter(clazz)
+ val getter = sym.getterIn(clazz)
if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter))
deriveDefDef(stat)(rhs => Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)))), UNIT))
else stat
@@ -1004,28 +1004,60 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
buildBitmapOffsets()
var stats1 = addCheckedGetters(clazz, stats)
- def accessedReference(sym: Symbol) = sym.tpe match {
- case MethodType(Nil, ConstantType(c)) => Literal(c)
- case _ =>
- // if it is a mixed-in lazy value, complete the accessor
- if (sym.isLazy && sym.isGetter) {
- val isUnit = sym.tpe.resultType.typeSymbol == UnitClass
- val initCall = Apply(staticRef(initializer(sym)), gen.mkAttributedThis(clazz) :: Nil)
- val selection = Select(This(clazz), sym.accessed)
- val init = if (isUnit) initCall else atPos(sym.pos)(Assign(selection, initCall))
- val returns = if (isUnit) UNIT else selection
-
- mkLazyDef(clazz, sym, List(init), returns, fieldOffset(sym))
- }
- else sym.getter(sym.owner).tpe.resultType.typeSymbol match {
- case UnitClass => UNIT
- case _ => Select(This(clazz), sym.accessed)
- }
+ def getterBody(getter: Symbol) = {
+ assert(getter.isGetter)
+ val readValue = getter.tpe match {
+ // A field "final val f = const" in a trait generates a getter with a ConstantType.
+ case MethodType(Nil, ConstantType(c)) =>
+ Literal(c)
+ case _ =>
+ // if it is a mixed-in lazy value, complete the accessor
+ if (getter.isLazy) {
+ val isUnit = isUnitGetter(getter)
+ val initCall = Apply(staticRef(initializer(getter)), gen.mkAttributedThis(clazz) :: Nil)
+ val selection = fieldAccess(getter)
+ val init = if (isUnit) initCall else atPos(getter.pos)(Assign(selection, initCall))
+ val returns = if (isUnit) UNIT else selection
+ mkLazyDef(clazz, getter, List(init), returns, fieldOffset(getter))
+ }
+ // For a field of type Unit in a trait, no actual field is generated when being mixed in.
+ else if (isUnitGetter(getter)) UNIT
+ else fieldAccess(getter)
+ }
+ if (!needsInitFlag(getter)) readValue
+ else mkCheckedAccessor(clazz, readValue, fieldOffset(getter), getter.pos, getter)
+ }
+
+ def setterBody(setter: Symbol) = {
+ val getter = setter.getterIn(clazz)
+
+ // A trait with a field of type Unit creates a trait setter (invoked by the
+ // implementation class constructor), like for any other trait field.
+ // However, no actual field is created in the class that mixes in the trait.
+ // Therefore the setter does nothing (except setting the -Xcheckinit flag).
+
+ val setInitFlag =
+ if (!needsInitFlag(getter)) Nil
+ else List(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)))
+
+ val fieldInitializer =
+ if (isUnitGetter(getter)) Nil
+ else List(Assign(fieldAccess(setter), Ident(setter.firstParam)))
+
+ (fieldInitializer ::: setInitFlag) match {
+ case Nil => UNIT
+ // If there's only one statement, the Block factory does not actually create a Block.
+ case stats => Block(stats: _*)
+ }
}
+
+ def isUnitGetter(getter: Symbol) = getter.tpe.resultType.typeSymbol == UnitClass
+ def fieldAccess(accessor: Symbol) = Select(This(clazz), accessor.accessed)
+
def isOverriddenSetter(sym: Symbol) =
nme.isTraitSetterName(sym.name) && {
val other = sym.nextOverriddenSymbol
- isOverriddenAccessor(other.getter(other.owner), clazz.info.baseClasses)
+ isOverriddenAccessor(other.getterIn(other.owner), clazz.info.baseClasses)
}
// for all symbols `sym` in the class definition, which are mixed in:
@@ -1036,27 +1068,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
// if class is not a trait add accessor definitions
else if (!clazz.isTrait) {
- // This needs to be a def to avoid sharing trees
- def accessedRef = accessedReference(sym)
if (isConcreteAccessor(sym)) {
// add accessor definitions
addDefDef(sym, {
if (sym.isSetter) {
+ // If this is a setter of a mixed-in field which is overridden by another mixin,
+ // the trait setter of the overridden one does not need to do anything - the
+ // trait setter of the overriding field will initialize the field.
if (isOverriddenSetter(sym)) UNIT
- else accessedRef match {
- case ref @ Literal(_) => ref
- case ref =>
- val init = Assign(ref, Ident(sym.firstParam))
- val getter = sym.getter(clazz)
-
- if (!needsInitFlag(getter)) init
- else Block(init, mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)), UNIT)
- }
+ else setterBody(sym)
}
- else if (needsInitFlag(sym))
- mkCheckedAccessor(clazz, accessedRef, fieldOffset(sym), sym.pos, sym)
- else
- accessedRef
+ else getterBody(sym)
})
}
else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) {
@@ -1210,7 +1232,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// refer to fields in some implementation class via an abstract
// getter in the interface.
val iface = toInterface(sym.owner.tpe).typeSymbol
- val ifaceGetter = sym getter iface
+ val ifaceGetter = sym getterIn iface
if (ifaceGetter == NoSymbol) abort("No getter for " + sym + " in " + iface)
else typedPos(tree.pos)((qual DOT ifaceGetter)())
@@ -1218,7 +1240,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case Assign(Apply(lhs @ Select(qual, _), List()), rhs) =>
// assign to fields in some implementation class via an abstract
// setter in the interface.
- def setter = lhs.symbol.setter(toInterface(lhs.symbol.owner.tpe).typeSymbol) setPos lhs.pos
+ def setter = lhs.symbol.setterIn(toInterface(lhs.symbol.owner.tpe).typeSymbol) setPos lhs.pos
typedPos(tree.pos)((qual DOT setter)(rhs))
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index bbd11efa7e..e4082eb376 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package transform
-import symtab.Flags._
import scala.reflect.internal.SymbolPairs
/** A class that yields a kind of iterator (`Cursor`),
@@ -36,7 +35,7 @@ abstract class OverridingPairs extends SymbolPairs {
*/
override protected def matches(lo: Symbol, high: Symbol) = lo.isType || (
(lo.owner != high.owner) // don't try to form pairs from overloaded members
- && !high.isPrivate // private or private[this] members never are overriden
+ && !high.isPrivate // private or private[this] members never are overridden
&& !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member.
&& relatively.matches(lo, high)
) // TODO we don't call exclude(high), should we?
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index cffb483072..ba303f7c2b 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -27,7 +27,7 @@ abstract class SampleTransform extends Transform {
tree1 match {
case Block(List(), expr) => // a simple optimization
expr
- case Block(defs, sup @ Super(qual, mix)) => // A hypthothetic transformation, which replaces
+ case Block(defs, sup @ Super(qual, mix)) => // A hypothetical transformation, which replaces
// {super} by {super.sample}
treeCopy.Block( // `copy` is the usual lazy tree copier
tree1, defs,
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 02e55241b3..53a1347a48 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -303,6 +303,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
+ def specializedFunctionName(sym: Symbol, args: List[Type]) = exitingSpecialize {
+ require(isFunctionSymbol(sym), sym)
+ val env: TypeEnv = TypeEnv.fromSpecialization(sym, args)
+ specializedClass.get((sym, env)) match {
+ case Some(x) =>
+ x.name
+ case None =>
+ sym.name
+ }
+ }
+
/** Return the specialized name of 'sym' in the given environment. It
* guarantees the same result regardless of the map order by sorting
* type variables alphabetically.
@@ -315,10 +326,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (sym.isClass) env.keySet
else specializedTypeVars(sym).intersect(env.keySet)
)
+ specializedName(sym.name, tvars, env)
+ }
+
+ private def specializedName(name: Name, tvars: immutable.Set[Symbol], env: TypeEnv): TermName = {
val (methparams, others) = tvars.toList sortBy ("" + _.name) partition (_.owner.isMethod)
// debuglog("specName(" + sym + ") env: " + env + " tvars: " + tvars)
- specializedName(sym.name, methparams map env, others map env)
+ specializedName(name, methparams map env, others map env)
}
/** Specialize name for the two list of types. The first one denotes
@@ -538,6 +553,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
bytecodeClazz.info
val sClass = clazz.owner.newClass(clazzName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE)
+ sClass.setAnnotations(clazz.annotations) // SI-8574 important that the subclass picks up @SerialVersionUID, @strictfp, etc.
def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long, newName: Name = null) =
member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED), newName)
@@ -609,7 +625,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
exitingSpecialize(sClass setInfo specializedInfoType)
val fullEnv = outerEnv ++ env
- /* Enter 'sym' in the scope of the current specialized class. It's type is
+ /* Enter 'sym' in the scope of the current specialized class. Its type is
* mapped through the active environment, binding type variables to concrete
* types. The existing typeEnv for `sym` is composed with the current active
* environment
@@ -698,7 +714,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else if (m.isValue && !m.isMethod && !m.hasFlag(LAZY)) { // concrete value definition
def mkAccessor(field: Symbol, name: Name) = {
- val newFlags = (SPECIALIZED | m.getter(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR)
+ val newFlags = (SPECIALIZED | m.getterIn(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR)
// we rely on the super class to initialize param accessors
val sym = sClass.newMethod(name.toTermName, field.pos, newFlags)
info(sym) = SpecializedAccessor(field)
@@ -719,7 +735,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (nme.isLocalName(m.name)) {
val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info)
- val origGetter = overrideIn(sClass, m.getter(clazz))
+ val origGetter = overrideIn(sClass, m.getterIn(clazz))
info(origGetter) = Forward(specGetter)
enterMember(specGetter)
enterMember(origGetter)
@@ -732,12 +748,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("override case field accessor %s -> %s".format(m.name.decode, cfaGetter.name.decode))
}
- if (specVal.isVariable && m.setter(clazz) != NoSymbol) {
+ if (specVal.isVariable && m.setterIn(clazz) != NoSymbol) {
val specSetter = mkAccessor(specVal, specGetter.setterName)
.resetFlag(STABLE)
specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)),
UnitTpe))
- val origSetter = overrideIn(sClass, m.setter(clazz))
+ val origSetter = overrideIn(sClass, m.setterIn(clazz))
info(origSetter) = Forward(specSetter)
enterMember(specSetter)
enterMember(origSetter)
@@ -860,11 +876,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env)))
info(specMember) = NormalizedMember(sym)
newOverload(sym, specMember, env)
- // if this is a class, we insert the normalized member in scope,
- // if this is a method, there's no attached scope for it (EmptyScope)
- val decls = owner.info.decls
- if (decls != EmptyScope)
- decls.enter(specMember)
specMember
}
}
@@ -898,7 +909,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
- owner.info.decls.enter(specMember)
typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
@@ -1295,7 +1305,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* // even in the specialized variant, the local X class
* // doesn't extend Parent$mcI$sp, since its symbol has
* // been created after specialization and was not seen
- * // by specialzation's info transformer.
+ * // by specialization's info transformer.
* ...
* }
* }
@@ -1373,7 +1383,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
)
def specializeCalls(unit: CompilationUnit) = new TypingTransformer(unit) {
- /** Map a specializable method to it's rhs, when not deferred. */
+ /** Map a specializable method to its rhs, when not deferred. */
val body = perRunCaches.newMap[Symbol, Tree]()
/** Map a specializable method to its value parameter symbols. */
@@ -1503,20 +1513,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val residualTargs = symbol.info.typeParams zip baseTargs collect {
case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ
}
- // See SI-5583. Don't know why it happens now if it didn't before.
- if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) {
- devWarning("Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
- baseTree
- }
- else {
- ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
- "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env))
- )
+ ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
+ "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env))
+ )
- val tree1 = gen.mkTypeApply(specTree, residualTargs)
- debuglog("rewrote " + tree + " to " + tree1)
- localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
- }
+ val tree1 = gen.mkTypeApply(specTree, residualTargs)
+ debuglog("rewrote " + tree + " to " + tree1)
+ localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
}
curTree = tree
diff --git a/src/compiler/scala/tools/nsc/transform/Statics.scala b/src/compiler/scala/tools/nsc/transform/Statics.scala
index e2508b8d08..4673be6de7 100644
--- a/src/compiler/scala/tools/nsc/transform/Statics.scala
+++ b/src/compiler/scala/tools/nsc/transform/Statics.scala
@@ -1,9 +1,6 @@
package scala.tools.nsc
package transform
-import symtab._
-import Flags._
-
import collection.mutable.Buffer
abstract class Statics extends Transform with ast.TreeDSL {
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 714f189ead..16ea3ea90f 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -96,7 +96,7 @@ abstract class TailCalls extends Transform {
val failReason = failReasons(ctx)
val failPos = failPositions(ctx)
- unit.error(failPos, s"could not optimize @tailrec annotated $method: $failReason")
+ reporter.error(failPos, s"could not optimize @tailrec annotated $method: $failReason")
}
/** Has the label been accessed? Then its symbol is in this set. */
@@ -129,6 +129,13 @@ abstract class TailCalls extends Transform {
}
override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}"
+ final def noTailContext() = clonedTailContext(false)
+ final def yesTailContext() = clonedTailContext(true)
+ protected def clonedTailContext(tailPos: Boolean): TailContext = this match {
+ case _ if this.tailPos == tailPos => this
+ case clone: ClonedTailContext => clone.that.clonedTailContext(tailPos)
+ case _ => new ClonedTailContext(this, tailPos)
+ }
}
object EmptyTailContext extends TailContext {
@@ -174,7 +181,7 @@ abstract class TailCalls extends Transform {
}
def containsRecursiveCall(t: Tree) = t exists isRecursiveCall
}
- class ClonedTailContext(that: TailContext, override val tailPos: Boolean) extends TailContext {
+ class ClonedTailContext(val that: TailContext, override val tailPos: Boolean) extends TailContext {
def method = that.method
def tparams = that.tparams
def methodPos = that.methodPos
@@ -183,9 +190,6 @@ abstract class TailCalls extends Transform {
}
private var ctx: TailContext = EmptyTailContext
- private def noTailContext() = new ClonedTailContext(ctx, tailPos = false)
- private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true)
-
override def transformUnit(unit: CompilationUnit): Unit = {
try {
@@ -206,16 +210,16 @@ abstract class TailCalls extends Transform {
finally this.ctx = saved
}
- def yesTailTransform(tree: Tree): Tree = transform(tree, yesTailContext())
- def noTailTransform(tree: Tree): Tree = transform(tree, noTailContext())
+ def yesTailTransform(tree: Tree): Tree = transform(tree, ctx.yesTailContext())
+ def noTailTransform(tree: Tree): Tree = transform(tree, ctx.noTailContext())
def noTailTransforms(trees: List[Tree]) = {
- val nctx = noTailContext()
- trees map (t => transform(t, nctx))
+ val nctx = ctx.noTailContext()
+ trees mapConserve (t => transform(t, nctx))
}
override def transform(tree: Tree): Tree = {
/* A possibly polymorphic apply to be considered for tail call transformation. */
- def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = {
+ def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree], mustTransformArgs: Boolean = true) = {
val receiver: Tree = fun match {
case Select(qual, _) => qual
case _ => EmptyTree
@@ -223,7 +227,7 @@ abstract class TailCalls extends Transform {
def receiverIsSame = ctx.enclosingType.widen =:= receiver.tpe.widen
def receiverIsSuper = ctx.enclosingType.widen <:< receiver.tpe.widen
def isRecursiveCall = (ctx.method eq fun.symbol) && ctx.tailPos
- def transformArgs = noTailTransforms(args)
+ def transformArgs = if (mustTransformArgs) noTailTransforms(args) else args
def matchesTypeArgs = ctx.tparams sameElements (targs map (_.tpe.typeSymbol))
/* Records failure reason in Context for reporting.
@@ -265,17 +269,21 @@ abstract class TailCalls extends Transform {
!(sym.hasAccessorFlag || sym.isConstructor)
}
+ // intentionally shadowing imports from definitions for performance
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions.{Boolean_or, Boolean_and}
+
tree match {
case ValDef(_, _, _, _) =>
if (tree.symbol.isLazy && tree.symbol.hasAnnotation(TailrecClass))
- unit.error(tree.pos, "lazy vals are not tailcall transformed")
+ reporter.error(tree.pos, "lazy vals are not tailcall transformed")
super.transform(tree)
case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if isEligible(dd) =>
val newCtx = new DefDefTailContext(dd)
if (newCtx.isMandatory && !(newCtx containsRecursiveCall rhs0))
- unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
+ reporter.error(tree.pos, "@tailrec annotated method contains no recursive calls")
debuglog(s"Considering $name for tailcalls, with labels in tailpos: ${newCtx.tailLabels}")
val newRHS = transform(rhs0, newCtx)
@@ -312,8 +320,13 @@ abstract class TailCalls extends Transform {
// the assumption is once we encounter a case, the remainder of the block will consist of cases
// the prologue may be empty, usually it is the valdef that stores the scrut
val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ val transformedPrologue = noTailTransforms(prologue)
+ val transformedCases = transformTrees(cases)
+ val transformedStats =
+ if ((prologue eq transformedPrologue) && (cases eq transformedCases)) stats // allow reuse of `tree` if the subtransform was an identity
+ else transformedPrologue ++ transformedCases
treeCopy.Block(tree,
- noTailTransforms(prologue) ++ transformTrees(cases),
+ transformedStats,
transform(expr)
)
@@ -328,11 +341,14 @@ abstract class TailCalls extends Transform {
)
case CaseDef(pat, guard, body) =>
+ // CaseDefs are already translated and guards were moved into the body.
+ // If this was not the case, guards would have to be transformed here as well.
+ assert(guard.isEmpty)
deriveCaseDef(tree)(transform)
case If(cond, thenp, elsep) =>
treeCopy.If(tree,
- cond,
+ noTailTransform(cond),
transform(thenp),
transform(elsep)
)
@@ -363,7 +379,7 @@ abstract class TailCalls extends Transform {
rewriteApply(tapply, fun, targs, vargs)
case Apply(fun, args) if fun.symbol == Boolean_or || fun.symbol == Boolean_and =>
- treeCopy.Apply(tree, fun, transformTrees(args))
+ treeCopy.Apply(tree, noTailTransform(fun), transformTrees(args))
// this is to detect tailcalls in translated matches
// it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
@@ -377,7 +393,7 @@ abstract class TailCalls extends Transform {
if (res ne arg)
treeCopy.Apply(tree, fun, res :: Nil)
else
- rewriteApply(fun, fun, Nil, args)
+ rewriteApply(fun, fun, Nil, args, mustTransformArgs = false)
case Apply(fun, args) =>
rewriteApply(fun, fun, Nil, args)
@@ -418,6 +434,10 @@ abstract class TailCalls extends Transform {
def traverseNoTail(tree: Tree) = traverse(tree, maybeTailNew = false)
def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail
+ // intentionally shadowing imports from definitions for performance
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions.{Boolean_or, Boolean_and}
+
override def traverse(tree: Tree) = tree match {
// we're looking for label(x){x} in tail position, since that means `a` is in tail position in a call `label(a)`
case LabelDef(_, List(arg), body@Ident(_)) if arg.symbol == body.symbol =>
diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
index f83b6f857e..3b23306386 100644
--- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
@@ -1,7 +1,6 @@
package scala.tools.nsc
package transform
-import scala.reflect.internal._
import scala.tools.nsc.ast.TreeDSL
import scala.tools.nsc.Global
diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
index 3feadcd9b2..dc3313e2e4 100644
--- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
@@ -17,9 +17,9 @@ trait TypingTransformers {
abstract class TypingTransformer(unit: CompilationUnit) extends Transformer {
var localTyper: analyzer.Typer =
if (phase.erasedTypes)
- erasure.newTyper(erasure.rootContext(unit, EmptyTree, erasedTypes = true)).asInstanceOf[analyzer.Typer]
- else
- analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true))
+ erasure.newTyper(erasure.rootContextPostTyper(unit, EmptyTree)).asInstanceOf[analyzer.Typer]
+ else // TODO: AM: should some phases use a regular rootContext instead of a post-typer one??
+ analyzer.newTyper(analyzer.rootContextPostTyper(unit, EmptyTree))
protected var curTree: Tree = _
override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans)
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index d77c6b54a9..836ea808ac 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -10,6 +10,7 @@ package transform
import symtab.Flags._
import scala.collection.{ mutable, immutable }
import scala.language.postfixOps
+import scala.reflect.internal.util.ListOfNil
/*<export> */
/** - uncurry all symbol and tree types (@see UnCurryPhase) -- this includes normalizing all proper types.
@@ -69,7 +70,14 @@ abstract class UnCurry extends InfoTransform
private val byNameArgs = mutable.HashSet[Tree]()
private val noApply = mutable.HashSet[Tree]()
private val newMembers = mutable.Map[Symbol, mutable.Buffer[Tree]]()
- private val repeatedParams = mutable.Map[Symbol, List[ValDef]]()
+
+ private lazy val forceSpecializationInfoTransformOfFunctionN: Unit = {
+ if (currentRun.specializePhase != NoPhase) { // be robust in case of -Ystop-after:uncurry
+ exitingSpecialize {
+ FunctionClass.seq.foreach(cls => cls.info)
+ }
+ }
+ }
/** Add a new synthetic member for `currentOwner` */
private def addNewMember(t: Tree): Unit =
@@ -93,7 +101,7 @@ abstract class UnCurry extends InfoTransform
override def transform(tree: Tree): Tree = (
try postTransform(mainTransform(tree))
catch { case ex: TypeError =>
- unit.error(ex.pos, ex.msg)
+ reporter.error(ex.pos, ex.msg)
debugStack(ex)
EmptyTree
}
@@ -174,7 +182,7 @@ abstract class UnCurry extends InfoTransform
cdef <- catches
if catchesThrowable(cdef) && !isSyntheticCase(cdef)
} {
- unit.warning(body.pos, "catch block may intercept non-local return from " + meth)
+ reporter.warning(body.pos, "catch block may intercept non-local return from " + meth)
}
Block(List(keyDef), tryCatch)
@@ -207,7 +215,7 @@ abstract class UnCurry extends InfoTransform
// (() => Int) { def apply(): Int @typeConstraint }
case RefinedType(List(funTp), decls) =>
debuglog(s"eliminate refinement from function type ${fun.tpe}")
- fun.tpe = funTp
+ fun.setType(funTp)
case _ =>
()
}
@@ -221,11 +229,23 @@ abstract class UnCurry extends InfoTransform
def mkMethod(owner: Symbol, name: TermName, additionalFlags: FlagSet = NoFlags): DefDef =
gen.mkMethodFromFunction(localTyper)(fun, owner, name, additionalFlags)
- val canUseDelamdafyMethod = (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation
+ def isSpecialized = {
+ forceSpecializationInfoTransformOfFunctionN
+ val specialized = specializeTypes.specializedType(fun.tpe)
+ !(specialized =:= fun.tpe)
+ }
+ def canUseDelamdafyMethod = (
+ (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation
+ && (!isSpecialized || (settings.target.value == "jvm-1.8")) // DelambdafyTransformer currently only emits generic FunctionN-s, use the old style in the meantime
+ )
if (inlineFunctionExpansion || !canUseDelamdafyMethod) {
val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation SerialVersionUIDAnnotation
+ // The original owner is used in the backend for the EnclosingMethod attribute. If fun is
+ // nested in a value-class method, its owner was already changed to the extension method.
+ // Saving the original owner allows getting the source structure from the class symbol.
+ defineOriginalOwner(anonClass, fun.symbol.originalOwner)
anonClass setInfo ClassInfoType(parents, newScope, anonClass)
val applyMethodDef = mkMethod(anonClass, nme.apply)
@@ -428,7 +448,7 @@ abstract class UnCurry extends InfoTransform
treeCopy.ValDef(p, p.mods, p.name, p.tpt, EmptyTree)
})
- if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
+ if (dd.symbol hasAnnotation VarargsClass) validateVarargs(dd)
withNeedLift(needLift = false) {
if (dd.symbol.isClassConstructor) {
@@ -460,7 +480,7 @@ abstract class UnCurry extends InfoTransform
case UnApply(fn, args) =>
val fn1 = transform(fn)
val args1 = fn.symbol.name match {
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, patmat.alignPatterns(tree).expectedTypes)
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, patmat.alignPatterns(global.typer.context, tree).expectedTypes)
case _ => args
}
treeCopy.UnApply(tree, fn1, args1)
@@ -699,19 +719,12 @@ abstract class UnCurry extends InfoTransform
}
}
-
- /* Analyzes repeated params if method is annotated as `varargs`.
- * If the repeated params exist, it saves them into the `repeatedParams` map,
- * which is used later.
- */
- private def saveRepeatedParams(dd: DefDef): Unit =
+ private def validateVarargs(dd: DefDef): Unit =
if (dd.symbol.isConstructor)
- unit.error(dd.symbol.pos, "A constructor cannot be annotated with a `varargs` annotation.")
- else treeInfo.repeatedParams(dd) match {
- case Nil =>
- unit.error(dd.symbol.pos, "A method without repeated parameters cannot be annotated with the `varargs` annotation.")
- case reps =>
- repeatedParams(dd.symbol) = reps
+ reporter.error(dd.symbol.pos, "A constructor cannot be annotated with a `varargs` annotation.")
+ else {
+ val hasRepeated = mexists(dd.symbol.paramss)(sym => definitions.isRepeatedParamType(sym.tpe))
+ if (!hasRepeated) reporter.error(dd.symbol.pos, "A method without repeated parameters cannot be annotated with the `varargs` annotation.")
}
/* Called during post transform, after the method argument lists have been flattened.
@@ -719,7 +732,7 @@ abstract class UnCurry extends InfoTransform
* varargs forwarder.
*/
private def addJavaVarargsForwarders(dd: DefDef, flatdd: DefDef): DefDef = {
- if (!dd.symbol.hasAnnotation(VarargsClass) || !repeatedParams.contains(dd.symbol))
+ if (!dd.symbol.hasAnnotation(VarargsClass) || !enteringUncurry(mexists(dd.symbol.paramss)(sym => definitions.isRepeatedParamType(sym.tpe))))
return flatdd
def toArrayType(tp: Type): Type = {
@@ -735,19 +748,18 @@ abstract class UnCurry extends InfoTransform
)
}
- val reps = repeatedParams(dd.symbol)
- val rpsymbols = reps.map(_.symbol).toSet
val theTyper = typer.atOwner(dd, currentClass)
- val flatparams = flatdd.vparamss.head
+ val flatparams = flatdd.symbol.paramss.head
+ val isRepeated = enteringUncurry(dd.symbol.info.paramss.flatten.map(sym => definitions.isRepeatedParamType(sym.tpe)))
// create the type
- val forwformals = flatparams map {
- case p if rpsymbols(p.symbol) => toArrayType(p.symbol.tpe)
- case p => p.symbol.tpe
+ val forwformals = map2(flatparams, isRepeated) {
+ case (p, true) => toArrayType(p.tpe)
+ case (p, false)=> p.tpe
}
val forwresult = dd.symbol.tpe_*.finalResultType
val forwformsyms = map2(forwformals, flatparams)((tp, oldparam) =>
- currentClass.newValueParameter(oldparam.name, oldparam.symbol.pos).setInfo(tp)
+ currentClass.newValueParameter(oldparam.name.toTermName, oldparam.pos).setInfo(tp)
)
def mono = MethodType(forwformsyms, forwresult)
val forwtype = dd.symbol.tpe match {
@@ -761,13 +773,13 @@ abstract class UnCurry extends InfoTransform
// create the tree
val forwtree = theTyper.typedPos(dd.pos) {
- val locals = map2(forwParams, flatparams) {
- case (_, fp) if !rpsymbols(fp.symbol) => null
- case (argsym, fp) =>
+ val locals = map3(forwParams, flatparams, isRepeated) {
+ case (_, fp, false) => null
+ case (argsym, fp, true) =>
Block(Nil,
gen.mkCast(
gen.mkWrapArray(Ident(argsym), elementType(ArrayClass, argsym.tpe)),
- seqType(elementType(SeqClass, fp.symbol.tpe))
+ seqType(elementType(SeqClass, fp.tpe))
)
)
}
@@ -782,7 +794,7 @@ abstract class UnCurry extends InfoTransform
// check if the method with that name and those arguments already exists in the template
currentClass.info.member(forwsym.name).alternatives.find(s => s != forwsym && s.tpe.matches(forwsym.tpe)) match {
- case Some(s) => unit.error(dd.symbol.pos,
+ case Some(s) => reporter.error(dd.symbol.pos,
"A method with a varargs annotation produces a forwarder method with the same signature "
+ s.tpe + " as an existing method.")
case None =>
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index e0bc478fad..227c45b3a7 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -9,16 +9,15 @@ package tools.nsc.transform.patmat
import scala.language.postfixOps
import scala.collection.mutable
-import scala.reflect.internal.util.Statistics
-import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.HashSet
+import scala.reflect.internal.util.{NoPosition, Position, Statistics, HashSet}
+import scala.tools.nsc.Global
trait Logic extends Debugging {
import PatternMatchingStats._
private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max
- private def alignedColumns(cols: Seq[AnyRef]): Seq[String] = {
- def toString(x: AnyRef) = if (x eq null) "" else x.toString
+ private def alignedColumns(cols: Seq[Any]): Seq[String] = {
+ def toString(x: Any) = if (x == null) "" else x.toString
if (cols.isEmpty || cols.tails.isEmpty) cols map toString
else {
val colLens = cols map (c => toString(c).length)
@@ -33,7 +32,7 @@ trait Logic extends Debugging {
}
}
- def alignAcrossRows(xss: List[List[AnyRef]], sep: String, lineSep: String = "\n"): String = {
+ def alignAcrossRows(xss: List[List[Any]], sep: String, lineSep: String = "\n"): String = {
val maxLen = max(xss map (_.length))
val padded = xss map (xs => xs ++ List.fill(maxLen - xs.length)(null))
padded.transpose.map(alignedColumns).transpose map (_.mkString(sep)) mkString(lineSep)
@@ -47,7 +46,7 @@ trait Logic extends Debugging {
type Tree
class Prop
- case class Eq(p: Var, q: Const) extends Prop
+ final case class Eq(p: Var, q: Const) extends Prop
type Const
@@ -72,6 +71,10 @@ trait Logic extends Debugging {
def unapply(v: Var): Some[Tree]
}
+ def uncheckedWarning(pos: Position, msg: String): Unit
+
+ def reportWarning(message: String): Unit
+
// resets hash consing -- only supposed to be called by TreeMakersToProps
def prepareNewAnalysis(): Unit
@@ -88,6 +91,8 @@ trait Logic extends Debugging {
// compute the domain and return it (call registerNull first!)
def domainSyms: Option[Set[Sym]]
+ def groupedDomains: List[Set[Sym]]
+
// the symbol for this variable being equal to its statically known type
// (only available if registerEquality has been called for that type before)
def symForStaticTp: Option[Sym]
@@ -104,43 +109,162 @@ trait Logic extends Debugging {
// would be nice to statically check whether a prop is equational or pure,
// but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop)
- case class And(a: Prop, b: Prop) extends Prop
- case class Or(a: Prop, b: Prop) extends Prop
- case class Not(a: Prop) extends Prop
+ final case class And(ops: Set[Prop]) extends Prop
+ object And {
+ def apply(ops: Prop*) = new And(ops.toSet)
+ }
+
+ final case class Or(ops: Set[Prop]) extends Prop
+ object Or {
+ def apply(ops: Prop*) = new Or(ops.toSet)
+ }
+
+ final case class Not(a: Prop) extends Prop
+
+ // mutually exclusive (i.e., not more than one symbol is set)
+ final case class AtMostOne(ops: List[Sym]) extends Prop
case object True extends Prop
case object False extends Prop
// symbols are propositions
- abstract case class Sym(variable: Var, const: Const) extends Prop {
+ final class Sym private[PropositionalLogic] (val variable: Var, val const: Const) extends Prop {
+
+ override def equals(other: scala.Any): Boolean = other match {
+ case that: Sym => this.variable == that.variable &&
+ this.const == that.const
+ case _ => false
+ }
+
+ override def hashCode(): Int = {
+ variable.hashCode * 41 + const.hashCode
+ }
+
private val id: Int = Sym.nextSymId
- override def toString = variable +"="+ const +"#"+ id
+ override def toString = s"$variable=$const#$id"
}
- class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
+
object Sym {
private val uniques: HashSet[Sym] = new HashSet("uniques", 512)
def apply(variable: Var, const: Const): Sym = {
- val newSym = new UniqueSym(variable, const)
+ val newSym = new Sym(variable, const)
(uniques findEntryOrUpdate newSym)
}
- private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ def nextSymId = {_symId += 1; _symId}; private var _symId = 0
implicit val SymOrdering: Ordering[Sym] = Ordering.by(_.id)
}
- def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
- def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
+ def /\(props: Iterable[Prop]) = if (props.isEmpty) True else And(props.toSeq: _*)
+ def \/(props: Iterable[Prop]) = if (props.isEmpty) False else Or(props.toSeq: _*)
+
+ /**
+ * Simplifies propositional formula according to the following rules:
+ * - eliminate double negation (avoids unnecessary Tseitin variables)
+ * - flatten trees of same connectives (avoids unnecessary Tseitin variables)
+ * - removes constants and connectives that are in fact constant because of their operands
+ * - eliminates duplicate operands
+ * - convert formula into NNF: all sub-expressions have a positive polarity
+ * which makes them amenable for the subsequent Plaisted transformation
+ * and increases chances to figure out that the formula is already in CNF
+ *
+ * Complexity: DFS over formula tree
+ *
+ * See http://www.decision-procedures.org/slides/propositional_logic-2x3.pdf
+ */
+ def simplify(f: Prop): Prop = {
+
+ // limit size to avoid blow up
+ def hasImpureAtom(ops: Seq[Prop]): Boolean = ops.size < 10 &&
+ ops.combinations(2).exists {
+ case Seq(a, Not(b)) if a == b => true
+ case Seq(Not(a), b) if a == b => true
+ case _ => false
+ }
+
+ // push negation inside formula
+ def negationNormalFormNot(p: Prop): Prop = p match {
+ case And(ops) => Or(ops.map(negationNormalFormNot)) // De'Morgan
+ case Or(ops) => And(ops.map(negationNormalFormNot)) // De'Morgan
+ case Not(p) => negationNormalForm(p)
+ case True => False
+ case False => True
+ case s: Sym => Not(s)
+ }
+
+ def negationNormalForm(p: Prop): Prop = p match {
+ case And(ops) => And(ops.map(negationNormalForm))
+ case Or(ops) => Or(ops.map(negationNormalForm))
+ case Not(negated) => negationNormalFormNot(negated)
+ case True
+ | False
+ | (_: Sym)
+ | (_: AtMostOne) => p
+ }
+
+ def simplifyProp(p: Prop): Prop = p match {
+ case And(fv) =>
+ // recurse for nested And (pulls all Ands up)
+ val ops = fv.map(simplifyProp) - True // ignore `True`
+
+ // build up Set in order to remove duplicates
+ val opsFlattened = ops.flatMap {
+ case And(fv) => fv
+ case f => Set(f)
+ }.toSeq
+
+ if (hasImpureAtom(opsFlattened) || opsFlattened.contains(False)) {
+ False
+ } else {
+ opsFlattened match {
+ case Seq() => True
+ case Seq(f) => f
+ case ops => And(ops: _*)
+ }
+ }
+ case Or(fv) =>
+ // recurse for nested Or (pulls all Ors up)
+ val ops = fv.map(simplifyProp) - False // ignore `False`
+
+ val opsFlattened = ops.flatMap {
+ case Or(fv) => fv
+ case f => Set(f)
+ }.toSeq
+
+ if (hasImpureAtom(opsFlattened) || opsFlattened.contains(True)) {
+ True
+ } else {
+ opsFlattened match {
+ case Seq() => False
+ case Seq(f) => f
+ case ops => Or(ops: _*)
+ }
+ }
+ case Not(Not(a)) =>
+ simplify(a)
+ case Not(p) =>
+ Not(simplify(p))
+ case p =>
+ p
+ }
+
+ val nnf = negationNormalForm(f)
+ simplifyProp(nnf)
+ }
trait PropTraverser {
def apply(x: Prop): Unit = x match {
- case And(a, b) => apply(a); apply(b)
- case Or(a, b) => apply(a); apply(b)
+ case And(ops) => ops foreach apply
+ case Or(ops) => ops foreach apply
case Not(a) => apply(a)
case Eq(a, b) => applyVar(a); applyConst(b)
+ case s: Sym => applySymbol(s)
+ case AtMostOne(ops) => ops.foreach(applySymbol)
case _ =>
}
def applyVar(x: Var): Unit = {}
def applyConst(x: Const): Unit = {}
+ def applySymbol(x: Sym): Unit = {}
}
def gatherVariables(p: Prop): Set[Var] = {
@@ -151,10 +275,18 @@ trait Logic extends Debugging {
vars.toSet
}
+ def gatherSymbols(p: Prop): Set[Sym] = {
+ val syms = new mutable.HashSet[Sym]()
+ (new PropTraverser {
+ override def applySymbol(s: Sym) = syms += s
+ })(p)
+ syms.toSet
+ }
+
trait PropMap {
def apply(x: Prop): Prop = x match { // TODO: mapConserve
- case And(a, b) => And(apply(a), apply(b))
- case Or(a, b) => Or(apply(a), apply(b))
+ case And(ops) => And(ops map apply)
+ case Or(ops) => Or(ops map apply)
case Not(a) => Not(apply(a))
case p => p
}
@@ -162,20 +294,27 @@ trait Logic extends Debugging {
// to govern how much time we spend analyzing matches for unreachability/exhaustivity
object AnalysisBudget {
- private val budgetProp = scala.sys.Prop[Int]("scalac.patmat.analysisBudget")
- private val budgetOff = "off"
- val max: Int = {
- val DefaultBudget = 256
- budgetProp.option.getOrElse(if (budgetProp.get.equalsIgnoreCase("off")) Integer.MAX_VALUE else DefaultBudget)
- }
+ val maxDPLLdepth = global.settings.YpatmatExhaustdepth.value
+ val maxFormulaSize = 100 * math.min(Int.MaxValue / 100, maxDPLLdepth)
+
+ private def advice =
+ s"Please try with scalac -Ypatmat-exhaust-depth ${maxDPLLdepth * 2} or -Ypatmat-exhaust-depth off."
+
+ def recursionDepthReached =
+ s"Exhaustivity analysis reached max recursion depth, not all missing cases are reported.\n($advice)"
abstract class Exception(val advice: String) extends RuntimeException("CNF budget exceeded")
- object exceeded extends Exception(
- s"(The analysis required more space than allowed. Please try with scalac -D${budgetProp.key}=${AnalysisBudget.max*2} or -D${budgetProp.key}=${budgetOff}.)")
+ object formulaSizeExceeded extends Exception(s"The analysis required more space than allowed.\n$advice")
}
+ // TODO: remove since deprecated
+ val budgetProp = scala.sys.Prop[String]("scalac.patmat.analysisBudget")
+ if (budgetProp.isSet) {
+ reportWarning(s"Please remove -D${budgetProp.key}, it is ignored.")
+ }
+
// convert finite domain propositional logic with subtyping to pure boolean propositional logic
// a type test or a value equality test are modelled as a variable being equal to some constant
// a variable V may be assigned multiple constants, as long as they do not contradict each other
@@ -194,10 +333,10 @@ trait Logic extends Debugging {
// TODO: for V1 representing x1 and V2 standing for x1.head, encode that
// V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
// may throw an AnalysisBudget.Exception
- def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Formula, List[Formula]) = {
+ def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = {
val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
- val vars = new scala.collection.mutable.HashSet[Var]
+ val vars = new mutable.HashSet[Var]
object gatherEqualities extends PropTraverser {
override def apply(p: Prop) = p match {
@@ -218,10 +357,10 @@ trait Logic extends Debugging {
props foreach gatherEqualities.apply
if (modelNull) vars foreach (_.registerNull())
- val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
+ val pure = props map (p => rewriteEqualsToProp(p))
- val eqAxioms = formulaBuilder
- @inline def addAxiom(p: Prop) = addFormula(eqAxioms, eqFreePropToSolvable(p))
+ val eqAxioms = mutable.ArrayBuffer[Prop]()
+ @inline def addAxiom(p: Prop) = eqAxioms += p
debug.patmat("removeVarEq vars: "+ vars)
vars.foreach { v =>
@@ -243,53 +382,51 @@ trait Logic extends Debugging {
// when sym is true, what must hold...
implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
// ... and what must not?
- excluded foreach (excludedSym => addAxiom(Or(Not(sym), Not(excludedSym))))
+ excluded foreach {
+ excludedSym =>
+ val exclusive = v.groupedDomains.exists {
+ domain => domain.contains(sym) && domain.contains(excludedSym)
+ }
+
+ // TODO: populate `v.exclusiveDomains` with `Set`s from the start, and optimize to:
+ // val exclusive = v.exclusiveDomains.exists { inDomain => inDomain(sym) && inDomain(excludedSym) }
+ if (!exclusive)
+ addAxiom(Or(Not(sym), Not(excludedSym)))
+ }
+ }
+
+ // all symbols in a domain are mutually exclusive
+ v.groupedDomains.foreach {
+ syms => if (syms.size > 1) addAxiom(AtMostOne(syms.toList))
}
}
- debug.patmat("eqAxioms:\n"+ cnfString(toFormula(eqAxioms)))
- debug.patmat("pure:"+ pure.map(p => cnfString(p)).mkString("\n"))
+ debug.patmat(s"eqAxioms:\n${eqAxioms.mkString("\n")}")
+ debug.patmat(s"pure:${pure.mkString("\n")}")
if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start)
- (toFormula(eqAxioms), pure)
+ (And(eqAxioms: _*), pure)
}
+ type Solvable
- // an interface that should be suitable for feeding a SAT solver when the time comes
- type Formula
- type FormulaBuilder
-
- // creates an empty formula builder to which more formulae can be added
- def formulaBuilder: FormulaBuilder
-
- // val f = formulaBuilder; addFormula(f, f1); ... addFormula(f, fN)
- // toFormula(f) == andFormula(f1, andFormula(..., fN))
- def addFormula(buff: FormulaBuilder, f: Formula): Unit
- def toFormula(buff: FormulaBuilder): Formula
-
- // the conjunction of formulae `a` and `b`
- def andFormula(a: Formula, b: Formula): Formula
-
- // equivalent formula to `a`, but simplified in a lightweight way (drop duplicate clauses)
- def simplifyFormula(a: Formula): Formula
-
- // may throw an AnalysisBudget.Exception
- def propToSolvable(p: Prop): Formula = {
- val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false)
- andFormula(eqAxioms, pure)
+ def propToSolvable(p: Prop): Solvable = {
+ val (eqAxiom, pure :: Nil) = removeVarEq(List(p), modelNull = false)
+ eqFreePropToSolvable(And(eqAxiom, pure))
}
- // may throw an AnalysisBudget.Exception
- def eqFreePropToSolvable(p: Prop): Formula
- def cnfString(f: Formula): String
+ def eqFreePropToSolvable(f: Prop): Solvable
- type Model = collection.immutable.SortedMap[Sym, Boolean]
+ type Model = Map[Sym, Boolean]
val EmptyModel: Model
val NoModel: Model
- def findModelFor(f: Formula): Model
- def findAllModelsFor(f: Formula): List[Model]
+ final case class Solution(model: Model, unassigned: List[Sym])
+
+ def findModelFor(solvable: Solvable): Model
+
+ def findAllModelsFor(solvable: Solvable, pos: Position = NoPosition): List[Solution]
}
}
@@ -335,7 +472,9 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
// once we go to run-time checks (on Const's), convert them to checkable types
// TODO: there seems to be bug for singleton domains (variable does not show up in model)
lazy val domain: Option[Set[Const]] = {
- val subConsts = enumerateSubtypes(staticTp).map{ tps =>
+ val subConsts =
+ enumerateSubtypes(staticTp, grouped = false)
+ .headOption.map { tps =>
tps.toSet[Type].map{ tp =>
val domainC = TypeConst(tp)
registerEquality(domainC)
@@ -353,6 +492,15 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
observed(); allConsts
}
+ lazy val groupedDomains: List[Set[Sym]] = {
+ val subtypes = enumerateSubtypes(staticTp, grouped = true)
+ subtypes.map {
+ subTypes =>
+ val syms = subTypes.flatMap(tpe => symForEqualsTo.get(TypeConst(tpe))).toSet
+ if (mayBeNull) syms + symForEqualsTo(NullConst) else syms
+ }.filter(_.nonEmpty)
+ }
+
// populate equalitySyms
// don't care about the result, but want only one fresh symbol per distinct constant c
def registerEquality(c: Const): Unit = {ensureCanModify(); symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
@@ -498,7 +646,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
}
- import global.{ConstantType, Constant, SingletonType, Literal, Ident, singleType}
+ import global.{ConstantType, Constant, EmptyScope, SingletonType, Literal, Ident, refinedType, singleType, TypeBounds, NoSymbol}
import global.definitions._
@@ -531,23 +679,30 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
private val trees = mutable.HashSet.empty[Tree]
// hashconsing trees (modulo value-equality)
- private[TreesAndTypesDomain] def uniqueTpForTree(t: Tree): Type =
- // a new type for every unstable symbol -- only stable value are uniqued
- // technically, an unreachable value may change between cases
- // thus, the failure of a case that matches on a mutable value does not exclude the next case succeeding
- // (and thuuuuus, the latter case must be considered reachable)
- if (!t.symbol.isStable) t.tpe.narrow
+ private[TreesAndTypesDomain] def uniqueTpForTree(t: Tree): Type = {
+ def freshExistentialSubtype(tp: Type): Type = {
+ // SI-8611 tp.narrow is tempting, but unsuitable. See `testRefinedTypeSI8611` for an explanation.
+ NoSymbol.freshExistential("").setInfo(TypeBounds.upper(tp)).tpe
+ }
+
+ if (!t.symbol.isStable) {
+ // Create a fresh type for each unstable value, since we can never correlate it to another value.
+ // For example `case X => case X =>` should not complain about the second case being unreachable,
+ // if X is mutable.
+ freshExistentialSubtype(t.tpe)
+ }
else trees find (a => a.correspondsStructure(t)(sameValue)) match {
case Some(orig) =>
- debug.patmat("unique tp for tree: "+ ((orig, orig.tpe)))
+ debug.patmat("unique tp for tree: " + ((orig, orig.tpe)))
orig.tpe
case _ =>
// duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
- val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
+ val treeWithNarrowedType = t.duplicate setType freshExistentialSubtype(t.tpe)
debug.patmat("uniqued: "+ ((t, t.tpe, treeWithNarrowedType.tpe)))
trees += treeWithNarrowedType
treeWithNarrowedType.tpe
}
+ }
}
sealed abstract class Const {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index 2893cbdf45..a11906ace1 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -6,10 +6,11 @@
package scala.tools.nsc.transform.patmat
+import scala.annotation.tailrec
+import scala.collection.immutable.{IndexedSeq, Iterable}
import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
-import scala.reflect.internal.util.Position
trait TreeAndTypeAnalysis extends Debugging {
import global._
@@ -50,8 +51,8 @@ trait TreeAndTypeAnalysis extends Debugging {
// but the annotation didn't bubble up...
// This is a pretty poor approximation.
def unsoundAssumptionUsed = binder.name != nme.WILDCARD && !(pt <:< pat.tpe)
- if (settings.lint && unsoundAssumptionUsed)
- global.currentUnit.warning(pat.pos,
+ if (settings.warnUnsoundMatch && unsoundAssumptionUsed)
+ reporter.warning(pat.pos,
sm"""The value matched by $pat is bound to ${binder.name}, which may be used under the
|unsound assumption that it has type ${pat.tpe}, whereas we can only safely
|count on it having type $pt, as the pattern is matched using `==` (see SI-1503).""")
@@ -94,48 +95,84 @@ trait TreeAndTypeAnalysis extends Debugging {
val typer: Typer
// TODO: domain of other feasibly enumerable built-in types (char?)
- def enumerateSubtypes(tp: Type): Option[List[Type]] =
+ def enumerateSubtypes(tp: Type, grouped: Boolean): List[List[Type]] =
tp.typeSymbol match {
// TODO case _ if tp.isTupleType => // recurse into component types?
- case UnitClass =>
- Some(List(UnitTpe))
- case BooleanClass =>
- Some(ConstantTrue :: ConstantFalse :: Nil)
+ case UnitClass if !grouped =>
+ List(List(UnitTpe))
+ case BooleanClass if !grouped =>
+ List(ConstantTrue :: ConstantFalse :: Nil)
// TODO case _ if tp.isTupleType => // recurse into component types
- case modSym: ModuleClassSymbol =>
- Some(List(tp))
+ case modSym: ModuleClassSymbol if !grouped =>
+ List(List(tp))
+ case sym: RefinementClassSymbol =>
+ val parentSubtypes = tp.parents.flatMap(parent => enumerateSubtypes(parent, grouped))
+ if (parentSubtypes exists (_.nonEmpty)) {
+ // If any of the parents is enumerable, then the refinement type is enumerable.
+ // We must only include subtypes of the parents that conform to `tp`.
+ // See neg/virtpatmat_exhaust_compound.scala for an example.
+ parentSubtypes map (_.filter(_ <:< tp))
+ }
+ else Nil
// make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
- case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
- debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym))))
- None
- case sym =>
- val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")(
- // symbols which are both sealed and abstract need not be covered themselves, because
- // all of their children must be and they cannot otherwise be created.
- sym.sealedDescendants.toList
- sortBy (_.sealedSortName)
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
- )
+ case sym if sym.isSealed =>
val tpApprox = typer.infer.approximateAbstracts(tp)
val pre = tpApprox.prefix
- Some(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") {
- // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
- subclasses flatMap { sym =>
+ def filterChildren(children: List[Symbol]): List[Type] = {
+ children flatMap { sym =>
// have to filter out children which cannot match: see ticket #3683 for an example
// compare to the fully known type `tp` (modulo abstract types),
// so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
// however, must approximate abstract types in
- val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
- val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
+ val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
+ val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
// debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
else None
}
- })
+ }
+
+ if(grouped) {
+ def enumerateChildren(sym: Symbol) = {
+ sym.children.toList
+ .sortBy(_.sealedSortName)
+ .filterNot(x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
+ }
+
+ // enumerate only direct subclasses,
+ // subclasses of subclasses are enumerated in the next iteration
+ // and added to a new group
+ def groupChildren(wl: List[Symbol],
+ acc: List[List[Type]]): List[List[Type]] = wl match {
+ case hd :: tl =>
+ val children = enumerateChildren(hd)
+ groupChildren(tl ++ children, acc :+ filterChildren(children))
+ case Nil => acc
+ }
+
+ groupChildren(sym :: Nil, Nil)
+ } else {
+ val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")(
+ // symbols which are both sealed and abstract need not be covered themselves, because
+ // all of their children must be and they cannot otherwise be created.
+ sym.sealedDescendants.toList
+ sortBy (_.sealedSortName)
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
+ )
+
+ List(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") {
+ // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
+ filterChildren(subclasses)
+ })
+ }
+
+ case sym =>
+ debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym))))
+ Nil
}
// approximate a type to the static type that is fully checkable at run time,
@@ -149,23 +186,23 @@ trait TreeAndTypeAnalysis extends Debugging {
object typeArgsToWildcardsExceptArray extends TypeMap {
// SI-6771 dealias would be enough today, but future proofing with the dealiasWiden.
// See neg/t6771b.scala for elaboration
- def apply(tp: Type): Type = tp.dealiasWiden match {
+ def apply(tp: Type): Type = tp.dealias match {
case TypeRef(pre, sym, args) if args.nonEmpty && (sym ne ArrayClass) =>
TypeRef(pre, sym, args map (_ => WildcardType))
case _ =>
mapOver(tp)
}
}
- debug.patmatResult(s"checkableType($tp)")(typeArgsToWildcardsExceptArray(tp))
+ val result = typeArgsToWildcardsExceptArray(tp)
+ debug.patmatResult(s"checkableType($tp)")(result)
}
// a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
// we consider tuple types with at least one component of a checkable type as a checkable type
def uncheckableType(tp: Type): Boolean = {
- def tupleComponents(tp: Type) = tp.normalize.typeArgs
val checkable = (
(isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
- || enumerateSubtypes(tp).nonEmpty)
+ || enumerateSubtypes(tp, grouped = false).nonEmpty)
// if (!checkable) debug.patmat("deemed uncheckable: "+ tp)
!checkable
}
@@ -257,7 +294,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
// the type of the binder passed to the first invocation
// determines the type of the tree that'll be returned for that binder as of then
final def binderToUniqueTree(b: Symbol) =
- unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
+ unique(accumSubst(normalize(gen.mkAttributedStableRef(b))), b.tpe)
// note that the sequencing of operations is important: must visit in same order as match execution
// binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
@@ -354,7 +391,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
def handleUnknown(tm: TreeMaker) = handler(tm)
}
- // used for CSE -- rewrite all unknowns to False (the most conserative option)
+ // used for CSE -- rewrite all unknowns to False (the most conservative option)
object conservative extends TreeMakerToProp {
def handleUnknown(tm: TreeMaker) = False
}
@@ -387,8 +424,9 @@ trait MatchAnalysis extends MatchApproximation {
import global.definitions._
trait MatchAnalyzer extends MatchApproximator {
- def uncheckedWarning(pos: Position, msg: String) = global.currentUnit.uncheckedWarning(pos, msg)
+ def uncheckedWarning(pos: Position, msg: String) = currentRun.reporting.uncheckedWarning(pos, msg)
def warn(pos: Position, ex: AnalysisBudget.Exception, kind: String) = uncheckedWarning(pos, s"Cannot check match for $kind.\n${ex.advice}")
+ def reportWarning(message: String) = global.reporter.warning(typer.context.tree.pos, message)
// TODO: model dependencies between variables: if V1 corresponds to (x: List[_]) and V2 is (x.hd), V2 cannot be assigned when V1 = null or V1 = Nil
// right now hackily implement this by pruning counter-examples
@@ -420,19 +458,19 @@ trait MatchAnalysis extends MatchApproximation {
try {
val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
- val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
- val eqAxioms = simplifyFormula(andFormula(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure.
+ val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
+ val eqAxioms = simplify(And(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure.
- val prefix = formulaBuilder
- addFormula(prefix, eqAxioms)
+ val prefix = mutable.ArrayBuffer[Prop]()
+ prefix += eqAxioms
var prefixRest = symbolicCasesFail
- var current = symbolicCasesOk
- var reachable = true
- var caseIndex = 0
+ var current = symbolicCasesOk
+ var reachable = true
+ var caseIndex = 0
- debug.patmat("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
- debug.patmat("equality axioms:\n"+ cnfString(eqAxiomsOk))
+ debug.patmat("reachability, vars:\n" + ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
+ debug.patmat(s"equality axioms:\n$eqAxiomsOk")
// invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
// termination: prefixRest.length decreases by 1
@@ -442,9 +480,10 @@ trait MatchAnalysis extends MatchApproximation {
prefixRest = prefixRest.tail
if (prefixRest.isEmpty) reachable = true
else {
- addFormula(prefix, prefHead)
+ prefix += prefHead
current = current.tail
- val model = findModelFor(andFormula(current.head, toFormula(prefix)))
+ val and = And((current.head +: prefix): _*)
+ val model = findModelFor(eqFreePropToSolvable(and))
// debug.patmat("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
// if (NoModel ne model) debug.patmat("reached: "+ modelString(model))
@@ -501,7 +540,7 @@ trait MatchAnalysis extends MatchApproximation {
// when does the match fail?
val matchFails = Not(\/(symbolicCases))
- // debug output:
+ // debug output:
debug.patmat("analysing:")
showTreeMakers(cases)
@@ -510,17 +549,26 @@ trait MatchAnalysis extends MatchApproximation {
try {
// find the models (under which the match fails)
- val matchFailModels = findAllModelsFor(propToSolvable(matchFails))
+ val matchFailModels = findAllModelsFor(propToSolvable(matchFails), prevBinder.pos)
val scrutVar = Var(prevBinderTree)
- val counterExamples = matchFailModels.map(modelToCounterExample(scrutVar))
+ val counterExamples = {
+ matchFailModels.flatMap {
+ model =>
+ val varAssignments = expandModel(model)
+ varAssignments.flatMap(modelToCounterExample(scrutVar) _)
+ }
+ }
- val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
+ // sorting before pruning is important here in order to
+ // keep neg/t7020.scala stable
+ // since e.g. List(_, _) would cover List(1, _)
+ val pruned = CounterExample.prune(counterExamples.sortBy(_.toString)).map(_.toString)
if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start)
pruned
} catch {
- case ex : AnalysisBudget.Exception =>
+ case ex: AnalysisBudget.Exception =>
warn(prevBinder.pos, ex, "exhaustivity")
Nil // CNF budget exceeded
}
@@ -588,6 +636,8 @@ trait MatchAnalysis extends MatchApproximation {
case object WildcardExample extends CounterExample { override def toString = "_" }
case object NoExample extends CounterExample { override def toString = "??" }
+ // returns a mapping from variable to
+ // equal and notEqual symbols
def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] =
model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs =>
val (trues, falses) = xs.partition(_._2)
@@ -601,20 +651,110 @@ trait MatchAnalysis extends MatchApproximation {
v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment
}.mkString("\n")
- // return constructor call when the model is a true counter example
- // (the variables don't take into account type information derived from other variables,
- // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
- // since we didn't realize the tail of the outer cons was a Nil)
- def modelToCounterExample(scrutVar: Var)(model: Model): CounterExample = {
+ /**
+ * The models we get from the DPLL solver need to be mapped back to counter examples.
+ * However there's no precalculated mapping model -> counter example. Even worse,
+ * not every valid model corresponds to a valid counter example.
+ * The reason is that restricting the valid models further would for example require
+ * a quadratic number of additional clauses. So to keep the optimistic case fast
+ * (i.e., all cases are covered in a pattern match), the infeasible counter examples
+ * are filtered later.
+ *
+ * The DPLL procedure keeps the literals that do not contribute to the solution
+ * unassigned, e.g., for `(a \/ b)`
+ * only {a = true} or {b = true} is required and the other variable can have any value.
+ *
+ * This function does a smart expansion of the model and avoids models that
+ * have conflicting mappings.
+ *
+ * For example for in case of the given set of symbols (taken from `t7020.scala`):
+ * "V2=2#16"
+ * "V2=6#19"
+ * "V2=5#18"
+ * "V2=4#17"
+ * "V2=7#20"
+ *
+ * One possibility would be to group the symbols by domain but
+ * this would only work for equality tests and would not be compatible
+ * with type tests.
+ * Another observation leads to a much simpler algorithm:
+ * Only one of these symbols can be set to true,
+ * since `V2` can at most be equal to one of {2,6,5,4,7}.
+ */
+ def expandModel(solution: Solution): List[Map[Var, (Seq[Const], Seq[Const])]] = {
+
+ val model = solution.model
+
// x1 = ...
// x1.hd = ...
// x1.tl = ...
// x1.hd.hd = ...
// ...
val varAssignment = modelToVarAssignment(model)
+ debug.patmat("var assignment for model " + model + ":\n" + varAssignmentString(varAssignment))
- debug.patmat("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
+ // group symbols that assign values to the same variables (i.e., symbols are mutually exclusive)
+ // (thus the groups are sets of disjoint assignments to variables)
+ val groupedByVar: Map[Var, List[Sym]] = solution.unassigned.groupBy(_.variable)
+ val expanded = for {
+ (variable, syms) <- groupedByVar.toList
+ } yield {
+
+ val (equal, notEqual) = varAssignment.getOrElse(variable, Nil -> Nil)
+
+ def addVarAssignment(equalTo: List[Const], notEqualTo: List[Const]) = {
+ Map(variable ->(equal ++ equalTo, notEqual ++ notEqualTo))
+ }
+
+ // this assignment is needed in case that
+ // there exists already an assign
+ val allNotEqual = addVarAssignment(Nil, syms.map(_.const))
+
+ // this assignment is conflicting on purpose:
+ // a list counter example could contain wildcards: e.g. `List(_,_)`
+ val allEqual = addVarAssignment(syms.map(_.const), Nil)
+
+ if(equal.isEmpty) {
+ val oneHot = for {
+ s <- syms
+ } yield {
+ addVarAssignment(List(s.const), syms.filterNot(_ == s).map(_.const))
+ }
+ allEqual :: allNotEqual :: oneHot
+ } else {
+ allEqual :: allNotEqual :: Nil
+ }
+ }
+
+ if (expanded.isEmpty) {
+ List(varAssignment)
+ } else {
+ // we need the cartesian product here,
+ // since we want to report all missing cases
+ // (i.e., combinations)
+ val cartesianProd = expanded.reduceLeft((xs, ys) =>
+ for {map1 <- xs
+ map2 <- ys} yield {
+ map1 ++ map2
+ })
+
+ // add expanded variables
+ // note that we can just use `++`
+ // since the Maps have disjoint keySets
+ for {
+ m <- cartesianProd
+ } yield {
+ varAssignment ++ m
+ }
+ }
+ }
+
+ // return constructor call when the model is a true counter example
+ // (the variables don't take into account type information derived from other variables,
+ // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
+ // since we didn't realize the tail of the outer cons was a Nil)
+ def modelToCounterExample(scrutVar: Var)(varAssignment: Map[Var, (Seq[Const], Seq[Const])]): Option[CounterExample] = {
// chop a path into a list of symbols
def chop(path: Tree): List[Symbol] = path match {
case Ident(_) => List(path.symbol)
@@ -663,6 +803,7 @@ trait MatchAnalysis extends MatchApproximation {
private val fields: mutable.Map[Symbol, VariableAssignment] = mutable.HashMap.empty
// need to prune since the model now incorporates all super types of a constant (needed for reachability)
private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
+ private lazy val inSameDomain = uniqueEqualTo forall (const => variable.domainSyms.exists(_.exists(_.const.tp =:= const.tp)))
private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
private lazy val ctorParams = if (ctor.paramss.isEmpty) Nil else ctor.paramss.head
@@ -683,13 +824,13 @@ trait MatchAnalysis extends MatchApproximation {
// NoExample if the constructor call is ill-typed
// (thus statically impossible -- can we incorporate this into the formula?)
// beBrief is used to suppress negative information nested in tuples -- it tends to get too noisy
- def toCounterExample(beBrief: Boolean = false): CounterExample =
- if (!allFieldAssignmentsLegal) NoExample
+ def toCounterExample(beBrief: Boolean = false): Option[CounterExample] =
+ if (!allFieldAssignmentsLegal) Some(NoExample)
else {
debug.patmat("describing "+ ((variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal)))
val res = prunedEqualTo match {
// a definite assignment to a value
- case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
+ case List(eq: ValueConst) if fields.isEmpty => Some(ValueExample(eq))
// constructor call
// or we did not gather any information about equality but we have information about the fields
@@ -702,30 +843,50 @@ trait MatchAnalysis extends MatchApproximation {
// figure out the constructor arguments from the field assignment
val argLen = (caseFieldAccs.length min ctorParams.length)
- (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse WildcardExample).toList
+ val examples = (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse Some(WildcardExample)).toList
+ sequence(examples)
}
cls match {
- case ConsClass => ListExample(args())
- case _ if isTupleSymbol(cls) => TupleExample(args(brevity = true))
- case _ => ConstructorExample(cls, args())
+ case ConsClass =>
+ args().map {
+ case List(NoExample, l: ListExample) =>
+ // special case for neg/t7020.scala:
+ // if we find a counter example `??::*` we report `*::*` instead
+ // since the `??` originates from uniqueEqualTo containing several instanced of the same type
+ List(WildcardExample, l)
+ case args => args
+ }.map(ListExample)
+ case _ if isTupleSymbol(cls) => args(brevity = true).map(TupleExample)
+ case _ if cls.isSealed && cls.isAbstractClass =>
+ // don't report sealed abstract classes, since
+ // 1) they can't be instantiated
+ // 2) we are already reporting any missing subclass (since we know the full domain)
+ // (see patmatexhaust.scala)
+ None
+ case _ => args().map(ConstructorExample(cls, _))
}
// a definite assignment to a type
- case List(eq) if fields.isEmpty => TypeExample(eq)
+ case List(eq) if fields.isEmpty => Some(TypeExample(eq))
// negative information
case Nil if nonTrivialNonEqualTo.nonEmpty =>
// negation tends to get pretty verbose
- if (beBrief) WildcardExample
+ if (beBrief) Some(WildcardExample)
else {
val eqTo = equalTo.headOption getOrElse TypeConst(variable.staticTpCheckable)
- NegativeExample(eqTo, nonTrivialNonEqualTo)
+ Some(NegativeExample(eqTo, nonTrivialNonEqualTo))
}
+ // if uniqueEqualTo contains more than one symbol of the same domain
+ // then we can safely ignore these counter examples since we will eventually encounter
+ // both counter examples separately
+ case _ if inSameDomain => None
+
// not a valid counter-example, possibly since we have a definite type but there was a field mismatch
// TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
- case _ => NoExample
+ case _ => Some(NoExample)
}
debug.patmatResult("described as")(res)
}
@@ -741,12 +902,12 @@ trait MatchAnalysis extends MatchApproximation {
}
def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {
- if (!suppression.unreachable) {
+ if (!suppression.suppressUnreachable) {
unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
reportUnreachable(cases(caseIndex).last.pos)
}
}
- if (!suppression.exhaustive) {
+ if (!suppression.suppressExhaustive) {
val counterExamples = exhaustive(prevBinder, cases, pt)
if (counterExamples.nonEmpty)
reportMissingCases(prevBinder.pos, counterExamples)
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index 8ff7824159..b3aef8a20e 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -46,16 +46,16 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
val cond = test.prop
def simplify(c: Prop): Set[Prop] = c match {
- case And(a, b) => simplify(a) ++ simplify(b)
- case Or(_, _) => Set(False) // TODO: make more precise
- case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering
+ case And(ops) => ops.toSet flatMap simplify
+ case Or(ops) => Set(False) // TODO: make more precise
+ case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering
case _ => Set(c)
}
val conds = simplify(cond)
if (conds(False)) false // stop when we encounter a definite "no" or a "not sure"
else {
- val nonTrivial = conds filterNot (_ == True)
+ val nonTrivial = conds - True
if (nonTrivial nonEmpty) {
tested ++= nonTrivial
@@ -442,7 +442,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
val distinctAlts = distinctBy(switchableAlts)(extractConst)
if (distinctAlts.size < switchableAlts.size) {
val duplicated = switchableAlts.groupBy(extractConst).flatMap(_._2.drop(1).take(1)) // report the first duplicated
- global.currentUnit.warning(pos, s"Pattern contains duplicate alternatives: ${duplicated.mkString(", ")}")
+ reporter.warning(pos, s"Pattern contains duplicate alternatives: ${duplicated.mkString(", ")}")
}
CaseDef(Alternative(distinctAlts), guard, body)
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index 4cf8980689..6302e34ac9 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -154,7 +154,7 @@ trait MatchTranslation {
case SymbolBound(sym, expr) => bindingStep(sym, expr)
case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) => equalityTestStep()
case Alternative(alts) => alternativesStep(alts)
- case _ => context.unit.error(pos, unsupportedPatternMsg) ; noStep()
+ case _ => reporter.error(pos, unsupportedPatternMsg) ; noStep()
}
def translate(): List[TreeMaker] = nextStep() merge (_.translate())
@@ -208,7 +208,7 @@ trait MatchTranslation {
case _ => (cases, None)
}
- checkMatchVariablePatterns(nonSyntheticCases)
+ if (!settings.XnoPatmatAnalysis) checkMatchVariablePatterns(nonSyntheticCases)
// we don't transform after uncurry
// (that would require more sophistication when generating trees,
@@ -248,7 +248,10 @@ trait MatchTranslation {
if (caseDefs forall treeInfo.isCatchCase) caseDefs
else {
val swatches = { // switch-catches
- val bindersAndCases = caseDefs map { caseDef =>
+ // SI-7459 must duplicate here as we haven't commited to switch emission, and just figuring out
+ // if we can ends up mutating `caseDefs` down in the use of `substituteSymbols` in
+ // `TypedSubstitution#Substitution`. That is called indirectly by `emitTypeSwitch`.
+ val bindersAndCases = caseDefs.map(_.duplicate) map { caseDef =>
// generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
// if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
val caseScrutSym = freshSym(pos, pureType(ThrowableTpe))
@@ -377,8 +380,8 @@ trait MatchTranslation {
object ExtractorCall {
// TODO: check unargs == args
def apply(tree: Tree): ExtractorCall = tree match {
- case UnApply(unfun, args) => new ExtractorCallRegular(alignPatterns(tree), unfun, args) // extractor
- case Apply(fun, args) => new ExtractorCallProd(alignPatterns(tree), fun, args) // case class
+ case UnApply(unfun, args) => new ExtractorCallRegular(alignPatterns(context, tree), unfun, args) // extractor
+ case Apply(fun, args) => new ExtractorCallProd(alignPatterns(context, tree), fun, args) // case class
}
}
@@ -518,7 +521,7 @@ trait MatchTranslation {
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
override protected def tupleSel(binder: Symbol)(i: Int): Tree = {
val accessors = binder.caseFieldAccessors
- if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
+ if (accessors isDefinedAt (i-1)) gen.mkAttributedStableRef(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
}
@@ -544,10 +547,17 @@ trait MatchTranslation {
// wrong when isSeq, and resultInMonad should always be correct since it comes
// directly from the extractor's result type
val binder = freshSym(pos, pureType(resultInMonad))
+ val potentiallyMutableBinders: Set[Symbol] =
+ if (extractorApply.tpe.typeSymbol.isNonBottomSubClass(OptionClass) && !aligner.isSeq)
+ Set.empty
+ else
+ // Ensures we capture unstable bound variables eagerly. These can arise under name based patmat or by indexing into mutable Seqs. See run t9003.scala
+ subPatBinders.toSet
ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
subPatBinders,
subPatRefs(binder),
+ potentiallyMutableBinders,
aligner.isBool,
checkedLength,
patBinderOrCasted,
@@ -573,7 +583,7 @@ trait MatchTranslation {
// duplicated with the extractor Unapplied
case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
treeCopy.Apply(t, x, binderRef(i.pos) :: Nil)
- // SI-7868 Account for numeric widening, e.g. <unappplySelector>.toInt
+ // SI-7868 Account for numeric widening, e.g. <unapplySelector>.toInt
case Apply(x, List(i @ (sel @ Select(Ident(nme.SELECTOR_DUMMY), name)))) =>
treeCopy.Apply(t, x, treeCopy.Select(sel, binderRef(i.pos), name) :: Nil)
case _ =>
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 5d8a9fecef..e1fe220556 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -21,9 +21,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
import global._
import definitions._
- final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
+ final case class Suppression(suppressExhaustive: Boolean, suppressUnreachable: Boolean)
object Suppression {
val NoSuppression = Suppression(false, false)
+ val FullSuppression = Suppression(true, true)
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -166,8 +167,17 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
val usedBinders = new mutable.HashSet[Symbol]()
// all potentially stored subpat binders
val potentiallyStoredBinders = stored.unzip._1.toSet
+ def ref(sym: Symbol) =
+ if (potentiallyStoredBinders(sym)) usedBinders += sym
// compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
- in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
+ in.foreach {
+ case tt: TypeTree =>
+ tt.tpe foreach { // SI-7459 e.g. case Prod(t) => new t.u.Foo
+ case SingleType(_, sym) => ref(sym)
+ case _ =>
+ }
+ case t => ref(t.symbol)
+ }
if (usedBinders.isEmpty) in
else {
@@ -192,13 +202,14 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)(
val subPatBinders: List[Symbol],
val subPatRefs: List[Tree],
+ val potentiallyMutableBinders: Set[Symbol],
extractorReturnsBoolean: Boolean,
val checkedLength: Option[Int],
val prevBinder: Symbol,
val ignoredSubPatBinders: Set[Symbol]
) extends FunTreeMaker with PreserveSubPatBinders {
- def extraStoredBinders: Set[Symbol] = Set()
+ def extraStoredBinders: Set[Symbol] = potentiallyMutableBinders
debug.patmat(s"""
|ExtractorTreeMaker($extractor, $extraCond, $nextBinder) {
@@ -516,7 +527,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
// a foldLeft to accumulate the localSubstitution left-to-right
- // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
+ // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fulfilled by propagateSubstitution
def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
var accumSubst: Substitution = initial
treeMakers foreach { maker =>
@@ -541,7 +552,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
val (suppression, requireSwitch): (Suppression, Boolean) =
- if (settings.XnoPatmatAnalysis) (Suppression.NoSuppression, false)
+ if (settings.XnoPatmatAnalysis) (Suppression.FullSuppression, false)
else scrut match {
case Typed(tree, tpt) =>
val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
@@ -550,15 +561,29 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case _ => false
}
val suppression = Suppression(suppressExhaustive, supressUnreachable)
+ val hasSwitchAnnotation = treeInfo.isSwitchAnnotation(tpt.tpe)
// matches with two or fewer cases need not apply for switchiness (if-then-else will do)
- val requireSwitch = treeInfo.isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0
+ // `case 1 | 2` is considered as two cases.
+ def exceedsTwoCasesOrAlts = {
+ // avoids traversing the entire list if there are more than 3 elements
+ def lengthMax3[T](l: List[T]): Int = l match {
+ case a :: b :: c :: _ => 3
+ case cases =>
+ cases.map({
+ case AlternativesTreeMaker(_, alts, _) :: _ => lengthMax3(alts)
+ case c => 1
+ }).sum
+ }
+ lengthMax3(casesNoSubstOnly) > 2
+ }
+ val requireSwitch = hasSwitchAnnotation && exceedsTwoCasesOrAlts
(suppression, requireSwitch)
case _ =>
(Suppression.NoSuppression, false)
}
- emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, suppression.exhaustive).getOrElse{
- if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match")
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, unchecked = suppression.suppressExhaustive).getOrElse{
+ if (requireSwitch) reporter.warning(scrut.pos, "could not emit switch for @switch annotated match")
if (casesNoSubstOnly nonEmpty) {
// before optimizing, check casesNoSubstOnly for presence of a default case,
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
index a7d7680db1..9e9372f709 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
@@ -67,7 +67,7 @@ trait MatchWarnings {
val cdef = it.next()
// If a default case has been seen, then every succeeding case is unreachable.
if (vpat != null)
- context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
+ reporter.warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat)) // TODO: make configurable whether this is an error
// If this is a default case and more cases follow, warn about this one so
// we have a reason to mention its pattern variable name and any corresponding
// symbol in scope. Errors will follow from the remaining cases, at least
@@ -78,7 +78,7 @@ trait MatchWarnings {
case _ => ""
}
vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
- context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ reporter.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index f6c960d089..b2f2516b5b 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -12,7 +12,7 @@ import scala.language.postfixOps
import scala.tools.nsc.transform.TypingTransformers
import scala.tools.nsc.transform.Transform
import scala.reflect.internal.util.Statistics
-import scala.reflect.internal.Types
+import scala.reflect.internal.{Mode, Types}
import scala.reflect.internal.util.Position
/** Translate pattern matching.
@@ -65,7 +65,7 @@ trait PatternMatching extends Transform
} catch {
case x: (Types#TypeError) =>
// TODO: this should never happen; error should've been reported during type checking
- unit.error(tree.pos, "error during expansion of this match (this is a scalac bug).\nThe underlying error was: "+ x.msg)
+ reporter.error(tree.pos, "error during expansion of this match (this is a scalac bug).\nThe underlying error was: "+ x.msg)
translated
}
case Try(block, catches, finalizer) =>
@@ -175,13 +175,13 @@ trait Interface extends ast.TreeDSL {
val matchOwner = typer.context.owner
def pureType(tp: Type): Type = tp
- def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
+ def reportUnreachable(pos: Position) = reporter.warning(pos, "unreachable code")
def reportMissingCases(pos: Position, counterExamples: List[String]) = {
val ceString =
if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
else "inputs: " + counterExamples.mkString(", ")
- typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
+ reporter.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
}
}
@@ -198,33 +198,62 @@ trait Interface extends ast.TreeDSL {
}
class Substitution(val from: List[Symbol], val to: List[Tree]) {
- import global.{Transformer, Ident, NoType}
+ import global.{Transformer, Ident, NoType, TypeTree, SingleType}
// We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
// and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
def apply(tree: Tree): Tree = {
// according to -Ystatistics 10% of translateMatch's time is spent in this method...
// since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
- if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
- else (new Transformer {
+ val toIdents = to.forall(_.isInstanceOf[Ident])
+ val containsSym = tree.exists {
+ case i@Ident(_) => from contains i.symbol
+ case tt: TypeTree => tt.tpe.exists {
+ case SingleType(_, sym) =>
+ (from contains sym) && {
+ if (!toIdents) global.devWarning(s"Unexpected substitution of non-Ident into TypeTree `$tt`, subst= $this")
+ true
+ }
+ case _ => false
+ }
+ case _ => false
+ }
+ val toSyms = to.map(_.symbol)
+ object substIdentsForTrees extends Transformer {
private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
if (origTp == null || origTp == NoType) to
// important: only type when actually substing and when original tree was typed
// (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
else typer.typed(to)
+ def typedStable(t: Tree) = typer.typed(t.shallowDuplicate, Mode.MonoQualifierModes | Mode.TYPEPATmode)
+ lazy val toTypes: List[Type] = to map (tree => typedStable(tree).tpe)
+
override def transform(tree: Tree): Tree = {
def subst(from: List[Symbol], to: List[Tree]): Tree =
if (from.isEmpty) tree
- else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate.setPos(tree.pos), tree.tpe)
+ else if (tree.symbol == from.head) typedIfOrigTyped(typedStable(to.head).setPos(tree.pos), tree.tpe)
else subst(from.tail, to.tail)
- tree match {
+ val tree1 = tree match {
case Ident(_) => subst(from, to)
case _ => super.transform(tree)
}
+ tree1 match {
+ case _: DefTree =>
+ tree1.symbol.modifyInfo(_.substituteTypes(from, toTypes))
+ case _ =>
+ }
+ tree1.modifyType(_.substituteTypes(from, toTypes))
}
- }).transform(tree)
+ }
+ if (containsSym) {
+ if (to.forall(_.isInstanceOf[Ident]))
+ tree.duplicate.substituteSymbols(from, to.map(_.symbol)) // SI-7459 catches `case t => new t.Foo`
+ else
+ substIdentsForTrees.transform(tree)
+ }
+ else tree
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
index d10eff1d8d..2753baa51d 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
@@ -18,6 +18,7 @@ trait ScalacPatternExpanders {
import global._
import definitions._
import treeInfo._
+ import analyzer._
type PatternAligned = ScalacPatternExpander#Aligned
@@ -72,9 +73,7 @@ trait ScalacPatternExpanders {
* Unfortunately the MethodType does not carry the information of whether
* it was unapplySeq, so we have to funnel that information in separately.
*/
- def unapplyMethodTypes(method: Type, isSeq: Boolean): Extractor = {
- val whole = firstParamType(method)
- val result = method.finalResultType
+ def unapplyMethodTypes(whole: Type, result: Type, isSeq: Boolean): Extractor = {
val expanded = (
if (result =:= BooleanTpe) Nil
else typeOfMemberNamedGet(result) match {
@@ -94,7 +93,7 @@ trait ScalacPatternExpanders {
def tupleExtractor(extractor: Extractor): Extractor =
extractor.copy(fixed = tupleType(extractor.fixed) :: Nil)
- private def validateAligned(tree: Tree, aligned: Aligned): Aligned = {
+ private def validateAligned(context: Context, tree: Tree, aligned: Aligned): Aligned = {
import aligned._
def owner = tree.symbol.owner
@@ -103,31 +102,33 @@ trait ScalacPatternExpanders {
def offerString = if (extractor.isErroneous) "" else s" offering $offering"
def arityExpected = ( if (extractor.hasSeq) "at least " else "" ) + productArity
- def err(msg: String) = currentUnit.error(tree.pos, msg)
- def warn(msg: String) = currentUnit.warning(tree.pos, msg)
+ def err(msg: String) = context.error(tree.pos, msg)
+ def warn(msg: String) = context.warning(tree.pos, msg)
def arityError(what: String) = err(s"$what patterns for $owner$offerString: expected $arityExpected, found $totalArity")
if (isStar && !isSeq)
err("Star pattern must correspond with varargs or unapplySeq")
else if (elementArity < 0)
arityError("not enough")
- else if (elementArity > 0 && !extractor.hasSeq)
+ else if (elementArity > 0 && !isSeq)
arityError("too many")
+ else if (settings.warnStarsAlign && isSeq && productArity > 0 && (elementArity > 0 || !isStar))
+ warn("A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).")
aligned
}
- def apply(sel: Tree, args: List[Tree]): Aligned = {
+ def apply(context: Context, sel: Tree, args: List[Tree]): Aligned = {
val fn = sel match {
case Unapplied(fn) => fn
case _ => sel
}
val patterns = newPatterns(args)
- val isSeq = sel.symbol.name == nme.unapplySeq
val isUnapply = sel.symbol.name == nme.unapply
+
val extractor = sel.symbol.name match {
- case nme.unapply => unapplyMethodTypes(fn.tpe, isSeq = false)
- case nme.unapplySeq => unapplyMethodTypes(fn.tpe, isSeq = true)
+ case nme.unapply => unapplyMethodTypes(firstParamType(fn.tpe), sel.tpe, isSeq = false)
+ case nme.unapplySeq => unapplyMethodTypes(firstParamType(fn.tpe), sel.tpe, isSeq = true)
case _ => applyMethodTypes(fn.tpe)
}
@@ -139,16 +140,18 @@ trait ScalacPatternExpanders {
def acceptMessage = if (extractor.isErroneous) "" else s" to hold ${extractor.offeringString}"
val requiresTupling = isUnapply && patterns.totalArity == 1 && productArity > 1
- if (requiresTupling && effectivePatternArity(args) == 1)
- currentUnit.deprecationWarning(sel.pos, s"${sel.symbol.owner} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)")
+ if (requiresTupling && effectivePatternArity(args) == 1) {
+ val sym = sel.symbol.owner
+ currentRun.reporting.deprecationWarning(sel.pos, sym, s"${sym} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)")
+ }
val normalizedExtractor = if (requiresTupling) tupleExtractor(extractor) else extractor
- validateAligned(fn, Aligned(patterns, normalizedExtractor))
+ validateAligned(context, fn, Aligned(patterns, normalizedExtractor))
}
- def apply(tree: Tree): Aligned = tree match {
- case Apply(fn, args) => apply(fn, args)
- case UnApply(fn, args) => apply(fn, args)
+ def apply(context: Context, tree: Tree): Aligned = tree match {
+ case Apply(fn, args) => apply(context, fn, args)
+ case UnApply(fn, args) => apply(context, fn, args)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index 1902606d86..9710c5c66b 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -6,234 +6,526 @@
package scala.tools.nsc.transform.patmat
-import scala.collection.mutable
+import scala.collection.mutable.ArrayBuffer
import scala.reflect.internal.util.Statistics
import scala.language.postfixOps
+import scala.collection.mutable
import scala.reflect.internal.util.Collections._
+import scala.reflect.internal.util.Position
+
+// a literal is a (possibly negated) variable
+class Lit(val v: Int) extends AnyVal {
+ def unary_- : Lit = Lit(-v)
+
+ def variable: Int = Math.abs(v)
+
+ def positive = v >= 0
+
+ override def toString(): String = s"Lit#$v"
+}
-// naive CNF translation and simple DPLL solver
+object Lit {
+ def apply(v: Int): Lit = new Lit(v)
+
+ implicit val LitOrdering: Ordering[Lit] = Ordering.by(_.v)
+}
+
+/** Solve pattern matcher exhaustivity problem via DPLL.
+ */
trait Solving extends Logic {
+
import PatternMatchingStats._
+
trait CNF extends PropositionalLogic {
- import scala.collection.mutable.ArrayBuffer
- type FormulaBuilder = ArrayBuffer[Clause]
- def formulaBuilder = ArrayBuffer[Clause]()
- def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
- def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
- def toFormula(buff: FormulaBuilder): Formula = buff
- // CNF: a formula is a conjunction of clauses
- type Formula = FormulaBuilder
- def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
+ type Clause = Set[Lit]
- type Clause = collection.Set[Lit]
// a clause is a disjunction of distinct literals
- def clause(l: Lit*): Clause = (
- // neg/t7020.scala changes output 1% of the time, the non-determinism is quelled with this linked set
- mutable.LinkedHashSet(l: _*)
- )
-
- type Lit
- def Lit(sym: Sym, pos: Boolean = true): Lit
-
- def andFormula(a: Formula, b: Formula): Formula = a ++ b
- def simplifyFormula(a: Formula): Formula = a.distinct
-
- private def merge(a: Clause, b: Clause) = a ++ b
-
- // throws an AnalysisBudget.Exception when the prop results in a CNF that's too big
- // TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding)
- def eqFreePropToSolvable(p: Prop): Formula = {
- def negationNormalFormNot(p: Prop, budget: Int): Prop =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else p match {
- case And(a, b) => Or(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
- case Or(a, b) => And(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
- case Not(p) => negationNormalForm(p, budget - 1)
- case True => False
- case False => True
- case s: Sym => Not(s)
+ def clause(l: Lit*): Clause = l.toSet
+
+ /** Conjunctive normal form (of a Boolean formula).
+ * A formula in this form is amenable to a SAT solver
+ * (i.e., solver that decides satisfiability of a formula).
+ */
+ type Cnf = Array[Clause]
+
+ class SymbolMapping(symbols: Set[Sym]) {
+ val variableForSymbol: Map[Sym, Int] = {
+ symbols.zipWithIndex.map {
+ case (sym, i) => sym -> (i + 1)
+ }.toMap
+ }
+
+ val symForVar: Map[Int, Sym] = variableForSymbol.map(_.swap)
+
+ val relevantVars: Set[Int] = symForVar.keySet.map(math.abs)
+
+ def lit(sym: Sym): Lit = Lit(variableForSymbol(sym))
+
+ def size = symbols.size
+ }
+
+ def cnfString(f: Array[Clause]): String
+
+ final case class Solvable(cnf: Cnf, symbolMapping: SymbolMapping) {
+ def ++(other: Solvable) = {
+ require(this.symbolMapping eq other.symbolMapping)
+ Solvable(cnf ++ other.cnf, symbolMapping)
+ }
+
+ override def toString: String = {
+ "Solvable\nLiterals:\n" +
+ (for {
+ (lit, sym) <- symbolMapping.symForVar.toSeq.sortBy(_._1)
+ } yield {
+ s"$lit -> $sym"
+ }).mkString("\n") + "Cnf:\n" + cnfString(cnf)
+ }
+ }
+
+ trait CnfBuilder {
+ private[this] val buff = ArrayBuffer[Clause]()
+
+ var literalCount: Int
+
+ /**
+ * @return new Tseitin variable
+ */
+ def newLiteral(): Lit = {
+ literalCount += 1
+ Lit(literalCount)
+ }
+
+ lazy val constTrue: Lit = {
+ val constTrue = newLiteral()
+ addClauseProcessed(clause(constTrue))
+ constTrue
+ }
+
+ def constFalse: Lit = -constTrue
+
+ def isConst(l: Lit): Boolean = l == constTrue || l == constFalse
+
+ def addClauseProcessed(clause: Clause) {
+ if (clause.nonEmpty) {
+ buff += clause
+ }
+ }
+
+ def buildCnf: Array[Clause] = {
+ val cnf = buff.toArray
+ buff.clear()
+ cnf
+ }
+
+ }
+
+ /** Plaisted transformation: used for conversion of a
+ * propositional formula into conjunctive normal form (CNF)
+ * (input format for SAT solver).
+ * A simple conversion into CNF via Shannon expansion would
+ * also be possible but it's worst-case complexity is exponential
+ * (in the number of variables) and thus even simple problems
+ * could become untractable.
+ * The Plaisted transformation results in an _equisatisfiable_
+ * CNF-formula (it generates auxiliary variables)
+ * but runs with linear complexity.
+ * The common known Tseitin transformation uses bi-implication,
+ * whereas the Plaisted transformation uses implication only, thus
+ * the resulting CNF formula has (on average) only half of the clauses
+ * of a Tseitin transformation.
+ * The Plaisted transformation uses the polarities of sub-expressions
+ * to figure out which part of the bi-implication can be omitted.
+ * However, if all sub-expressions have positive polarity
+ * (e.g., after transformation into negation normal form)
+ * then the conversion is rather simple and the pseudo-normalization
+ * via NNF increases chances only one side of the bi-implication
+ * is needed.
+ */
+ class TransformToCnf(symbolMapping: SymbolMapping) extends CnfBuilder {
+
+ // new literals start after formula symbols
+ var literalCount: Int = symbolMapping.size
+
+ def convertSym(sym: Sym): Lit = symbolMapping.lit(sym)
+
+ def apply(p: Prop): Solvable = {
+
+ def convert(p: Prop): Option[Lit] = {
+ p match {
+ case And(fv) =>
+ Some(and(fv.flatMap(convert)))
+ case Or(fv) =>
+ Some(or(fv.flatMap(convert)))
+ case Not(a) =>
+ convert(a).map(not)
+ case sym: Sym =>
+ Some(convertSym(sym))
+ case True =>
+ Some(constTrue)
+ case False =>
+ Some(constFalse)
+ case AtMostOne(ops) =>
+ atMostOne(ops)
+ None
+ case _: Eq =>
+ throw new MatchError(p)
+ }
}
- def negationNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Prop =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else p match {
- case And(a, b) => And(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
- case Or(a, b) => Or(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
- case Not(negated) => negationNormalFormNot(negated, budget - 1)
- case True
- | False
- | (_ : Sym) => p
+ def and(bv: Set[Lit]): Lit = {
+ if (bv.isEmpty) {
+ // this case can actually happen because `removeVarEq` could add no constraints
+ constTrue
+ } else if (bv.size == 1) {
+ bv.head
+ } else if (bv.contains(constFalse)) {
+ constFalse
+ } else {
+ // op1 /\ op2 /\ ... /\ opx <==>
+ // (o -> op1) /\ (o -> op2) ... (o -> opx) /\ (!op1 \/ !op2 \/... \/ !opx \/ o)
+ // (!o \/ op1) /\ (!o \/ op2) ... (!o \/ opx) /\ (!op1 \/ !op2 \/... \/ !opx \/ o)
+ val new_bv = bv - constTrue // ignore `True`
+ val o = newLiteral() // auxiliary Tseitin variable
+ new_bv.map(op => addClauseProcessed(clause(op, -o)))
+ o
+ }
+ }
+
+ def or(bv: Set[Lit]): Lit = {
+ if (bv.isEmpty) {
+ constFalse
+ } else if (bv.size == 1) {
+ bv.head
+ } else if (bv.contains(constTrue)) {
+ constTrue
+ } else {
+ // op1 \/ op2 \/ ... \/ opx <==>
+ // (op1 -> o) /\ (op2 -> o) ... (opx -> o) /\ (op1 \/ op2 \/... \/ opx \/ !o)
+ // (!op1 \/ o) /\ (!op2 \/ o) ... (!opx \/ o) /\ (op1 \/ op2 \/... \/ opx \/ !o)
+ val new_bv = bv - constFalse // ignore `False`
+ val o = newLiteral() // auxiliary Tseitin variable
+ addClauseProcessed(new_bv + (-o))
+ o
+ }
}
- val TrueF = formula()
- val FalseF = formula(clause())
- def lit(s: Sym) = formula(clause(Lit(s)))
- def negLit(s: Sym) = formula(clause(Lit(s, pos = false)))
-
- def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
- def distribute(a: Formula, b: Formula, budget: Int): Formula =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else
- (a, b) match {
- // true \/ _ = true
- // _ \/ true = true
- case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF
- // lit \/ lit
- case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0)))
- // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d))
- // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn))
- case (cs, ds) =>
- val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs)
- big flatMap (c => distribute(formula(c), small, budget - (big.size*small.size)))
+ // no need for auxiliary variable
+ def not(a: Lit): Lit = -a
+
+ /**
+ * This encoding adds 3n-4 variables auxiliary variables
+ * to encode that at most 1 symbol can be set.
+ * See also "Towards an Optimal CNF Encoding of Boolean Cardinality Constraints"
+ * http://www.carstensinz.de/papers/CP-2005.pdf
+ */
+ def atMostOne(ops: List[Sym]) {
+ (ops: @unchecked) match {
+ case hd :: Nil => convertSym(hd)
+ case x1 :: tail =>
+ // sequential counter: 3n-4 clauses
+ // pairwise encoding: n*(n-1)/2 clauses
+ // thus pays off only if n > 5
+ if (ops.lengthCompare(5) > 0) {
+
+ @inline
+ def /\(a: Lit, b: Lit) = addClauseProcessed(clause(a, b))
+
+ val (mid, xn :: Nil) = tail.splitAt(tail.size - 1)
+
+ // 1 <= x1,...,xn <==>
+ //
+ // (!x1 \/ s1) /\ (!xn \/ !sn-1) /\
+ //
+ // /\
+ // / \ (!xi \/ si) /\ (!si-1 \/ si) /\ (!xi \/ !si-1)
+ // 1 < i < n
+ val s1 = newLiteral()
+ /\(-convertSym(x1), s1)
+ val snMinus = mid.foldLeft(s1) {
+ case (siMinus, sym) =>
+ val xi = convertSym(sym)
+ val si = newLiteral()
+ /\(-xi, si)
+ /\(-siMinus, si)
+ /\(-xi, -siMinus)
+ si
+ }
+ /\(-convertSym(xn), -snMinus)
+ } else {
+ ops.map(convertSym).combinations(2).foreach {
+ case a :: b :: Nil =>
+ addClauseProcessed(clause(-a, -b))
+ case _ =>
+ }
+ }
+ }
+ }
+
+ // add intermediate variable since we want the formula to be SAT!
+ addClauseProcessed(convert(p).toSet)
+
+ Solvable(buildCnf, symbolMapping)
+ }
+ }
+
+ class AlreadyInCNF(symbolMapping: SymbolMapping) {
+
+ object ToLiteral {
+ def unapply(f: Prop): Option[Lit] = f match {
+ case Not(ToLiteral(lit)) => Some(-lit)
+ case sym: Sym => Some(symbolMapping.lit(sym))
+ case _ => None
+ }
+ }
+
+ object ToDisjunction {
+ def unapply(f: Prop): Option[Array[Clause]] = f match {
+ case Or(fv) =>
+ val cl = fv.foldLeft(Option(clause())) {
+ case (Some(clause), ToLiteral(lit)) =>
+ Some(clause + lit)
+ case (_, _) =>
+ None
+ }
+ cl.map(Array(_))
+ case True => Some(Array()) // empty, no clauses needed
+ case False => Some(Array(clause())) // empty clause can't be satisfied
+ case ToLiteral(lit) => Some(Array(clause(lit)))
+ case _ => None
+ }
+ }
+
+ /**
+ * Checks if propositional formula is already in CNF
+ */
+ object ToCnf {
+ def unapply(f: Prop): Option[Solvable] = f match {
+ case ToDisjunction(clauses) => Some(Solvable(clauses, symbolMapping) )
+ case And(fv) =>
+ val clauses = fv.foldLeft(Option(mutable.ArrayBuffer[Clause]())) {
+ case (Some(cnf), ToDisjunction(clauses)) =>
+ Some(cnf ++= clauses)
+ case (_, _) =>
+ None
}
+ clauses.map(c => Solvable(c.toArray, symbolMapping))
+ case _ => None
+ }
+ }
+ }
- if (budget <= 0) throw AnalysisBudget.exceeded
+ def eqFreePropToSolvable(p: Prop): Solvable = {
+ def doesFormulaExceedSize(p: Prop): Boolean = {
p match {
- case True => TrueF
- case False => FalseF
- case s: Sym => lit(s)
- case Not(s: Sym) => negLit(s)
- case And(a, b) =>
- val cnfA = conjunctiveNormalForm(a, budget - 1)
- val cnfB = conjunctiveNormalForm(b, budget - cnfA.size)
- cnfA ++ cnfB
- case Or(a, b) =>
- val cnfA = conjunctiveNormalForm(a)
- val cnfB = conjunctiveNormalForm(b)
- distribute(cnfA, cnfB, budget - (cnfA.size + cnfB.size))
+ case And(ops) =>
+ if (ops.size > AnalysisBudget.maxFormulaSize) {
+ true
+ } else {
+ ops.exists(doesFormulaExceedSize)
+ }
+ case Or(ops) =>
+ if (ops.size > AnalysisBudget.maxFormulaSize) {
+ true
+ } else {
+ ops.exists(doesFormulaExceedSize)
+ }
+ case Not(a) => doesFormulaExceedSize(a)
+ case _ => false
}
}
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatCNF) else null
- val res = conjunctiveNormalForm(negationNormalForm(p))
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatCNF, start)
+ val simplified = simplify(p)
+ if (doesFormulaExceedSize(simplified)) {
+ throw AnalysisBudget.formulaSizeExceeded
+ }
- //
- if (Statistics.canEnable) patmatCNFSizes(res.size).value += 1
+ // collect all variables since after simplification / CNF conversion
+ // they could have been removed from the formula
+ val symbolMapping = new SymbolMapping(gatherSymbols(p))
+ val cnfExtractor = new AlreadyInCNF(symbolMapping)
+ val cnfTransformer = new TransformToCnf(symbolMapping)
+
+ def cnfFor(prop: Prop): Solvable = {
+ prop match {
+ case cnfExtractor.ToCnf(solvable) =>
+ // this is needed because t6942 would generate too many clauses with Tseitin
+ // already in CNF, just add clauses
+ solvable
+ case p =>
+ cnfTransformer.apply(p)
+ }
+ }
-// debug.patmat("cnf for\n"+ p +"\nis:\n"+cnfString(res))
- res
+ simplified match {
+ case And(props) =>
+ // SI-6942:
+ // CNF(P1 /\ ... /\ PN) == CNF(P1) ++ CNF(...) ++ CNF(PN)
+ props.map(cnfFor).reduce(_ ++ _)
+ case p =>
+ cnfFor(p)
+ }
}
}
// simple solver using DPLL
trait Solver extends CNF {
- // a literal is a (possibly negated) variable
- def Lit(sym: Sym, pos: Boolean = true) = new Lit(sym, pos)
- class Lit(val sym: Sym, val pos: Boolean) {
- override def toString = if (!pos) "-"+ sym.toString else sym.toString
- override def equals(o: Any) = o match {
- case o: Lit => (o.sym eq sym) && (o.pos == pos)
- case _ => false
- }
- override def hashCode = sym.hashCode + pos.hashCode
+ import scala.collection.mutable.ArrayBuffer
- def unary_- = Lit(sym, !pos)
+ def cnfString(f: Array[Clause]): String = {
+ val lits: Array[List[String]] = f map (_.map(_.toString).toList)
+ val xss: List[List[String]] = lits toList
+ val aligned: String = alignAcrossRows(xss, "\\/", " /\\\n")
+ aligned
}
- def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
-
// adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
- val EmptyModel = collection.immutable.SortedMap.empty[Sym, Boolean]
+
+ // empty set of clauses is trivially satisfied
+ val EmptyModel = Map.empty[Sym, Boolean]
+
+ // no model: originates from the encounter of an empty clause, i.e.,
+ // happens if all variables have been assigned in a way that makes the corresponding literals false
+ // thus there is no possibility to satisfy that clause, so the whole formula is UNSAT
val NoModel: Model = null
+ // this model contains the auxiliary variables as well
+ type TseitinModel = Set[Lit]
+ val EmptyTseitinModel = Set.empty[Lit]
+ val NoTseitinModel: TseitinModel = null
+
// returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
- def findAllModelsFor(f: Formula): List[Model] = {
- val vars: Set[Sym] = f.flatMap(_ collect {case l: Lit => l.sym}).toSet
+ def findAllModelsFor(solvable: Solvable, pos: Position): List[Solution] = {
+ debug.patmat("find all models for\n"+ cnfString(solvable.cnf))
+
+ // we must take all vars from non simplified formula
+ // otherwise if we get `T` as formula, we don't expand the variables
+ // that are not in the formula...
+ val relevantVars: Set[Int] = solvable.symbolMapping.relevantVars
+
// debug.patmat("vars "+ vars)
// the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True)
- def negateModel(m: Model) = clause(m.toSeq.map{ case (sym, pos) => Lit(sym, !pos) } : _*)
+ // (i.e. the blocking clause - used for ALL-SAT)
+ def negateModel(m: TseitinModel) = {
+ // filter out auxiliary Tseitin variables
+ val relevantLits = m.filter(l => relevantVars.contains(l.variable))
+ relevantLits.map(lit => -lit)
+ }
+
+ final case class TseitinSolution(model: TseitinModel, unassigned: List[Int]) {
+ def projectToSolution(symForVar: Map[Int, Sym]) = Solution(projectToModel(model, symForVar), unassigned map symForVar)
+ }
- def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]=
- if (recursionDepthAllowed == 0) models
- else {
- debug.patmat("find all models for\n"+ cnfString(f))
- val model = findModelFor(f)
+ def findAllModels(clauses: Array[Clause],
+ models: List[TseitinSolution],
+ recursionDepthAllowed: Int = AnalysisBudget.maxDPLLdepth): List[TseitinSolution]=
+ if (recursionDepthAllowed == 0) {
+ uncheckedWarning(pos, AnalysisBudget.recursionDepthReached)
+ models
+ } else {
+ debug.patmat("find all models for\n" + cnfString(clauses))
+ val model = findTseitinModelFor(clauses)
// if we found a solution, conjunct the formula with the model's negation and recurse
- if (model ne NoModel) {
- val unassigned = (vars -- model.keySet).toList
+ if (model ne NoTseitinModel) {
+ // note that we should not expand the auxiliary variables (from Tseitin transformation)
+ // since they are existentially quantified in the final solution
+ val unassigned: List[Int] = (relevantVars -- model.map(lit => lit.variable)).toList
debug.patmat("unassigned "+ unassigned +" in "+ model)
- def force(lit: Lit) = {
- val model = withLit(findModelFor(dropUnit(f, lit)), lit)
- if (model ne NoModel) List(model)
- else Nil
- }
- val forced = unassigned flatMap { s =>
- force(Lit(s, pos = true)) ++ force(Lit(s, pos = false))
- }
- debug.patmat("forced "+ forced)
+
+ val solution = TseitinSolution(model, unassigned)
val negated = negateModel(model)
- findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1)
+ findAllModels(clauses :+ negated, solution :: models, recursionDepthAllowed - 1)
}
else models
}
- findAllModels(f, Nil)
+ val tseitinSolutions = findAllModels(solvable.cnf, Nil)
+ tseitinSolutions.map(_.projectToSolution(solvable.symbolMapping.symForVar))
}
- private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
- private def dropUnit(f: Formula, unitLit: Lit): Formula = {
+ private def withLit(res: TseitinModel, l: Lit): TseitinModel = {
+ if (res eq NoTseitinModel) NoTseitinModel else res + l
+ }
+
+ /** Drop trivially true clauses, simplify others by dropping negation of `unitLit`.
+ *
+ * Disjunctions that contain the literal we're making true in the returned model are trivially true.
+ * Clauses can be simplified by dropping the negation of the literal we're making true
+ * (since False \/ X == X)
+ */
+ private def dropUnit(clauses: Array[Clause], unitLit: Lit): Array[Clause] = {
val negated = -unitLit
- // drop entire clauses that are trivially true
- // (i.e., disjunctions that contain the literal we're making true in the returned model),
- // and simplify clauses by dropping the negation of the literal we're making true
- // (since False \/ X == X)
- val dropped = formulaBuilderSized(f.size)
- for {
- clause <- f
- if !(clause contains unitLit)
- } dropped += (clause - negated)
- dropped
+ val simplified = new ArrayBuffer[Clause](clauses.size)
+ clauses foreach {
+ case trivial if trivial contains unitLit => // drop
+ case clause => simplified += clause - negated
+ }
+ simplified.toArray
}
- def findModelFor(f: Formula): Model = {
- @inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b
+ def findModelFor(solvable: Solvable): Model = {
+ projectToModel(findTseitinModelFor(solvable.cnf), solvable.symbolMapping.symForVar)
+ }
+
+ def findTseitinModelFor(clauses: Array[Clause]): TseitinModel = {
+ @inline def orElse(a: TseitinModel, b: => TseitinModel) = if (a ne NoTseitinModel) a else b
- debug.patmat("DPLL\n"+ cnfString(f))
+ debug.patmat(s"DPLL\n${cnfString(clauses)}")
val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaDPLL) else null
- val satisfiableWithModel: Model =
- if (f isEmpty) EmptyModel
- else if(f exists (_.isEmpty)) NoModel
- else f.find(_.size == 1) match {
+ val satisfiableWithModel: TseitinModel =
+ if (clauses isEmpty) EmptyTseitinModel
+ else if (clauses exists (_.isEmpty)) NoTseitinModel
+ else clauses.find(_.size == 1) match {
case Some(unitClause) =>
val unitLit = unitClause.head
- // debug.patmat("unit: "+ unitLit)
- withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
+ withLit(findTseitinModelFor(dropUnit(clauses, unitLit)), unitLit)
case _ =>
// partition symbols according to whether they appear in positive and/or negative literals
- // SI-7020 Linked- for deterministic counter examples.
- val pos = new mutable.LinkedHashSet[Sym]()
- val neg = new mutable.LinkedHashSet[Sym]()
- mforeach(f)(lit => if (lit.pos) pos += lit.sym else neg += lit.sym)
+ val pos = new mutable.HashSet[Int]()
+ val neg = new mutable.HashSet[Int]()
+ mforeach(clauses)(lit => if (lit.positive) pos += lit.variable else neg += lit.variable)
// appearing in both positive and negative
- val impures: mutable.LinkedHashSet[Sym] = pos intersect neg
+ val impures = pos intersect neg
// appearing only in either positive/negative positions
- val pures: mutable.LinkedHashSet[Sym] = (pos ++ neg) -- impures
+ val pures = (pos ++ neg) -- impures
if (pures nonEmpty) {
- val pureSym = pures.head
+ val pureVar = pures.head
// turn it back into a literal
// (since equality on literals is in terms of equality
// of the underlying symbol and its positivity, simply construct a new Lit)
- val pureLit = Lit(pureSym, pos(pureSym))
+ val pureLit = Lit(if (neg(pureVar)) -pureVar else pureVar)
// debug.patmat("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures)
- val simplified = f.filterNot(_.contains(pureLit))
- withLit(findModelFor(simplified), pureLit)
+ val simplified = clauses.filterNot(_.contains(pureLit))
+ withLit(findTseitinModelFor(simplified), pureLit)
} else {
- val split = f.head.head
+ val split = clauses.head.head
// debug.patmat("split: "+ split)
- orElse(findModelFor(f :+ clause(split)), findModelFor(f :+ clause(-split)))
+ orElse(findTseitinModelFor(clauses :+ clause(split)), findTseitinModelFor(clauses :+ clause(-split)))
}
}
if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
satisfiableWithModel
}
+
+ private def projectToModel(model: TseitinModel, symForVar: Map[Int, Sym]): Model =
+ if (model == NoTseitinModel) NoModel
+ else if (model == EmptyTseitinModel) EmptyModel
+ else {
+ val mappedModels = model.toList collect {
+ case lit if symForVar isDefinedAt lit.variable => (symForVar(lit.variable), lit.positive)
+ }
+ if (mappedModels.isEmpty) {
+ // could get an empty model if mappedModels is a constant like `True`
+ EmptyModel
+ } else {
+ mappedModels.toMap
+ }
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
index 1e544e54f6..2f4d228347 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
@@ -77,12 +77,13 @@ trait Adaptations {
val msg = "Adaptation of argument list by inserting () has been deprecated: " + (
if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous."
else "this is unlikely to be what you want.")
- context.unit.deprecationWarning(t.pos, adaptWarningMessage(msg))
+ context.deprecationWarning(t.pos, t.symbol, adaptWarningMessage(msg))
}
} else if (settings.warnAdaptedArgs)
context.warning(t.pos, adaptWarningMessage(s"Adapting argument list by creating a ${args.size}-tuple: this may not be what you want."))
- !settings.noAdaptedArgs || !(args.isEmpty && settings.future)
+ // return `true` if the adaptation should be kept
+ !(settings.noAdaptedArgs || (args.isEmpty && settings.future))
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
index fa6e5399eb..0574869714 100644
--- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -57,7 +57,7 @@ trait AnalyzerPlugins { self: Analyzer =>
* `analyzer.transformed` hash map, indexed by the definition's rhs tree.
*
* NOTE: Invoking the type checker can lead to cyclic reference errors. For instance, if this
- * method is called from the type completer of a recursive method, type checking the mehtod
+ * method is called from the type completer of a recursive method, type checking the method
* rhs will invoke the same completer again. It might be possible to avoid this situation by
* assigning `tpe` to `defTree.symbol` (untested) - the final type computed by this method
* will then be assigned to the definition's symbol by monoTypeCompleter (in Namers).
@@ -190,6 +190,16 @@ trait AnalyzerPlugins { self: Analyzer =>
def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = None
/**
+ * Figures out whether the given macro definition is blackbox or whitebox.
+ *
+ * Default implementation provided in `self.standardIsBlackbox` loads the macro impl binding
+ * and fetches boxity from the "isBlackbox" field of the macro signature.
+ *
+ * $nonCumulativeReturnValueDoc.
+ */
+ def pluginsIsBlackbox(macroDef: Symbol): Option[Boolean] = None
+
+ /**
* Expands an application of a def macro (i.e. of a symbol that has the MACRO flag set),
* possibly using the current typer mode and the provided prototype.
*
@@ -375,6 +385,14 @@ trait AnalyzerPlugins { self: Analyzer =>
def custom(plugin: MacroPlugin) = plugin.pluginsTypedMacroBody(typer, ddef)
})
+ /** @see MacroPlugin.pluginsIsBlackbox */
+ def pluginsIsBlackbox(macroDef: Symbol): Boolean = invoke(new NonCumulativeOp[Boolean] {
+ def position = macroDef.pos
+ def description = "compute boxity for this macro definition"
+ def default = standardIsBlackbox(macroDef)
+ def custom(plugin: MacroPlugin) = plugin.pluginsIsBlackbox(macroDef)
+ })
+
/** @see MacroPlugin.pluginsMacroExpand */
def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = invoke(new NonCumulativeOp[Tree] {
def position = expandee.pos
@@ -429,6 +447,6 @@ trait AnalyzerPlugins { self: Analyzer =>
// performance opt
if (macroPlugins.isEmpty) stats
else macroPlugins.foldLeft(stats)((current, plugin) =>
- if (!plugin.isActive()) current else plugin.pluginsEnterStats(typer, stats))
+ if (!plugin.isActive()) current else plugin.pluginsEnterStats(typer, current))
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 13884404b3..fc632e0d0d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -11,12 +11,28 @@ import scala.language.postfixOps
/** On pattern matcher checkability:
*
+ * The spec says that case _: List[Int] should be always issue
+ * an unchecked warning:
+ *
+ * > Types which are not of one of the forms described above are
+ * > also accepted as type patterns. However, such type patterns
+ * > will be translated to their erasure (§3.7). The Scala compiler
+ * > will issue an “unchecked” warning for these patterns to flag
+ * > the possible loss of type-safety.
+ *
+ * But the implementation goes a little further to omit warnings
+ * based on the static type of the scrutinee. As a trivial example:
+ *
+ * def foo(s: Seq[Int]) = s match { case _: List[Int] => }
+ *
+ * need not issue this warning.
+ *
* Consider a pattern match of this form: (x: X) match { case _: P => }
*
* There are four possibilities to consider:
* [P1] X will always conform to P
* [P2] x will never conform to P
- * [P3] X <: P if some runtime test is true
+ * [P3] X will conform to P if some runtime test is true
* [P4] X cannot be checked against P
*
* The first two cases correspond to those when there is enough
@@ -28,6 +44,11 @@ import scala.language.postfixOps
* which is essentially the intersection of X and |P|, where |P| is
* the erasure of P. If XR <: P, then no warning is emitted.
*
+ * We evaluate "X with conform to P" by checking `X <: P_wild, where
+ * P_wild is the result of substituting wildcard types in place of
+ * pattern type variables. This is intentionally stricter than
+ * (X matchesPattern P), see SI-8597 for motivating test cases.
+ *
* Examples of how this info is put to use:
* sealed trait A[T] ; class B[T] extends A[T]
* def f(x: B[Int]) = x match { case _: A[Int] if true => }
@@ -100,7 +121,7 @@ trait Checkable {
private def typeArgsInTopLevelType(tp: Type): List[Type] = {
val tps = tp match {
case RefinedType(parents, _) => parents flatMap typeArgsInTopLevelType
- case TypeRef(_, ArrayClass, arg :: Nil) => typeArgsInTopLevelType(arg)
+ case TypeRef(_, ArrayClass, arg :: Nil) => if (arg.typeSymbol.isAbstractType) arg :: Nil else typeArgsInTopLevelType(arg)
case TypeRef(pre, sym, args) => typeArgsInTopLevelType(pre) ++ args
case ExistentialType(tparams, underlying) => tparams.map(_.tpe) ++ typeArgsInTopLevelType(underlying)
case _ => Nil
@@ -108,14 +129,31 @@ trait Checkable {
tps filterNot isUnwarnableTypeArg
}
+ private def scrutConformsToPatternType(scrut: Type, pattTp: Type): Boolean = {
+ def typeVarToWildcard(tp: Type) = {
+ // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala
+ if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp
+ }
+ val pattTpWild = pattTp.map(typeVarToWildcard)
+ scrut <:< pattTpWild
+ }
+
private class CheckabilityChecker(val X: Type, val P: Type) {
def Xsym = X.typeSymbol
def Psym = P.typeSymbol
- def XR = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym)
+ def PErased = {
+ P match {
+ case erasure.GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P)
+ case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*)
+ }
+ }
+ def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym)
+
+
// sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean]
- def P1 = X matchesPattern P
+ def P1 = scrutConformsToPatternType(X, P)
def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P)
- def P3 = isNonRefinementClassType(P) && (XR matchesPattern P)
+ def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P)
def P4 = !(P1 || P2 || P3)
def summaryString = f"""
@@ -275,7 +313,7 @@ trait Checkable {
;
// Matching on types like case _: AnyRef { def bippy: Int } => doesn't work -- yet.
case RefinedType(_, decls) if !decls.isEmpty =>
- getContext.unit.warning(tree.pos, s"a pattern match on a refinement type is unchecked")
+ reporter.warning(tree.pos, s"a pattern match on a refinement type is unchecked")
case RefinedType(parents, _) =>
parents foreach (p => checkCheckable(tree, p, X, inPattern, canRemedy))
case _ =>
@@ -285,14 +323,14 @@ trait Checkable {
if (checker.neverMatches) {
val addendum = if (checker.neverSubClass) "" else " (but still might match its erasure)"
- getContext.unit.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $PString$addendum")
+ reporter.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $PString$addendum")
}
else if (checker.isUncheckable) {
val msg = (
if (checker.uncheckableType =:= P) s"abstract type $where$PString"
else s"${checker.uncheckableMessage} in type $where$PString"
)
- getContext.unit.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure")
+ reporter.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure")
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 9715fdaf00..c80aaea160 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -27,6 +27,16 @@ trait ContextErrors {
override def toString() = "[Type error at:" + errPos + "] " + errMsg
}
+ abstract class AbsAmbiguousTypeError extends AbsTypeError
+
+ case class AmbiguousTypeError(errPos: Position, errMsg: String)
+ extends AbsAmbiguousTypeError
+
+ case class AmbiguousImplicitTypeError(underlyingTree: Tree, errMsg: String)
+ extends AbsAmbiguousTypeError {
+ def errPos = underlyingTree.pos
+ }
+
sealed abstract class TreeTypeError extends AbsTypeError {
def underlyingTree: Tree
def errPos = underlyingTree.pos
@@ -35,12 +45,17 @@ trait ContextErrors {
case class NormalTypeError(underlyingTree: Tree, errMsg: String)
extends TreeTypeError
+ /**
+ * Marks a TypeError that was constructed from a CyclicReference (under silent).
+ * This is used for named arguments, where we need to know if an assignment expression
+ * failed with a cyclic reference or some other type error.
+ */
+ class NormalTypeErrorFromCyclicReference(underlyingTree: Tree, errMsg: String)
+ extends NormalTypeError(underlyingTree, errMsg)
+
case class AccessTypeError(underlyingTree: Tree, errMsg: String)
extends TreeTypeError
- case class AmbiguousTypeError(errPos: Position, errMsg: String)
- extends AbsTypeError
-
case class SymbolTypeError(underlyingSym: Symbol, errMsg: String)
extends AbsTypeError {
@@ -66,7 +81,7 @@ trait ContextErrors {
// 2) provide the type of the implicit parameter for which we got diverging expansion
// (pt at the point of divergence gives less information to the user)
// Note: it is safe to delay error message generation in this case
- // becasue we don't modify implicits' infos.
+ // because we don't modify implicits' infos.
case class DivergentImplicitTypeError(underlyingTree: Tree, pt0: Type, sym: Symbol)
extends TreeTypeError {
def errMsg: String = errMsgForPt(pt0)
@@ -75,8 +90,6 @@ trait ContextErrors {
s"diverging implicit expansion for type ${pt}\nstarting with ${sym.fullLocationString}"
}
- case class AmbiguousImplicitTypeError(underlyingTree: Tree, errMsg: String)
- extends TreeTypeError
case class PosAndMsgTypeError(errPos: Position, errMsg: String)
extends AbsTypeError
@@ -90,10 +103,6 @@ trait ContextErrors {
issueTypeError(SymbolTypeError(sym, msg))
}
- def issueAmbiguousTypeError(pre: Type, sym1: Symbol, sym2: Symbol, err: AmbiguousTypeError)(implicit context: Context) {
- context.issueAmbiguousError(pre, sym1, sym2, err)
- }
-
def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) }
def typeErrorMsg(found: Type, req: Type) = "type mismatch" + foundReqMsg(found, req)
@@ -123,6 +132,36 @@ trait ContextErrors {
import ErrorUtils._
+ private def MacroIncompatibleEngineError(friendlyMessage: String, internalMessage: String) = {
+ def debugDiagnostic = s"(internal diagnostic: $internalMessage)"
+ val message = if (macroDebugLite || macroDebugVerbose) s"$friendlyMessage $debugDiagnostic" else friendlyMessage
+ // TODO: clean this up! (This is a more explicit version of what the code use to do, to reveal the issue.)
+ throw new TypeError(analyzer.lastTreeToTyper.pos, message)
+ }
+
+ def MacroCantExpand210xMacrosError(internalMessage: String) =
+ MacroIncompatibleEngineError("can't expand macros compiled by previous versions of Scala", internalMessage)
+
+ def MacroCantExpandIncompatibleMacrosError(internalMessage: String) =
+ MacroIncompatibleEngineError("macro cannot be expanded, because it was compiled by an incompatible macro engine", internalMessage)
+
+ def NoImplicitFoundError(tree: Tree, param: Symbol)(implicit context: Context): Unit = {
+ def errMsg = {
+ val paramName = param.name
+ val paramTp = param.tpe
+ def evOrParam = (
+ if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX)
+ "evidence parameter of type"
+ else
+ s"parameter $paramName:")
+ paramTp.typeSymbolDirect match {
+ case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
+ case _ => s"could not find implicit value for $evOrParam $paramTp"
+ }
+ }
+ issueNormalTypeError(tree, errMsg)
+ }
+
trait TyperContextErrors {
self: Typer =>
@@ -141,24 +180,6 @@ trait ContextErrors {
setError(tree)
}
- def NoImplicitFoundError(tree: Tree, param: Symbol) = {
- def errMsg = {
- val paramName = param.name
- val paramTp = param.tpe
- def evOrParam = (
- if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX)
- "evidence parameter of type"
- else
- s"parameter $paramName:"
- )
- paramTp.typeSymbolDirect match {
- case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
- case _ => s"could not find implicit value for $evOrParam $paramTp"
- }
- }
- issueNormalTypeError(tree, errMsg)
- }
-
def AdaptTypeError(tree: Tree, found: Type, req: Type) = {
// SI-3971 unwrapping to the outermost Apply helps prevent confusion with the
// error message point.
@@ -733,17 +754,6 @@ trait ContextErrors {
NormalTypeError(expandee, "too many argument lists for " + fun)
}
- private def MacroIncompatibleEngineError(friendlyMessage: String, internalMessage: String) = {
- def debugDiagnostic = s"(internal diagnostic: $internalMessage)"
- val message = if (macroDebugLite || macroDebugVerbose) s"$friendlyMessage $debugDiagnostic" else friendlyMessage
- issueNormalTypeError(lastTreeToTyper, message)
- }
-
- def MacroCantExpand210xMacrosError(internalMessage: String) =
- MacroIncompatibleEngineError("can't expand macros compiled by previous versions of Scala", internalMessage)
-
- def MacroCantExpandIncompatibleMacrosError(internalMessage: String) =
- MacroIncompatibleEngineError("macro cannot be expanded, because it was compiled by an incompatible macro engine", internalMessage)
case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable
@@ -883,20 +893,31 @@ trait ContextErrors {
val WrongNumber, NoParams, ArgsDoNotConform = Value
}
- private def ambiguousErrorMsgPos(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String) =
- if (sym1.hasDefault && sym2.hasDefault && sym1.enclClass == sym2.enclClass) {
- val methodName = nme.defaultGetterToMethod(sym1.name)
- (sym1.enclClass.pos,
- "in "+ sym1.enclClass +", multiple overloaded alternatives of " + methodName +
- " define default arguments")
- } else {
- (pos,
- ("ambiguous reference to overloaded definition,\n" +
- "both " + sym1 + sym1.locationString + " of type " + pre.memberType(sym1) +
- "\nand " + sym2 + sym2.locationString + " of type " + pre.memberType(sym2) +
- "\nmatch " + rest)
- )
- }
+ private def issueAmbiguousTypeErrorUnlessErroneous(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String): Unit = {
+ // To avoid stack overflows (SI-8890), we MUST (at least) report when either `validTargets` OR `ambiguousSuppressed`
+ // More details:
+ // If `!context.ambiguousErrors`, `reporter.issueAmbiguousError` (which `context.issueAmbiguousError` forwards to)
+ // buffers ambiguous errors. In this case, to avoid looping, we must issue even if `!validTargets`. (TODO: why?)
+ // When not buffering (and thus reporting to the user), we shouldn't issue unless `validTargets`,
+ // otherwise we report two different errors that trace back to the same root cause,
+ // and unless `validTargets`, we don't know for sure the ambiguity is real anyway.
+ val validTargets = !(pre.isErroneous || sym1.isErroneous || sym2.isErroneous)
+ val ambiguousBuffered = !context.ambiguousErrors
+ if (validTargets || ambiguousBuffered)
+ context.issueAmbiguousError(
+ if (sym1.hasDefault && sym2.hasDefault && sym1.enclClass == sym2.enclClass) {
+ val methodName = nme.defaultGetterToMethod(sym1.name)
+ AmbiguousTypeError(sym1.enclClass.pos,
+ s"in ${sym1.enclClass}, multiple overloaded alternatives of $methodName define default arguments")
+
+ } else {
+ AmbiguousTypeError(pos,
+ "ambiguous reference to overloaded definition,\n" +
+ s"both ${sym1.fullLocationString} of type ${pre.memberType(sym1)}\n" +
+ s"and ${sym2.fullLocationString} of type ${pre.memberType(sym2)}\n" +
+ s"match $rest")
+ })
+ }
def AccessError(tree: Tree, sym: Symbol, ctx: Context, explanation: String): AbsTypeError =
AccessError(tree, sym, ctx.enclClass.owner.thisType, ctx.enclClass.owner, explanation)
@@ -952,8 +973,7 @@ trait ContextErrors {
val msg0 =
"argument types " + argtpes.mkString("(", ",", ")") +
(if (pt == WildcardType) "" else " and expected result type " + pt)
- val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
- issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg))
+ issueAmbiguousTypeErrorUnlessErroneous(tree.pos, pre, best, firstCompeting, msg0)
setErrorOnLastTry(lastTry, tree)
} else setError(tree) // do not even try further attempts because they should all fail
// even if this is not the last attempt (because of the SO's possibility on the horizon)
@@ -966,8 +986,7 @@ trait ContextErrors {
}
def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type, lastTry: Boolean) = {
- val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt)
- issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg))
+ issueAmbiguousTypeErrorUnlessErroneous(tree.pos, pre, best, firstCompeting, "expected type " + pt)
setErrorOnLastTry(lastTry, tree)
}
@@ -1076,8 +1095,9 @@ trait ContextErrors {
// hence we (together with reportTypeError in TypeDiagnostics) make sure that this CyclicReference
// evades all the handlers on its way and successfully reaches `isCyclicOrErroneous` in Implicits
throw ex
- case CyclicReference(sym, info: TypeCompleter) =>
- issueNormalTypeError(tree, typer.cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
+ case c @ CyclicReference(sym, info: TypeCompleter) =>
+ val error = new NormalTypeErrorFromCyclicReference(tree, typer.cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage)
+ issueTypeError(error)
case _ =>
contextNamerErrorGen.issue(TypeErrorWithUnderlyingTree(tree, ex))
}
@@ -1264,8 +1284,8 @@ trait ContextErrors {
}
def WarnAfterNonSilentRecursiveInference(param: Symbol, arg: Tree)(implicit context: Context) = {
- val note = "type-checking the invocation of "+ param.owner +" checks if the named argument expression '"+ param.name + " = ...' is a valid assignment\n"+
- "in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for "+ param.name +"."
+ val note = "failed to determine if '"+ param.name + " = ...' is a named argument or an assignment expression.\n"+
+ "an explicit type is required for the definition mentioned in the error message above."
context.warning(arg.pos, note)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 8e1ceffecd..542f58795a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -9,6 +9,7 @@ package typechecker
import scala.collection.{ immutable, mutable }
import scala.annotation.tailrec
import scala.reflect.internal.util.shortClassOfInstance
+import scala.tools.nsc.reporters.Reporter
/**
* @author Martin Odersky
@@ -66,7 +67,7 @@ trait Contexts { self: Analyzer =>
def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD
imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel =>
- unit.warning(imp posOf sel, "Unused import")
+ reporter.warning(imp posOf sel, "Unused import")
}
}
allUsedSelectors --= imps
@@ -98,12 +99,12 @@ trait Contexts { self: Analyzer =>
}
- def rootContext(unit: CompilationUnit, tree: Tree = EmptyTree, erasedTypes: Boolean = false): Context = {
+ def rootContext(unit: CompilationUnit, tree: Tree = EmptyTree, throwing: Boolean = false, checking: Boolean = false): Context = {
val rootImportsContext = (startContext /: rootImports(unit))((c, sym) => c.make(gen.mkWildcardImport(sym)))
// there must be a scala.xml package when xml literals were parsed in this unit
if (unit.hasXml && ScalaXmlPackage == NoSymbol)
- unit.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see http://docs.scala-lang.org/overviews/core/scala-2.11.html#scala-xml.")
+ reporter.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see https://github.com/scala/scala-xml for details.")
// scala-xml needs `scala.xml.TopScope` to be in scope globally as `$scope`
// We detect `scala-xml` by looking for `scala.xml.TopScope` and
@@ -113,18 +114,21 @@ trait Contexts { self: Analyzer =>
else rootImportsContext.make(gen.mkImport(ScalaXmlPackage, nme.TopScope, nme.dollarScope))
val c = contextWithXML.make(tree, unit = unit)
- if (erasedTypes) c.setThrowErrors() else c.setReportErrors()
- c(EnrichmentEnabled | ImplicitsEnabled) = !erasedTypes
+
+ c.initRootContext(throwing, checking)
c
}
+ def rootContextPostTyper(unit: CompilationUnit, tree: Tree = EmptyTree): Context =
+ rootContext(unit, tree, throwing = true)
+
def resetContexts() {
startContext.enclosingContextChain foreach { context =>
context.tree match {
case Import(qual, _) => qual setType singleType(qual.symbol.owner.thisType, qual.symbol)
case _ =>
}
- context.reportBuffer.clearAll()
+ context.reporter.clearAll()
}
}
@@ -141,7 +145,7 @@ trait Contexts { self: Analyzer =>
* - A variety of bits that track the current error reporting policy (more on this later);
* whether or not implicits/macros are enabled, whether we are in a self or super call or
* in a constructor suffix. These are represented as bits in the mask `contextMode`.
- * - Some odds and ends: undetermined type pararameters of the current line of type inference;
+ * - Some odds and ends: undetermined type parameters of the current line of type inference;
* contextual augmentation for error messages, tracking of the nesting depth.
*
* And behaviour:
@@ -150,20 +154,20 @@ trait Contexts { self: Analyzer =>
* to buffer these for use in 'silent' type checking, when some recovery might be possible.
* - `Context` is something of a Zipper for the tree were are typechecking: it `enclosingContextChain`
* is the path back to the root. This is exactly what we need to resolve names (`lookupSymbol`)
- * and to collect in-scope implicit defintions (`implicitss`)
+ * and to collect in-scope implicit definitions (`implicitss`)
* Supporting these are `imports`, which represents all `Import` trees in in the enclosing context chain.
- * - In a similar vein, we can assess accessiblity (`isAccessible`.)
+ * - In a similar vein, we can assess accessibility (`isAccessible`.)
*
* More on error buffering:
* When are type errors recoverable? In quite a few places, it turns out. Some examples:
* trying to type an application with/without the expected type, or with/without implicit views
* enabled. This is usually mediated by `Typer.silent`, `Inferencer#tryTwice`.
*
- * Intially, starting from the `typer` phase, the contexts either buffer or report errors;
+ * Initially, starting from the `typer` phase, the contexts either buffer or report errors;
* afterwards errors are thrown. This is configured in `rootContext`. Additionally, more
* fine grained control is needed based on the kind of error; ambiguity errors are often
- * suppressed during exploraratory typing, such as determining whether `a == b` in an argument
- * position is an assignment or a named argument, when `Infererencer#isApplicableSafe` type checks
+ * suppressed during exploratory typing, such as determining whether `a == b` in an argument
+ * position is an assignment or a named argument, when `Inferencer#isApplicableSafe` type checks
* applications with and without an expected type, or whtn `Typer#tryTypedApply` tries to fit arguments to
* a function type with/without implicit views.
*
@@ -178,7 +182,8 @@ trait Contexts { self: Analyzer =>
* @param _outer The next outer context.
*/
class Context private[typechecker](val tree: Tree, val owner: Symbol, val scope: Scope,
- val unit: CompilationUnit, _outer: Context) {
+ val unit: CompilationUnit, _outer: Context,
+ private[this] var _reporter: ContextReporter = new ThrowingReporter) {
private def outerIsNoContext = _outer eq null
final def outer: Context = if (outerIsNoContext) NoContext else _outer
@@ -254,8 +259,6 @@ trait Contexts { self: Analyzer =>
def macrosEnabled = this(MacrosEnabled)
def enrichmentEnabled_=(value: Boolean) = this(EnrichmentEnabled) = value
def enrichmentEnabled = this(EnrichmentEnabled)
- def checking_=(value: Boolean) = this(Checking) = value
- def checking = this(Checking)
def retyping_=(value: Boolean) = this(ReTyping) = value
def retyping = this(ReTyping)
def inSecondTry = this(SecondTry)
@@ -265,8 +268,9 @@ trait Contexts { self: Analyzer =>
def defaultModeForTyped: Mode = if (inTypeConstructorAllowed) Mode.NOmode else Mode.EXPRmode
- /** These messages are printed when issuing an error */
- var diagnostic: List[String] = Nil
+ /** To enrich error messages involving default arguments.
+ When extending the notion, group diagnostics in an object. */
+ var diagUsedDefaults: Boolean = false
/** Saved type bounds for type parameters which are narrowed in a GADT. */
var savedTypeBounds: List[(Symbol, Type)] = List()
@@ -310,7 +314,7 @@ trait Contexts { self: Analyzer =>
*/
def savingUndeterminedTypeParams[A](reportAmbiguous: Boolean = ambiguousErrors)(body: => A): A = {
withMode() {
- this(AmbiguousErrors) = reportAmbiguous
+ setAmbiguousErrors(reportAmbiguous)
val saved = extractUndetparams()
try body
finally undetparams = saved
@@ -321,54 +325,59 @@ trait Contexts { self: Analyzer =>
// Error reporting policies and buffer.
//
- private var _reportBuffer: ReportBuffer = new ReportBuffer
- /** A buffer for errors and warnings, used with `this.bufferErrors == true` */
- def reportBuffer = _reportBuffer
- /** Discard the current report buffer, and replace with an empty one */
- def useFreshReportBuffer() = _reportBuffer = new ReportBuffer
- /** Discard the current report buffer, and replace with `other` */
- def restoreReportBuffer(other: ReportBuffer) = _reportBuffer = other
-
- /** The first error, if any, in the report buffer */
- def firstError: Option[AbsTypeError] = reportBuffer.firstError
- def errors: Seq[AbsTypeError] = reportBuffer.errors
- /** Does the report buffer contain any errors? */
- def hasErrors = reportBuffer.hasErrors
-
- def reportErrors = this(ReportErrors)
- def bufferErrors = this(BufferErrors)
+ // the reporter for this context
+ def reporter: ContextReporter = _reporter
+
+ // if set, errors will not be reporter/thrown
+ def bufferErrors = reporter.isBuffering
+ def reportErrors = !(bufferErrors || reporter.isThrowing)
+
+ // whether to *report* (which is separate from buffering/throwing) ambiguity errors
def ambiguousErrors = this(AmbiguousErrors)
- def throwErrors = contextMode.inNone(ReportErrors | BufferErrors)
-
- def setReportErrors(): Unit = set(enable = ReportErrors | AmbiguousErrors, disable = BufferErrors)
- def setBufferErrors(): Unit = set(enable = BufferErrors, disable = ReportErrors | AmbiguousErrors)
- def setThrowErrors(): Unit = this(ReportErrors | AmbiguousErrors | BufferErrors) = false
- def setAmbiguousErrors(report: Boolean): Unit = this(AmbiguousErrors) = report
-
- /** Append the given errors to the report buffer */
- def updateBuffer(errors: Traversable[AbsTypeError]) = reportBuffer ++= errors
- /** Clear all errors from the report buffer */
- def flushBuffer() { reportBuffer.clearAllErrors() }
- /** Return and clear all errors from the report buffer */
- def flushAndReturnBuffer(): immutable.Seq[AbsTypeError] = {
- val current = reportBuffer.errors
- reportBuffer.clearAllErrors()
- current
- }
- /** Issue and clear all warnings from the report buffer */
- def flushAndIssueWarnings() {
- reportBuffer.warnings foreach {
- case (pos, msg) => unit.warning(pos, msg)
+ private def setAmbiguousErrors(report: Boolean): Unit = this(AmbiguousErrors) = report
+
+ /**
+ * Try inference twice: once without views and once with views,
+ * unless views are already disabled.
+ */
+ abstract class TryTwice {
+ def tryOnce(isLastTry: Boolean): Unit
+
+ final def apply(): Unit = {
+ val doLastTry =
+ // do first try if implicits are enabled
+ if (implicitsEnabled) {
+ // We create a new BufferingReporter to
+ // distinguish errors that occurred before entering tryTwice
+ // and our first attempt in 'withImplicitsDisabled'. If the
+ // first attempt fails, we try with implicits on
+ // and the original reporter.
+ // immediate reporting of ambiguous errors is suppressed, so that they are buffered
+ inSilentMode {
+ try {
+ set(disable = ImplicitsEnabled | EnrichmentEnabled) // restored by inSilentMode
+ tryOnce(false)
+ reporter.hasErrors
+ } catch {
+ case ex: CyclicReference => throw ex
+ case ex: TypeError => true // recoverable cyclic references?
+ }
+ }
+ } else true
+
+ // do last try if try with implicits enabled failed
+ // (or if it was not attempted because they were disabled)
+ if (doLastTry)
+ tryOnce(true)
}
- reportBuffer.clearAllWarnings()
}
//
// Temporary mode adjustment
//
- @inline def withMode[T](enabled: ContextMode = NOmode, disabled: ContextMode = NOmode)(op: => T): T = {
+ @inline final def withMode[T](enabled: ContextMode = NOmode, disabled: ContextMode = NOmode)(op: => T): T = {
val saved = contextMode
set(enabled, disabled)
try op
@@ -402,12 +411,18 @@ trait Contexts { self: Analyzer =>
// See comment on FormerNonStickyModes.
@inline final def withOnlyStickyModes[T](op: => T): T = withMode(disabled = FormerNonStickyModes)(op)
- /** @return true if the `expr` evaluates to true within a silent Context that incurs no errors */
+ // inliner note: this has to be a simple method for inlining to work -- moved the `&& !reporter.hasErrors` out
@inline final def inSilentMode(expr: => Boolean): Boolean = {
- withMode() { // withMode with no arguments to restore the mode mutated by `setBufferErrors`.
- setBufferErrors()
- try expr && !hasErrors
- finally reportBuffer.clearAll()
+ val savedContextMode = contextMode
+ val savedReporter = reporter
+
+ setAmbiguousErrors(false)
+ _reporter = new BufferingReporter
+
+ try expr
+ finally {
+ contextMode = savedContextMode
+ _reporter = savedReporter
}
}
@@ -423,7 +438,8 @@ trait Contexts { self: Analyzer =>
* `Context#imports`.
*/
def make(tree: Tree = tree, owner: Symbol = owner,
- scope: Scope = scope, unit: CompilationUnit = unit): Context = {
+ scope: Scope = scope, unit: CompilationUnit = unit,
+ reporter: ContextReporter = this.reporter): Context = {
val isTemplateOrPackage = tree match {
case _: Template | _: PackageDef => true
case _ => false
@@ -446,16 +462,15 @@ trait Contexts { self: Analyzer =>
// The blank canvas
val c = if (isImport)
- new Context(tree, owner, scope, unit, this) with ImportContext
+ new Context(tree, owner, scope, unit, this, reporter) with ImportContext
else
- new Context(tree, owner, scope, unit, this)
+ new Context(tree, owner, scope, unit, this, reporter)
// Fields that are directly propagated
c.variance = variance
- c.diagnostic = diagnostic
+ c.diagUsedDefaults = diagUsedDefaults
c.openImplicits = openImplicits
c.contextMode = contextMode // note: ConstructorSuffix, a bit within `mode`, is conditionally overwritten below.
- c._reportBuffer = reportBuffer
// Fields that may take on a different value in the child
c.prefix = prefixInChild
@@ -465,27 +480,46 @@ trait Contexts { self: Analyzer =>
// SI-8245 `isLazy` need to skip lazy getters to ensure `return` binds to the right place
c.enclMethod = if (isDefDef && !owner.isLazy) c else enclMethod
+ if (tree != outer.tree)
+ c(TypeConstructorAllowed) = false
+
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
c
}
+ /** Use reporter (possibly buffered) for errors/warnings and enable implicit conversion **/
+ def initRootContext(throwing: Boolean = false, checking: Boolean = false): Unit = {
+ _reporter =
+ if (checking) new CheckingReporter
+ else if (throwing) new ThrowingReporter
+ else new ImmediateReporter
+
+ setAmbiguousErrors(!throwing)
+ this(EnrichmentEnabled | ImplicitsEnabled) = !throwing
+ }
+
def make(tree: Tree, owner: Symbol, scope: Scope): Context =
// TODO SI-7345 Moving this optimization into the main overload of `make` causes all tests to fail.
- // even if it is extened to check that `unit == this.unit`. Why is this?
+ // even if it is extended to check that `unit == this.unit`. Why is this?
if (tree == this.tree && owner == this.owner && scope == this.scope) this
else make(tree, owner, scope, unit)
/** Make a child context that represents a new nested scope */
- def makeNewScope(tree: Tree, owner: Symbol): Context =
- make(tree, owner, newNestedScope(scope))
+ def makeNewScope(tree: Tree, owner: Symbol, reporter: ContextReporter = this.reporter): Context =
+ make(tree, owner, newNestedScope(scope), reporter = reporter)
/** Make a child context that buffers errors and warnings into a fresh report buffer. */
def makeSilent(reportAmbiguousErrors: Boolean = ambiguousErrors, newtree: Tree = tree): Context = {
- val c = make(newtree)
- c.setBufferErrors()
+ // A fresh buffer so as not to leak errors/warnings into `this`.
+ val c = make(newtree, reporter = new BufferingReporter)
c.setAmbiguousErrors(reportAmbiguousErrors)
- c._reportBuffer = new ReportBuffer // A fresh buffer so as not to leak errors/warnings into `this`.
+ c
+ }
+
+ def makeNonSilent(newtree: Tree): Context = {
+ val c = make(newtree, reporter = reporter.makeImmediate)
+ c.setAmbiguousErrors(true)
c
}
@@ -508,7 +542,9 @@ trait Contexts { self: Analyzer =>
*/
def makeConstructorContext = {
val baseContext = enclClass.outer.nextEnclosing(!_.tree.isInstanceOf[Template])
- val argContext = baseContext.makeNewScope(tree, owner)
+ // must propagate reporter!
+ // (caught by neg/t3649 when refactoring reporting to be specified only by this.reporter and not also by this.contextMode)
+ val argContext = baseContext.makeNewScope(tree, owner, reporter = this.reporter)
argContext.contextMode = contextMode
argContext.inSelfSuperCall = true
def enterElems(c: Context) {
@@ -533,65 +569,25 @@ trait Contexts { self: Analyzer =>
// Error and warning issuance
//
- private def addDiagString(msg: String) = {
- val ds =
- if (diagnostic.isEmpty) ""
- else diagnostic.mkString("\n","\n", "")
- if (msg endsWith ds) msg else msg + ds
- }
-
- private def unitError(pos: Position, msg: String): Unit =
- if (checking) onTreeCheckerError(pos, msg) else unit.error(pos, msg)
-
- @inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) {
- // TODO: are errors allowed to have pos == NoPosition??
- // if not, Jason suggests doing: val pos = err.errPos.orElse( { devWarning("Que?"); context.tree.pos })
- if (settings.Yissuedebug) {
- log("issue error: " + err.errMsg)
- (new Exception).printStackTrace()
- }
- if (pf isDefinedAt err) pf(err)
- else if (bufferErrors) { reportBuffer += err }
- else throw new TypeError(err.errPos, err.errMsg)
- }
-
/** Issue/buffer/throw the given type error according to the current mode for error reporting. */
- def issue(err: AbsTypeError) {
- issueCommon(err) { case _ if reportErrors =>
- unitError(err.errPos, addDiagString(err.errMsg))
- }
- }
-
+ private[typechecker] def issue(err: AbsTypeError) = reporter.issue(err)(this)
/** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */
- def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) {
- issueCommon(err) { case _ if ambiguousErrors =>
- if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
- unitError(err.errPos, err.errMsg)
- }
- }
+ private[typechecker] def issueAmbiguousError(err: AbsAmbiguousTypeError) = reporter.issueAmbiguousError(err)(this)
+ /** Issue/throw the given error message according to the current mode for error reporting. */
+ def error(pos: Position, msg: String) = reporter.error(pos, msg)
+ /** Issue/throw the given error message according to the current mode for error reporting. */
+ def warning(pos: Position, msg: String) = reporter.warning(pos, msg)
+ def echo(pos: Position, msg: String) = reporter.echo(pos, msg)
- /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */
- def issueAmbiguousError(err: AbsTypeError) {
- issueCommon(err) { case _ if ambiguousErrors => unitError(err.errPos, addDiagString(err.errMsg)) }
- }
- /** Issue/throw the given `err` according to the current mode for error reporting. */
- def error(pos: Position, err: Throwable) =
- if (reportErrors) unitError(pos, addDiagString(err.getMessage()))
- else throw err
+ def deprecationWarning(pos: Position, sym: Symbol, msg: String): Unit =
+ currentRun.reporting.deprecationWarning(pos, sym, msg)
+ def deprecationWarning(pos: Position, sym: Symbol): Unit =
+ currentRun.reporting.deprecationWarning(pos, sym) // TODO: allow this to escalate to an error, and implicit search will ignore deprecated implicits
- /** Issue/throw the given error message according to the current mode for error reporting. */
- def error(pos: Position, msg: String) = {
- val msg1 = addDiagString(msg)
- if (reportErrors) unitError(pos, msg1)
- else throw new TypeError(pos, msg1)
- }
+ def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean): Unit =
+ currentRun.reporting.featureWarning(pos, featureName, featureDesc, featureTrait, construct, required)
- /** Issue/throw the given error message according to the current mode for error reporting. */
- def warning(pos: Position, msg: String, force: Boolean = false) {
- if (reportErrors || force) unit.warning(pos, msg)
- else if (bufferErrors) reportBuffer += (pos -> msg)
- }
// nextOuter determines which context is searched next for implicits
// (after `this`, which contributes `newImplicits` below.) In
@@ -803,7 +799,7 @@ trait Contexts { self: Analyzer =>
isAccessible(sym, pre) &&
!(imported && {
val e = scope.lookupEntry(name)
- (e ne null) && (e.owner == scope)
+ (e ne null) && (e.owner == scope) && (!settings.isScala212 || e.sym.exists)
})
private def collectImplicits(syms: Scope, pre: Type, imported: Boolean = false): List[ImplicitInfo] =
@@ -1112,10 +1108,10 @@ trait Contexts { self: Analyzer =>
//
// A type-import-on-demand declaration never causes any other declaration to be shadowed.
//
- // Scala: Bindings of different kinds have a precedence defined on them:
+ // Scala: Bindings of different kinds have a precedence defined on them:
//
- // 1) Definitions and declarations that are local, inherited, or made available by a
- // package clause in the same compilation unit where the definition occurs have
+ // 1) Definitions and declarations that are local, inherited, or made available by a
+ // package clause in the same compilation unit where the definition occurs have
// highest precedence.
// 2) Explicit imports have next highest precedence.
def depthOk(imp: ImportInfo) = (
@@ -1228,61 +1224,178 @@ trait Contexts { self: Analyzer =>
override final def toString = super.toString + " with " + s"ImportContext { $impInfo; outer.owner = ${outer.owner} }"
}
- /** A buffer for warnings and errors that are accumulated during speculative type checking. */
- final class ReportBuffer {
+ /** A reporter for use during type checking. It has multiple modes for handling errors.
+ *
+ * The default (immediate mode) is to send the error to the global reporter.
+ * When switched into buffering mode via makeBuffering, errors and warnings are buffered and not be reported
+ * (there's a special case for ambiguity errors for some reason: those are force to the reporter when context.ambiguousErrors,
+ * or else they are buffered -- TODO: can we simplify this?)
+ *
+ * When using the type checker after typers, an error results in a TypeError being thrown. TODO: get rid of this mode.
+ *
+ * To handle nested contexts, reporters share buffers. TODO: only buffer in BufferingReporter, emit immediately in ImmediateReporter
+ */
+ abstract class ContextReporter(private[this] var _errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, private[this] var _warningBuffer: mutable.LinkedHashSet[(Position, String)] = null) extends Reporter {
type Error = AbsTypeError
type Warning = (Position, String)
- private def newBuffer[A] = mutable.LinkedHashSet.empty[A] // Important to use LinkedHS for stable results.
+ def issue(err: AbsTypeError)(implicit context: Context): Unit = handleError(err.errPos, addDiagString(err.errMsg))
- // [JZ] Contexts, pre- the SI-7345 refactor, avoided allocating the buffers until needed. This
- // is replicated here out of conservatism.
- private var _errorBuffer: mutable.LinkedHashSet[Error] = _
- private def errorBuffer = {if (_errorBuffer == null) _errorBuffer = newBuffer; _errorBuffer}
- def errors: immutable.Seq[Error] = errorBuffer.toVector
+ protected def handleError(pos: Position, msg: String): Unit
+ protected def handleSuppressedAmbiguous(err: AbsAmbiguousTypeError): Unit = ()
+ protected def handleWarning(pos: Position, msg: String): Unit = reporter.warning(pos, msg)
- private var _warningBuffer: mutable.LinkedHashSet[Warning] = _
- private def warningBuffer = {if (_warningBuffer == null) _warningBuffer = newBuffer; _warningBuffer}
- def warnings: immutable.Seq[Warning] = warningBuffer.toVector
+ def makeImmediate: ContextReporter = this
+ def makeBuffering: ContextReporter = this
+ def isBuffering: Boolean = false
+ def isThrowing: Boolean = false
- def +=(error: AbsTypeError): this.type = {
- errorBuffer += error
- this
- }
- def ++=(errors: Traversable[AbsTypeError]): this.type = {
- errorBuffer ++= errors
- this
- }
- def +=(warning: Warning): this.type = {
- warningBuffer += warning
- this
+ /** Emit an ambiguous error according to context.ambiguousErrors
+ *
+ * - when true, use global.reporter regardless of whether we're buffering (TODO: can we change this?)
+ * - else, let this context reporter decide
+ */
+ final def issueAmbiguousError(err: AbsAmbiguousTypeError)(implicit context: Context): Unit =
+ if (context.ambiguousErrors) reporter.error(err.errPos, addDiagString(err.errMsg)) // force reporting... see TODO above
+ else handleSuppressedAmbiguous(err)
+
+ @inline final def withFreshErrorBuffer[T](expr: => T): T = {
+ val previousBuffer = _errorBuffer
+ _errorBuffer = newBuffer
+ val res = expr // expr will read _errorBuffer
+ _errorBuffer = previousBuffer
+ res
}
- def clearAll(): this.type = {
- clearAllErrors(); clearAllWarnings();
+ @inline final def propagatingErrorsTo[T](target: ContextReporter)(expr: => T): T = {
+ val res = expr // TODO: make sure we're okay skipping the try/finally overhead
+ if ((this ne target) && hasErrors) { // `this eq target` in e.g., test/files/neg/divergent-implicit.scala
+ // assert(target.errorBuffer ne _errorBuffer)
+ target ++= errors
+ // TODO: is clearAllErrors necessary? (no tests failed when dropping it)
+ // NOTE: even though `this ne target`, it may still be that `target.errorBuffer eq _errorBuffer`,
+ // so don't clear the buffer, but null out the reference so that a new one will be created when necessary (should be never??)
+ // (we should refactor error buffering to avoid mutation on shared buffers)
+ clearAllErrors()
+ }
+ res
}
- def clearAllErrors(): this.type = {
- errorBuffer.clear()
- this
- }
- def clearErrors(removeF: PartialFunction[AbsTypeError, Boolean]): this.type = {
- errorBuffer.retain(!PartialFunction.cond(_)(removeF))
- this
+ protected final def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit =
+ severity match {
+ case ERROR => handleError(pos, msg)
+ case WARNING => handleWarning(pos, msg)
+ case INFO => reporter.echo(pos, msg)
+ }
+
+ final override def hasErrors = super.hasErrors || errorBuffer.nonEmpty
+
+ // TODO: everything below should be pushed down to BufferingReporter (related to buffering)
+ // Implicit relies on this most heavily, but there you know reporter.isInstanceOf[BufferingReporter]
+ // can we encode this statically?
+
+ // have to pass in context because multiple contexts may share the same ReportBuffer
+ def reportFirstDivergentError(fun: Tree, param: Symbol, paramTp: Type)(implicit context: Context): Unit =
+ errors.collectFirst {
+ case dte: DivergentImplicitTypeError => dte
+ } match {
+ case Some(divergent) =>
+ // DivergentImplicit error has higher priority than "no implicit found"
+ // no need to issue the problem again if we are still in silent mode
+ if (context.reportErrors) {
+ context.issue(divergent.withPt(paramTp))
+ errorBuffer.retain {
+ case dte: DivergentImplicitTypeError => false
+ case _ => true
+ }
+ }
+ case _ =>
+ NoImplicitFoundError(fun, param)(context)
+ }
+
+ def retainDivergentErrorsExcept(saved: DivergentImplicitTypeError) =
+ errorBuffer.retain {
+ case err: DivergentImplicitTypeError => err ne saved
+ case _ => false
+ }
+
+ def propagateImplicitTypeErrorsTo(target: ContextReporter) = {
+ errors foreach {
+ case err@(_: DivergentImplicitTypeError | _: AmbiguousImplicitTypeError) =>
+ target.errorBuffer += err
+ case _ =>
+ }
+ // debuglog("propagateImplicitTypeErrorsTo: " + errors)
}
- def retainErrors(leaveF: PartialFunction[AbsTypeError, Boolean]): this.type = {
- errorBuffer.retain(PartialFunction.cond(_)(leaveF))
- this
+
+ protected def addDiagString(msg: String)(implicit context: Context): String = {
+ val diagUsedDefaultsMsg = "Error occurred in an application involving default arguments."
+ if (context.diagUsedDefaults && !(msg endsWith diagUsedDefaultsMsg)) msg + "\n" + diagUsedDefaultsMsg
+ else msg
}
- def clearAllWarnings(): this.type = {
- warningBuffer.clear()
- this
+
+ final def emitWarnings() = if (_warningBuffer != null) {
+ _warningBuffer foreach {
+ case (pos, msg) => reporter.warning(pos, msg)
+ }
+ _warningBuffer = null
}
- def hasErrors = errorBuffer.nonEmpty
- def firstError = errorBuffer.headOption
+ // [JZ] Contexts, pre- the SI-7345 refactor, avoided allocating the buffers until needed. This
+ // is replicated here out of conservatism.
+ private def newBuffer[A] = mutable.LinkedHashSet.empty[A] // Important to use LinkedHS for stable results.
+ final protected def errorBuffer = { if (_errorBuffer == null) _errorBuffer = newBuffer; _errorBuffer }
+ final protected def warningBuffer = { if (_warningBuffer == null) _warningBuffer = newBuffer; _warningBuffer }
+
+ final def errors: immutable.Seq[Error] = errorBuffer.toVector
+ final def warnings: immutable.Seq[Warning] = warningBuffer.toVector
+ final def firstError: Option[AbsTypeError] = errorBuffer.headOption
+
+ // TODO: remove ++= and clearAll* entirely in favor of more high-level combinators like withFreshErrorBuffer
+ final private[typechecker] def ++=(errors: Traversable[AbsTypeError]): Unit = errorBuffer ++= errors
+
+ // null references to buffers instead of clearing them,
+ // as the buffers may be shared between different reporters
+ final def clearAll(): Unit = { _errorBuffer = null; _warningBuffer = null }
+ final def clearAllErrors(): Unit = { _errorBuffer = null }
+ }
+
+ private[typechecker] class ImmediateReporter(_errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, _warningBuffer: mutable.LinkedHashSet[(Position, String)] = null) extends ContextReporter(_errorBuffer, _warningBuffer) {
+ override def makeBuffering: ContextReporter = new BufferingReporter(errorBuffer, warningBuffer)
+ protected def handleError(pos: Position, msg: String): Unit = reporter.error(pos, msg)
+ }
+
+
+ private[typechecker] class BufferingReporter(_errorBuffer: mutable.LinkedHashSet[AbsTypeError] = null, _warningBuffer: mutable.LinkedHashSet[(Position, String)] = null) extends ContextReporter(_errorBuffer, _warningBuffer) {
+ override def isBuffering = true
+
+ override def issue(err: AbsTypeError)(implicit context: Context): Unit = errorBuffer += err
+
+ // this used to throw new TypeError(pos, msg) -- buffering lets us report more errors (test/files/neg/macro-basic-mamdmi)
+ // the old throwing behavior was relied on by diagnostics in manifestOfType
+ protected def handleError(pos: Position, msg: String): Unit = errorBuffer += TypeErrorWrapper(new TypeError(pos, msg))
+ override protected def handleSuppressedAmbiguous(err: AbsAmbiguousTypeError): Unit = errorBuffer += err
+ override protected def handleWarning(pos: Position, msg: String): Unit = warningBuffer += ((pos, msg))
+
+ // TODO: emit all buffered errors, warnings
+ override def makeImmediate: ContextReporter = new ImmediateReporter(errorBuffer, warningBuffer)
}
+ /** Used after typer (specialization relies on TypeError being thrown, among other post-typer phases).
+ *
+ * TODO: get rid of it, use ImmediateReporter and a check for reporter.hasErrors where necessary
+ */
+ private[typechecker] class ThrowingReporter extends ContextReporter {
+ override def isThrowing = true
+ protected def handleError(pos: Position, msg: String): Unit = throw new TypeError(pos, msg)
+ }
+
+ /** Used during a run of [[scala.tools.nsc.typechecker.TreeCheckers]]? */
+ private[typechecker] class CheckingReporter extends ContextReporter {
+ protected def handleError(pos: Position, msg: String): Unit = onTreeCheckerError(pos, msg)
+ }
+
+
class ImportInfo(val tree: Import, val depth: Int) {
def pos = tree.pos
def posOf(sel: ImportSelector) = tree.pos withPoint sel.namePos
@@ -1375,8 +1488,6 @@ object ContextMode {
def apply(bits: Int): ContextMode = new ContextMode(bits)
final val NOmode: ContextMode = 0
- final val ReportErrors: ContextMode = 1 << 0
- final val BufferErrors: ContextMode = 1 << 1
final val AmbiguousErrors: ContextMode = 1 << 2
/** Are we in a secondary constructor after the this constructor call? */
@@ -1399,8 +1510,6 @@ object ContextMode {
/** To selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed */
final val EnrichmentEnabled: ContextMode = 1 << 8
- /** Are we in a run of [[scala.tools.nsc.typechecker.TreeCheckers]]? */
- final val Checking: ContextMode = 1 << 9
/** Are we retypechecking arguments independently from the function applied to them? See `Typer.tryTypedApply`
* TODO - iron out distinction/overlap with SecondTry.
@@ -1437,17 +1546,14 @@ object ContextMode {
PatternAlternative | StarPatterns | SuperInit | SecondTry | ReturnExpr | TypeConstructorAllowed
)
- final val DefaultMode: ContextMode = MacrosEnabled
+ final val DefaultMode: ContextMode = MacrosEnabled
private val contextModeNameMap = Map(
- ReportErrors -> "ReportErrors",
- BufferErrors -> "BufferErrors",
AmbiguousErrors -> "AmbiguousErrors",
ConstructorSuffix -> "ConstructorSuffix",
SelfSuperCall -> "SelfSuperCall",
ImplicitsEnabled -> "ImplicitsEnabled",
MacrosEnabled -> "MacrosEnabled",
- Checking -> "Checking",
ReTyping -> "ReTyping",
PatternAlternative -> "PatternAlternative",
StarPatterns -> "StarPatterns",
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index d87090fa46..5ecca5abce 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -3,7 +3,7 @@
* @author Martin Odersky
*/
-//todo: rewrite or disllow new T where T is a mixin (currently: <init> not a member of T)
+//todo: rewrite or disallow new T where T is a mixin (currently: <init> not a member of T)
//todo: use inherited type info also for vars and values
//todo: disallow C#D in superclass
//todo: treat :::= correctly
@@ -71,13 +71,10 @@ trait Implicits {
typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString))
val implicitSearchContext = context.makeImplicit(reportAmbiguous)
val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit
- if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
- context.updateBuffer(implicitSearchContext.reportBuffer.errors.collect {
- case dte: DivergentImplicitTypeError => dte
- case ate: AmbiguousImplicitTypeError => ate
- })
- debuglog("update buffer: " + implicitSearchContext.reportBuffer.errors)
- }
+
+ if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.reporter.hasErrors)
+ implicitSearchContext.reporter.propagateImplicitTypeErrorsTo(context.reporter)
+
// SI-7944 undetermined type parameters that result from inference within typedImplicit land in
// `implicitSearchContext.undetparams`, *not* in `context.undetparams`
// Here, we copy them up to parent context (analogously to the way the errors are copied above),
@@ -99,7 +96,7 @@ trait Implicits {
def wrapper(inference: => SearchResult) = wrapper1(inference)
val result = wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos))
if (result.isFailure && !silent) {
- val err = context.firstError
+ val err = context.reporter.firstError
val errPos = err.map(_.errPos).getOrElse(pos)
val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Xlog-implicits")
onError(errPos, errMsg)
@@ -162,7 +159,7 @@ trait Implicits {
* @param tree The tree representing the implicit
* @param subst A substituter that represents the undetermined type parameters
* that were instantiated by the winning implicit.
- * @param undetparams undeterminted type parameters
+ * @param undetparams undetermined type parameters
*/
class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter, val undetparams: List[Symbol]) {
override def toString = "SearchResult(%s, %s)".format(tree,
@@ -279,7 +276,7 @@ trait Implicits {
/** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp }
*/
object HasMethodMatching {
- val dummyMethod = NoSymbol.newTermSymbol("typer$dummy") setInfo NullaryMethodType(AnyTpe)
+ val dummyMethod = NoSymbol.newTermSymbol(TermName("typer$dummy")) setInfo NullaryMethodType(AnyTpe)
def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe))
@@ -612,7 +609,7 @@ trait Implicits {
val itree2 = if (!isView) fallback else pt match {
case Function1(arg1, arg2) =>
typed1(
- atPos(itree0.pos)(Apply(itree1, List(Ident("<argument>") setType approximate(arg1)))),
+ atPos(itree0.pos)(Apply(itree1, List(Ident(nme.argument) setType approximate(arg1)))),
EXPRmode,
approximate(arg2)
) match {
@@ -635,7 +632,7 @@ trait Implicits {
}
case _ => fallback
}
- context.firstError match { // using match rather than foreach to avoid non local return.
+ context.reporter.firstError match { // using match rather than foreach to avoid non local return.
case Some(err) =>
log("implicit adapt failed: " + err.errMsg)
return fail(err.errMsg)
@@ -658,8 +655,8 @@ trait Implicits {
}
}
- if (context.hasErrors)
- fail("hasMatchingSymbol reported error: " + context.firstError.get.errMsg)
+ if (context.reporter.hasErrors)
+ fail("hasMatchingSymbol reported error: " + context.reporter.firstError.get.errMsg)
else if (itree3.isErroneous)
fail("error typechecking implicit candidate")
else if (isLocalToCallsite && !hasMatchingSymbol(itree2))
@@ -677,7 +674,7 @@ trait Implicits {
// #2421: check that we correctly instantiated type parameters outside of the implicit tree:
checkBounds(itree3, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
- context.firstError match {
+ context.reporter.firstError match {
case Some(err) =>
return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + err.errMsg)
case None =>
@@ -716,7 +713,7 @@ trait Implicits {
case t => t
}
- context.firstError match {
+ context.reporter.firstError match {
case Some(err) =>
fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg)
case None =>
@@ -857,13 +854,11 @@ trait Implicits {
SearchFailure
} else {
if (search.isFailure) {
- // We don't want errors that occur during checking implicit info
+ // Discard the divergentError we saved (if any), as well as all errors that are not of type DivergentImplicitTypeError
+ // We don't want errors that occur while checking the implicit info
// to influence the check of further infos, but we should retain divergent implicit errors
// (except for the one we already squirreled away)
- val saved = divergentError.getOrElse(null)
- context.reportBuffer.retainErrors {
- case err: DivergentImplicitTypeError => err ne saved
- }
+ context.reporter.retainDivergentErrorsExcept(divergentError.getOrElse(null))
}
search
}
@@ -898,7 +893,7 @@ trait Implicits {
try improves(firstPending, alt)
catch {
case e: CyclicReference =>
- debugwarn(s"Discarding $firstPending during implicit search due to cyclic reference.")
+ devWarning(s"Discarding $firstPending during implicit search due to cyclic reference.")
true
}
)
@@ -909,7 +904,7 @@ trait Implicits {
// the first `DivergentImplicitTypeError` that is being propagated
// from a nested implicit search; this one will be
// re-issued if this level of the search fails.
- DivergentImplicitRecovery(typedFirstPending, firstPending, context.errors) match {
+ DivergentImplicitRecovery(typedFirstPending, firstPending, context.reporter.errors) match {
case sr if sr.isDivergent => Nil
case sr if sr.isFailure => rankImplicits(otherPending, acc)
case newBest =>
@@ -923,7 +918,7 @@ trait Implicits {
/** Returns all eligible ImplicitInfos and their SearchResults in a map.
*/
- def findAll() = mapFrom(eligible)(typedImplicit(_, ptChecked = false, isLocalToCallsite))
+ def findAll() = linkedMapFrom(eligible)(typedImplicit(_, ptChecked = false, isLocalToCallsite))
/** Returns the SearchResult of the best match.
*/
@@ -968,7 +963,7 @@ trait Implicits {
* symbols of the same name in succeeding lists.
* @return map from infos to search results
*/
- def applicableInfos(iss: Infoss, isLocalToCallsite: Boolean): Map[ImplicitInfo, SearchResult] = {
+ def applicableInfos(iss: Infoss, isLocalToCallsite: Boolean): mutable.LinkedHashMap[ImplicitInfo, SearchResult] = {
val start = if (Statistics.canEnable) Statistics.startCounter(subtypeAppInfos) else null
val computation = new ImplicitComputation(iss, isLocalToCallsite) { }
val applicable = computation.findAll()
@@ -1146,7 +1141,7 @@ trait Implicits {
try {
val tree1 = typedPos(pos.focus)(arg)
- context.firstError match {
+ context.reporter.firstError match {
case Some(err) => processMacroExpansionError(err.errPos, err.errMsg)
case None => new SearchResult(tree1, EmptyTreeTypeSubstituter, Nil)
}
@@ -1278,19 +1273,20 @@ trait Implicits {
if (tagInScope.isEmpty) mot(tp, Nil, Nil)
else {
if (ReflectRuntimeUniverse == NoSymbol) {
- // todo. write a test for this
- context.error(pos,
+ // TODO: write a test for this (the next error message is already checked by neg/interop_typetags_without_classtags_arenot_manifests.scala)
+ // TODO: this was using context.error, and implicit search always runs in silent mode, thus it was actually throwing a TypeError
+ // with the new strategy-based reporting, a BufferingReporter buffers instead of throwing
+ // it would be good to rework this logic to fit into the regular context.error mechanism
+ throw new TypeError(pos,
sm"""to create a manifest here, it is necessary to interoperate with the type tag `$tagInScope` in scope.
|however typetag -> manifest conversion requires Scala reflection, which is not present on the classpath.
|to proceed put scala-reflect.jar on your compilation classpath and recompile.""")
- return SearchFailure
}
if (resolveClassTag(pos, tp, allowMaterialization = true) == EmptyTree) {
- context.error(pos,
+ throw new TypeError(pos,
sm"""to create a manifest here, it is necessary to interoperate with the type tag `$tagInScope` in scope.
|however typetag -> manifest conversion requires a class tag for the corresponding type to be present.
|to proceed add a class tag to the type `$tp` (e.g. by introducing a context bound) and recompile.""")
- return SearchFailure
}
val cm = typed(Ident(ReflectRuntimeCurrentMirror))
val internal = gen.mkAttributedSelect(gen.mkAttributedRef(ReflectRuntimeUniverse), UniverseInternal)
@@ -1306,7 +1302,7 @@ trait Implicits {
}
def wrapResult(tree: Tree): SearchResult =
- if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter, Nil)
+ if (tree == EmptyTree) SearchFailure else new SearchResult(atPos(pos.focus)(tree), EmptyTreeTypeSubstituter, Nil)
/** Materializes implicits of predefined types (currently, manifests and tags).
* Will be replaced by implicit macros once we fix them.
@@ -1346,52 +1342,66 @@ trait Implicits {
* If all fails return SearchFailure
*/
def bestImplicit: SearchResult = {
- val failstart = if (Statistics.canEnable) Statistics.startTimer(inscopeFailNanos) else null
- val succstart = if (Statistics.canEnable) Statistics.startTimer(inscopeSucceedNanos) else null
+ val stats = Statistics.canEnable
+ val failstart = if (stats) Statistics.startTimer(inscopeFailNanos) else null
+ val succstart = if (stats) Statistics.startTimer(inscopeSucceedNanos) else null
var result = searchImplicit(context.implicitss, isLocalToCallsite = true)
- if (result.isFailure) {
- if (Statistics.canEnable) Statistics.stopTimer(inscopeFailNanos, failstart)
- } else {
- if (Statistics.canEnable) Statistics.stopTimer(inscopeSucceedNanos, succstart)
- if (Statistics.canEnable) Statistics.incCounter(inscopeImplicitHits)
+ if (stats) {
+ if (result.isFailure) Statistics.stopTimer(inscopeFailNanos, failstart)
+ else {
+ Statistics.stopTimer(inscopeSucceedNanos, succstart)
+ Statistics.incCounter(inscopeImplicitHits)
+ }
}
+
if (result.isFailure) {
- val previousErrs = context.flushAndReturnBuffer()
- val failstart = if (Statistics.canEnable) Statistics.startTimer(oftypeFailNanos) else null
- val succstart = if (Statistics.canEnable) Statistics.startTimer(oftypeSucceedNanos) else null
+ val failstart = if (stats) Statistics.startTimer(oftypeFailNanos) else null
+ val succstart = if (stats) Statistics.startTimer(oftypeSucceedNanos) else null
+
+ // SI-6667, never search companions after an ambiguous error in in-scope implicits
+ val wasAmbigious = result.isAmbiguousFailure
+
+ // TODO: encapsulate
+ val previousErrs = context.reporter.errors
+ context.reporter.clearAllErrors()
- val wasAmbigious = result.isAmbiguousFailure // SI-6667, never search companions after an ambiguous error in in-scope implicits
result = materializeImplicit(pt)
+
// `materializeImplicit` does some preprocessing for `pt`
// is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`?
if (result.isFailure && !wasAmbigious)
result = searchImplicit(implicitsOfExpectedType, isLocalToCallsite = false)
- if (result.isFailure) {
- context.updateBuffer(previousErrs)
- if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart)
- } else {
- if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart)
- if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits)
+ if (result.isFailure)
+ context.reporter ++= previousErrs
+
+ if (stats) {
+ if (result.isFailure) Statistics.stopTimer(oftypeFailNanos, failstart)
+ else {
+ Statistics.stopTimer(oftypeSucceedNanos, succstart)
+ Statistics.incCounter(oftypeImplicitHits)
+ }
}
}
if (result.isSuccess && isView) {
def maybeInvalidConversionError(msg: String) {
// We have to check context.ambiguousErrors even though we are calling "issueAmbiguousError"
// which ostensibly does exactly that before issuing the error. Why? I have no idea. Test is pos/t7690.
+ // AM: I would guess it's because ambiguous errors will be buffered in silent mode if they are not reported
if (context.ambiguousErrors)
context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, msg))
}
pt match {
case Function1(_, out) =>
- def prohibit(sym: Symbol) = if (sym.tpe <:< out) {
- maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than ${sym.name}")
- result = SearchFailure
+ // must inline to avoid capturing result
+ def prohibit(sym: Symbol) = (sym.tpe <:< out) && {
+ maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than ${sym.name}")
+ true
}
- prohibit(AnyRefClass)
- if (settings.isScala211) prohibit(AnyValClass)
+ if (prohibit(AnyRefClass) || (settings.isScala211 && prohibit(AnyValClass)))
+ result = SearchFailure
case _ => false
}
if (settings.isScala211 && isInvalidConversionSource(pt)) {
@@ -1399,8 +1409,9 @@ trait Implicits {
result = SearchFailure
}
}
- if (result.isFailure)
- debuglog("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
+
+ if (result.isFailure && settings.debug) // debuglog is not inlined for some reason
+ log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
result
}
@@ -1422,20 +1433,19 @@ trait Implicits {
val eligible = new ImplicitComputation(iss, isLocalToCallsite).eligible
eligible.toList.flatMap {
(ii: ImplicitInfo) =>
- // each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit)
- // thus, start each type var off with a fresh for every typedImplicit
- resetTVars()
- // any previous errors should not affect us now
- context.flushBuffer()
-
- val res = typedImplicit(ii, ptChecked = false, isLocalToCallsite)
- if (res.tree ne EmptyTree) List((res, tvars map (_.constr)))
- else Nil
+ // each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit)
+ // thus, start each type var off with a fresh for every typedImplicit
+ resetTVars()
+ // any previous errors should not affect us now
+ context.reporter.clearAllErrors()
+ val res = typedImplicit(ii, ptChecked = false, isLocalToCallsite)
+ if (res.tree ne EmptyTree) List((res, tvars map (_.constr)))
+ else Nil
+ }
}
- }
eligibleInfos(context.implicitss, isLocalToCallsite = true) ++
eligibleInfos(implicitsOfExpectedType, isLocalToCallsite = false)
- }
+ }
}
object ImplicitNotFoundMsg {
@@ -1468,8 +1478,10 @@ trait Implicits {
})
private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName)
+ private def typeArgsAtSym(paramTp: Type) = paramTp.baseType(sym).typeArgs
+
+ def format(paramName: Name, paramTp: Type): String = format(typeArgsAtSym(paramTp) map (_.toString))
- def format(paramName: Name, paramTp: Type): String = format(paramTp.typeArgs map (_.toString))
def format(typeArgs: List[String]): String =
interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index fc0e2c7c80..f9582a54ff 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -199,8 +199,6 @@ trait Infer extends Checkable {
def getContext = context
- def issue(err: AbsTypeError): Unit = context.issue(err)
-
def explainTypes(tp1: Type, tp2: Type) = {
if (context.reportErrors)
withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
@@ -297,11 +295,17 @@ trait Infer extends Checkable {
&& !isByNameParamType(tp)
&& isCompatible(tp, dropByName(pt))
)
+ def isCompatibleSam(tp: Type, pt: Type): Boolean = {
+ val samFun = typer.samToFunctionType(pt)
+ (samFun ne NoType) && isCompatible(tp, samFun)
+ }
+
val tp1 = normalize(tp)
( (tp1 weak_<:< pt)
|| isCoercible(tp1, pt)
|| isCompatibleByName(tp, pt)
+ || isCompatibleSam(tp, pt)
)
}
def isCompatibleArgs(tps: List[Type], pts: List[Type]) = (tps corresponds pts)(isCompatible)
@@ -372,7 +376,7 @@ trait Infer extends Checkable {
}
/** Overload which allocates fresh type vars.
* The other one exists because apparently inferExprInstance needs access to the typevars
- * after the call, and its wasteful to return a tuple and throw it away almost every time.
+ * after the call, and it's wasteful to return a tuple and throw it away almost every time.
*/
private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] =
exprTypeArgs(tparams map freshVar, tparams, restpe, pt, useWeaklyCompatible)
@@ -546,7 +550,13 @@ trait Infer extends Checkable {
val targs = solvedTypes(tvars, tparams, tparams map varianceInTypes(formals), upper = false, lubDepth(formals) max lubDepth(argtpes))
// Can warn about inferring Any/AnyVal as long as they don't appear
// explicitly anywhere amongst the formal, argument, result, or expected type.
- def canWarnAboutAny = !(pt :: restpe :: formals ::: argtpes exists (t => (t contains AnyClass) || (t contains AnyValClass)))
+ // ...or lower bound of a type param, since they're asking for it.
+ def canWarnAboutAny = {
+ val loBounds = tparams map (_.info.bounds.lo)
+ def containsAny(t: Type) = (t contains AnyClass) || (t contains AnyValClass)
+ val hasAny = pt :: restpe :: formals ::: argtpes ::: loBounds exists (_.dealiasWidenChain exists containsAny)
+ !hasAny
+ }
def argumentPosition(idx: Int): Position = context.tree match {
case x: ValOrDefDef => x.rhs match {
case Apply(fn, args) if idx < args.size => args(idx).pos
@@ -554,11 +564,11 @@ trait Infer extends Checkable {
}
case _ => context.tree.pos
}
- if (settings.warnInferAny.value && context.reportErrors && canWarnAboutAny) {
+ if (settings.warnInferAny && context.reportErrors && canWarnAboutAny) {
foreachWithIndex(targs) ((targ, idx) =>
targ.typeSymbol match {
case sym @ (AnyClass | AnyValClass) =>
- context.unit.warning(argumentPosition(idx), s"a type was inferred to be `${sym.name}`; this may indicate a programming error.")
+ reporter.warning(argumentPosition(idx), s"a type was inferred to be `${sym.name}`; this may indicate a programming error.")
case _ =>
}
)
@@ -781,7 +791,7 @@ trait Infer extends Checkable {
def applicableExpectingPt(pt: Type): Boolean = {
val silent = context.makeSilent(reportAmbiguousErrors = false)
val result = newTyper(silent).infer.isApplicable(undetparams, ftpe, argtpes0, pt)
- if (silent.hasErrors && !pt.isWildcard)
+ if (silent.reporter.hasErrors && !pt.isWildcard)
applicableExpectingPt(WildcardType) // second try
else
result
@@ -1006,7 +1016,7 @@ trait Infer extends Checkable {
/** Substitute free type variables `undetparams` of type constructor
* `tree` in pattern, given prototype `pt`.
*
- * @param tree the constuctor that needs to be instantiated
+ * @param tree the constructor that needs to be instantiated
* @param undetparams the undetermined type parameters
* @param pt0 the expected result type of the instance
*/
@@ -1266,33 +1276,36 @@ trait Infer extends Checkable {
* If no alternative matches `pt`, take the parameterless one anyway.
*/
def inferExprAlternative(tree: Tree, pt: Type): Tree = {
- def tryOurBests(pre: Type, alts: List[Symbol], isSecondTry: Boolean): Unit = {
- val alts0 = alts filter (alt => isWeaklyCompatible(pre memberType alt, pt))
- val alts1 = if (alts0.isEmpty) alts else alts0
- val bests = bestAlternatives(alts1) { (sym1, sym2) =>
- val tp1 = pre memberType sym1
- val tp2 = pre memberType sym2
-
- ( (tp2 eq ErrorType)
- || isWeaklyCompatible(tp1, pt) && !isWeaklyCompatible(tp2, pt)
- || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)
- )
- }
- // todo: missing test case for bests.isEmpty
- bests match {
- case best :: Nil => tree setSymbol best setType (pre memberType best)
- case best :: competing :: _ if alts0.nonEmpty =>
- // SI-6912 Don't give up and leave an OverloadedType on the tree.
- // Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try
- // unless an error is issued. We're not issuing an error, in the assumption that it would be
- // spurious in light of the erroneous expected type
- if (pt.isErroneous) setError(tree)
- else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry)
- case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry)
+ val c = context
+ class InferTwice(pre: Type, alts: List[Symbol]) extends c.TryTwice {
+ def tryOnce(isSecondTry: Boolean): Unit = {
+ val alts0 = alts filter (alt => isWeaklyCompatible(pre memberType alt, pt))
+ val alts1 = if (alts0.isEmpty) alts else alts0
+ val bests = bestAlternatives(alts1) { (sym1, sym2) =>
+ val tp1 = pre memberType sym1
+ val tp2 = pre memberType sym2
+
+ ( (tp2 eq ErrorType)
+ || isWeaklyCompatible(tp1, pt) && !isWeaklyCompatible(tp2, pt)
+ || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)
+ )
+ }
+ // todo: missing test case for bests.isEmpty
+ bests match {
+ case best :: Nil => tree setSymbol best setType (pre memberType best)
+ case best :: competing :: _ if alts0.nonEmpty =>
+ // SI-6912 Don't give up and leave an OverloadedType on the tree.
+ // Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try
+ // unless an error is issued. We're not issuing an error, in the assumption that it would be
+ // spurious in light of the erroneous expected type
+ if (pt.isErroneous) setError(tree)
+ else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry)
+ case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry)
+ }
}
}
tree.tpe match {
- case OverloadedType(pre, alts) => tryTwice(tryOurBests(pre, alts, _)) ; tree
+ case OverloadedType(pre, alts) => (new InferTwice(pre, alts)).apply() ; tree
case _ => tree
}
}
@@ -1370,70 +1383,41 @@ trait Infer extends Checkable {
* @pre tree.tpe is an OverloadedType.
*/
def inferMethodAlternative(tree: Tree, undetparams: List[Symbol], argtpes0: List[Type], pt0: Type): Unit = {
- val OverloadedType(pre, alts) = tree.tpe
- var varargsStar = false
- val argtpes = argtpes0 mapConserve {
- case RepeatedType(tp) => varargsStar = true ; tp
- case tp => tp
- }
- def followType(sym: Symbol) = followApply(pre memberType sym)
- def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = {
- val applicable0 = alts filter (alt => context inSilentMode isApplicable(undetparams, followType(alt), argtpes, pt))
- val applicable = overloadsToConsiderBySpecificity(applicable0, argtpes, varargsStar)
- val ranked = bestAlternatives(applicable)((sym1, sym2) =>
- isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2)
- )
- ranked match {
- case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous
- case best :: Nil => tree setSymbol best setType (pre memberType best) // success
- case Nil if pt.isWildcard => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed
- case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType
- }
- }
- // This potentially makes up to four attempts: tryTwice may execute
+ // This potentially makes up to four attempts: tryOnce may execute
// with and without views enabled, and bestForExpectedType will try again
// with pt = WildcardType if it fails with pt != WildcardType.
- tryTwice { isLastTry =>
- val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
- debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
- bestForExpectedType(pt, isLastTry)
- }
- }
+ val c = context
+ class InferMethodAlternativeTwice extends c.TryTwice {
+ private[this] val OverloadedType(pre, alts) = tree.tpe
+ private[this] var varargsStar = false
+ private[this] val argtpes = argtpes0 mapConserve {
+ case RepeatedType(tp) => varargsStar = true ; tp
+ case tp => tp
+ }
- /** Try inference twice, once without views and once with views,
- * unless views are already disabled.
- */
- def tryTwice(infer: Boolean => Unit): Unit = {
- if (context.implicitsEnabled) {
- val savedContextMode = context.contextMode
- var fallback = false
- context.setBufferErrors()
- // We cache the current buffer because it is impossible to
- // distinguish errors that occurred before entering tryTwice
- // and our first attempt in 'withImplicitsDisabled'. If the
- // first attempt fails we try with implicits on *and* clean
- // buffer but that would also flush any pre-tryTwice valid
- // errors, hence some manual buffer tweaking is necessary.
- val errorsToRestore = context.flushAndReturnBuffer()
- try {
- context.withImplicitsDisabled(infer(false))
- if (context.hasErrors) {
- fallback = true
- context.contextMode = savedContextMode
- context.flushBuffer()
- infer(true)
+ private def followType(sym: Symbol) = followApply(pre memberType sym)
+ // separate method to help the inliner
+ private def isAltApplicable(pt: Type)(alt: Symbol) = context inSilentMode { isApplicable(undetparams, followType(alt), argtpes, pt) && !context.reporter.hasErrors }
+ private def rankAlternatives(sym1: Symbol, sym2: Symbol) = isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2)
+ private def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = {
+ val applicable = overloadsToConsiderBySpecificity(alts filter isAltApplicable(pt), argtpes, varargsStar)
+ val ranked = bestAlternatives(applicable)(rankAlternatives)
+ ranked match {
+ case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous
+ case best :: Nil => tree setSymbol best setType (pre memberType best) // success
+ case Nil if pt.isWildcard => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed
+ case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType
}
- } catch {
- case ex: CyclicReference => throw ex
- case ex: TypeError => // recoverable cyclic references
- context.contextMode = savedContextMode
- if (!fallback) infer(true) else ()
- } finally {
- context.contextMode = savedContextMode
- context.updateBuffer(errorsToRestore)
+ }
+
+ private[this] val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
+ def tryOnce(isLastTry: Boolean): Unit = {
+ debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
+ bestForExpectedType(pt, isLastTry)
}
}
- else infer(true)
+
+ (new InferMethodAlternativeTwice).apply()
}
/** Assign `tree` the type of all polymorphic alternatives
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 9cf92ca5b9..10aefae20b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -3,13 +3,14 @@ package typechecker
import java.lang.Math.min
import symtab.Flags._
-import scala.tools.nsc.util._
+import scala.reflect.internal.util.ScalaClassLoader
import scala.reflect.runtime.ReflectionUtils
import scala.collection.mutable.ListBuffer
import scala.reflect.ClassTag
import scala.reflect.internal.util.Statistics
import scala.reflect.macros.util._
import scala.util.control.ControlThrowable
+import scala.reflect.internal.util.ListOfNil
import scala.reflect.macros.runtime.{AbortMacroException, MacroRuntimes}
import scala.reflect.runtime.{universe => ru}
import scala.reflect.macros.compiler.DefaultMacroCompiler
@@ -42,7 +43,7 @@ import Fingerprint._
* (Expr(elems))
* (TypeTag(Int))
*/
-trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
+trait Macros extends MacroRuntimes with Traces with Helpers {
self: Analyzer =>
import global._
@@ -50,6 +51,8 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
import treeInfo.{isRepeatedParamType => _, _}
import MacrosStats._
+ lazy val fastTrack = new FastTrack[self.type](self)
+
def globalSettings = global.settings
protected def findMacroClassLoader(): ClassLoader = {
@@ -224,7 +227,8 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
val Apply(_, pickledPayload) = wrapped
val payload = pickledPayload.map{ case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) }.toMap
- import typer.TyperErrorGen._
+ // TODO: refactor error handling: fail always throws a TypeError,
+ // and uses global state (analyzer.lastTreeToTyper) to determine the position for the error
def fail(msg: String) = MacroCantExpandIncompatibleMacrosError(msg)
def unpickle[T](field: String, clazz: Class[T]): T = {
def failField(msg: String) = fail(s"$field $msg")
@@ -261,7 +265,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
}
def isBlackbox(expandee: Tree): Boolean = isBlackbox(dissectApplied(expandee).core.symbol)
- def isBlackbox(macroDef: Symbol): Boolean = {
+ def isBlackbox(macroDef: Symbol): Boolean = pluginsIsBlackbox(macroDef)
+
+ /** Default implementation of `isBlackbox`.
+ * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsIsBlackbox for more details)
+ */
+ def standardIsBlackbox(macroDef: Symbol): Boolean = {
val fastTrackBoxity = fastTrack.get(macroDef).map(_.isBlackbox)
val bindingBoxity = loadMacroImplBinding(macroDef).map(_.isBlackbox)
fastTrackBoxity orElse bindingBoxity getOrElse false
@@ -415,9 +424,10 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
val wrappedArgs = mapWithIndex(args)((arg, j) => {
val fingerprint = implParams(min(j, implParams.length - 1))
+ val duplicatedArg = duplicateAndKeepPositions(arg)
fingerprint match {
- case LiftedTyped => context.Expr[Nothing](arg.duplicate)(TypeTag.Nothing) // TODO: SI-5752
- case LiftedUntyped => arg.duplicate
+ case LiftedTyped => context.Expr[Nothing](duplicatedArg)(TypeTag.Nothing) // TODO: SI-5752
+ case LiftedUntyped => duplicatedArg
case _ => abort(s"unexpected fingerprint $fingerprint in $binding with paramss being $paramss " +
s"corresponding to arg $arg in $argss")
}
@@ -568,7 +578,10 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
// also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext()
if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1)
- if (settings.Ymacroexpand.value == settings.MacroExpand.Discard) expandee.setType(expanded1.tpe)
+ if (settings.Ymacroexpand.value == settings.MacroExpand.Discard) {
+ suppressMacroExpansion(expandee)
+ expandee.setType(expanded1.tpe)
+ }
else expanded1
case Fallback(fallback) => onFallback(fallback)
case Delayed(delayed) => onDelayed(delayed)
@@ -613,7 +626,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
// `macroExpandApply` is called from `adapt`, where implicit conversions are disabled
// therefore we need to re-enable the conversions back temporarily
val result = typer.context.withImplicitsEnabled(typer.typed(tree, mode, pt))
- if (result.isErrorTyped && macroDebugVerbose) println(s"$label has failed: ${typer.context.reportBuffer.errors}")
+ if (result.isErrorTyped && macroDebugVerbose) println(s"$label has failed: ${typer.context.reporter.errors}")
result
}
}
@@ -706,7 +719,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
sealed abstract class MacroStatus(val result: Tree)
case class Success(expanded: Tree) extends MacroStatus(expanded)
- case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true }
+ case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.reporting.seenMacroExpansionsFallingBack = true }
case class Delayed(delayed: Tree) extends MacroStatus(delayed)
case class Skipped(skipped: Tree) extends MacroStatus(skipped)
case class Failure(failure: Tree) extends MacroStatus(failure)
@@ -780,7 +793,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
}
} catch {
case ex: Throwable =>
- popMacroContext()
+ if (openMacros.nonEmpty) popMacroContext() // weirdly we started popping on an empty stack when refactoring fatalWarnings logic
val realex = ReflectionUtils.unwrapThrowable(ex)
realex match {
case ex: AbortMacroException => MacroGeneratedAbort(expandee, ex)
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index ba183fe3e6..f90e32ce8a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -8,6 +8,7 @@ package typechecker
import symtab.Flags._
import scala.reflect.internal.util.StringOps.{ ojoin }
import scala.reflect.ClassTag
+import scala.reflect.internal.util.ListOfNil
import scala.reflect.runtime.{ universe => ru }
import scala.language.higherKinds
@@ -212,7 +213,9 @@ trait MethodSynthesis {
List(cd, mdef)
case _ =>
// Shouldn't happen, but let's give ourselves a reasonable error when it does
- abort("No synthetics for " + meth + ": synthetics contains " + context.unit.synthetics.keys.mkString(", "))
+ context.error(cd.pos, s"Internal error: Symbol for synthetic factory method not found among ${context.unit.synthetics.keys.mkString(", ")}")
+ // Soldier on for the sake of the presentation compiler
+ List(cd)
}
case _ =>
stat :: Nil
@@ -355,8 +358,9 @@ trait MethodSynthesis {
def derivedSym: Symbol = {
// Only methods will do! Don't want to pick up any stray
// companion objects of the same name.
- val result = enclClass.info decl name suchThat (x => x.isMethod && x.isSynthetic)
- assert(result != NoSymbol, "not found: "+name+" in "+enclClass+" "+enclClass.info.decls)
+ val result = enclClass.info decl name filter (x => x.isMethod && x.isSynthetic)
+ if (result == NoSymbol || result.isOverloaded)
+ context.error(tree.pos, s"Internal error: Unable to find the synthetic factory method corresponding to implicit class $name in $enclClass / ${enclClass.info.decls}")
result
}
def derivedTree: DefDef =
@@ -381,7 +385,7 @@ trait MethodSynthesis {
}
}
case class Getter(tree: ValDef) extends BaseGetter(tree) {
- override def derivedSym = if (mods.isDeferred) basisSym else basisSym.getter(enclClass)
+ override def derivedSym = if (mods.isDeferred) basisSym else basisSym.getterIn(enclClass)
private def derivedRhs = if (mods.isDeferred) EmptyTree else fieldSelection
private def derivedTpt = {
// For existentials, don't specify a type for the getter, even one derived
@@ -448,7 +452,7 @@ trait MethodSynthesis {
def flagsMask = SetterFlags
def flagsExtra = ACCESSOR
- override def derivedSym = basisSym.setter(enclClass)
+ override def derivedSym = basisSym.setterIn(enclClass)
}
case class Field(tree: ValDef) extends DerivedFromValDef {
def name = tree.localName
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 23dc57d5b9..77c49a862a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -10,6 +10,7 @@ import scala.collection.mutable
import scala.annotation.tailrec
import symtab.Flags._
import scala.language.postfixOps
+import scala.reflect.internal.util.ListOfNil
/** This trait declares methods to create symbols and to enter them into scopes.
*
@@ -171,7 +172,7 @@ trait Namers extends MethodSynthesis {
val newFlags = (sym.flags & LOCKED) | flags
sym.rawInfo match {
case tr: TypeRef =>
- // !!! needed for: pos/t5954d; the uniques type cache will happilly serve up the same TypeRef
+ // !!! needed for: pos/t5954d; the uniques type cache will happily serve up the same TypeRef
// over this mutated symbol, and we witness a stale cache for `parents`.
tr.invalidateCaches()
case _ =>
@@ -296,7 +297,7 @@ trait Namers extends MethodSynthesis {
}
tree.symbol match {
case NoSymbol => try dispatch() catch typeErrorHandler(tree, this.context)
- case sym => enterExistingSym(sym)
+ case sym => enterExistingSym(sym, tree)
}
}
@@ -413,6 +414,7 @@ trait Namers extends MethodSynthesis {
if (isRedefinition) {
updatePosFlags(existing, tree.pos, tree.mods.flags)
setPrivateWithin(tree, existing)
+ clearRenamedCaseAccessors(existing)
existing
}
else assignAndEnterSymbol(tree) setFlag inConstructorFlag
@@ -443,7 +445,7 @@ trait Namers extends MethodSynthesis {
&& clazz.exists
)
if (fails) {
- context.unit.error(tree.pos, (
+ reporter.error(tree.pos, (
s"Companions '$clazz' and '$module' must be defined in same file:\n"
+ s" Found in ${clazz.sourceFile.canonicalPath} and ${module.sourceFile.canonicalPath}")
)
@@ -463,7 +465,7 @@ trait Namers extends MethodSynthesis {
def enterModuleSymbol(tree : ModuleDef): Symbol = {
var m: Symbol = context.scope lookupModule tree.name
val moduleFlags = tree.mods.flags | MODULE
- if (m.isModule && !m.isPackage && inCurrentScope(m) && (currentRun.canRedefine(m) || m.isSynthetic)) {
+ if (m.isModule && !m.hasPackageFlag && inCurrentScope(m) && (currentRun.canRedefine(m) || m.isSynthetic)) {
// This code accounts for the way the package objects found in the classpath are opened up
// early by the completer of the package itself. If the `packageobjects` phase then finds
// the same package object in sources, we have to clean the slate and remove package object
@@ -486,7 +488,7 @@ trait Namers extends MethodSynthesis {
m.moduleClass setFlag moduleClassFlags(moduleFlags)
setPrivateWithin(tree, m.moduleClass)
}
- if (m.isTopLevel && !m.isPackage) {
+ if (m.isTopLevel && !m.hasPackageFlag) {
m.moduleClass.associatedFile = contextFile
currentRun.symSource(m) = m.moduleClass.sourceFile
registerTopLevelSym(m)
@@ -583,7 +585,7 @@ trait Namers extends MethodSynthesis {
// more than one hidden name, the second will not be warned.
// So it is the position of the actual hidden name.
//
- // Note: java imports have precence over definitions in the same package
+ // Note: java imports have precedence over definitions in the same package
// so don't warn for them. There is a corresponding special treatment
// in the shadowing rules in typedIdent to (SI-7232). In any case,
// we shouldn't be emitting warnings for .java source files.
@@ -711,17 +713,14 @@ trait Namers extends MethodSynthesis {
val m = ensureCompanionObject(tree, caseModuleDef)
m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree))
}
- val hasDefault = impl.body exists {
- case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault)
- case _ => false
- }
+ val hasDefault = impl.body exists treeInfo.isConstructorWithDefault
if (hasDefault) {
val m = ensureCompanionObject(tree)
m.updateAttachment(new ConstructorDefaultsAttachment(tree, null))
}
val owner = tree.symbol.owner
- if (settings.lint && owner.isPackageObjectClass && !mods.isImplicit) {
- context.unit.warning(tree.pos,
+ if (settings.warnPackageObjectClasses && owner.isPackageObjectClass && !mods.isImplicit) {
+ reporter.warning(tree.pos,
"it is not recommended to define classes/objects inside of package objects.\n" +
"If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead."
)
@@ -733,13 +732,15 @@ trait Namers extends MethodSynthesis {
log("enter implicit wrapper "+tree+", owner = "+owner)
enterImplicitWrapper(tree)
}
- else context.unit.error(tree.pos, "implicit classes must accept exactly one primary constructor parameter")
+ else reporter.error(tree.pos, "implicit classes must accept exactly one primary constructor parameter")
}
validateCompanionDefs(tree)
}
// Hooks which are overridden in the presentation compiler
- def enterExistingSym(sym: Symbol): Context = this.context
+ def enterExistingSym(sym: Symbol, tree: Tree): Context = {
+ this.context
+ }
def enterIfNotThere(sym: Symbol) { }
def enterSyntheticSym(tree: Tree): Symbol = {
@@ -844,7 +845,7 @@ trait Namers extends MethodSynthesis {
private def widenIfNecessary(sym: Symbol, tpe: Type, pt: Type): Type = {
val getter =
if (sym.isValue && sym.owner.isClass && sym.isPrivate)
- sym.getter(sym.owner)
+ sym.getterIn(sym.owner)
else sym
def isHidden(tp: Type): Boolean = tp match {
case SingleType(pre, sym) =>
@@ -1043,10 +1044,10 @@ trait Namers extends MethodSynthesis {
* so the resulting type is a valid external method type, it does not contain (references to) skolems.
*/
def thisMethodType(restpe: Type) = {
- val checkDependencies = new DependentTypeChecker(context)(this)
- checkDependencies check vparamSymss
- // DEPMETTODO: check not needed when they become on by default
- checkDependencies(restpe)
+ if (vparamSymss.lengthCompare(0) > 0) { // OPT fast path for methods of 0-1 parameter lists
+ val checkDependencies = new DependentTypeChecker(context)(this)
+ checkDependencies check vparamSymss
+ }
val makeMethodType = (vparams: List[Symbol], restpe: Type) => {
// TODODEPMET: check that we actually don't need to do anything here
@@ -1124,7 +1125,7 @@ trait Namers extends MethodSynthesis {
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
val overriddenParamTp = overriddenParams.head.tpe
- // references to type parameteres in overriddenParamTp link to the type skolems, so the
+ // references to type parameters in overriddenParamTp link to the type skolems, so the
// assigned type is consistent with the other / existing parameter types in vparamSymss.
vparam.symbol setInfo overriddenParamTp
vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus
@@ -1180,7 +1181,13 @@ trait Namers extends MethodSynthesis {
}
}
- addDefaultGetters(meth, ddef, vparamss, tparams, overriddenSymbol(methResTp))
+ val overridden = {
+ val isConstr = meth.isConstructor
+ if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol(methResTp)
+ }
+ val hasDefaults = mexists(vparamss)(_.symbol.hasDefault) || mexists(overridden.paramss)(_.hasDefault)
+ if (hasDefaults)
+ addDefaultGetters(meth, ddef, vparamss, tparams, overridden)
// fast track macros, i.e. macros defined inside the compiler, are hardcoded
// hence we make use of that and let them have whatever right-hand side they need
@@ -1222,7 +1229,7 @@ trait Namers extends MethodSynthesis {
* typechecked, the corresponding param would not yet have the "defaultparam"
* flag.
*/
- private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
+ private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overridden: Symbol) {
val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetAttrs(ddef.duplicate)
// having defs here is important to make sure that there's no sneaky tree sharing
// in methods with multiple default parameters
@@ -1230,7 +1237,6 @@ trait Namers extends MethodSynthesis {
def rvparamss = rvparamss0.map(_.map(_.duplicate))
val methOwner = meth.owner
val isConstr = meth.isConstructor
- val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol
val overrides = overridden != NoSymbol && !overridden.isOverloaded
// value parameters of the base class (whose defaults might be overridden)
var baseParamss = (vparamss, overridden.tpe.paramss) match {
@@ -1300,7 +1306,7 @@ trait Namers extends MethodSynthesis {
// by martin: the null case can happen in IDE; this is really an ugly hack on top of an ugly hack but it seems to work
case Some(cda) =>
if (cda.companionModuleClassNamer == null) {
- debugwarn(s"SI-6576 The companion module namer for $meth was unexpectedly null")
+ devWarning(s"SI-6576 The companion module namer for $meth was unexpectedly null")
return
}
val p = (cda.classWithDefault, cda.companionModuleClassNamer)
@@ -1492,8 +1498,7 @@ trait Namers extends MethodSynthesis {
case defn: MemberDef =>
val ainfos = defn.mods.annotations filterNot (_ eq null) map { ann =>
val ctx = typer.context
- val annCtx = ctx.make(ann)
- annCtx.setReportErrors()
+ val annCtx = ctx.makeNonSilent(ann)
// need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892.
AnnotationInfo lazily {
enteringTyper(newTyper(annCtx) typedAnnotation ann)
@@ -1642,6 +1647,7 @@ trait Namers extends MethodSynthesis {
def symbolAllowsDeferred = (
sym.isValueParameter
|| sym.isTypeParameterOrSkolem
+ || (sym.isAbstractType && sym.owner.isClass)
|| context.tree.isInstanceOf[ExistentialTypeTree]
)
// Does the symbol owner require no undefined members?
@@ -1748,7 +1754,6 @@ trait Namers extends MethodSynthesis {
for (p <- vps)
this(p.info)
// can only refer to symbols in earlier parameter sections
- // (if the extension is enabled)
okParams ++= vps
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index dceb0a47d8..39cd610b1c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -174,8 +174,8 @@ trait NamesDefaults { self: Analyzer =>
// assigning the correct method symbol, typedSelect will just assign the type. the reason
// to still call 'typed' is to correctly infer singleton types, SI-5259.
val selectPos =
- if(qual.pos.isRange && baseFun.pos.isRange) qual.pos.union(baseFun.pos).withStart(Math.min(qual.pos.end, baseFun.pos.end))
- else baseFun.pos
+ if(qual.pos.isRange && baseFun1.pos.isRange) qual.pos.union(baseFun1.pos).withStart(Math.min(qual.pos.end, baseFun1.pos.end))
+ else baseFun1.pos
val f = blockTyper.typedOperator(Select(newQual, selected).setSymbol(baseFun1.symbol).setPos(selectPos))
if (funTargs.isEmpty) f
else TypeApply(f, funTargs).setType(baseFun.tpe)
@@ -379,18 +379,37 @@ trait NamesDefaults { self: Analyzer =>
def makeNamedTypes(syms: List[Symbol]) = syms map (sym => NamedType(sym.name, sym.tpe))
- def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOfNamedArg _): (List[Symbol], Boolean) = {
- val namedArgs = args.dropWhile(arg => {
- val n = argName(arg)
- n.isEmpty || params.forall(p => p.name != n.get)
- })
- val namedParams = params.drop(args.length - namedArgs.length)
- // missing: keep those with a name which doesn't exist in namedArgs
- val missingParams = namedParams.filter(p => namedArgs.forall(arg => {
+ /**
+ * Returns the parameter symbols of an invocation expression that are not defined by the list
+ * of arguments.
+ *
+ * @param args The list of arguments
+ * @param params The list of parameter symbols of the invoked method
+ * @param argName A function that extracts the name of an argument expression, if it is a named argument.
+ */
+ def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name]): (List[Symbol], Boolean) = {
+ // The argument list contains first a mix of positional args and named args that are on the
+ // right parameter position, and then a number or named args on different positions.
+
+ // collect all named arguments whose position does not match the parameter they define
+ val namedArgsOnChangedPosition = args.zip(params) dropWhile {
+ case (arg, param) =>
+ val n = argName(arg)
+ // drop the argument if
+ // - it's not named, or
+ // - it's named, but defines the parameter on its current position, or
+ // - it's named, but none of the parameter names matches (treated as a positional argument, an assignment expression)
+ n.isEmpty || n.get == param.name || params.forall(_.name != n.get)
+ } map (_._1)
+
+ val paramsWithoutPositionalArg = params.drop(args.length - namedArgsOnChangedPosition.length)
+
+ // missing parameters: those with a name which is not specified in one of the namedArgsOnChangedPosition
+ val missingParams = paramsWithoutPositionalArg.filter(p => namedArgsOnChangedPosition.forall { arg =>
val n = argName(arg)
n.isEmpty || n.get != p.name
- }))
- val allPositional = missingParams.length == namedParams.length
+ })
+ val allPositional = missingParams.length == paramsWithoutPositionalArg.length
(missingParams, allPositional)
}
@@ -407,7 +426,7 @@ trait NamesDefaults { self: Analyzer =>
previousArgss: List[List[Tree]], params: List[Symbol],
pos: scala.reflect.internal.util.Position, context: Context): (List[Tree], List[Symbol]) = {
if (givenArgs.length < params.length) {
- val (missing, positional) = missingParams(givenArgs, params)
+ val (missing, positional) = missingParams(givenArgs, params, nameOfNamedArg)
if (missing forall (_.hasDefault)) {
val defaultArgs = missing flatMap (p => {
val defGetter = defaultGetter(p, context)
@@ -502,8 +521,22 @@ trait NamesDefaults { self: Analyzer =>
WarnAfterNonSilentRecursiveInference(param, arg)(context)
res
} match {
- case SilentResultValue(t) => !t.isErroneous // #4041
- case _ => false
+ case SilentResultValue(t) =>
+ !t.isErroneous // #4041
+ case SilentTypeError(e: NormalTypeErrorFromCyclicReference) =>
+ // If we end up here, the CyclicReference was reported in a silent context. This can
+ // happen for local definitions, when the completer for a definition is created during
+ // type checking in silent mode. ContextErrors.TypeSigError catches that cyclic reference
+ // and transforms it into a NormalTypeErrorFromCyclicReference.
+ // The cycle needs to be reported, because the program cannot be typed: we don't know
+ // if we have an assignment or a named arg.
+ context.issue(e)
+ // 'err = true' is required because we're in a silent context
+ WarnAfterNonSilentRecursiveInference(param, arg)(context)
+ false
+ case _ =>
+ // We got a type error, so it cannot be an assignment (it doesn't type check as one).
+ false
}
catch {
// `silent` only catches and returns TypeErrors which are not
@@ -536,8 +569,8 @@ trait NamesDefaults { self: Analyzer =>
def matchesName(param: Symbol) = !param.isSynthetic && (
(param.name == name) || (param.deprecatedParamName match {
case Some(`name`) =>
- context0.unit.deprecationWarning(arg.pos,
- "the parameter name "+ name +" has been deprecated. Use "+ param.name +" instead.")
+ context0.deprecationWarning(arg.pos, param,
+ s"the parameter name $name has been deprecated. Use ${param.name} instead.")
true
case _ => false
})
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
index cf3f265f0c..8a66c7d274 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -261,14 +261,14 @@ trait PatternTypers {
def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
- def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
+ def duplErrorTree(err: AbsTypeError) = { context.issue(err); duplErrTree }
if (args.length > MaxTupleArity)
return duplErrorTree(TooManyArgsPatternError(fun))
def freshArgType(tp: Type): Type = tp match {
case MethodType(param :: _, _) => param.tpe
- case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, freshArgType(restpe))(polyType)
+ case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, freshArgType(restpe))(genPolyType)
case OverloadedType(_, _) => OverloadedUnapplyError(fun) ; ErrorType
case _ => UnapplyWithSingleArgError(fun) ; ErrorType
}
@@ -309,7 +309,7 @@ trait PatternTypers {
// the union of the expected type and the inferred type of the argument to unapply
val glbType = glb(ensureFullyDefined(pt) :: unapplyArg.tpe_* :: Nil)
val wrapInTypeTest = canRemedy && !(fun1.symbol.owner isNonBottomSubClass ClassTagClass)
- val formals = patmat.alignPatterns(fun1, args).unexpandedFormals
+ val formals = patmat.alignPatterns(context.asInstanceOf[analyzer.Context], fun1, args).unexpandedFormals
val args1 = typedArgsForFormals(args, formals, mode)
val result = UnApply(fun1, args1) setPos tree.pos setType glbType
@@ -336,7 +336,7 @@ trait PatternTypers {
val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
// must call doTypedUnapply directly, as otherwise we get undesirable rewrites
// and re-typechecks of the target of the unapply call in PATTERNmode,
- // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
+ // this breaks down when the classTagExtractor (which defines the unapply member) is not a simple reference to an object,
// but an arbitrary tree as is the case here
val res = doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index b166bf988d..4b30b4e436 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -132,13 +132,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
defaultMethodNames.toList.distinct foreach { name =>
val methods = clazz.info.findMember(name, 0L, requiredFlags = METHOD, stableOnly = false).alternatives
- val haveDefaults = methods filter (sym => mexists(sym.info.paramss)(_.hasDefault) && !nme.isProtectedAccessorName(sym.name))
+ def hasDefaultParam(tpe: Type): Boolean = tpe match {
+ case MethodType(params, restpe) => (params exists (_.hasDefault)) || hasDefaultParam(restpe)
+ case _ => false
+ }
+ val haveDefaults = methods filter (
+ if (settings.isScala211)
+ (sym => mexists(sym.info.paramss)(_.hasDefault) && !nme.isProtectedAccessorName(sym.name))
+ else
+ (sym => hasDefaultParam(sym.info) && !nme.isProtectedAccessorName(sym.name))
+ )
if (haveDefaults.lengthCompare(1) > 0) {
val owners = haveDefaults map (_.owner)
// constructors of different classes are allowed to have defaults
if (haveDefaults.exists(x => !x.isConstructor) || owners.distinct.size < haveDefaults.size) {
- unit.error(clazz.pos,
+ reporter.error(clazz.pos,
"in "+ clazz +
", multiple overloaded alternatives of "+ haveDefaults.head +
" define default arguments" + (
@@ -153,17 +162,17 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Check for doomed attempt to overload applyDynamic
if (clazz isSubClass DynamicClass) {
for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.typeParams.length)) {
- unit.error(m1.pos, "implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)")
+ reporter.error(m1.pos, "implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)")
}
}
// This has become noisy with implicit classes.
- if (settings.lint && settings.developer) {
+ if (settings.warnPolyImplicitOverload && settings.developer) {
clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
// implicit classes leave both a module symbol and a method symbol as residue
val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule)
if (alts.size > 1)
- alts foreach (x => unit.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds"))
+ alts foreach (x => reporter.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds"))
}
}
}
@@ -272,10 +281,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
mixinOverrideErrors.toList match {
case List() =>
case List(MixinOverrideError(_, msg)) =>
- unit.error(clazz.pos, msg)
+ reporter.error(clazz.pos, msg)
case MixinOverrideError(member, msg) :: others =>
val others1 = others.map(_.member.name.decode).filter(member.name.decode != _).distinct
- unit.error(
+ reporter.error(
clazz.pos,
msg+(if (others1.isEmpty) ""
else ";\n other members with override errors are: "+(others1 mkString ", ")))
@@ -338,7 +347,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
)
}
def emitOverrideError(fullmsg: String) {
- if (member.owner == clazz) unit.error(member.pos, fullmsg)
+ if (member.owner == clazz) reporter.error(member.pos, fullmsg)
else mixinOverrideErrors += new MixinOverrideError(member, fullmsg)
}
@@ -455,7 +464,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
checkOverrideDeprecated()
if (settings.warnNullaryOverride) {
if (other.paramss.isEmpty && !member.paramss.isEmpty) {
- unit.warning(member.pos, "non-nullary method overrides nullary method")
+ reporter.warning(member.pos, "non-nullary method overrides nullary method")
}
}
}
@@ -487,7 +496,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
typer.infer.checkKindBounds(high :: Nil, lowType :: Nil, rootType, low.owner) match { // (1.7.2)
case Nil =>
case kindErrors =>
- unit.error(member.pos,
+ reporter.error(member.pos,
"The kind of "+member.keyString+" "+member.varianceString + member.nameString+
" does not conform to the expected kind of " + other.defString + other.locationString + "." +
kindErrors.toList.mkString("\n", ", ", ""))
@@ -498,7 +507,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
typer.infer.checkKindBounds(low :: Nil, lowType.normalize :: Nil, rootType, low.owner) match {
case Nil =>
case kindErrors =>
- unit.error(member.pos,
+ reporter.error(member.pos,
"The kind of the right-hand side "+lowType.normalize+" of "+low.keyString+" "+
low.varianceString + low.nameString+ " does not conform to its expected kind."+
kindErrors.toList.mkString("\n", ", ", ""))
@@ -534,10 +543,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
def checkOverrideDeprecated() {
- if (other.hasDeprecatedOverridingAnnotation) {
+ if (other.hasDeprecatedOverridingAnnotation && !member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
val suffix = other.deprecatedOverridingMessage map (": " + _) getOrElse ""
val msg = s"overriding ${other.fullLocationString} is deprecated$suffix"
- unit.deprecationWarning(member.pos, msg)
+ currentRun.reporting.deprecationWarning(member.pos, other, msg)
}
}
}
@@ -575,7 +584,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
!other.isDeferred && other.isJavaDefined && !sym.enclClass.isSubClass(other.enclClass) && {
// #3622: erasure operates on uncurried types --
// note on passing sym in both cases: only sym.isType is relevant for uncurry.transformInfo
- // !!! erasure.erasure(sym, uncurry.transformInfo(sym, tp)) gives erreneous of inaccessible type - check whether that's still the case!
+ // !!! erasure.erasure(sym, uncurry.transformInfo(sym, tp)) gives erroneous or inaccessible type - check whether that's still the case!
def uncurryAndErase(tp: Type) = erasure.erasure(sym)(uncurry.transformInfo(sym, tp))
val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
@@ -709,7 +718,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Check the remainder for invalid absoverride.
for (member <- rest ; if (member.isAbstractOverride && member.isIncompleteIn(clazz))) {
- val other = member.superSymbol(clazz)
+ val other = member.superSymbolIn(clazz)
val explanation =
if (other != NoSymbol) " and overrides incomplete superclass member " + infoString(other)
else ", but no concrete implementation could be found in a base class"
@@ -745,7 +754,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
checkNoAbstractDecls(clazz)
if (abstractErrors.nonEmpty)
- unit.error(clazz.pos, abstractErrorMessage)
+ reporter.error(clazz.pos, abstractErrorMessage)
}
else if (clazz.isTrait && !(clazz isSubClass AnyValClass)) {
// For non-AnyVal classes, prevent abstract methods in interfaces that override
@@ -756,7 +765,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// override a concrete method in Object. The jvm, however, does not.
val overridden = decl.matchingSymbol(ObjectClass, ObjectTpe)
if (overridden.isFinal)
- unit.error(decl.pos, "trait cannot redefine final method from class AnyRef")
+ reporter.error(decl.pos, "trait cannot redefine final method from class AnyRef")
}
}
@@ -809,7 +818,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal)
- def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix)
+ def issueError(suffix: String) = reporter.error(member.pos, member.toString() + " overrides nothing" + suffix)
nonMatching match {
case Nil =>
issueError("")
@@ -862,7 +871,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case _ :: Nil =>
;// OK
case tp1 :: tp2 :: _ =>
- unit.error(clazz.pos, "illegal inheritance;\n " + clazz +
+ reporter.error(clazz.pos, "illegal inheritance;\n " + clazz +
" inherits different type instances of " + baseClass +
":\n" + tp1 + " and " + tp2)
explainTypes(tp1, tp2)
@@ -879,7 +888,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case _ => "type "+tp
}
override def issueVarianceError(base: Symbol, sym: Symbol, required: Variance) {
- currentRun.currentUnit.error(base.pos,
+ reporter.error(base.pos,
s"${sym.variance} $sym occurs in $required position in ${tpString(base.info)} of $base")
}
}
@@ -945,9 +954,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def apply(tp: Type) = mapOver(tp).normalize
}
- def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint) (fn, args) match {
+ def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.warnOptionImplicit) (fn, args) match {
case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == currentRun.runDefinitions.Option_apply =>
- unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567
+ reporter.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567
case _ =>
}
@@ -1022,7 +1031,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
- unit.warning(pos, s"comparing $what using `${name.decode}' will always yield $msg")
+ reporter.warning(pos, s"comparing $what using `${name.decode}' will always yield $msg")
isNonSensible = true
}
def nonSensible(pre: String, alwaysEqual: Boolean) =
@@ -1037,7 +1046,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
def unrelatedTypes() = if (!isNonSensible) {
val weaselWord = if (isEitherValueClass) "" else " most likely"
- unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
+ reporter.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
}
if (nullCount == 2) // null == null
@@ -1086,7 +1095,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// better to have lubbed and lost
def warnIfLubless(): Unit = {
val common = global.lub(List(actual.tpe, receiver.tpe))
- if (ObjectTpe <:< common)
+ if (ObjectTpe <:< common && !(ObjectTpe <:< actual.tpe && ObjectTpe <:< receiver.tpe))
unrelatedTypes()
}
// warn if actual has a case parent that is not same as receiver's;
@@ -1113,7 +1122,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
/** Sensibility check examines flavors of equals. */
def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
- case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
+ case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) && !currentOwner.isSynthetic =>
checkSensibleEquals(pos, qual, name, fn.symbol, args.head)
case _ =>
}
@@ -1132,7 +1141,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
&& callsSelf
)
if (trivialInifiniteLoop)
- unit.warning(valOrDef.rhs.pos, s"${valOrDef.symbol.fullLocationString} does nothing other than call itself recursively")
+ reporter.warning(valOrDef.rhs.pos, s"${valOrDef.symbol.fullLocationString} does nothing other than call itself recursively")
}
// Transformation ------------------------------------------------------------
@@ -1222,7 +1231,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
finally if (currentLevel.maxindex > 0) {
// An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see SI-4717
debuglog("refsym = " + currentLevel.refsym)
- unit.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation")
+ reporter.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation")
}
case ModuleDef(_, _, _) => eliminateModuleDefs(tree)
case ValDef(_, _, _, _) =>
@@ -1232,7 +1241,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val lazySym = tree.symbol.lazyAccessorOrSelf
if (lazySym.isLocalToBlock && index <= currentLevel.maxindex) {
debuglog("refsym = " + currentLevel.refsym)
- unit.error(currentLevel.refpos, "forward reference extends over definition of " + lazySym)
+ reporter.error(currentLevel.refpos, "forward reference extends over definition of " + lazySym)
}
tree1 :: Nil
}
@@ -1246,7 +1255,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
try typer.infer.checkBounds(tree0, pre, owner, tparams, argtps, "")
catch {
case ex: TypeError =>
- unit.error(tree0.pos, ex.getMessage())
+ reporter.error(tree0.pos, ex.getMessage())
if (settings.explaintypes) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds)
(argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ))
@@ -1278,11 +1287,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
private def checkUndesiredProperties(sym: Symbol, pos: Position) {
// If symbol is deprecated, and the point of reference is not enclosed
// in either a deprecated member or a scala bridge method, issue a warning.
- if (sym.isDeprecated && !currentOwner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
- unit.deprecationWarning(pos, "%s%s is deprecated%s".format(
- sym, sym.locationString, sym.deprecationMessage map (": " + _) getOrElse "")
- )
- }
+ // TODO: x.hasBridgeAnnotation doesn't seem to be needed here...
+ if (sym.isDeprecated && !currentOwner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation))
+ currentRun.reporting.deprecationWarning(pos, sym)
+
// Similar to deprecation: check if the symbol is marked with @migration
// indicating it has changed semantics between versions.
if (sym.hasMigrationAnnotation && settings.Xmigration.value != NoScalaVersion) {
@@ -1290,20 +1298,20 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
catch {
case e : NumberFormatException =>
- unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
+ reporter.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
// if we can't parse the format on the migration annotation just conservatively assume it changed
true
}
if (changed)
- unit.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}")
+ reporter.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}")
}
// See an explanation of compileTimeOnly in its scaladoc at scala.annotation.compileTimeOnly.
- if (sym.isCompileTimeOnly) {
+ if (sym.isCompileTimeOnly && !currentOwner.ownerChain.exists(x => x.isCompileTimeOnly)) {
def defaultMsg =
sm"""Reference to ${sym.fullLocationString} should not have survived past type checking,
|it should have been processed and eliminated during expansion of an enclosing macro."""
// The getOrElse part should never happen, it's just here as a backstop.
- unit.error(pos, sym.compileTimeOnlyMessage getOrElse defaultMsg)
+ reporter.error(pos, sym.compileTimeOnlyMessage getOrElse defaultMsg)
}
}
@@ -1313,8 +1321,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
&& !qual.tpe.isInstanceOf[ThisType]
&& sym.accessedOrSelf.isVal
)
- if (settings.lint.value && isLikelyUninitialized)
- unit.warning(pos, s"Selecting ${sym} from ${sym.owner}, which extends scala.DelayedInit, is likely to yield an uninitialized value")
+ if (settings.warnDelayedInit && isLikelyUninitialized)
+ reporter.warning(pos, s"Selecting ${sym} from ${sym.owner}, which extends scala.DelayedInit, is likely to yield an uninitialized value")
}
private def lessAccessible(otherSym: Symbol, memberSym: Symbol): Boolean = (
@@ -1346,7 +1354,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (memberSym.isDeferred) "may be unable to provide a concrete implementation of"
else "may be unable to override"
- unit.warning(memberSym.pos,
+ reporter.warning(memberSym.pos,
"%s%s references %s %s.".format(
memberSym.fullLocationString, comparison,
accessFlagsToString(otherSym), otherSym
@@ -1380,8 +1388,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
private def checkByNameRightAssociativeDef(tree: DefDef) {
tree match {
case DefDef(_, name, _, params :: _, _, _) =>
- if (settings.lint && !treeInfo.isLeftAssoc(name.decodedName) && params.exists(p => isByName(p.symbol)))
- unit.warning(tree.pos,
+ if (settings.warnByNameRightAssociative && !treeInfo.isLeftAssoc(name.decodedName) && params.exists(p => isByName(p.symbol)))
+ reporter.warning(tree.pos,
"by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.")
case _ =>
}
@@ -1396,12 +1404,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (symbol.isDeprecated) {
val concrOvers =
symbol.allOverriddenSymbols.filter(sym =>
- !sym.isDeprecated && !sym.isDeferred)
+ !sym.isDeprecated && !sym.isDeferred && !sym.hasDeprecatedOverridingAnnotation && !sym.enclClass.hasDeprecatedInheritanceAnnotation)
if(!concrOvers.isEmpty)
- unit.deprecationWarning(
+ currentRun.reporting.deprecationWarning(
tree.pos,
- symbol.toString + " overrides concrete, non-deprecated symbol(s):" +
- concrOvers.map(_.name.decode).mkString(" ", ", ", ""))
+ symbol,
+ s"${symbol.toString} overrides concrete, non-deprecated symbol(s): ${concrOvers.map(_.name.decode).mkString(", ")}")
}
}
private def isRepeatedParamArg(tree: Tree) = currentApplication match {
@@ -1462,7 +1470,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
applyChecks(sym.annotations)
// validate implicitNotFoundMessage
analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn =>
- unit.warning(tree.pos, f"Invalid implicitNotFound message for ${sym}%s${sym.locationString}%s:%n$warn")
+ reporter.warning(tree.pos, f"Invalid implicitNotFound message for ${sym}%s${sym.locationString}%s:%n$warn")
}
case tpt@TypeTree() =>
@@ -1587,7 +1595,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
|| sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
)
if (!isOk)
- unit.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead")
+ reporter.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead")
case _ => ()
}
@@ -1595,15 +1603,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
private def checkAnyValSubclass(clazz: Symbol) = {
if (clazz.isDerivedValueClass) {
if (clazz.isTrait)
- unit.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal")
+ reporter.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal")
else if (clazz.hasAbstractFlag)
- unit.error(clazz.pos, "`abstract' modifier cannot be used with value classes")
+ reporter.error(clazz.pos, "`abstract' modifier cannot be used with value classes")
}
}
private def checkUnexpandedMacro(t: Tree) =
if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro)
- unit.error(t.pos, "macro has not been expanded")
+ reporter.error(t.pos, "macro has not been expanded")
override def transform(tree: Tree): Tree = {
val savedLocalTyper = localTyper
@@ -1626,7 +1634,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (settings.warnNullaryUnit)
checkNullaryMethodReturnType(sym)
if (settings.warnInaccessible) {
- if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic)
+ if (!sym.isConstructor && !sym.isEffectivelyFinalOrNotOverridden && !sym.isSynthetic)
checkAccessibilityOfReferencedTypes(tree)
}
tree match {
@@ -1676,9 +1684,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case _ =>
}
if (skipBounds) {
- tree.tpe = tree.tpe.map {
+ tree.setType(tree.tpe.map {
_.filterAnnotations(_.symbol != UncheckedBoundsClass)
- }
+ })
}
tree
@@ -1698,7 +1706,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
tree
case treeInfo.WildcardStarArg(_) if !isRepeatedParamArg(tree) =>
- unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
+ reporter.error(tree.pos, "no `: _*' annotation allowed here\n"+
"(such annotations are only allowed in arguments to *-parameters)")
tree
@@ -1771,7 +1779,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
} catch {
case ex: TypeError =>
if (settings.debug) ex.printStackTrace()
- unit.error(tree.pos, ex.getMessage())
+ reporter.error(tree.pos, ex.getMessage())
tree
} finally {
localTyper = savedLocalTyper
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 57f27a05fd..ea44b9dc39 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -61,7 +61,7 @@ trait StdAttachments {
val metadata = MacroExpansionAttachment(expandee, expanded)
expandee updateAttachment metadata
expanded match {
- case expanded: Tree => expanded updateAttachment metadata
+ case expanded: Tree if !expanded.isEmpty => expanded updateAttachment metadata
case _ => // do nothing
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 87da565142..e0d96df062 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -12,21 +12,47 @@ import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
import symtab.Flags._
-/** This phase adds super accessors for all super calls that either
+/** This phase performs the following functions, each of which could be split out in a
+ * mini-phase:
+ *
+ * (1) Adds super accessors for all super calls that either
* appear in a trait or have as a target a member of some outer class.
- * It also replaces references to parameter accessors with aliases
- * by super references to these aliases. The phase also checks that
- * symbols accessed from super are not abstract, or are overridden by
- * an abstract override. Finally, the phase also mangles the names
- * of class-members which are private up to an enclosing non-package
- * class, in order to avoid overriding conflicts.
*
- * This phase also sets SPECIALIZED flag on type parameters with
+ * (2) Converts references to parameter fields that have the same name as a corresponding
+ * public parameter field in a superclass to a reference to the superclass
+ * field (corresponding = super class field is initialized with subclass field).
+ * This info is pre-computed by the `alias` field in Typer. `dotc` follows a different
+ * route; it computes everything in SuperAccessors and changes the subclass field
+ * to a forwarder instead of manipulating references. This is more modular.
+ *
+ * (3) Adds protected accessors if the access to the protected member happens
+ * in a class which is not a subclass of the member's owner.
+ *
+ * (4) Mangles the names of class-members which are
+ * private up to an enclosing non-package class, in order to avoid overriding conflicts.
+ * This is a dubious, and it would be better to deprecate class-qualified privates.
+ *
+ * (5) This phase also sets SPECIALIZED flag on type parameters with
* `@specialized` annotation. We put this logic here because the
* flag must be set before pickling.
*
- * @author Martin Odersky
- * @version 1.0
+ * It also checks that:
+ *
+ * (1) Symbols accessed from super are not abstract, or are overridden by
+ * an abstract override.
+ *
+ * (2) If a symbol accessed accessed from super is defined in a real class (not a trait),
+ * there are no abstract members which override this member in Java's rules
+ * (see SI-4989; such an access would lead to illegal bytecode)
+ *
+ * (3) Super calls do not go to some synthetic members of Any (see isDisallowed)
+ *
+ * (4) Super calls do not go to synthetic field accessors
+ *
+ * (5) A class and its companion object do not both define a class or module with the
+ * same name.
+ *
+ * TODO: Rename phase to "Accessors" because it handles more than just super accessors
*/
abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers {
import global._
@@ -56,11 +82,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val buf = accDefs.getOrElse(clazz, sys.error("no acc def buf for "+clazz))
buf += typers(clazz) typed tree
}
- private def ensureAccessor(sel: Select) = {
+ private def ensureAccessor(sel: Select, mixName: TermName = nme.EMPTY) = {
val Select(qual, name) = sel
val sym = sel.symbol
val clazz = qual.symbol
- val supername = nme.superName(name)
+ val supername = nme.superName(name, mixName)
val superAcc = clazz.info.decl(supername).suchThat(_.alias == sym) orElse {
debuglog(s"add super acc ${sym.fullLocationString} to $clazz")
val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE | ARTIFACT) setAlias sym
@@ -98,7 +124,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
if (other == NoSymbol)
other = linked.info.decl(sym.name.toTermName).filter(_.isModule)
if (other != NoSymbol)
- unit.error(sym.pos, "name clash: "+sym.owner+" defines "+sym+
+ reporter.error(sym.pos, "name clash: "+sym.owner+" defines "+sym+
"\nand its companion "+sym.owner.companionModule+" also defines "+
other)
}
@@ -113,19 +139,31 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val member = sym.overridingSymbol(clazz)
if (mix != tpnme.EMPTY || member == NoSymbol ||
!(member.isAbstractOverride && member.isIncompleteIn(clazz)))
- unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
+ reporter.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
"unless it is overridden by a member declared `abstract' and `override'")
} else if (mix == tpnme.EMPTY && !sym.owner.isTrait){
// SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract.
val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)
intermediateClasses.map(sym.overridingSymbol).find(s => s.isDeferred && !s.isAbstractOverride && !s.owner.isTrait).foreach {
absSym =>
- unit.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from ${clazz} because ${absSym.owner} redeclares it as abstract")
+ reporter.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from ${clazz} because ${absSym.owner} redeclares it as abstract")
}
}
- if (name.isTermName && mix == tpnme.EMPTY && (clazz.isTrait || clazz != currentClass || !validCurrentOwner))
- ensureAccessor(sel)
+ def mixIsTrait = sup.tpe match {
+ case SuperType(thisTpe, superTpe) => superTpe.typeSymbol.isTrait
+ }
+
+ val needAccessor = name.isTermName && {
+ mix.isEmpty && (clazz.isTrait || clazz != currentClass || !validCurrentOwner) ||
+ // SI-8803. If we access super[A] from an inner class (!= currentClass) or closure (validCurrentOwner),
+ // where A is the superclass we need an accessor. If A is a parent trait we don't: in this case mixin
+ // will re-route the super call directly to the impl class (it's statically known).
+ !mix.isEmpty && (clazz != currentClass || !validCurrentOwner) && !mixIsTrait
+ }
+
+ if (needAccessor)
+ ensureAccessor(sel, mix.toTermName)
else sel
}
@@ -221,12 +259,12 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
// also exists in a superclass, because they may be surprised
// to find out that a constructor parameter will shadow a
// field. See SI-4762.
- if (settings.lint) {
+ if (settings.warnPrivateShadow) {
if (sym.isPrivateLocal && sym.paramss.isEmpty) {
qual.symbol.ancestors foreach { parent =>
parent.info.decls filterNot (x => x.isPrivate || x.isLocalToThis) foreach { m2 =>
if (sym.name == m2.name && m2.isGetter && m2.accessed.isMutable) {
- unit.warning(sel.pos,
+ reporter.warning(sel.pos,
sym.accessString + " " + sym.fullLocationString + " shadows mutable " + m2.name
+ " inherited from " + m2.owner + ". Changes to " + m2.name + " will not be visible within "
+ sym.owner + " - you may want to give them distinct names.")
@@ -284,9 +322,9 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case Super(_, mix) =>
if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) {
if (!settings.overrideVars)
- unit.error(tree.pos, "super may be not be used on " + sym.accessedOrSelf)
+ reporter.error(tree.pos, "super may not be used on " + sym.accessedOrSelf)
} else if (isDisallowed(sym)) {
- unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead")
+ reporter.error(tree.pos, "super not allowed here: use this." + name.decode + " instead")
}
transformSuperSelect(sel)
@@ -346,12 +384,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
* performance hit for the compiler as a whole.
*/
override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
+ val savedValid = validCurrentOwner
if (owner.isClass) validCurrentOwner = true
val savedLocalTyper = localTyper
localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
typers = typers updated (owner, localTyper)
val result = super.atOwner(tree, owner)(trans)
localTyper = savedLocalTyper
+ validCurrentOwner = savedValid
typers -= owner
result
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 9516f94135..966e8f1abe 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -54,6 +54,9 @@ trait SyntheticMethods extends ast.TreeDSL {
/** Does not force the info of `caseclazz` */
final def caseAccessorName(caseclazz: Symbol, paramName: TermName) =
(renamedCaseAccessors get caseclazz).fold(paramName)(_(paramName))
+ final def clearRenamedCaseAccessors(caseclazz: Symbol): Unit = {
+ renamedCaseAccessors -= caseclazz
+ }
/** Add the synthetic methods to case classes.
*/
@@ -95,7 +98,7 @@ trait SyntheticMethods extends ast.TreeDSL {
// which they shouldn't.
val accessorLub = (
if (settings.Xexperimental) {
- global.weakLub(accessors map (_.tpe.finalResultType)) match {
+ global.lub(accessors map (_.tpe.finalResultType)) match {
case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
case tp => tp
}
@@ -168,7 +171,7 @@ trait SyntheticMethods extends ast.TreeDSL {
def thatCast(eqmeth: Symbol): Tree =
gen.mkCast(Ident(eqmeth.firstParam), clazz.tpe)
- /* The equality method core for case classes and inline clases.
+ /* The equality method core for case classes and inline classes.
* 1+ args:
* (that.isInstanceOf[this.C]) && {
* val x$1 = that.asInstanceOf[this.C]
@@ -339,12 +342,11 @@ trait SyntheticMethods extends ast.TreeDSL {
!hasOverridingImplementation(m) || {
clazz.isDerivedValueClass && (m == Any_hashCode || m == Any_equals) && {
// Without a means to suppress this warning, I've thought better of it.
- //
- // if (settings.lint) {
- // (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
- // currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
- // }
- // }
+ if (settings.warnValueOverrides) {
+ (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
+ typer.context.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
+ }
+ }
true
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index 90ec3a89b8..57dc74d2a0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -11,7 +11,6 @@ trait Tags {
self: Typer =>
private val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper {
def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree))
@@ -66,7 +65,7 @@ trait Tags {
// if someone requests a type tag, but scala-reflect.jar isn't on the library classpath, then bail
if (pre == NoType && ApiUniverseClass == NoSymbol) EmptyTree
else {
- val tagSym = if (concrete) TypeTagClass else WeakTypeTagClass
+ val tagSym = if (concrete) runDefinitions.TypeTagClass else runDefinitions.WeakTypeTagClass
val tagTp = if (pre == NoType) TypeRef(ApiUniverseClass.toTypeConstructor, tagSym, List(tp)) else singleType(pre, pre member tagSym.name)
val taggedTp = appliedType(tagTp, List(tp))
resolveTag(pos, taggedTp, allowMaterialization)
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index a2f52e1905..a7d48ceb89 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -17,7 +17,7 @@ abstract class TreeCheckers extends Analyzer {
override protected def onTreeCheckerError(pos: Position, msg: String) {
if (settings.fatalWarnings)
- currentUnit.warning(pos, "\n** Error during internal checking:\n" + msg)
+ reporter.warning(pos, "\n** Error during internal checking:\n" + msg)
}
case class DiffResult[T](lost: List[T], gained: List[T]) {
@@ -170,7 +170,7 @@ abstract class TreeCheckers extends Analyzer {
)
- def errorFn(pos: Position, msg: Any): Unit = currentUnit.warning(pos, "[check: %s] %s".format(phase.prev, msg))
+ def errorFn(pos: Position, msg: Any): Unit = reporter.warning(pos, "[check: %s] %s".format(phase.prev, msg))
def errorFn(msg: Any): Unit = errorFn(NoPosition, msg)
def informFn(msg: Any) {
@@ -208,8 +208,7 @@ abstract class TreeCheckers extends Analyzer {
}
def check(unit: CompilationUnit) {
informProgress("checking "+unit)
- val context = rootContext(unit)
- context.checking = true
+ val context = rootContext(unit, checking = true)
tpeOfTree.clear()
SymbolTracker.check(phase, unit)
val checker = new TreeChecker(context)
@@ -267,7 +266,6 @@ abstract class TreeCheckers extends Analyzer {
if (tree ne typed)
treesDiffer(tree, typed)
-
tree
}
@@ -302,8 +300,8 @@ abstract class TreeCheckers extends Analyzer {
checkSym(tree)
/* XXX: lots of syms show up here with accessed == NoSymbol. */
if (accessed != NoSymbol) {
- val agetter = accessed.getter(sym.owner)
- val asetter = accessed.setter(sym.owner)
+ val agetter = accessed.getterIn(sym.owner)
+ val asetter = accessed.setterIn(sym.owner)
assertFn(agetter == sym || asetter == sym,
sym + " is getter or setter, but accessed sym " + accessed + " shows " + agetter + " and " + asetter
@@ -313,7 +311,7 @@ abstract class TreeCheckers extends Analyzer {
}
case ValDef(_, _, _, _) =>
if (sym.hasGetter && !sym.isOuterField && !sym.isOuterAccessor) {
- assertFn(sym.getter(sym.owner) != NoSymbol, ownerstr(sym) + " has getter but cannot be found. " + sym.ownerChain)
+ assertFn(sym.getterIn(sym.owner) != NoSymbol, ownerstr(sym) + " has getter but cannot be found. " + sym.ownerChain)
}
case Apply(fn, args) =>
if (args exists (_ == EmptyTree))
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 60346e7be1..059981aa37 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -41,9 +41,9 @@ trait TypeDiagnostics {
* indicate that the restriction may be lifted in the future.
*/
def restrictionWarning(pos: Position, unit: CompilationUnit, msg: String): Unit =
- unit.warning(pos, "Implementation restriction: " + msg)
+ reporter.warning(pos, "Implementation restriction: " + msg)
def restrictionError(pos: Position, unit: CompilationUnit, msg: String): Unit =
- unit.error(pos, "Implementation restriction: " + msg)
+ reporter.error(pos, "Implementation restriction: " + msg)
/** A map of Positions to addendums - if an error involves a position in
* the map, the addendum should also be printed.
@@ -141,8 +141,8 @@ trait TypeDiagnostics {
if (!member.hasAccessorFlag) member
else if (!member.isDeferred) member.accessed
else {
- val getter = if (member.isSetter) member.getter(member.owner) else member
- val flags = if (getter.setter(member.owner) != NoSymbol) DEFERRED.toLong | MUTABLE else DEFERRED
+ val getter = if (member.isSetter) member.getterIn(member.owner) else member
+ val flags = if (getter.setterIn(member.owner) != NoSymbol) DEFERRED.toLong | MUTABLE else DEFERRED
getter.owner.newValue(getter.name.toTermName, getter.pos, flags) setInfo getter.tpe.resultType
}
@@ -435,15 +435,11 @@ trait TypeDiagnostics {
trait TyperDiagnostics {
self: Typer =>
- private def contextError(context0: Analyzer#Context, pos: Position, msg: String) = context0.error(pos, msg)
- private def contextError(context0: Analyzer#Context, pos: Position, err: Throwable) = context0.error(pos, err)
- private def contextWarning(pos: Position, msg: String) = context.unit.warning(pos, msg)
-
def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) =
- contextWarning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString))
+ context.warning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString))
object checkUnused {
- val ignoreNames = Set[TermName]("readResolve", "readObject", "writeObject", "writeReplace")
+ val ignoreNames: Set[TermName] = Set(TermName("readResolve"), TermName("readObject"), TermName("writeObject"), TermName("writeReplace"))
class UnusedPrivates extends Traverser {
val defnTrees = ListBuffer[MemberDef]()
@@ -536,21 +532,21 @@ trait TypeDiagnostics {
if (sym.isDefaultGetter) "default argument"
else if (sym.isConstructor) "constructor"
else if (sym.isVar || sym.isGetter && sym.accessed.isVar) "var"
- else if (sym.isVal || sym.isGetter && sym.accessed.isVal) "val"
+ else if (sym.isVal || sym.isGetter && sym.accessed.isVal || sym.isLazy) "val"
else if (sym.isSetter) "setter"
else if (sym.isMethod) "method"
else if (sym.isModule) "object"
else "term"
)
- unit.warning(pos, s"$why $what in ${sym.owner} is never used")
+ reporter.warning(pos, s"$why $what in ${sym.owner} is never used")
}
p.unsetVars foreach { v =>
- unit.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set - it could be a val")
+ reporter.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set - it could be a val")
}
p.unusedTypes foreach { t =>
val sym = t.symbol
val why = if (sym.isPrivate) "private" else "local"
- unit.warning(t.pos, s"$why ${sym.fullLocationString} is never used")
+ reporter.warning(t.pos, s"$why ${sym.fullLocationString} is never used")
}
}
}
@@ -576,11 +572,11 @@ trait TypeDiagnostics {
} else f
}
def apply(tree: Tree): Tree = {
- // Error suppression will squash some of these warnings unless we circumvent it.
+ // Error suppression (in context.warning) would squash some of these warnings.
// It is presumed if you are using a -Y option you would really like to hear
- // the warnings you've requested.
+ // the warnings you've requested; thus, use reporter.warning.
if (settings.warnDeadCode && context.unit.exists && treeOK(tree) && exprOK)
- context.warning(tree.pos, "dead code following this construct", force = true)
+ reporter.warning(tree.pos, "dead code following this construct")
tree
}
@@ -604,6 +600,23 @@ trait TypeDiagnostics {
)
}
+ // warn about class/method/type-members' type parameters that shadow types already in scope
+ def warnTypeParameterShadow(tparams: List[TypeDef], sym: Symbol): Unit =
+ if (settings.warnTypeParameterShadow && !isPastTyper && !sym.isSynthetic) {
+ def enclClassOrMethodOrTypeMember(c: Context): Context =
+ if (!c.owner.exists || c.owner.isClass || c.owner.isMethod || (c.owner.isType && !c.owner.isParameter)) c
+ else enclClassOrMethodOrTypeMember(c.outer)
+
+ tparams.filter(_.name != typeNames.WILDCARD).foreach { tp =>
+ // we don't care about type params shadowing other type params in the same declaration
+ enclClassOrMethodOrTypeMember(context).outer.lookupSymbol(tp.name, s => s != tp.symbol && s.hasRawInfo && reallyExists(s)) match {
+ case LookupSucceeded(_, sym2) => context.warning(tp.pos,
+ s"type parameter ${tp.name} defined in $sym shadows $sym2 defined in ${sym2.owner}. You may want to rename your type parameter, or possibly remove it.")
+ case _ =>
+ }
+ }
+ }
+
/** Report a type error.
*
* @param pos The position where to report the error
@@ -627,13 +640,13 @@ trait TypeDiagnostics {
case Import(expr, _) => expr.pos
case _ => ex.pos
}
- contextError(context0, pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
+ context0.error(pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
if (sym == ObjectClass)
throw new FatalError("cannot redefine root "+sym)
}
case _ =>
- contextError(context0, ex.pos, ex)
+ context0.error(ex.pos, ex.msg)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 9f557f4aa5..27a574a449 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -14,7 +14,7 @@ package tools.nsc
package typechecker
import scala.collection.{mutable, immutable}
-import scala.reflect.internal.util.{ BatchSourceFile, Statistics, shortClassOfInstance }
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics, shortClassOfInstance, ListOfNil }
import mutable.ListBuffer
import symtab.Flags._
import Mode._
@@ -76,7 +76,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case s : SilentTypeError => f(s.reportableErrors)
}
}
- class SilentTypeError private(val errors: List[AbsTypeError]) extends SilentResult[Nothing] {
+ class SilentTypeError private(val errors: List[AbsTypeError], val warnings: List[(Position, String)]) extends SilentResult[Nothing] {
override def isEmpty = true
def err: AbsTypeError = errors.head
def reportableErrors = errors match {
@@ -87,10 +87,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
}
object SilentTypeError {
- def apply(errors: AbsTypeError*): SilentTypeError = new SilentTypeError(errors.toList)
+ def apply(errors: AbsTypeError*): SilentTypeError = apply(errors.toList, Nil)
+ def apply(errors: List[AbsTypeError], warnings: List[(Position, String)]): SilentTypeError = new SilentTypeError(errors, warnings)
+ // todo: this extracts only one error, should be a separate extractor.
def unapply(error: SilentTypeError): Option[AbsTypeError] = error.errors.headOption
}
+ // todo: should include reporter warnings in SilentResultValue.
+ // e.g. tryTypedApply could print warnings on arguments when the typing succeeds.
case class SilentResultValue[+T](value: T) extends SilentResult[T] { override def isEmpty = false }
def newTyper(context: Context): Typer = new NormalTyper(context)
@@ -147,7 +151,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
for(ar <- argResultsBuff)
paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
- val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, isView = false, context)
+ val res = if (paramFailed || (paramTp.isErroneous && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, isView = false, context)
argResultsBuff += res
if (res.isSuccess) {
@@ -155,21 +159,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
} else {
mkArg = gen.mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
if (!param.hasDefault && !paramFailed) {
- context.reportBuffer.errors.collectFirst {
- case dte: DivergentImplicitTypeError => dte
- } match {
- case Some(divergent) =>
- // DivergentImplicit error has higher priority than "no implicit found"
- // no need to issue the problem again if we are still in silent mode
- if (context.reportErrors) {
- context.issue(divergent.withPt(paramTp))
- context.reportBuffer.clearErrors {
- case dte: DivergentImplicitTypeError => true
- }
- }
- case _ =>
- NoImplicitFoundError(fun, param)
- }
+ context.reporter.reportFirstDivergentError(fun, param, paramTp)(context)
paramFailed = true
}
/* else {
@@ -197,7 +187,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
!from.isError
&& !to.isError
&& context.implicitsEnabled
- && (inferView(EmptyTree, from, to, reportAmbiguous = false) != EmptyTree)
+ && (inferView(context.tree, from, to, reportAmbiguous = false, saveErrors = true) != EmptyTree)
+ // SI-8230 / SI-8463 We'd like to change this to `saveErrors = false`, but can't.
+ // For now, we can at least pass in `context.tree` rather then `EmptyTree` so as
+ // to avoid unpositioned type errors.
)
def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree =
@@ -252,7 +245,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case TypeRef(_, sym, _) if sym.isAliasType =>
val tp0 = tp.dealias
if (tp eq tp0) {
- debugwarn(s"dropExistential did not progress dealiasing $tp, see SI-7126")
+ devWarning(s"dropExistential did not progress dealiasing $tp, see SI-7126")
tp
} else {
val tp1 = dropExistential(tp0)
@@ -475,20 +468,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (cond) typerWithLocalContext(c)(f) else f(this)
@inline
- final def typerWithLocalContext[T](c: Context)(f: Typer => T): T = {
- val res = f(newTyper(c))
- if (c.hasErrors)
- context.updateBuffer(c.flushAndReturnBuffer())
- res
- }
-
- @inline
- final def withSavedContext[T](c: Context)(f: => T) = {
- val savedErrors = c.flushAndReturnBuffer()
- val res = f
- c.updateBuffer(savedErrors)
- res
- }
+ final def typerWithLocalContext[T](c: Context)(f: Typer => T): T =
+ c.reporter.propagatingErrorsTo(context.reporter)(f(newTyper(c)))
/** The typer for a label definition. If this is part of a template we
* first have to enter the label definition.
@@ -681,6 +662,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (Statistics.canEnable) Statistics.stopCounter(subtypeFailed, subtypeStart)
if (Statistics.canEnable) Statistics.stopTimer(failedSilentNanos, failedSilentStart)
}
+ @inline def wrapResult(reporter: ContextReporter, result: T) =
+ if (reporter.hasErrors) {
+ stopStats()
+ SilentTypeError(reporter.errors.toList, reporter.warnings.toList)
+ } else SilentResultValue(result)
+
try {
if (context.reportErrors ||
reportAmbiguousErrors != context.ambiguousErrors ||
@@ -694,20 +681,17 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
context.undetparams = context1.undetparams
context.savedTypeBounds = context1.savedTypeBounds
context.namedApplyBlockInfo = context1.namedApplyBlockInfo
- if (context1.hasErrors) {
- stopStats()
- SilentTypeError(context1.errors: _*)
- } else {
- // If we have a successful result, emit any warnings it created.
- context1.flushAndIssueWarnings()
- SilentResultValue(result)
- }
+
+ // If we have a successful result, emit any warnings it created.
+ if (!context1.reporter.hasErrors)
+ context1.reporter.emitWarnings()
+
+ wrapResult(context1.reporter, result)
} else {
assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer")
- withSavedContext(context){
- val res = op(this)
- val errorsToReport = context.flushAndReturnBuffer()
- if (errorsToReport.isEmpty) SilentResultValue(res) else SilentTypeError(errorsToReport.head)
+
+ context.reporter.withFreshErrorBuffer {
+ wrapResult(context.reporter, op(this))
}
}
} catch {
@@ -738,26 +722,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name
def action(): Boolean = {
def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, reportAmbiguous = true, isView = false, context).isSuccess
- def hasOption = settings.language.value exists (s => s == featureName || s == "_")
+ def hasOption = settings.language contains featureName
val OK = hasImport || hasOption
if (!OK) {
val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) =
featureTrait getAnnotation LanguageFeatureAnnot
- val req = if (required) "needs to" else "should"
- val fqname = "scala.language." + featureName
- val explain = (
- if (currentRun.reportedFeature contains featureTrait) "" else
- s"""|
- |This can be achieved by adding the import clause 'import $fqname'
- |or by setting the compiler option -language:$featureName.
- |See the Scala docs for value $fqname for a discussion
- |why the feature $req be explicitly enabled.""".stripMargin
- )
- currentRun.reportedFeature += featureTrait
-
- val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain" replace ("#", construct)
- if (required) unit.error(pos, msg)
- else currentRun.featureWarnings.warn(pos, msg)
+ context.featureWarning(pos, featureName, featureDesc, featureTrait, construct, required)
}
OK
}
@@ -775,6 +745,26 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case _ =>
}
+ /**
+ * Convert a SAM type to the corresponding FunctionType,
+ * extrapolating BoundedWildcardTypes in the process
+ * (no type precision is lost by the extrapolation,
+ * but this facilitates dealing with the types arising from Java's use-site variance).
+ */
+ def samToFunctionType(tp: Type, sam: Symbol = NoSymbol): Type = {
+ val samSym = sam orElse samOf(tp)
+
+ def correspondingFunctionSymbol = {
+ val numVparams = samSym.info.params.length
+ if (numVparams > definitions.MaxFunctionArity) NoSymbol
+ else FunctionClass(numVparams)
+ }
+
+ if (samSym.exists && samSym.owner != correspondingFunctionSymbol) // don't treat Functions as SAMs
+ wildcardExtrapolation(normalize(tp memberInfo samSym))
+ else NoType
+ }
+
/** Perform the following adaptations of expression, pattern or type `tree` wrt to
* given mode `mode` and given prototype `pt`:
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
@@ -827,18 +817,28 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
// avoid throwing spurious DivergentImplicit errors
- if (context.hasErrors)
+ if (context.reporter.hasErrors)
setError(tree)
else
withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 =>
if (original != EmptyTree && pt != WildcardType) (
typer1 silent { tpr =>
val withImplicitArgs = tpr.applyImplicitArgs(tree)
- if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
+ if (tpr.context.reporter.hasErrors) tree // silent will wrap it in SilentTypeError anyway
else tpr.typed(withImplicitArgs, mode, pt)
}
orElse { _ =>
val resetTree = resetAttrs(original)
+ resetTree match {
+ case treeInfo.Applied(fun, targs, args) =>
+ if (fun.symbol != null && fun.symbol.isError)
+ // SI-9041 Without this, we leak error symbols past the typer!
+ // because the fallback typechecking notices the error-symbol,
+ // refuses to re-attempt typechecking, and presumes that someone
+ // else was responsible for issuing the related type error!
+ fun.setSymbol(NoSymbol)
+ case _ =>
+ }
debuglog(s"fallback on implicits: ${tree}/$resetTree")
val tree1 = typed(resetTree, mode)
// Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
@@ -858,7 +858,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case Block(_, tree1) => tree1.symbol
case _ => tree.symbol
}
- if (!meth.isConstructor && isFunctionType(pt)) { // (4.2)
+ if (!meth.isConstructor && (isFunctionType(pt) || samOf(pt).exists)) { // (4.2)
debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt")
checkParamsConvertible(tree, tree.tpe)
val tree0 = etaExpand(context.unit, tree, this)
@@ -884,13 +884,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def adaptType(): Tree = {
// @M When not typing a type constructor (!context.inTypeConstructorAllowed)
- // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
+ // or raw type, types must be of kind *,
// and thus parameterized types must be applied to their type arguments
// @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
def properTypeRequired = (
tree.hasSymbolField
&& !context.inTypeConstructorAllowed
- && !(tree.symbol.isJavaDefined && context.unit.isJava)
+ && !context.unit.isJava
)
// @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
// (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
@@ -952,16 +952,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
def adaptConstant(value: Constant): Tree = {
val sym = tree.symbol
- if (sym != null && sym.isDeprecated) {
- val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
- unit.deprecationWarning(tree.pos, msg)
- }
+ if (sym != null && sym.isDeprecated)
+ context.deprecationWarning(tree.pos, sym)
+
treeCopy.Literal(tree, value)
}
// Ignore type errors raised in later phases that are due to mismatching types with existential skolems
// We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
- // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
+ // Here's my hypothesis why this happens. The pattern matcher defines a variable of type
//
// val x: T = expr
//
@@ -1040,12 +1039,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// to non-continuation types.
if (tree.tpe <:< AnyTpe) pt.dealias match {
case TypeRef(_, UnitClass, _) => // (12)
- if (settings.warnValueDiscard)
- context.unit.warning(tree.pos, "discarded non-Unit value")
+ if (!isPastTyper && settings.warnValueDiscard)
+ context.warning(tree.pos, "discarded non-Unit value")
return typedPos(tree.pos, mode, pt)(Block(List(tree), Literal(Constant(()))))
case TypeRef(_, sym, _) if isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt) =>
- if (settings.warnNumericWiden)
- context.unit.warning(tree.pos, "implicit numeric widening")
+ if (!isPastTyper && settings.warnNumericWiden)
+ context.warning(tree.pos, "implicit numeric widening")
return typedPos(tree.pos, mode, pt)(Select(tree, "to" + sym.name))
case _ =>
}
@@ -1063,13 +1062,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case coercion =>
def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
if (settings.logImplicitConv)
- unit.echo(tree.pos, msg)
+ context.echo(tree.pos, msg)
debuglog(msg)
val silentContext = context.makeImplicit(context.ambiguousErrors)
val res = newTyper(silentContext).typed(
new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
- silentContext.firstError match {
+ silentContext.reporter.firstError match {
case Some(err) => context.issue(err)
case None => return res
}
@@ -1135,7 +1134,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
adaptConstant(value)
case OverloadedType(pre, alts) if !mode.inFunMode => // (1)
inferExprAlternative(tree, pt)
- adapt(tree, mode, pt, original)
+ adaptAfterOverloadResolution(tree, mode, pt, original)
case NullaryMethodType(restpe) => // (2)
adapt(tree setType restpe, mode, pt, original)
case TypeRef(_, ByNameParamClass, arg :: Nil) if mode.inExprMode => // (2)
@@ -1168,6 +1167,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
}
+ // This just exists to help keep track of the spots where we have to adapt a tree after
+ // overload resolution. These proved hard to find during the fix for SI-8267.
+ def adaptAfterOverloadResolution(tree: Tree, mode: Mode, pt: Type = WildcardType, original: Tree = EmptyTree): Tree = {
+ adapt(tree, mode, pt, original)
+ }
+
def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = {
inferExprInstance(tree, context.extractUndetparams(), pt)
adapt(tree, mode, pt)
@@ -1186,7 +1191,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
def instantiatePossiblyExpectingUnit(tree: Tree, mode: Mode, pt: Type): Tree = {
- if (mode.typingExprNotFun && pt.typeSymbol == UnitClass)
+ if (mode.typingExprNotFun && pt.typeSymbol == UnitClass && !tree.tpe.isInstanceOf[MethodType])
instantiateExpectingUnit(tree, mode)
else
instantiate(tree, mode, pt)
@@ -1223,7 +1228,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case EmptyTree => qual
case coercion =>
if (settings.logImplicitConv)
- unit.echo(qual.pos,
+ context.echo(qual.pos,
"applied implicit conversion from %s to %s = %s".format(
qual.tpe, searchTemplate, coercion.symbol.defString))
@@ -1288,7 +1293,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
private def validateNoCaseAncestor(clazz: Symbol) = {
if (!phase.erasedTypes) {
for (ancestor <- clazz.ancestors find (_.isCase)) {
- unit.error(clazz.pos, (
+ context.error(clazz.pos, (
"case %s has case ancestor %s, but case-to-case inheritance is prohibited."+
" To overcome this limitation, use extractors to pattern match on non-leaf nodes."
).format(clazz, ancestor.fullName))
@@ -1305,7 +1310,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val isValueClass = !clazz.isTrait
def where = if (isValueClass) "value class" else "universal trait extending from class Any"
def implRestriction(tree: Tree, what: String) =
- unit.error(tree.pos, s"implementation restriction: $what is not allowed in $where" +
+ context.error(tree.pos, s"implementation restriction: $what is not allowed in $where" +
"\nThis restriction is planned to be removed in subsequent releases.")
/**
* Deeply traverses the tree in search of constructs that are not allowed
@@ -1334,7 +1339,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
}
for (stat <- body) {
- def notAllowed(what: String) = unit.error(stat.pos, s"$what is not allowed in $where")
+ def notAllowed(what: String) = context.error(stat.pos, s"$what is not allowed in $where")
stat match {
// see https://issues.scala-lang.org/browse/SI-6444
// see https://issues.scala-lang.org/browse/SI-6463
@@ -1362,9 +1367,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
private def validateDerivedValueClass(clazz: Symbol, body: List[Tree]) = {
if (clazz.isTrait)
- unit.error(clazz.pos, "only classes (not traits) are allowed to extend AnyVal")
+ context.error(clazz.pos, "only classes (not traits) are allowed to extend AnyVal")
if (!clazz.isStatic)
- unit.error(clazz.pos, "value class may not be a "+
+ context.error(clazz.pos, "value class may not be a "+
(if (clazz.owner.isTerm) "local class" else "member of another class"))
if (!clazz.isPrimitiveValueClass) {
clazz.primaryConstructor.paramss match {
@@ -1372,26 +1377,26 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val decls = clazz.info.decls
val paramAccessor = clazz.constrParamAccessors.head
if (paramAccessor.isMutable)
- unit.error(paramAccessor.pos, "value class parameter must not be a var")
+ context.error(paramAccessor.pos, "value class parameter must not be a var")
val accessor = decls.toList.find(x => x.isMethod && x.accessedOrSelf == paramAccessor)
accessor match {
case None =>
- unit.error(paramAccessor.pos, "value class parameter must be a val and not be private[this]")
+ context.error(paramAccessor.pos, "value class parameter must be a val and not be private[this]")
case Some(acc) if acc.isProtectedLocal =>
- unit.error(paramAccessor.pos, "value class parameter must not be protected[this]")
+ context.error(paramAccessor.pos, "value class parameter must not be protected[this]")
case Some(acc) =>
if (acc.tpe.typeSymbol.isDerivedValueClass)
- unit.error(acc.pos, "value class may not wrap another user-defined value class")
+ context.error(acc.pos, "value class may not wrap another user-defined value class")
checkEphemeral(clazz, body filterNot (stat => stat.symbol != null && stat.symbol.accessedOrSelf == paramAccessor))
}
case _ =>
- unit.error(clazz.pos, "value class needs to have exactly one val parameter")
+ context.error(clazz.pos, "value class needs to have exactly one val parameter")
}
}
for (tparam <- clazz.typeParams)
if (tparam hasAnnotation definitions.SpecializedClass)
- unit.error(tparam.pos, "type parameter of value class may not be specialized")
+ context.error(tparam.pos, "type parameter of value class may not be specialized")
}
/** Typechecks a parent type reference.
@@ -1545,7 +1550,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
val clazz = context.owner
assert(clazz != NoSymbol, templ)
- val cscope = context.outer.makeNewScope(ctor, context.outer.owner)
+ // SI-9086 The position of this symbol is material: implicit search will avoid triggering
+ // cyclic errors in an implicit search in argument to the super constructor call on
+ // account of the "ignore symbols without complete info that succeed the implicit search"
+ // in this source file. See `ImplicitSearch#isValid` and `ImplicitInfo#isCyclicOrErroneous`.
+ val dummy = context.outer.owner.newLocalDummy(context.owner.pos)
+ val cscope = context.outer.makeNewScope(ctor, dummy)
+ if (dummy.isTopLevel) currentRun.symSource(dummy) = currentUnit.source.file
val cbody2 = { // called both during completion AND typing.
val typer1 = newTyper(cscope)
// XXX: see about using the class's symbol....
@@ -1684,10 +1695,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val sameSourceFile = context.unit.source.file == psym.sourceFile
- if (psym.hasDeprecatedInheritanceAnnotation && !sameSourceFile) {
+ if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation &&
+ !sameSourceFile && !context.owner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
- unit.deprecationWarning(parent.pos, msg)
+ context.deprecationWarning(parent.pos, psym, msg)
}
if (psym.isSealed && !phase.erasedTypes)
@@ -1754,13 +1766,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) {
if (!clazz.owner.isPackageClass)
- unit.error(clazz.pos, "inner classes cannot be classfile annotations")
- else restrictionWarning(cdef.pos, unit,
+ context.error(clazz.pos, "inner classes cannot be classfile annotations")
+ // Ignore @SerialVersionUID, because it is special-cased and handled completely differently.
+ // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement
+ // of constant argument values "for free". Related to SI-7041.
+ else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit,
"""|subclassing Classfile does not
|make your annotation visible at runtime. If that is what
|you want, you must write the annotation class in Java.""".stripMargin)
}
+ warnTypeParameterShadow(tparams1, clazz)
+
if (!isPastTyper) {
for (ann <- clazz.getAnnotation(DeprecatedAttr)) {
val m = companionSymbolOf(clazz, context)
@@ -1809,7 +1826,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
private def ensurePredefParentsAreInSameSourceFile(template: Template) = {
val parentSyms = template.parents map (_.symbol) filterNot (_ == AnyRefClass)
if (parentSyms exists (_.associatedFile != PredefModule.associatedFile))
- unit.error(template.pos, s"All parents of Predef must be defined in ${PredefModule.associatedFile}.")
+ context.error(template.pos, s"All parents of Predef must be defined in ${PredefModule.associatedFile}.")
}
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
* all the time, it is exposed here the module/class typing methods go through it.
@@ -1880,7 +1897,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
ConstrArgsInParentOfTraitError(parents1.head, clazz)
if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel)
- unit.error(clazz.pos, "inner classes cannot be classfile annotations")
+ context.error(clazz.pos, "inner classes cannot be classfile annotations")
if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
@@ -1908,7 +1925,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (clazz.isTrait) {
for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) {
- unit.warning(decl.pos, "Implementation restriction: early definitions in traits are not initialized before the super class is initialized.")
+ context.warning(decl.pos, "Implementation restriction: early definitions in traits are not initialized before the super class is initialized.")
}
}
@@ -2027,7 +2044,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (mexists(vparamss)(_.symbol == superArg.symbol)) {
val alias = (
superAcc.initialize.alias
- orElse (superAcc getter superAcc.owner)
+ orElse (superAcc getterIn superAcc.owner)
filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias)
)
if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) {
@@ -2036,7 +2053,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case acc => acc
}
ownAcc match {
- case acc: TermSymbol if !acc.isVariable =>
+ case acc: TermSymbol if !acc.isVariable && !isByNameParamType(acc.info) =>
debuglog(s"$acc has alias ${alias.fullLocationString}")
acc setAlias alias
case _ =>
@@ -2096,7 +2113,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case xs => xs.map(_.nameString).mkString(" (of ", " with ", ")")
}
def fail(pos: Position, msg: String): Boolean = {
- unit.error(pos, msg)
+ context.error(pos, msg)
false
}
/* Have to examine all parameters in all lists.
@@ -2163,6 +2180,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val tparams1 = ddef.tparams mapConserve typedTypeDef
val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef)
+ warnTypeParameterShadow(tparams1, meth)
+
meth.annotations.map(_.completeInfo())
for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1)
@@ -2239,6 +2258,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val typedMods = typedModifiers(tdef.mods)
tdef.symbol.annotations.map(_.completeInfo())
+ warnTypeParameterShadow(tparams1, tdef.symbol)
+
// @specialized should not be pickled when compiling with -no-specialize
if (settings.nospecialization && currentRun.compiles(tdef.symbol)) {
tdef.symbol.removeAnnotation(definitions.SpecializedClass)
@@ -2479,7 +2500,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* an alternative TODO: add partial function AST node or equivalent and get rid of this synthesis --> do everything in uncurry (or later)
* however, note that pattern matching codegen is designed to run *before* uncurry
*/
- def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Mode, pt: Type): Tree = {
+ def synthesizePartialFunction(paramName: TermName, paramPos: Position, paramSynthetic: Boolean,
+ tree: Tree, mode: Mode, pt: Type): Tree = {
assert(pt.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt.")
val targs = pt.dealiasWiden.typeArgs
@@ -2507,7 +2529,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE)).duplicate.asInstanceOf[CaseDef])
// must generate a new tree every time
- def selector: Tree = gen.mkUnchecked(
+ def selector(paramSym: Symbol): Tree = gen.mkUnchecked(
if (sel != EmptyTree) sel.duplicate
else atPos(tree.pos.focusStart)(
// SI-6925: subsume type of the selector to `argTp`
@@ -2518,7 +2540,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// hence the cast, which will be erased in posterasure
// (the cast originally caused extremely weird types to show up
// in test/scaladoc/run/SI-5933.scala because `variantToSkolem` was missing `tpSym.initialize`)
- gen.mkCastPreservingAnnotations(Ident(paramName), argTp)
+ gen.mkCastPreservingAnnotations(Ident(paramSym), argTp)
))
def mkParam(methodSym: Symbol, tp: Type = argTp) =
@@ -2543,17 +2565,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val default = methodSym newValueParameter (newTermName("default"), tree.pos.focus, SYNTHETIC) setInfo functionType(List(A1.tpe), B1.tpe)
val paramSyms = List(x, default)
- methodSym setInfo polyType(List(A1, B1), MethodType(paramSyms, B1.tpe))
+ methodSym setInfo genPolyType(List(A1, B1), MethodType(paramSyms, B1.tpe))
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
- // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
- paramSyms foreach (methodBodyTyper.context.scope enter _)
+ if (!paramSynthetic) methodBodyTyper.context.scope enter x
// First, type without the default case; only the cases provided
// by the user are typed. The LUB of these becomes `B`, the lower
// bound of `B1`, which in turn is the result type of the default
// case
- val match0 = methodBodyTyper.typedMatch(selector, cases, mode, resTp)
+ val match0 = methodBodyTyper.typedMatch(selector(x), cases, mode, resTp)
val matchResTp = match0.tpe
B1 setInfo TypeBounds.lower(matchResTp) // patch info
@@ -2627,11 +2648,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val paramSym = mkParam(methodSym)
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
- methodBodyTyper.context.scope enter paramSym
+ if (!paramSynthetic) methodBodyTyper.context.scope enter paramSym
methodSym setInfo MethodType(List(paramSym), BooleanTpe)
val defaultCase = mkDefaultCase(FALSE)
- val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanTpe)
+ val match_ = methodBodyTyper.typedMatch(selector(paramSym), casesTrue :+ defaultCase, mode, BooleanTpe)
DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanTpe))
}
@@ -2645,10 +2666,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
methodSym setInfo MethodType(List(paramSym), AnyTpe)
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
- // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
- methodBodyTyper.context.scope enter paramSym
+ if (!paramSynthetic) methodBodyTyper.context.scope enter paramSym
- val match_ = methodBodyTyper.typedMatch(selector, cases, mode, resTp)
+ val match_ = methodBodyTyper.typedMatch(selector(paramSym), cases, mode, resTp)
val matchResTp = match_.tpe
methodSym setInfo MethodType(List(paramSym), matchResTp) // patch info
@@ -2699,7 +2719,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* `{
* def apply$body(p1: T1, ..., pN: TN): T = body
* new S {
- * def apply(p1: T1, ..., pN: TN): T = apply$body(p1,..., pN)
+ * def apply(p1: T1', ..., pN: TN'): T' = apply$body(p1,..., pN)
* }
* }`
*
@@ -2709,6 +2729,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
*
* The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `samClassTp`,
* and `resPt` is derived from `samClassTp` -- it may be fully defined, or not...
+ * If it is not fully defined, we derive `samClassTpFullyDefined` by inferring any unknown type parameters.
+ *
+ * The types T1' ... TN' and T' are derived from the method signature of the sam method,
+ * as seen from the fully defined `samClassTpFullyDefined`.
*
* The function's body is put in a method outside of the class definition to enforce scoping.
* S's members should not be in scope in `body`.
@@ -2720,6 +2744,22 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* However T must be fully defined before we type the instantiation, as it'll end up as a parent type,
* which must be fully defined. Would be nice to have some kind of mechanism to insert type vars in a block of code,
* and have the instantiation of the first occurrence propagate to the rest of the block.
+ *
+ * TODO: by-name params
+ * scala> trait LazySink { def accept(a: => Any): Unit }
+ * defined trait LazySink
+ *
+ * scala> val f: LazySink = (a) => (a, a)
+ * f: LazySink = $anonfun$1@1fb26910
+ *
+ * scala> f(println("!"))
+ * <console>:10: error: LazySink does not take parameters
+ * f(println("!"))
+ * ^
+ *
+ * scala> f.accept(println("!"))
+ * !
+ * !
*/
def synthesizeSAMFunction(sam: Symbol, fun: Function, resPt: Type, samClassTp: Type, mode: Mode): Tree = {
// assert(fun.vparams forall (vp => isFullyDefined(vp.tpt.tpe))) -- by construction, as we take them from sam's info
@@ -2800,14 +2840,21 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
samClassTp
}
- // `final override def ${sam.name}($p1: $T1, ..., $pN: $TN): $resPt = ${sam.name}\$body'($p1, ..., $pN)`
+ // what's the signature of the method that we should actually be overriding?
+ val samMethTp = samClassTpFullyDefined memberInfo sam
+ // Before the mutation, `tp <:< vpar.tpt.tpe` should hold.
+ // TODO: error message when this is not the case, as the expansion won't type check
+ // - Ti' <:< Ti and T <: T' must hold for the samDef body to type check
+ val funArgTps = foreach2(samMethTp.paramTypes, fun.vparams)((tp, vpar) => vpar.tpt setType tp)
+
+ // `final override def ${sam.name}($p1: $T1', ..., $pN: $TN'): ${samMethTp.finalResultType} = ${sam.name}\$body'($p1, ..., $pN)`
val samDef =
DefDef(Modifiers(FINAL | OVERRIDE | SYNTHETIC),
sam.name.toTermName,
Nil,
List(fun.vparams),
- TypeTree(samBodyDef.tpt.tpe) setPos sampos.focus,
- Apply(Ident(bodyName), fun.vparams map (p => Ident(p.name)))
+ TypeTree(samMethTp.finalResultType) setPos sampos.focus,
+ Apply(Ident(bodyName), fun.vparams map gen.paramToArg)
)
val serializableParentAddendum =
@@ -2818,7 +2865,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
ClassDef(Modifiers(FINAL), tpnme.ANON_FUN_NAME, tparams = Nil,
gen.mkTemplate(
parents = TypeTree(samClassTpFullyDefined) :: serializableParentAddendum,
- self = emptyValDef,
+ self = noSelfType,
constrMods = NoMods,
vparamss = ListOfNil,
body = List(samDef),
@@ -2837,6 +2884,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
)
}
+ // TODO: improve error reporting -- when we're in silent mode (from `silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError`)
+ // the errors in the function don't get out...
+ if (block exists (_.isErroneous))
+ context.error(fun.pos, s"Could not derive subclass of $samClassTp\n (with SAM `def $sam$samMethTp`)\n based on: $fun.")
+
classDef.symbol addAnnotation SerialVersionUIDAnnotation
block
}
@@ -2857,7 +2909,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* as `(a => a): Int => Int` should not (yet) get the sam treatment.
*/
val sam =
- if (!settings.Xexperimental || pt.typeSymbol == FunctionSymbol) NoSymbol
+ if (pt.typeSymbol == FunctionSymbol) NoSymbol
else samOf(pt)
/* The SAM case comes first so that this works:
@@ -2867,15 +2919,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* Note that the arity of the sam must correspond to the arity of the function.
*/
val samViable = sam.exists && sameLength(sam.info.params, fun.vparams)
+ val ptNorm = if (samViable) samToFunctionType(pt, sam) else pt
val (argpts, respt) =
- if (samViable) {
- val samInfo = pt memberInfo sam
- (samInfo.paramTypes, samInfo.resultType)
- } else {
- pt baseType FunctionSymbol match {
- case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
- case _ => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType)
- }
+ ptNorm baseType FunctionSymbol match {
+ case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
+ case _ => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType)
}
if (!FunctionSymbol.exists)
@@ -2886,7 +2934,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
var issuedMissingParameterTypeError = false
foreach2(fun.vparams, argpts) { (vparam, argpt) =>
if (vparam.tpt.isEmpty) {
- vparam.tpt.tpe =
+ val vparamType =
if (isFullyDefined(argpt)) argpt
else {
fun match {
@@ -2905,6 +2953,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
issuedMissingParameterTypeError = true
ErrorType
}
+ vparam.tpt.setType(vparamType)
if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus
}
}
@@ -2920,7 +2969,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val p = fun.vparams.head
if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe
- outerTyper.synthesizePartialFunction(p.name, p.pos, fun.body, mode, pt)
+ outerTyper.synthesizePartialFunction(p.name, p.pos, paramSynthetic = false, fun.body, mode, pt)
// Use synthesizeSAMFunction to expand `(p1: T1, ..., pN: TN) => body`
// to an instance of the corresponding anonymous subclass of `pt`.
@@ -3003,7 +3052,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
ConstructorsOrderError(stat)
}
- if (treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos,
+ if (!isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos,
"a pure expression does nothing in statement position; " +
"you may be omitting necessary parentheses"
)
@@ -3020,7 +3069,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
|| (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate)
)
- def checkNoDoubleDefs(stats: List[Tree]): Unit = {
+ def checkNoDoubleDefs: Unit = {
val scope = if (inBlock) context.scope else context.owner.info.decls
var e = scope.elems
while ((e ne null) && e.owner == scope) {
@@ -3055,8 +3104,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// the corresponding synthetics to the package class, only to the package object class.
def shouldAdd(sym: Symbol) =
inBlock || !context.isInPackageObject(sym, context.owner)
- for (sym <- scope if shouldAdd(sym))
- for (tree <- context.unit.synthetics get sym) {
+ for (sym <- scope)
+ for (tree <- context.unit.synthetics get sym if shouldAdd(sym)) { // OPT: shouldAdd is usually true. Call it here, rather than in the outer loop
newStats += typedStat(tree) // might add even more synthetics to the scope
context.unit.synthetics -= sym
}
@@ -3102,7 +3151,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val stats1 = stats mapConserve typedStat
if (phase.erasedTypes) stats1
else {
- checkNoDoubleDefs(stats1)
+ // As packages are open, it doesn't make sense to check double definitions here. Furthermore,
+ // it is expensive if the package is large. Instead, such double defininitions are checked in `Namers.enterInScope`
+ if (!context.owner.isPackageClass)
+ checkNoDoubleDefs
addSynthetics(stats1)
}
}
@@ -3159,7 +3211,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
// TODO_NMT: check the assumption that args nonEmpty
def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
- def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
+ def duplErrorTree(err: AbsTypeError) = { context.issue(err); duplErrTree }
def preSelectOverloaded(fun: Tree): Tree = {
if (fun.hasSymbolField && fun.symbol.isOverloaded) {
@@ -3198,7 +3250,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (sym1 != NoSymbol) sym = sym1
}
if (sym == NoSymbol) fun
- else adapt(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode, WildcardType)
+ else adaptAfterOverloadResolution(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode)
} else fun
}
@@ -3239,15 +3291,31 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
(arg1, arg1.tpe.deconst)
}.unzip
}
- if (context.hasErrors)
+ if (context.reporter.hasErrors)
setError(tree)
else {
inferMethodAlternative(fun, undetparams, argTpes, pt)
- doTypedApply(tree, adapt(fun, mode.forFunMode, WildcardType), args1, mode, pt)
+ doTypedApply(tree, adaptAfterOverloadResolution(fun, mode.forFunMode, WildcardType), args1, mode, pt)
}
}
handleOverloaded
+ case _ if isPolymorphicSignature(fun.symbol) =>
+ // Mimic's Java's treatment of polymorphic signatures as described in
+ // https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.3
+ //
+ // One can think of these methods as being infinitely overloaded. We create
+ // a ficticious new cloned method symbol for each call site that takes on a signature
+ // governed by a) the argument types and b) the expected type
+ val args1 = typedArgs(args, forArgMode(fun, mode))
+ val pts = args1.map(_.tpe.deconst)
+ val clone = fun.symbol.cloneSymbol
+ val cloneParams = pts map (pt => clone.newValueParameter(currentUnit.freshTermName()).setInfo(pt))
+ val resultType = if (isFullyDefined(pt)) pt else ObjectTpe
+ clone.modifyInfo(mt => copyMethodType(mt, cloneParams, resultType))
+ val fun1 = fun.setSymbol(clone).setType(clone.info)
+ doTypedApply(tree, fun1, args1, mode, resultType).setType(resultType)
+
case mt @ MethodType(params, _) =>
val paramTypes = mt.paramTypes
// repeat vararg as often as needed, remove by-name
@@ -3258,25 +3326,25 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* to that. This is the last thing which is tried (after
* default arguments)
*/
- def tryTupleApply: Tree = (
+ def tryTupleApply: Tree = {
if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) {
val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args)))
// expected one argument, but got 0 or >1 ==> try applying to tuple
// the inner "doTypedApply" does "extractUndetparams" => restore when it fails
val savedUndetparams = context.undetparams
silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) map { t =>
- // Depending on user options, may warn or error here if
- // a Unit or tuple was inserted.
- val keepTree = (
- !mode.typingExprNotFun
- || t.symbol == null
- || checkValidAdaptation(t, args)
- )
- if (keepTree) t else EmptyTree
+ // Depending on user options, may warn or error here if
+ // a Unit or tuple was inserted.
+ val keepTree = (
+ !mode.typingExprNotFun // why? introduced in 4e488a60, doc welcome
+ || t.symbol == null // ditto
+ || checkValidAdaptation(t, args)
+ )
+ if (keepTree) t else EmptyTree
} orElse { _ => context.undetparams = savedUndetparams ; EmptyTree }
}
else EmptyTree
- )
+ }
/* Treats an application which uses named or default arguments.
* Also works if names + a vararg used: when names are used, the vararg
@@ -3320,7 +3388,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// defaults are needed. they are added to the argument list in named style as
// calls to the default getters. Example:
// foo[Int](a)() ==> foo[Int](a)(b = foo$qual.foo$default$2[Int](a))
- checkNotMacro()
// SI-8111 transformNamedApplication eagerly shuffles around the application to preserve
// evaluation order. During this process, it calls `changeOwner` on symbols that
@@ -3367,8 +3434,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
duplErrTree
} else if (lencmp2 == 0) {
// useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]()
- val note = "Error occurred in an application involving default arguments."
- if (!(context.diagnostic contains note)) context.diagnostic = note :: context.diagnostic
+ checkNotMacro()
+ context.diagUsedDefaults = true
doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt)
} else {
rollbackNamesDefaultsOwnerChanges()
@@ -3675,7 +3742,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2)
- unit.deprecationWarning(ann.pos, "@deprecated now takes two arguments; see the scaladoc.")
+ context.deprecationWarning(ann.pos, DeprecatedAttr, "@deprecated now takes two arguments; see the scaladoc.")
if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation
else annInfo(typedAnn)
@@ -3744,8 +3811,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case TypeRef(pre, sym, args) =>
if (sym.isAliasType && containsLocal(tp) && (tp.dealias ne tp)) apply(tp.dealias)
else {
- if (pre.isVolatile)
- InferTypeWithVolatileTypeSelectionError(tree, pre)
+ if (pre.isVolatile) pre match {
+ case SingleType(_, sym) if sym.isSynthetic && isPastTyper =>
+ debuglog(s"ignoring volatility of prefix in pattern matcher generated inferred type: $tp") // See pos/t7459c.scala
+ case _ =>
+ InferTypeWithVolatileTypeSelectionError(tree, pre)
+ }
mapOver(tp)
}
case _ =>
@@ -3827,7 +3898,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
case OverloadedType(pre, alts) =>
inferPolyAlternatives(fun, mapList(args)(treeTpe))
- val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
+
+ // SI-8267 `memberType` can introduce existentials *around* a PolyType/MethodType, see AsSeenFromMap#captureThis.
+ // If we had selected a non-overloaded symbol, `memberType` would have been called in `makeAccessible`
+ // and the resulting existential type would have been skolemized in `adapt` *before* we typechecked
+ // the enclosing type-/ value- application.
+ //
+ // However, if the selection is overloaded, we defer calling `memberType` until we can select a single
+ // alternative here. It is therefore necessary to skolemize the existential here.
+ //
+ val fun1 = adaptAfterOverloadResolution(fun, mode.forFunMode | TAPPmode)
+
+ val tparams = fun1.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
val args1 = if (sameLength(args, tparams)) {
//@M: in case TypeApply we can't check the kind-arities of the type arguments,
// as we don't know which alternative to choose... here we do
@@ -3841,7 +3923,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// ...actually this was looping anyway, see bug #278.
return TypedApplyWrongNumberOfTpeParametersError(fun, fun)
- typedTypeApply(tree, mode, fun, args1)
+ typedTypeApply(tree, mode, fun1, args1)
case SingleType(_, _) =>
typedTypeApply(tree, mode, fun setType fun.tpe.widen, args)
case PolyType(tparams, restpe) if tparams.nonEmpty =>
@@ -4207,7 +4289,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val cases = tree.cases
if (selector == EmptyTree) {
if (pt.typeSymbol == PartialFunctionClass)
- synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt)
+ synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, paramSynthetic = true, tree, mode, pt)
else {
val arity = if (isFunctionType(pt)) pt.dealiasWiden.typeArgs.length - 1 else 1
val params = for (i <- List.range(0, arity)) yield
@@ -4219,7 +4301,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
// SI-8120 If we don't duplicate the cases, the original Match node will share trees with ones that
// receive symbols owned by this function. However if, after a silent mode session, we discard
- // this Function and try a different approach (e.g. applying a view to the reciever) we end up
+ // this Function and try a different approach (e.g. applying a view to the receiver) we end up
// with orphaned symbols which blows up far down the pipeline (or can be detected with -Ycheck:typer).
val body = treeCopy.Match(tree, selector1, (cases map duplicateAndKeepPositions).asInstanceOf[List[CaseDef]])
typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
@@ -4249,7 +4331,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// it is non-Unit) so we have to retype it. Fortunately it won't come up much
// unless the warning is legitimate.
if (typed(expr).tpe.typeSymbol != UnitClass)
- unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
+ context.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
val tp = pluginsTypedReturn(NothingTpe, this, res, restpt.tpe)
@@ -4284,7 +4366,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def narrowRhs(tp: Type) = { val sym = context.tree.symbol
context.tree match {
case ValDef(mods, _, _, Apply(Select(`tree`, _), _)) if !mods.isMutable && sym != null && sym != NoSymbol =>
- val sym1 = if (sym.owner.isClass && sym.getter(sym.owner) != NoSymbol) sym.getter(sym.owner)
+ val sym1 = if (sym.owner.isClass && sym.getterIn(sym.owner) != NoSymbol) sym.getterIn(sym.owner)
else sym.lazyAccessorOrSelf
val pre = if (sym1.owner.isClass) sym1.owner.thisType else NoPrefix
intersectionType(List(tp, singleType(pre, sym1)))
@@ -4341,7 +4423,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
c.retyping = true
try {
val res = newTyper(c).typedArgs(args, mode)
- if (c.hasErrors) None else Some(res)
+ if (c.reporter.hasErrors) None else Some(res)
} catch {
case ex: CyclicReference =>
throw ex
@@ -4358,7 +4440,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
val start = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
- def onError(typeErrors: Seq[AbsTypeError]): Tree = {
+ def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[(Position, String)]): Tree = {
if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start)
// If the problem is with raw types, copnvert to existentials and try again.
@@ -4394,7 +4476,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (retry) {
val Select(qual, name) = fun
tryTypedArgs(args, forArgMode(fun, mode)) match {
- case Some(args1) =>
+ case Some(args1) if !args1.exists(arg => arg.exists(_.isErroneous)) =>
val qual1 =
if (!pt.isError) adaptToArguments(qual, name, args1, pt, reportAmbiguous = true, saveErrors = true)
else qual
@@ -4405,11 +4487,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case _ => ()
}
}
- typeErrors foreach issue
+ typeErrors foreach context.issue
+ warnings foreach { case (p, m) => context.warning(p, m) }
setError(treeCopy.Apply(tree, fun, args))
}
- silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError
+ silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
+ case SilentResultValue(value) => value
+ case e: SilentTypeError => onError(e.errors, e.warnings)
+ }
}
def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
@@ -4459,7 +4545,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
doTypedApply(tree, fun2, args, mode, pt)
case err: SilentTypeError =>
onError({
- err.reportableErrors foreach issue
+ err.reportableErrors foreach context.issue
+ err.warnings foreach { case (p, m) => context.warning(p, m) }
args foreach (arg => typed(arg, mode, ErrorType))
setError(tree)
})
@@ -4696,7 +4783,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
else
// before failing due to access, try a dynamic call.
asDynamicCall getOrElse {
- issue(accessibleError.get)
+ context.issue(accessibleError.get)
setError(tree)
}
case _ =>
@@ -4707,10 +4794,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// temporarily use `filter` as an alternative for `withFilter`
def tryWithFilterAndFilter(tree: Select, qual: Tree): Tree = {
- def warn() = unit.deprecationWarning(tree.pos, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead")
+ def warn(sym: Symbol) = context.deprecationWarning(tree.pos, sym, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead")
silent(_ => typedSelect(tree, qual, nme.withFilter)) orElse { _ =>
silent(_ => typed1(Select(qual, nme.filter) setPos tree.pos, mode, pt)) match {
- case SilentResultValue(res) => warn() ; res
+ case SilentResultValue(res) => warn(res.symbol) ; res
case SilentTypeError(err) => WithFilterError(tree, err)
}
}
@@ -4794,10 +4881,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
(// this -> Foo.this
if (sym.isThisSym)
typed1(This(sym.owner) setPos tree.pos, mode, pt)
- // Inferring classOf type parameter from expected type. Otherwise an
- // actual call to the stubbed classOf method is generated, returning null.
- else if (isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
- typedClassOf(tree, TypeTree(pt.typeArgs.head))
+ else if (isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) {
+ // Inferring classOf type parameter from expected type. Otherwise an
+ // actual call to the stubbed classOf method is generated, returning null.
+ typedClassOf(tree, TypeTree(pt.typeArgs.head).setPos(tree.pos.focus))
+ }
else {
val pre1 = if (sym.isTopLevel) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe
val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name))
@@ -5109,18 +5197,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def isPlausible(m: Symbol) = m.alternatives exists (m => requiresNoArgs(m.info))
def maybeWarn(s: String): Unit = {
- def warn(message: String) = context.unit.warning(lit.pos, s"$message Did you forget the interpolator?")
+ def warn(message: String) = context.warning(lit.pos, s"possible missing interpolator: $message")
def suspiciousSym(name: TermName) = context.lookupSymbol(name, _ => true).symbol
def suspiciousExpr = InterpolatorCodeRegex findFirstIn s
- def suspiciousIdents = InterpolatorIdentRegex findAllIn s map (s => suspiciousSym(s drop 1))
+ def suspiciousIdents = InterpolatorIdentRegex findAllIn s map (s => suspiciousSym(TermName(s drop 1)))
- // heuristics - no warning on e.g. a string with only "$asInstanceOf"
- if (s contains ' ') (
- if (suspiciousExpr.nonEmpty)
- warn("That looks like an interpolated expression!") // "${...}"
- else
- suspiciousIdents find isPlausible foreach (sym => warn(s"`$$${sym.name}` looks like an interpolated identifier!")) // "$id"
- )
+ if (suspiciousExpr.nonEmpty)
+ warn("detected an interpolated expression") // "${...}"
+ else
+ suspiciousIdents find isPlausible foreach (sym => warn(s"detected interpolated identifier `$$${sym.name}`")) // "$id"
}
lit match {
case Literal(Constant(s: String)) if !isRecognizablyNotForInterpolation => maybeWarn(s)
@@ -5129,7 +5214,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
def typedLiteral(tree: Literal) = {
- if (settings.lint) warnMissingInterpolator(tree)
+ if (settings.warnMissingInterpolator) warnMissingInterpolator(tree)
tree setType (if (tree.value.tag == UnitTag) UnitTpe else ConstantType(tree.value))
}
@@ -5140,16 +5225,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe)
}
- if (!refTyped.isErrorTyped)
+ if (refTyped.isErrorTyped) {
+ setError(tree)
+ } else {
tree setType refTyped.tpe.resultType
-
- if (treeInfo.admitsTypeSelection(refTyped)) tree
- else UnstableTreeError(refTyped)
+ if (refTyped.isErrorTyped || treeInfo.admitsTypeSelection(refTyped)) tree
+ else UnstableTreeError(tree)
+ }
}
def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
val qual1 = typedType(tree.qualifier, mode)
- if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
+ if (qual1.isErrorTyped) setError(treeCopy.SelectFromTypeTree(tree, qual1, tree.name))
+ else if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
else typedSelect(tree, qual1, tree.name)
}
@@ -5161,7 +5249,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def typedExistentialTypeTree(tree: ExistentialTypeTree) = {
val tree1 = typerWithLocalContext(context.makeNewScope(tree, context.owner)){
- _.typedExistentialTypeTree(tree, mode)
+ typer =>
+ if (context.inTypeConstructorAllowed)
+ typer.context.withinTypeConstructorAllowed(typer.typedExistentialTypeTree(tree, mode))
+ else
+ typer.typedExistentialTypeTree(tree, mode)
}
checkExistentialsFeature(tree1.pos, tree1.tpe, "the existential type")
tree1
@@ -5291,8 +5383,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
)
def runTyper(): Tree = {
if (retypingOk) {
- tree.tpe = null
- if (tree.hasSymbol) tree.symbol = NoSymbol
+ tree.setType(null)
+ if (tree.hasSymbolField) tree.symbol = NoSymbol
}
val alreadyTyped = tree.tpe ne null
val shouldPrint = !alreadyTyped && !phase.erasedTypes
@@ -5487,11 +5579,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val commonMessage = "macro defs must have explicitly specified return types"
def reportFailure() = {
ddef.symbol.setFlag(IS_ERROR)
- unit.error(ddef.pos, commonMessage)
+ context.error(ddef.pos, commonMessage)
}
def reportWarning(inferredType: Type) = {
val explanation = s"inference of $inferredType from macro impl's c.Expr[$inferredType] is deprecated and is going to stop working in 2.12"
- unit.deprecationWarning(ddef.pos, s"$commonMessage ($explanation)")
+ context.deprecationWarning(ddef.pos, ddef.symbol, s"$commonMessage ($explanation)")
}
computeMacroDefTypeFromMacroImplRef(ddef, rhs1) match {
case ErrorType => ErrorType
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index cc2d9141ce..22fb0728e6 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -7,6 +7,7 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
+import scala.reflect.internal.util.ListOfNil
/*
* @author Martin Odersky
@@ -142,17 +143,30 @@ trait Unapplies extends ast.TreeDSL {
/** The unapply method corresponding to a case class
*/
def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = {
- val tparams = constrTparamsInvariant(cdef)
- val method = constrParamss(cdef) match {
+ val tparams = constrTparamsInvariant(cdef)
+ val method = constrParamss(cdef) match {
case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq
case _ => nme.unapply
}
- val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
- val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
- val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName))
+ val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
+ val resultType = if (!settings.isScala212) TypeTree() else { // fix for SI-6541 under -Xsource:2.12
+ def repeatedToSeq(tp: Tree) = tp match {
+ case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), tps) => AppliedTypeTree(gen.rootScalaDot(tpnme.Seq), tps)
+ case _ => tp
+ }
+ constrParamss(cdef) match {
+ case Nil | Nil :: _ =>
+ gen.rootScalaDot(tpnme.Boolean)
+ case params :: _ =>
+ val constrParamTypes = params.map(param => repeatedToSeq(param.tpt))
+ AppliedTypeTree(gen.rootScalaDot(tpnme.Option), List(treeBuilder.makeTupleType(constrParamTypes)))
+ }
+ }
+ val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
+ val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName))
atPos(cdef.pos.focus)(
- DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body)
+ DefDef(caseMods, method, tparams, List(cparams), resultType, body)
)
}
diff --git a/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala b/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala
new file mode 100644
index 0000000000..4451651229
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.util
+
+import scala.tools.nsc.io.AbstractFile
+import java.net.URL
+
+/**
+ * Simple interface that allows us to abstract over how class file lookup is performed
+ * in different classpath representations.
+ */
+// TODO at the end, after the possible removal of the old classpath representation, this class shouldn't be generic
+// T should be just changed to AbstractFile
+trait ClassFileLookup[T] {
+ def findClassFile(name: String): Option[AbstractFile]
+
+ /**
+ * It returns both classes from class file and source files (as our base ClassRepresentation).
+ * So note that it's not so strictly related to findClassFile.
+ */
+ def findClass(name: String): Option[ClassRepresentation[T]]
+
+ /**
+ * A sequence of URLs representing this classpath.
+ */
+ def asURLs: Seq[URL]
+
+ /** The whole classpath in the form of one String.
+ */
+ def asClassPathString: String
+
+ // for compatibility purposes
+ @deprecated("Use asClassPathString instead of this one", "2.11.5")
+ def asClasspathString: String = asClassPathString
+
+ /** The whole sourcepath in the form of one String.
+ */
+ def asSourcePathString: String
+}
+
+/**
+ * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
+ */
+// TODO at the end, after the possible removal of the old classpath implementation, this class shouldn't be generic
+// T should be just changed to AbstractFile
+trait ClassRepresentation[T] {
+ def binary: Option[T]
+ def source: Option[AbstractFile]
+
+ def name: String
+}
+
+object ClassRepresentation {
+ def unapply[T](classRep: ClassRepresentation[T]): Option[(Option[T], Option[AbstractFile])] =
+ Some((classRep.binary, classRep.source))
+}
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index d2ba61cc0b..8d4d07759f 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -7,16 +7,18 @@
package scala.tools.nsc
package util
+import io.{ AbstractFile, Directory, File, Jar }
+import java.net.MalformedURLException
import java.net.URL
+import java.util.regex.PatternSyntaxException
import scala.collection.{ mutable, immutable }
-import io.{ File, Directory, Path, Jar, AbstractFile }
import scala.reflect.internal.util.StringOps.splitWhere
-import Jar.isJarOrZip
+import scala.tools.nsc.classpath.FileUtils
+
import File.pathSeparator
-import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
-import java.net.MalformedURLException
-import java.util.regex.PatternSyntaxException
-import scala.reflect.runtime.ReflectionUtils
+import FileUtils.endsClass
+import FileUtils.endsScalaOrJava
+import Jar.isJarOrZip
/** <p>
* This module provides star expansion of '-classpath' option arguments, behaves the same as
@@ -89,7 +91,7 @@ object ClassPath {
/** A class modeling aspects of a ClassPath which should be
* propagated to any classpaths it creates.
*/
- abstract class ClassPathContext[T] {
+ abstract class ClassPathContext[T] extends classpath.ClassPathFactory[ClassPath[T]] {
/** A filter which can be used to exclude entities from the classpath
* based on their name.
*/
@@ -99,75 +101,47 @@ object ClassPath {
*/
def validClassFile(name: String) = endsClass(name) && isValidName(name)
def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
- def validSourceFile(name: String) = endsScala(name) || endsJava(name)
+ def validSourceFile(name: String) = endsScalaOrJava(name)
/** From the representation to its identifier.
*/
def toBinaryName(rep: T): String
- /** Create a new classpath based on the abstract file.
- */
- def newClassPath(file: AbstractFile): ClassPath[T]
-
- /** Creators for sub classpaths which preserve this context.
- */
def sourcesInPath(path: String): List[ClassPath[T]] =
for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield
new SourcePath[T](dir, this)
-
- def contentsOfDirsInPath(path: String): List[ClassPath[T]] =
- for (dir <- expandPath(path, expandStar = false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
- newClassPath(entry)
-
- def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] =
- classesInPathImpl(path, expand = true).toIndexedSeq
-
- def classesInPath(path: String) = classesInPathImpl(path, expand = false)
-
- // Internal
- private def classesInPathImpl(path: String, expand: Boolean) =
- for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
- newClassPath(dir)
-
- def classesInManifest(used: Boolean) =
- if (used) for (url <- manifests) yield newClassPath(AbstractFile getResources url) else Nil
}
- def manifests = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF").filter(_.getProtocol() == "jar").toList
+ def manifests: List[java.net.URL] = {
+ import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
+ Thread.currentThread().getContextClassLoader()
+ .getResources("META-INF/MANIFEST.MF")
+ .filter(_.getProtocol == "jar").toList
+ }
class JavaContext extends ClassPathContext[AbstractFile] {
def toBinaryName(rep: AbstractFile) = {
val name = rep.name
assert(endsClass(name), name)
- name.substring(0, name.length - 6)
+ FileUtils.stripClassExtension(name)
}
+
def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
}
object DefaultJavaContext extends JavaContext
- private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
- private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
- private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java"
-
/** From the source file to its identifier.
*/
- def toSourceName(f: AbstractFile): String = {
- val name = f.name
-
- if (endsScala(name)) name.substring(0, name.length - 6)
- else if (endsJava(name)) name.substring(0, name.length - 5)
- else throw new FatalError("Unexpected source file ending: " + name)
- }
+ def toSourceName(f: AbstractFile): String = FileUtils.stripSourceExtension(f.name)
}
+
import ClassPath._
/**
* Represents a package which contains classes and other packages
*/
-abstract class ClassPath[T] {
- type AnyClassRep = ClassPath[T]#ClassRep
-
+abstract class ClassPath[T] extends ClassFileLookup[T] {
/**
* The short name of the package (without prefix)
*/
@@ -179,28 +153,37 @@ abstract class ClassPath[T] {
*/
def origin: Option[String] = None
- /** A list of URLs representing this classpath.
- */
- def asURLs: List[URL]
-
- /** The whole classpath in the form of one String.
- */
- def asClasspathString: String
-
/** Info which should be propagated to any sub-classpaths.
*/
def context: ClassPathContext[T]
/** Lists of entities.
*/
- def classes: IndexedSeq[AnyClassRep]
+ def classes: IndexedSeq[ClassRepresentation[T]]
def packages: IndexedSeq[ClassPath[T]]
def sourcepaths: IndexedSeq[AbstractFile]
+ /** The entries this classpath is composed of. In class `ClassPath` it's just the singleton list containing `this`.
+ * Subclasses such as `MergedClassPath` typically return lists with more elements.
+ */
+ def entries: IndexedSeq[ClassPath[T]] = IndexedSeq(this)
+
+ /** Merge classpath of `platform` and `urls` into merged classpath */
+ def mergeUrlsIntoClassPath(urls: URL*): MergedClassPath[T] = {
+ // Collect our new jars/directories and add them to the existing set of classpaths
+ val allEntries =
+ (entries ++
+ urls.map(url => context.newClassPath(io.AbstractFile.getURL(url)))
+ ).distinct
+
+ // Combine all of our classpaths (old and new) into one merged classpath
+ new MergedClassPath(allEntries, context)
+ }
+
/**
* Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
*/
- case class ClassRep(binary: Option[T], source: Option[AbstractFile]) {
+ case class ClassRep(binary: Option[T], source: Option[AbstractFile]) extends ClassRepresentation[T] {
def name: String = binary match {
case Some(x) => context.toBinaryName(x)
case _ =>
@@ -219,25 +202,27 @@ abstract class ClassPath[T] {
* Find a ClassRep given a class name of the form "package.subpackage.ClassName".
* Does not support nested classes on .NET
*/
- def findClass(name: String): Option[AnyClassRep] =
+ override def findClass(name: String): Option[ClassRepresentation[T]] =
splitWhere(name, _ == '.', doDropIndex = true) match {
case Some((pkg, rest)) =>
val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
rep map {
- case x: ClassRep => x
+ case x: ClassRepresentation[T] => x
case x => throw new FatalError("Unexpected ClassRep '%s' found searching for name '%s'".format(x, name))
}
case _ =>
classes find (_.name == name)
}
- def findSourceFile(name: String): Option[AbstractFile] =
+ override def findClassFile(name: String): Option[AbstractFile] =
findClass(name) match {
- case Some(ClassRep(Some(x: AbstractFile), _)) => Some(x)
+ case Some(ClassRepresentation(Some(x: AbstractFile), _)) => Some(x)
case _ => None
}
- def sortString = join(split(asClasspathString).sorted: _*)
+ override def asSourcePathString: String = sourcepaths.mkString(pathSeparator)
+
+ def sortString = join(split(asClassPathString).sorted: _*)
override def equals(that: Any) = that match {
case x: ClassPath[_] => this.sortString == x.sortString
case _ => false
@@ -249,10 +234,12 @@ abstract class ClassPath[T] {
* A Classpath containing source files
*/
class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends ClassPath[T] {
+ import FileUtils.AbstractFileOps
+
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) Nil else List(dir.toURL)
- def asClasspathString = dir.path
+ def asURLs = dir.toURLs()
+ def asClassPathString = dir.path
val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq(dir)
private def traverse() = {
@@ -275,10 +262,12 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
* A directory (or a .jar file) containing classfiles and packages
*/
class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
+ import FileUtils.AbstractFileOps
+
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) List(new URL(name)) else List(dir.toURL)
- def asClasspathString = dir.path
+ def asURLs = dir.toURLs(default = Seq(new URL(name)))
+ def asClassPathString = dir.path
val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
// calculates (packages, classes) in one traversal.
@@ -322,9 +311,10 @@ extends MergedClassPath[T](original.entries map (e => subst getOrElse (e, e)), o
* A classpath unifying multiple class- and sourcepath entries.
*/
class MergedClassPath[T](
- val entries: IndexedSeq[ClassPath[T]],
+ override val entries: IndexedSeq[ClassPath[T]],
val context: ClassPathContext[T])
extends ClassPath[T] {
+
def this(entries: TraversableOnce[ClassPath[T]], context: ClassPathContext[T]) =
this(entries.toIndexedSeq, context)
@@ -333,12 +323,12 @@ extends ClassPath[T] {
lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths)
override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
- override def asClasspathString: String = join(entries map (_.asClasspathString) : _*)
+ override def asClassPathString: String = join(entries map (_.asClassPathString) : _*)
- lazy val classes: IndexedSeq[AnyClassRep] = {
+ lazy val classes: IndexedSeq[ClassRepresentation[T]] = {
var count = 0
val indices = mutable.HashMap[String, Int]()
- val cls = new mutable.ArrayBuffer[AnyClassRep](1024)
+ val cls = new mutable.ArrayBuffer[ClassRepresentation[T]](1024)
for (e <- entries; c <- e.classes) {
val name = c.name
@@ -347,9 +337,9 @@ extends ClassPath[T] {
val existing = cls(idx)
if (existing.binary.isEmpty && c.binary.isDefined)
- cls(idx) = existing.copy(binary = c.binary)
+ cls(idx) = ClassRep(binary = c.binary, source = existing.source)
if (existing.source.isEmpty && c.source.isDefined)
- cls(idx) = existing.copy(source = c.source)
+ cls(idx) = ClassRep(binary = existing.binary, source = c.source)
}
else {
indices(name) = count
@@ -387,10 +377,12 @@ extends ClassPath[T] {
}
new MergedClassPath[T](newEntries, context)
}
+
def show() {
println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
- asClasspathString split ':' foreach (x => println(" " + x))
+ asClassPathString split ':' foreach (x => println(" " + x))
}
+
override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
}
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index ba44126df2..352816803f 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -8,7 +8,7 @@ package util
import scala.reflect.internal.Chars._
-/** Utilitity methods for doc comment strings
+/** Utility methods for doc comment strings
*/
object DocStrings {
diff --git a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
index 225f6ca68e..be245347a8 100644
--- a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
+++ b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
@@ -14,12 +14,10 @@ abstract class StatisticsInfo {
import global._
import scala.reflect.internal.TreesStats.nodeByType
- val phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
-
val retainedCount = Statistics.newCounter("#retained tree nodes")
val retainedByType = Statistics.newByClass("#retained tree nodes by type")(Statistics.newCounter(""))
- def print(phase: Phase) = if (phasesShown contains phase.name) {
+ def print(phase: Phase) = if (settings.Ystatistics contains phase.name) {
inform("*** Cumulative statistics at phase " + phase)
retainedCount.value = 0
for (c <- retainedByType.keys)
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index 8630ecf69e..8fed53c89f 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -5,19 +5,23 @@ import scala.reflect.reify.Taggers
import scala.tools.nsc.typechecker.{ Analyzer, Macros }
import scala.reflect.runtime.Macros.currentMirror
import scala.reflect.api.Universe
-import scala.tools.reflect.quasiquotes.{ Quasiquotes => QuasiquoteImpls }
+import scala.reflect.quasiquotes.{ Quasiquotes => QuasiquoteImpls }
/** Optimizes system macro expansions by hardwiring them directly to their implementations
* bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection.
*/
-trait FastTrack {
- self: Macros with Analyzer =>
+class FastTrack[MacrosAndAnalyzer <: Macros with Analyzer](val macros: MacrosAndAnalyzer) {
+ import macros._
import global._
import definitions._
import scala.language.implicitConversions
import treeInfo.Applied
+ def contains(symbol: Symbol): Boolean = fastTrackCache().contains(symbol)
+ def apply(symbol: Symbol): FastTrackEntry = fastTrackCache().apply(symbol)
+ def get(symbol: Symbol): Option[FastTrackEntry] = fastTrackCache().get(symbol)
+
private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } =
new { val c: c0.type = c0 } with Taggers
private implicit def context2macroimplementations(c0: MacroContext): FormatInterpolator { val c: c0.type } =
@@ -39,7 +43,6 @@ trait FastTrack {
}
/** A map from a set of pre-established macro symbols to their implementations. */
- def fastTrack: Map[Symbol, FastTrackEntry] = fastTrackCache()
private val fastTrackCache = perRunCaches.newGeneric[Map[Symbol, FastTrackEntry]] {
val runDefinitions = currentRun.runDefinitions
import runDefinitions._
diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala
index e0f9bb6044..b445f1e2bb 100644
--- a/src/compiler/scala/tools/reflect/FormatInterpolator.scala
+++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala
@@ -117,7 +117,7 @@ abstract class FormatInterpolator {
c.error(errPoint, msg("unsupported"))
s0
} else {
- c.enclosingUnit.deprecationWarning(errPoint, msg("deprecated"))
+ currentRun.reporting.deprecationWarning(errPoint, msg("deprecated"))
try StringContext.treatEscapes(s0) catch escapeHatch
}
}
@@ -182,13 +182,23 @@ abstract class FormatInterpolator {
case (part, n) => copyPart(part, n)
}
- //q"{..$evals; ${fstring.toString}.format(..$ids)}"
- locally {
+ //q"{..$evals; new StringOps(${fstring.toString}).format(..$ids)}"
+ val format = fstring.toString
+ if (ids.isEmpty && !format.contains("%")) Literal(Constant(format))
+ else {
+ val scalaPackage = Select(Ident(nme.ROOTPKG), TermName("scala"))
+ val newStringOps = Select(
+ New(Select(Select(Select(scalaPackage,
+ TermName("collection")), TermName("immutable")), TypeName("StringOps"))),
+ termNames.CONSTRUCTOR
+ )
val expr =
Apply(
Select(
- Literal(Constant(fstring.toString)),
- newTermName("format")),
+ Apply(
+ newStringOps,
+ List(Literal(Constant(format)))),
+ TermName("format")),
ids.toList
)
val p = c.macroApplication.pos
diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 3ae21b6b98..8d8418945a 100644
--- a/src/compiler/scala/tools/reflect/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -1,17 +1,17 @@
package scala.tools
package reflect
+import scala.reflect.internal.util.ScalaClassLoader
import scala.tools.nsc.Driver
import scala.tools.nsc.Global
import scala.tools.nsc.Settings
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.util.PathResolver
+import scala.tools.util.PathResolverFactory
object ReflectMain extends Driver {
private def classloaderFromSettings(settings: Settings) = {
- val classpath = new PathResolver(settings).result
- ScalaClassLoader.fromURLs(classpath.asURLs, getClass.getClassLoader)
+ val classPathURLs = PathResolverFactory.create(settings).resultAsURLs
+ ScalaClassLoader.fromURLs(classPathURLs, getClass.getClassLoader)
}
override def newCompiler(): Global = new ReflectGlobal(settings, reporter, classloaderFromSettings(settings))
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 3b12086cc7..47c88f2c00 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -7,7 +7,7 @@ import scala.tools.nsc.Global
import scala.tools.nsc.reporters._
import scala.tools.nsc.CompilerCommand
import scala.tools.nsc.io.{AbstractFile, VirtualDirectory}
-import scala.tools.nsc.util.AbstractFileClassLoader
+import scala.reflect.internal.util.AbstractFileClassLoader
import scala.reflect.internal.Flags._
import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, NoFile}
import java.lang.{Class => jClass}
@@ -141,7 +141,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val run = new Run
run.symSource(ownerClass) = NoAbstractFile // need to set file to something different from null, so that currentRun.defines works
phase = run.typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
- currentTyper.context.setReportErrors() // need to manually set context mode, otherwise typer.silent will throw exceptions
+ globalPhase = run.typerPhase // amazing... looks like phase and globalPhase are different things, so we need to set them separately
+ currentTyper.context.initRootContext() // need to manually set context mode, otherwise typer.silent will throw exceptions
reporter.reset()
val expr3 = withContext(transform(currentTyper, expr2))
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
deleted file mode 100644
index 3cfc1eb2a1..0000000000
--- a/src/compiler/scala/tools/util/Javap.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-import scala.tools.nsc.util.ScalaClassLoader
-import java.io.PrintWriter
-
-trait JpResult {
- def isError: Boolean
- def value: Any
- def show(): Unit
-}
-
-trait Javap {
- def loader: ScalaClassLoader
- def printWriter: PrintWriter
- def apply(args: Seq[String]): List[JpResult]
- def tryFile(path: String): Option[Array[Byte]]
- def tryClass(path: String): Array[Byte]
-}
-
-object NoJavap extends Javap {
- def loader: ScalaClassLoader = getClass.getClassLoader
- def printWriter: PrintWriter = new PrintWriter(System.err, true)
- def apply(args: Seq[String]): List[JpResult] = Nil
- def tryFile(path: String): Option[Array[Byte]] = None
- def tryClass(path: String): Array[Byte] = Array()
-}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 5526660509..8e5b1e0a5c 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -7,14 +7,17 @@ package scala
package tools
package util
+import java.net.URL
import scala.tools.reflect.WrappedProperties.AccessControl
-import scala.tools.nsc.{ Settings }
-import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
+import scala.tools.nsc.Settings
+import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, JavaClassPath }
import scala.reflect.io.{ File, Directory, Path, AbstractFile }
import scala.reflect.runtime.ReflectionUtils
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
import scala.language.postfixOps
+import scala.tools.nsc.classpath.{ AggregateFlatClassPath, ClassPathFactory, FlatClassPath, FlatClassPathFactory }
+import scala.tools.nsc.settings.ClassPathRepresentationType
// Loosely based on the draft specification at:
// https://wiki.scala-lang.org/display/SIW/Classpath
@@ -48,9 +51,8 @@ object PathResolver {
/** Values found solely by inspecting environment or property variables.
*/
object Environment {
- private def searchForBootClasspath = (
+ private def searchForBootClasspath =
systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
- )
/** Environment variables which java pays attention to so it
* seems we do as well.
@@ -104,7 +106,7 @@ object PathResolver {
else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path
else ""
- // XXX It must be time for someone to figure out what all these things
+ // TODO It must be time for someone to figure out what all these things
// are intended to do. This is disabled here because it was causing all
// the scala jars to end up on the classpath twice: one on the boot
// classpath as set up by the runner (or regular classpath under -nobootcp)
@@ -170,39 +172,48 @@ object PathResolver {
!ReflectionUtils.scalacShouldntLoadClassfile(name)
}
- // called from scalap
+ @deprecated("This method is no longer used be scalap and will be deleted", "2.11.5")
def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
val s = new Settings()
s.classpath.value = path
- new PathResolver(s, context) result
+ new PathResolver(s, context).result
}
/** With no arguments, show the interesting values in Environment and Defaults.
* If there are arguments, show those in Calculated as if those options had been
* given to a scala runner.
*/
- def main(args: Array[String]): Unit = {
+ def main(args: Array[String]): Unit =
if (args.isEmpty) {
println(Environment)
println(Defaults)
- }
- else {
+ } else {
val settings = new Settings()
val rest = settings.processArguments(args.toList, processAll = false)._2
- val pr = new PathResolver(settings)
- println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
+ val pr = PathResolverFactory.create(settings)
+ println("COMMAND: 'scala %s'".format(args.mkString(" ")))
println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
- pr.result.show()
+
+ pr.result match {
+ case cp: JavaClassPath =>
+ cp.show()
+ case cp: AggregateFlatClassPath =>
+ println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}")
+ }
}
- }
}
-class PathResolver(settings: Settings, context: JavaContext) {
- import PathResolver.{ Defaults, Environment, AsLines, MkLines, ppcp }
+trait PathResolverResult {
+ def result: ClassFileLookup[AbstractFile]
- def this(settings: Settings) = this(settings,
- if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext
- else DefaultJavaContext)
+ def resultAsURLs: Seq[URL] = result.asURLs
+}
+
+abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFile], ResultClassPathType <: BaseClassPathType]
+(settings: Settings, classPathFactory: ClassPathFactory[BaseClassPathType])
+ extends PathResolverResult {
+
+ import PathResolver.{ AsLines, Defaults, ppcp }
private def cmdLineOrElse(name: String, alt: String) = {
(commandLineFor(name) match {
@@ -232,6 +243,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
def scalaBootClassPath = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath)
def scalaExtDirs = cmdLineOrElse("extdirs", Defaults.scalaExtDirs)
+
/** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as:
* [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect
* [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg)
@@ -250,16 +262,14 @@ class PathResolver(settings: Settings, context: JavaContext) {
* - Otherwise, if CLASSPATH is set, it is that
* - If neither of those, then "." is used.
*/
- def userClassPath = (
- if (!settings.classpath.isDefault)
- settings.classpath.value
+ def userClassPath =
+ if (!settings.classpath.isDefault) settings.classpath.value
else sys.env.getOrElse("CLASSPATH", ".")
- )
- import context._
+ import classPathFactory._
// Assemble the elements!
- def basis = List[Traversable[ClassPath[AbstractFile]]](
+ def basis = List[Traversable[BaseClassPathType]](
classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
@@ -278,7 +288,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
| javaBootClassPath = ${ppcp(javaBootClassPath)}
| javaExtDirs = ${ppcp(javaExtDirs)}
| javaUserClassPath = ${ppcp(javaUserClassPath)}
- | useJavaClassPath = $useJavaClassPath
+ | useJavaClassPath = $useJavaClassPath
| scalaBootClassPath = ${ppcp(scalaBootClassPath)}
| scalaExtDirs = ${ppcp(scalaExtDirs)}
| userClassPath = ${ppcp(userClassPath)}
@@ -288,8 +298,10 @@ class PathResolver(settings: Settings, context: JavaContext) {
def containers = Calculated.containers
- lazy val result = {
- val cp = new JavaClassPath(containers.toIndexedSeq, context)
+ import PathResolver.MkLines
+
+ def result: ResultClassPathType = {
+ val cp = computeResult()
if (settings.Ylogcp) {
Console print f"Classpath built from ${settings.toConciseString} %n"
Console print s"Defaults: ${PathResolver.Defaults}"
@@ -301,5 +313,37 @@ class PathResolver(settings: Settings, context: JavaContext) {
cp
}
- def asURLs = result.asURLs
+ @deprecated("Use resultAsURLs instead of this one", "2.11.5")
+ def asURLs: List[URL] = resultAsURLs.toList
+
+ protected def computeResult(): ResultClassPathType
+}
+
+class PathResolver(settings: Settings, context: JavaContext)
+ extends PathResolverBase[ClassPath[AbstractFile], JavaClassPath](settings, context) {
+
+ def this(settings: Settings) =
+ this(settings,
+ if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext
+ else DefaultJavaContext)
+
+ override protected def computeResult(): JavaClassPath =
+ new JavaClassPath(containers.toIndexedSeq, context)
+}
+
+class FlatClassPathResolver(settings: Settings, flatClassPathFactory: ClassPathFactory[FlatClassPath])
+ extends PathResolverBase[FlatClassPath, AggregateFlatClassPath](settings, flatClassPathFactory) {
+
+ def this(settings: Settings) = this(settings, new FlatClassPathFactory(settings))
+
+ override protected def computeResult(): AggregateFlatClassPath = AggregateFlatClassPath(containers.toIndexedSeq)
+}
+
+object PathResolverFactory {
+
+ def create(settings: Settings): PathResolverResult =
+ settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat => new FlatClassPathResolver(settings)
+ case ClassPathRepresentationType.Recursive => new PathResolver(settings)
+ }
}
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index 1d39a59cf4..7858bf0658 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -28,12 +28,12 @@ trait CompileOutputCommon {
* @author Martin Odersky
* @version 1.0
*/
-abstract class SocketServer extends CompileOutputCommon {
+abstract class SocketServer(fixPort: Int = 0) extends CompileOutputCommon {
def shutdown: Boolean
def session(): Unit
def timeout(): Unit = () // called after a timeout is detected for subclasses to cleanup
// a hook for subclasses
- protected def createServerSocket(): ServerSocket = new ServerSocket(0)
+ protected def createServerSocket(): ServerSocket = new ServerSocket(fixPort)
var in: BufferedReader = _
var out: PrintWriter = _
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
index 5311651db5..03c7403b04 100644
--- a/src/eclipse/README.md
+++ b/src/eclipse/README.md
@@ -1,28 +1,9 @@
Eclipse project files
=====================
-The following points describe how to get Scala to run in Eclipse:
+The following points describe how to get Scala to run in Eclipse. Please also take a look at the [excellent tutorial on scala-ide.org](http://scala-ide.org/docs/tutorials/scalac-trunk/index.html).
-0. To get Scala to work inside of Eclipse Kepler it is necessary to build the Scala IDE by your own
-because for the moment there is no update site provided for the newest development version
-of Scala. To do so enter the following commands one after the other:
-
- git clone https://github.com/scala-ide/scala-ide.git
- cd scala-ide
- ./build-all.sh clean install -Pscala-2.11.x -Peclipse-kepler -DskipTests
-
- After that you have an update site in `scala-ide/org.scala-ide.sdt.update-site/target/site`, which needs to be
-installed in Eclipse.
-
-0. The second thing that needs to be done is building Scala in order to get all necessary
-dependencies. To do that simply enter
-
- ant
-
- and wait until it is completed. To verify that everything has been built successfully, execute the REPL that can be found
-at `scala/build/pack/bin/scala`.
-
-0. Import all projects inside of Eclipse by choosing `File/Import Existing Projects`
+0. Import all projects into a [very recent version of Scala IDE for Eclipse](http://scala-ide.org/download/nightly.html) by choosing `File/Import Existing Projects`
and navigate to `scala/src/eclipse`. Check all projects and click ok.
0. You need to define a `path variable` inside Eclipse. Define `SCALA_BASEDIR` in
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index c2aab19f18..7f28868d95 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -4,11 +4,11 @@
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-sbt/test-interface/1.0/test-interface-1.0.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-sbt/test-interface/1.0/test-interface-1.0.jar"/>
<classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M7/1.0.0-RC8/scala-partest_2.11.0-M7-1.0.0-RC8.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry kind="output" path="build-quick-partest-extras"/>
</classpath>
diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath
index 601a231aeb..8ff9aabfbf 100644
--- a/src/eclipse/repl/.classpath
+++ b/src/eclipse/repl/.classpath
@@ -1,11 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="src" path="repl"/>
- <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
- <classpathentry kind="var" path="M2_REPO/jline/jline/2.11/jline-2.11.jar"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/repl/jline-2.11.jar"/>
- <classpathentry kind="output" path="build-quick-repl"/>
+ <classpathentry kind="src" path="repl"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="var" path="M2_REPO/jline/jline/2.12/jline-2.12.jar"/>
+ <!-- <classpathentry kind="var" path="SCALA_BASEDIR/build/deps/repl/jline-2.12.jar"/> -->
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="build-quick-repl"/>
</classpath>
diff --git a/src/intellij/README b/src/intellij/README
index ade87749cd..4ecab5561f 100644
--- a/src/intellij/README
+++ b/src/intellij/README
@@ -1,8 +1,12 @@
Use the latest IntelliJ IDEA release and install the Scala plugin from within the IDE.
+Compilation withing IDEA is performed in "-Dlocker.skip=1" mode: the sources are built
+directly using the STARR compiler.
+
The following steps are required to use IntelliJ IDEA on Scala trunk
- - compile "locker" using "ant locker.done". This will also download some JARs from
- Maven to ./build/deps, which are included in IntelliJ's classpath.
+ - Run "ant init". This will download some JARs from to ./build/deps, which are
+ included in IntelliJ's classpath.
- Run src/intellij/setup.sh
- - Open ./src/intellij/scala-lang.ipr in IntelliJ
- - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the java 1.6 SDK
+ - Open ./src/intellij/scala.ipr in IntelliJ
+ - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the
+ Java 1.6 SDK
diff --git a/src/intellij/actors.iml.SAMPLE b/src/intellij/actors.iml.SAMPLE
index 896c4966ff..dfdf396c46 100644
--- a/src/intellij/actors.iml.SAMPLE
+++ b/src/intellij/actors.iml.SAMPLE
@@ -1,15 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/../actors">
@@ -19,6 +9,6 @@
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="library" name="starr" level="project" />
</component>
-</module>
-
+</module> \ No newline at end of file
diff --git a/src/intellij/asm.iml.SAMPLE b/src/intellij/asm.iml.SAMPLE
index ba9e7e899f..9886154bdf 100644
--- a/src/intellij/asm.iml.SAMPLE
+++ b/src/intellij/asm.iml.SAMPLE
@@ -8,5 +8,4 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
-</module>
-
+</module> \ No newline at end of file
diff --git a/src/intellij/compiler.iml.SAMPLE b/src/intellij/compiler.iml.SAMPLE
index 9fb9cd55eb..00f732e255 100644
--- a/src/intellij/compiler.iml.SAMPLE
+++ b/src/intellij/compiler.iml.SAMPLE
@@ -1,26 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/../compiler">
+ <sourceFolder url="file://$MODULE_DIR$/../compiler/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/../compiler" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="asm" />
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="reflect" />
- <orderEntry type="module" module-name="asm" exported="" />
- <orderEntry type="library" exported="" name="ant" level="project" />
+ <orderEntry type="library" name="starr" level="project" />
+ <orderEntry type="library" name="ant" level="project" />
</component>
-</module>
-
+</module> \ No newline at end of file
diff --git a/src/intellij/forkjoin.iml.SAMPLE b/src/intellij/forkjoin.iml.SAMPLE
index be807cc019..42507b2911 100644
--- a/src/intellij/forkjoin.iml.SAMPLE
+++ b/src/intellij/forkjoin.iml.SAMPLE
@@ -8,5 +8,4 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
-</module>
-
+</module> \ No newline at end of file
diff --git a/src/intellij/interactive.iml.SAMPLE b/src/intellij/interactive.iml.SAMPLE
index c6c8ebb606..047b5c9069 100644
--- a/src/intellij/interactive.iml.SAMPLE
+++ b/src/intellij/interactive.iml.SAMPLE
@@ -1,15 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/../interactive">
@@ -17,9 +7,9 @@
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="compiler" />
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="reflect" />
- <orderEntry type="module" module-name="compiler" />
- <orderEntry type="module" module-name="scaladoc" />
+ <orderEntry type="library" name="starr" level="project" />
</component>
-</module>
+</module> \ No newline at end of file
diff --git a/src/intellij/library.iml.SAMPLE b/src/intellij/library.iml.SAMPLE
index cac53dff15..b03fef9414 100644
--- a/src/intellij/library.iml.SAMPLE
+++ b/src/intellij/library.iml.SAMPLE
@@ -1,24 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="compilerOptions" value="-sourcepath $BASE_DIR$/src/library" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/../library">
+ <sourceFolder url="file://$MODULE_DIR$/../library/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/../library" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="library" name="starr" level="project" />
</component>
-</module>
-
+</module> \ No newline at end of file
diff --git a/src/intellij/manual.iml.SAMPLE b/src/intellij/manual.iml.SAMPLE
index 3295a4a877..97bfb5940a 100644
--- a/src/intellij/manual.iml.SAMPLE
+++ b/src/intellij/manual.iml.SAMPLE
@@ -1,15 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/../manual">
@@ -18,8 +8,8 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="library" />
- <orderEntry type="module" module-name="xml" />
+ <orderEntry type="library" name="scaladoc-deps" level="project" />
<orderEntry type="library" name="ant" level="project" />
+ <orderEntry type="library" name="starr" level="project" />
</component>
-</module>
-
+</module> \ No newline at end of file
diff --git a/src/intellij/partest-extras.iml.SAMPLE b/src/intellij/partest-extras.iml.SAMPLE
new file mode 100644
index 0000000000..d352f9ebc3
--- /dev/null
+++ b/src/intellij/partest-extras.iml.SAMPLE
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../partest-extras">
+ <sourceFolder url="file://$MODULE_DIR$/../partest-extras" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="repl" />
+ <orderEntry type="library" name="partest" level="project" />
+ <orderEntry type="library" name="starr" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij/partest-javaagent.iml.SAMPLE b/src/intellij/partest-javaagent.iml.SAMPLE
new file mode 100644
index 0000000000..c6081a2a4b
--- /dev/null
+++ b/src/intellij/partest-javaagent.iml.SAMPLE
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../partest-javaagent">
+ <sourceFolder url="file://$MODULE_DIR$/../partest-javaagent" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="asm" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij/reflect.iml.SAMPLE b/src/intellij/reflect.iml.SAMPLE
index 7d10522826..c9b7130aef 100644
--- a/src/intellij/reflect.iml.SAMPLE
+++ b/src/intellij/reflect.iml.SAMPLE
@@ -1,16 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="compilerOptions" value="-sourcepath $BASE_DIR$/src/reflect" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/../reflect">
@@ -19,7 +8,6 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="library" />
- <orderEntry type="library" name="jline" level="project" />
+ <orderEntry type="library" name="starr" level="project" />
</component>
-</module>
-
+</module> \ No newline at end of file
diff --git a/src/intellij/repl.iml.SAMPLE b/src/intellij/repl.iml.SAMPLE
index fc78ffe8c2..896ec1dd5c 100644
--- a/src/intellij/repl.iml.SAMPLE
+++ b/src/intellij/repl.iml.SAMPLE
@@ -1,15 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/../repl">
@@ -17,9 +7,11 @@
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="compiler" />
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="reflect" />
- <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="library" name="starr" level="project" />
<orderEntry type="library" name="repl-deps" level="project" />
</component>
-</module>
+</module> \ No newline at end of file
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
deleted file mode 100644
index a0765b3e99..0000000000
--- a/src/intellij/scala-lang.ipr.SAMPLE
+++ /dev/null
@@ -1,278 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
- <component name="AntConfiguration">
- <defaultAnt bundledAnt="true" />
- </component>
- <component name="CompilerConfiguration">
- <option name="DEFAULT_COMPILER" value="Javac" />
- <resourceExtensions>
- <entry name=".+\.(properties|xml|html|dtd|tld)" />
- <entry name=".+\.(gif|png|jpeg|jpg)" />
- </resourceExtensions>
- <wildcardResourcePatterns>
- <entry name="?*.properties" />
- <entry name="?*.xml" />
- <entry name="?*.gif" />
- <entry name="?*.png" />
- <entry name="?*.jpeg" />
- <entry name="?*.jpg" />
- <entry name="?*.html" />
- <entry name="?*.dtd" />
- <entry name="?*.tld" />
- <entry name="?*.ftl" />
- </wildcardResourcePatterns>
- <annotationProcessing enabled="false" useClasspath="true" />
- </component>
- <component name="CopyrightManager" default="">
- <module2copyright />
- </component>
- <component name="DependencyValidationManager">
- <option name="SKIP_IMPORT_STATEMENTS" value="false" />
- </component>
- <component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false" />
- <component name="EntryPointsManager">
- <entry_points version="2.0" />
- </component>
- <component name="HighlightingAdvisor">
- <option name="SUGGEST_TYPE_AWARE_HIGHLIGHTING" value="false" />
- </component>
- <component name="InspectionProjectProfileManager">
- <profiles>
- <profile version="1.0" is_locked="false">
- <option name="myName" value="Project Default" />
- <option name="myLocal" value="false" />
- <inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
- <option name="processCode" value="true" />
- <option name="processLiterals" value="true" />
- <option name="processComments" value="true" />
- </inspection_tool>
- </profile>
- </profiles>
- <option name="PROJECT_PROFILE" value="Project Default" />
- <option name="USE_PROJECT_PROFILE" value="true" />
- <version value="1.0" />
- </component>
- <component name="JavacSettings">
- <option name="ADDITIONAL_OPTIONS_STRING" value="-source 1.5 -target 1.5" />
- </component>
- <component name="JavadocGenerationManager">
- <option name="OUTPUT_DIRECTORY" />
- <option name="OPTION_SCOPE" value="protected" />
- <option name="OPTION_HIERARCHY" value="true" />
- <option name="OPTION_NAVIGATOR" value="true" />
- <option name="OPTION_INDEX" value="true" />
- <option name="OPTION_SEPARATE_INDEX" value="true" />
- <option name="OPTION_DOCUMENT_TAG_USE" value="false" />
- <option name="OPTION_DOCUMENT_TAG_AUTHOR" value="false" />
- <option name="OPTION_DOCUMENT_TAG_VERSION" value="false" />
- <option name="OPTION_DOCUMENT_TAG_DEPRECATED" value="true" />
- <option name="OPTION_DEPRECATED_LIST" value="true" />
- <option name="OTHER_OPTIONS" value="" />
- <option name="HEAP_SIZE" />
- <option name="LOCALE" />
- <option name="OPEN_IN_BROWSER" value="true" />
- </component>
- <component name="Palette2">
- <group name="Swing">
- <item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
- </item>
- <item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
- </item>
- <item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
- </item>
- <item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.png" removable="false" auto-create-binding="false" can-attach-label="true">
- <default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
- </item>
- <item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
- <initial-values>
- <property name="text" value="Button" />
- </initial-values>
- </item>
- <item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
- <initial-values>
- <property name="text" value="RadioButton" />
- </initial-values>
- </item>
- <item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
- <initial-values>
- <property name="text" value="CheckBox" />
- </initial-values>
- </item>
- <item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
- <initial-values>
- <property name="text" value="Label" />
- </initial-values>
- </item>
- <item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
- <preferred-size width="150" height="-1" />
- </default-constraints>
- </item>
- <item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
- <preferred-size width="150" height="-1" />
- </default-constraints>
- </item>
- <item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
- <preferred-size width="150" height="-1" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
- </item>
- <item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
- <preferred-size width="200" height="200" />
- </default-constraints>
- </item>
- <item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
- <preferred-size width="200" height="200" />
- </default-constraints>
- </item>
- <item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
- </item>
- <item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
- </item>
- <item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
- </item>
- <item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
- </item>
- <item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
- <preferred-size width="-1" height="20" />
- </default-constraints>
- </item>
- <item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
- </item>
- <item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
- </item>
- </group>
- </component>
- <component name="ProjectDictionaryState">
- <dictionary name="luc" />
- </component>
- <component name="ProjectModuleManager">
- <modules>
- <module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
- <module fileurl="file://$PROJECT_DIR$/asm.iml" filepath="$PROJECT_DIR$/asm.iml" />
- <module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
- <module fileurl="file://$PROJECT_DIR$/forkjoin.iml" filepath="$PROJECT_DIR$/forkjoin.iml" />
- <module fileurl="file://$PROJECT_DIR$/interactive.iml" filepath="$PROJECT_DIR$/interactive.iml" />
- <module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
- <module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
- <module fileurl="file://$PROJECT_DIR$/reflect.iml" filepath="$PROJECT_DIR$/reflect.iml" />
- <module fileurl="file://$PROJECT_DIR$/repl.iml" filepath="$PROJECT_DIR$/repl.iml" />
- <module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
- <module fileurl="file://$PROJECT_DIR$/scaladoc.iml" filepath="$PROJECT_DIR$/scaladoc.iml" />
- <module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
- <module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
- </modules>
- </component>
- <component name="ProjectResources">
- <default-html-doctype>http://www.w3.org/1999/xhtml</default-html-doctype>
- </component>
- <component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
- <output url="file://$PROJECT_DIR$/../../out" />
- </component>
- <component name="ScalacSettings">
- <option name="COMPILER_LIBRARY_NAME" value="compiler-locker" />
- <option name="COMPILER_LIBRARY_LEVEL" value="Project" />
- </component>
- <component name="VcsDirectoryMappings">
- <mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
- </component>
- <component name="libraryTable">
- <library name="ant">
- <CLASSES>
- <root url="jar://$PROJECT_DIR$/../../lib/ant/ant.jar!/" />
- </CLASSES>
- <JAVADOC />
- <SOURCES />
- </library>
- <library name="compiler-locker">
- <CLASSES>
- <root url="file://$PROJECT_DIR$/../../build/locker/classes/library" />
- <root url="file://$PROJECT_DIR$/../../build/locker/classes/compiler" />
- <root url="file://$PROJECT_DIR$/../../build/locker/classes/reflect" />
- <root url="file://$PROJECT_DIR$/../../build/asm/classes" />
- </CLASSES>
- <JAVADOC />
- <SOURCES />
- </library>
- <library name="junit">
- <CLASSES>
- <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
- </CLASSES>
- <JAVADOC />
- <SOURCES>
- <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
- </SOURCES>
- <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" />
- <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" type="SOURCES" />
- </library>
- <library name="partest-deps">
- <CLASSES>
- <root url="file://$PROJECT_DIR$/../../build/deps/partest" />
- </CLASSES>
- <JAVADOC />
- <SOURCES>
- <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
- </SOURCES>
- <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/partest" recursive="false" />
- <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" type="SOURCES" />
- </library>
- <library name="repl-deps">
- <CLASSES>
- <root url="file://$PROJECT_DIR$/../../build/deps/repl" />
- </CLASSES>
- <JAVADOC />
- <SOURCES />
- <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/repl" recursive="false" />
- </library>
- </component>
-</project>
diff --git a/src/intellij/scala.iml.SAMPLE b/src/intellij/scala.iml.SAMPLE
index a4d863800b..9e8718dd45 100644
--- a/src/intellij/scala.iml.SAMPLE
+++ b/src/intellij/scala.iml.SAMPLE
@@ -8,5 +8,4 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
-</module>
-
+</module> \ No newline at end of file
diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE
new file mode 100644
index 0000000000..07f366a302
--- /dev/null
+++ b/src/intellij/scala.ipr.SAMPLE
@@ -0,0 +1,121 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+ <component name="CompilerConfiguration">
+ <option name="DEFAULT_COMPILER" value="Javac" />
+ <resourceExtensions />
+ <wildcardResourcePatterns>
+ <entry name="!?*.java" />
+ <entry name="!?*.form" />
+ <entry name="!?*.class" />
+ <entry name="!?*.groovy" />
+ <entry name="!?*.scala" />
+ <entry name="!?*.flex" />
+ <entry name="!?*.kt" />
+ <entry name="!?*.clj" />
+ </wildcardResourcePatterns>
+ <annotationProcessing>
+ <profile default="true" name="Default" enabled="false">
+ <processorPath useClasspath="true" />
+ </profile>
+ </annotationProcessing>
+ </component>
+ <component name="CopyrightManager" default="" />
+ <component name="EntryPointsManager">
+ <entry_points version="2.0" />
+ </component>
+ <component name="ProjectLevelVcsManager" settingsEditedManually="false">
+ <OptionsSetting value="true" id="Add" />
+ <OptionsSetting value="true" id="Remove" />
+ <OptionsSetting value="true" id="Checkout" />
+ <OptionsSetting value="true" id="Update" />
+ <OptionsSetting value="true" id="Status" />
+ <OptionsSetting value="true" id="Edit" />
+ <ConfirmationsSetting value="0" id="Add" />
+ <ConfirmationsSetting value="0" id="Remove" />
+ </component>
+ <component name="ProjectModuleManager">
+ <modules>
+ <module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
+ <module fileurl="file://$PROJECT_DIR$/asm.iml" filepath="$PROJECT_DIR$/asm.iml" />
+ <module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
+ <module fileurl="file://$PROJECT_DIR$/forkjoin.iml" filepath="$PROJECT_DIR$/forkjoin.iml" />
+ <module fileurl="file://$PROJECT_DIR$/interactive.iml" filepath="$PROJECT_DIR$/interactive.iml" />
+ <module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
+ <module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
+ <module fileurl="file://$PROJECT_DIR$/partest-extras.iml" filepath="$PROJECT_DIR$/partest-extras.iml" />
+ <module fileurl="file://$PROJECT_DIR$/partest-javaagent.iml" filepath="$PROJECT_DIR$/partest-javaagent.iml" />
+ <module fileurl="file://$PROJECT_DIR$/reflect.iml" filepath="$PROJECT_DIR$/reflect.iml" />
+ <module fileurl="file://$PROJECT_DIR$/repl.iml" filepath="$PROJECT_DIR$/repl.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scaladoc.iml" filepath="$PROJECT_DIR$/scaladoc.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
+ <module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
+ <module fileurl="file://$PROJECT_DIR$/test-junit.iml" filepath="$PROJECT_DIR$/test-junit.iml" />
+ </modules>
+ </component>
+ <component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" default="true" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
+ <output url="file://$PROJECT_DIR$/../../out" />
+ </component>
+ <component name="ScalaCompilerConfiguration">
+ <parameters>
+ <parameter value="-sourcepath" />
+ <parameter value="$PROJECT_DIR$/../library" />
+ </parameters>
+ </component>
+ <component name="VcsDirectoryMappings">
+ <mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
+ </component>
+ <component name="libraryTable">
+ <library name="ant">
+ <CLASSES>
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/ant.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ <library name="junit">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" />
+ </library>
+ <library name="partest">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/partest" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/partest" recursive="false" />
+ </library>
+ <library name="repl-deps">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/repl" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/repl" recursive="false" />
+ </library>
+ <library name="scaladoc-deps">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/scaladoc" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/scaladoc" recursive="false" />
+ </library>
+ <library name="starr" type="Scala">
+ <properties>
+ <compiler-classpath>
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-compiler-#starr-version#.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-library-#starr-version#.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-reflect-#starr-version#.jar" />
+ </compiler-classpath>
+ </properties>
+ <CLASSES />
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ </component>
+</project>
diff --git a/src/intellij/scaladoc.iml.SAMPLE b/src/intellij/scaladoc.iml.SAMPLE
index 07bea5bf5d..4ba0a848c6 100644
--- a/src/intellij/scaladoc.iml.SAMPLE
+++ b/src/intellij/scaladoc.iml.SAMPLE
@@ -1,15 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/../scaladoc">
@@ -17,11 +7,12 @@
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="compiler" />
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="reflect" />
- <orderEntry type="module" module-name="compiler" />
- <orderEntry type="module" module-name="xml" />
- <orderEntry type="module" module-name="parser-combinators" />
- <orderEntry type="module" module-name="partest" />
+ <orderEntry type="library" name="starr" level="project" />
+ <orderEntry type="library" name="scaladoc-deps" level="project" />
+ <orderEntry type="library" name="ant" level="project" />
+ <orderEntry type="library" name="partest" level="project" />
</component>
-</module>
+</module> \ No newline at end of file
diff --git a/src/intellij/scalap.iml.SAMPLE b/src/intellij/scalap.iml.SAMPLE
index 77eea7c38f..665aac07f8 100644
--- a/src/intellij/scalap.iml.SAMPLE
+++ b/src/intellij/scalap.iml.SAMPLE
@@ -1,15 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/../scalap">
@@ -18,8 +8,8 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="library" />
- <orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="library" name="starr" level="project" />
</component>
-</module>
-
+</module> \ No newline at end of file
diff --git a/src/intellij/setup.sh b/src/intellij/setup.sh
index bd324ba5bd..251f717829 100755
--- a/src/intellij/setup.sh
+++ b/src/intellij/setup.sh
@@ -5,19 +5,13 @@
set -e
export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
-export BASE="$( cd "$( dirname "$0" )"/../.. && pwd )"
echo "About to delete .ipr and .iml files and replace with the .SAMPLE files. Press enter to continue or CTRL-C to cancel."
read
-(rm -f *.ipr *.iml 2>/dev/null)
-for f in $(ls "$SCRIPT_DIR"/*.SAMPLE); do
- NEW_FILE=`echo $f | perl -pe 's/.SAMPLE//'`;
-
- cp $f $NEW_FILE
-
- # IntelliJ doesn't process the "compilerOptions" setting for variable
- # replacement. If it did, we would just use "$PROJECT_DIR$". Instead,
- # we do this replacement ourselves.
- perl -pi -e 's/\$BASE_DIR\$/$ENV{"BASE"}/g' $NEW_FILE
- echo "Created $NEW_FILE"
+for f in "$SCRIPT_DIR"/*.SAMPLE; do
+ g=${f%.SAMPLE}
+ cp $f $g
done
+
+STARR_VERSION="`cat $SCRIPT_DIR/../../versions.properties | grep 'starr.version' | awk '{split($0,a,"="); print a[2]}'`"
+sed "s/#starr-version#/$STARR_VERSION/g" $SCRIPT_DIR/scala.ipr.SAMPLE > $SCRIPT_DIR/scala.ipr
diff --git a/src/intellij/test-junit.iml.SAMPLE b/src/intellij/test-junit.iml.SAMPLE
new file mode 100644
index 0000000000..fe98fce60c
--- /dev/null
+++ b/src/intellij/test-junit.iml.SAMPLE
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../../test/junit">
+ <sourceFolder url="file://$MODULE_DIR$/../../test/junit" isTestSource="true" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="actors" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="partest-extras" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="repl" />
+ <orderEntry type="module" module-name="scaladoc" />
+ <orderEntry type="library" name="scaladoc-deps" level="project" />
+ <orderEntry type="library" name="junit" level="project" />
+ <orderEntry type="library" name="starr" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE
index 423be2062c..175a920771 100644
--- a/src/intellij/test.iml.SAMPLE
+++ b/src/intellij/test.iml.SAMPLE
@@ -2,20 +2,21 @@
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/../../test" />
+ <content url="file://$MODULE_DIR$/../../test">
+ <excludeFolder url="file://$MODULE_DIR$/../../test/junit" />
+ </content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="library" />
- <orderEntry type="module" module-name="xml" />
- <orderEntry type="module" module-name="parser-combinators" />
- <orderEntry type="module" module-name="reflect" />
- <orderEntry type="module" module-name="compiler" />
<orderEntry type="module" module-name="actors" />
- <orderEntry type="module" module-name="swing" />
- <orderEntry type="module" module-name="partest" />
<orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="compiler" />
<orderEntry type="module" module-name="forkjoin" />
- <orderEntry type="library" name="junit" level="project" />
+ <orderEntry type="module" module-name="interactive" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="partest-extras" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="repl" />
+ <orderEntry type="library" name="partest" level="project" />
+ <orderEntry type="library" name="scaladoc-deps" level="project" />
</component>
-</module>
-
+</module> \ No newline at end of file
diff --git a/src/intellij/update.sh b/src/intellij/update.sh
new file mode 100755
index 0000000000..eb6fea782f
--- /dev/null
+++ b/src/intellij/update.sh
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+#
+# Updates the .SAMPLE files with the current project files.
+#
+
+set -e
+export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
+
+echo "About to create overwrite the .ipr.SAMPLE and .iml.SAMPLE files with the current project files. Press enter to continue or CTRL-C to cancel."
+read
+
+for f in "$SCRIPT_DIR"/*.{iml,ipr}; do
+ cp $f $f.SAMPLE
+done
+
+for f in "$SCRIPT_DIR"/*.SAMPLE; do
+ g=${f%.SAMPLE}
+ if [[ ! -f $g ]]; then
+ echo "Stale sample file, deleting $f"
+ rm $f
+ fi
+done
diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
index 2e4f6b08e9..9caebb711d 100644
--- a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
@@ -302,7 +302,7 @@ trait CompilerControl { self: Global =>
abstract class WorkItem extends (() => Unit) {
val onCompilerThread = self.onCompilerThread
- /** Raise a MissingReponse, if the work item carries a response. */
+ /** Raise a MissingResponse, if the work item carries a response. */
def raiseMissing(): Unit
}
diff --git a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
index bf718c27cc..a4cb3efa4f 100644
--- a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
@@ -55,7 +55,11 @@ trait ContextTrees { self: Global =>
context
}
}
- locateContextTree(contexts, pos) map locateFinestContextTree map (_.context)
+ def sanitizeContext(c: Context): Context = {
+ c.retyping = false
+ c
+ }
+ locateContextTree(contexts, pos) map locateFinestContextTree map (ct => sanitizeContext(ct.context))
}
/** Returns the ContextTree containing `pos`, or the ContextTree positioned just before `pos`,
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 95027a26b1..727bfdd510 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -64,7 +64,9 @@ trait InteractiveAnalyzer extends Analyzer {
// that case the definitions that were already attributed as
// well as any default parameters of such methods need to be
// re-entered in the current scope.
- override def enterExistingSym(sym: Symbol): Context = {
+ //
+ // Tested in test/files/presentation/t8941b
+ override def enterExistingSym(sym: Symbol, tree: Tree): Context = {
if (sym != null && sym.owner.isTerm) {
enterIfNotThere(sym)
if (sym.isLazy)
@@ -72,8 +74,17 @@ trait InteractiveAnalyzer extends Analyzer {
for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
defAtt.defaultGetters foreach enterIfNotThere
+ } else if (sym != null && sym.isClass && sym.isImplicit) {
+ val owningInfo = sym.owner.info
+ val existingDerivedSym = owningInfo.decl(sym.name.toTermName).filter(sym => sym.isSynthetic && sym.isMethod)
+ existingDerivedSym.alternatives foreach (owningInfo.decls.unlink)
+ val defTree = tree match {
+ case dd: DocDef => dd.definition // See SI-9011, Scala IDE's presentation compiler incorporates ScalaDocGlobal with InterativeGlobal, so we have to unwrap DocDefs.
+ case _ => tree
+ }
+ enterImplicitWrapper(defTree.asInstanceOf[ClassDef])
}
- super.enterExistingSym(sym)
+ super.enterExistingSym(sym, tree)
}
override def enterIfNotThere(sym: Symbol) {
val scope = context.scope
@@ -121,8 +132,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
else NullLogger
import log.logreplay
- debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath)
- debugLog("classpath: "+classPath)
+ debugLog(s"logger: ${log.getClass} writing to ${(new java.io.File(logName)).getAbsolutePath}")
+ debugLog(s"classpath: $classPath")
private var curTime = System.nanoTime
private def timeStep = {
@@ -141,9 +152,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
// don't keep the original owner in presentation compiler runs
// (the map will grow indefinitely, and the only use case is the backend)
- override protected def saveOriginalOwner(sym: Symbol) { }
- override protected def originalEnclosingMethod(sym: Symbol) =
- abort("originalOwner is not kept in presentation compiler runs.")
+ override def defineOriginalOwner(sym: Symbol, owner: Symbol): Unit = { }
override def forInteractive = true
override protected def synchronizeNames = true
@@ -306,7 +315,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
private val NoResponse: Response[_] = new Response[Any]
/** The response that is currently pending, i.e. the compiler
- * is working on providing an asnwer for it.
+ * is working on providing an answer for it.
*/
private var pendingResponse: Response[_] = NoResponse
@@ -517,7 +526,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
/** The current presentation compiler runner */
@volatile private[interactive] var compileRunner: Thread = newRunnerThread()
- /** Check that the currenyly executing thread is the presentation compiler thread.
+ /** Check that the currently executing thread is the presentation compiler thread.
*
* Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase)
*/
@@ -734,7 +743,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
- private def reloadSource(source: SourceFile) {
+ private[interactive] def reloadSource(source: SourceFile) {
val unit = new RichCompilationUnit(source)
unitOfFile(source.file) = unit
toBeRemoved -= source.file
@@ -783,7 +792,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
/** A fully attributed tree located at position `pos` */
- private def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match {
+ private[interactive] def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match {
case None =>
reloadSources(List(pos.source))
try typedTreeAt(pos)
@@ -1075,7 +1084,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
case t => t
}
val context = doLocateContext(pos)
-
val shouldTypeQualifier = tree0.tpe match {
case null => true
case mt: MethodType => mt.isImplicit
@@ -1130,7 +1138,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
for (view <- applicableViews) {
val vtree = viewApply(view)
val vpre = stabilizedType(vtree)
- for (sym <- vtree.tpe.members) {
+ for (sym <- vtree.tpe.members if sym.isTerm) {
addTypeMember(sym, vpre, inherited = false, view.tree.symbol)
}
}
@@ -1183,7 +1191,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
- /** Parses and enters given source file, stroring parse tree in response */
+ /** Parses and enters given source file, storing parse tree in response */
private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) {
respond(response) {
onUnitOf(source) { unit =>
diff --git a/src/interactive/scala/tools/nsc/interactive/Lexer.scala b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
index 82e8de3f3d..7daf24c204 100644
--- a/src/interactive/scala/tools/nsc/interactive/Lexer.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
@@ -195,7 +195,7 @@ class Lexer(rd: Reader) {
case 'n' => putAcceptString("null"); token = NullLit
case '"' => getString()
case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => getNumber()
- case _ => error("unrecoginezed start of token: '"+ch+"'")
+ case _ => error("unrecognized start of token: '"+ch+"'")
}
//println("["+token+"]")
}
diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala
index c838606f02..7796c65670 100644
--- a/src/interactive/scala/tools/nsc/interactive/Main.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Main.scala
@@ -12,7 +12,7 @@ package interactive
*/
object Main extends nsc.MainClass {
override def processSettingsHook(): Boolean = {
- if (this.settings.Yidedebug) {
+ def run(): Unit = {
this.settings.Xprintpos.value = true
this.settings.Yrangepos.value = true
val compiler = new interactive.Global(this.settings, this.reporter)
@@ -27,8 +27,9 @@ object Main extends nsc.MainClass {
case None => reporter.reset() // Causes other compiler errors to be ignored
}
askShutdown
- false
}
- else true
+ super.processSettingsHook() && (
+ if (this.settings.Yidedebug) { run() ; false } else true
+ )
}
}
diff --git a/src/interactive/scala/tools/nsc/interactive/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
index 83f3fab925..ddc0c8a068 100644
--- a/src/interactive/scala/tools/nsc/interactive/Pickler.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
@@ -6,7 +6,7 @@ import scala.language.implicitConversions
import scala.reflect.ClassTag
/** An abstract class for writing and reading Scala objects to and
- * from a legible representation. The presesentation follows the following grammar:
+ * from a legible representation. The representation follows the following grammar:
* {{{
* Pickled = `true` | `false` | `null` | NumericLit | StringLit |
* Labelled | Pickled `,` Pickled
@@ -85,7 +85,7 @@ abstract class Pickler[T] {
object Pickler {
/** A base class representing unpickler result. It has two subclasses:
- * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
+ * `UnpickleSuccess` for successful unpicklings and `UnpickleFailure` for failures,
* where a value of the given type `T` could not be unpickled from input.
* @tparam T the type of unpickled values in case of success.
*/
@@ -154,7 +154,7 @@ object Pickler {
*/
def pkl[T: Pickler] = implicitly[Pickler[T]]
- /** A class represenenting `~`-pairs */
+ /** A class representing `~`-pairs */
case class ~[+S, +T](fst: S, snd: T)
/** A wrapper class to be able to use `~` s an infix method */
diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
index 681204172b..4962d80a8b 100644
--- a/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
@@ -4,7 +4,7 @@ import scala.tools.nsc.io.Path
/** Common settings for the test. */
private[tests] trait TestSettings {
- protected final val TIMEOUT = 10000 // timeout in milliseconds
+ protected final val TIMEOUT = 30000 // timeout in milliseconds
/** The root directory for this test suite, usually the test kind ("test/files/presentation"). */
protected val outDir = Path(Option(System.getProperty("partest.cwd")).getOrElse("."))
diff --git a/src/jline/LICENSE.txt b/src/jline/LICENSE.txt
deleted file mode 100644
index 1cdc44c211..0000000000
--- a/src/jline/LICENSE.txt
+++ /dev/null
@@ -1,33 +0,0 @@
-Copyright (c) 2002-2006, Marc Prud'hommeaux <mwp1@cornell.edu>
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following
-conditions are met:
-
-Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-
-Redistributions in binary form must reproduce the above copyright
-notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with
-the distribution.
-
-Neither the name of JLine nor the names of its contributors
-may be used to endorse or promote products derived from this
-software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
-BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
-OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
-AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
-IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
-OF THE POSSIBILITY OF SUCH DAMAGE.
-
diff --git a/src/jline/README.md b/src/jline/README.md
deleted file mode 100644
index 829476145d..0000000000
--- a/src/jline/README.md
+++ /dev/null
@@ -1,24 +0,0 @@
-Description
------------
-
-JLine 2.x
-
-License
--------
-
-BSD
-
-Building
---------
-
-### Requirements
-
-* SBT
-* Java 5+
-
-This is a fork with scala specific modifications.
-The original repository was: git://github.com/jdillon/jline2.git
-
-You can now build with sbt:
-
- sbt update proguard
diff --git a/src/jline/build.sbt b/src/jline/build.sbt
deleted file mode 100644
index 873f7574f1..0000000000
--- a/src/jline/build.sbt
+++ /dev/null
@@ -1,49 +0,0 @@
-seq(ProguardPlugin.proguardSettings :_*)
-
-name := "jline"
-
-organization := "org.scala-lang"
-
-version := "2.11.0-SNAPSHOT"
-
-scalaVersion := "2.10.1"
-
-// Only need these because of weird testing jline issues.
-retrieveManaged := true
-
-parallelExecution in Test := false
-
-libraryDependencies ++= Seq(
- "org.fusesource.jansi" % "jansi" % "1.10",
- "com.novocode" % "junit-interface" % "0.9" % "test->default"
-)
-
-javacOptions ++= Seq("-source", "1.5", "-target", "1.5")
-
-proguardOptions ++= Seq(
- "-dontshrink",
- "-keep class *",
- "-keepdirectories"
-)
-
-proguardInJars := Nil
-
-makeInJarFilter ~= { prevFilter =>
- val jansiFilter = List(
- "!META-INF/MANIFEST.MF",
- "org/fusesource/hawtjni/runtime",
- "org/fusesource/hawtjni/runtime/Callback.class",
- "org/fusesource/hawtjni/runtime/Library.class",
- "!org/fusesource/hawtjni/**",
- "!META-INF/maven/org.fusesource.hawtjni",
- "!META-INF/maven/org.fusesource.jansi",
- "!META-INF/maven/org.fusesource.hawtjni/**",
- "!META-INF/maven/org.fusesource.jansi/**"
- ).mkString(",")
- // In sbt 0.9.8 the scala-library.jar line was not necessary,
- // but in 0.9.9 it started showing up here. Who knows.
- file =>
- if (file startsWith "jansi-") jansiFilter
- else if (file == "scala-library.jar") "!**"
- else prevFilter(file)
-}
diff --git a/src/jline/manual-test.sh b/src/jline/manual-test.sh
deleted file mode 100755
index 744e1756e8..0000000000
--- a/src/jline/manual-test.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env bash
-#
-# Apparently the jline bundled with sbt interferes with testing some
-# changes: for instance after changing the keybindings I kept seeing
-# failures until I realized what was happening and bypassed sbt, like this.
-CP=lib_managed/jars/com.novocode/junit-interface/junit-interface-0.9.jar:lib_managed/jars/junit/junit-dep/junit-dep-4.8.2.jar:lib_managed/jars/org.fusesource.jansi/jansi/jansi-1.10.jar:lib_managed/jars/org.hamcrest/hamcrest-core/hamcrest-core-1.1.jar:lib_managed/jars/org.scala-tools.testing/test-interface/test-interface-0.5.jar:target/scala-2.10/test-classes:target/scala-2.10/jline_2.10-2.11.0-SNAPSHOT.min.jar
-
-sbt proguard
-java -cp $CP org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest
diff --git a/src/jline/project/build.properties b/src/jline/project/build.properties
deleted file mode 100644
index 9b860e23c5..0000000000
--- a/src/jline/project/build.properties
+++ /dev/null
@@ -1 +0,0 @@
-sbt.version=0.12.3
diff --git a/src/jline/project/plugins.sbt b/src/jline/project/plugins.sbt
deleted file mode 100644
index 9c13de92d8..0000000000
--- a/src/jline/project/plugins.sbt
+++ /dev/null
@@ -1,3 +0,0 @@
-resolvers += Resolver.url("sbt-plugin-releases-scalasbt", url("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)
-
-addSbtPlugin("org.scala-sbt" % "xsbt-proguard-plugin" % "0.1.3")
diff --git a/src/jline/src/main/java/scala/tools/jline/AnsiWindowsTerminal.java b/src/jline/src/main/java/scala/tools/jline/AnsiWindowsTerminal.java
deleted file mode 100644
index 94697137d3..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/AnsiWindowsTerminal.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (C) 2009 the original author(s).
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * MODIFICATIONS: methods to deal with wrapping the output stream.
- */
-
-package scala.tools.jline;
-
-import org.fusesource.jansi.AnsiConsole;
-import org.fusesource.jansi.AnsiOutputStream;
-import org.fusesource.jansi.WindowsAnsiOutputStream;
-
-import java.io.ByteArrayOutputStream;
-import java.io.OutputStream;
-
-/**
- * ANSI-supported {@link WindowsTerminal}.
- *
- * @since 2.0
- */
-public class AnsiWindowsTerminal
- extends WindowsTerminal
-{
- private final boolean ansiSupported = detectAnsiSupport();
-
- @Override
- public OutputStream wrapOutIfNeeded(OutputStream out) {
- return wrapOutputStream(out);
- }
-
- /**
- * Returns an ansi output stream handler. We return whatever was
- * passed if we determine we cannot handle ansi based on Kernel32 calls.
- *
- * @return an @{link AltWindowAnsiOutputStream} instance or the passed
- * stream.
- */
- private static OutputStream wrapOutputStream(final OutputStream stream) {
- String os = System.getProperty("os.name");
- if( os.startsWith("Windows") ) {
- // On windows we know the console does not interpret ANSI codes..
- try {
- return new WindowsAnsiOutputStream(stream);
- } catch (Throwable ignore) {
- // this happens when JNA is not in the path.. or
- // this happens when the stdout is being redirected to a file.
- }
- // Use the ANSIOutputStream to strip out the ANSI escape sequences.
- return new AnsiOutputStream(stream);
- }
- return stream;
- }
-
- private static boolean detectAnsiSupport() {
- OutputStream out = AnsiConsole.wrapOutputStream(new ByteArrayOutputStream());
- try {
- out.close();
- }
- catch (Exception e) {
- // ignore;
- }
- return out instanceof WindowsAnsiOutputStream;
- }
-
- public AnsiWindowsTerminal() throws Exception {
- super();
- }
-
- @Override
- public boolean isAnsiSupported() {
- return ansiSupported;
- }
-
- @Override
- public boolean hasWeirdWrap() {
- return false;
- }
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/NoInterruptUnixTerminal.java b/src/jline/src/main/java/scala/tools/jline/NoInterruptUnixTerminal.java
deleted file mode 100644
index ef7cf23c4a..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/NoInterruptUnixTerminal.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (C) 2009 the original author(s).
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package scala.tools.jline;
-
-// Based on Apache Karaf impl
-
-/**
- * Non-interruptable (via CTRL-C) {@link UnixTerminal}.
- *
- * @since 2.0
- */
-public class NoInterruptUnixTerminal
- extends UnixTerminal
-{
- public NoInterruptUnixTerminal() throws Exception {
- super();
- }
-
- @Override
- public void init() throws Exception {
- super.init();
- getSettings().set("intr undef");
- }
-
- @Override
- public void restore() throws Exception {
- getSettings().set("intr ^C");
- super.restore();
- }
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/Terminal.java b/src/jline/src/main/java/scala/tools/jline/Terminal.java
deleted file mode 100644
index 79611c244d..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/Terminal.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-/**
- * Representation of the input terminal for a platform.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.0
- */
-public interface Terminal
-{
- void init() throws Exception;
-
- void restore() throws Exception;
-
- void reset() throws Exception;
-
- boolean isSupported();
-
- int getWidth();
-
- int getHeight();
-
- boolean isAnsiSupported();
-
- /**
- * When ANSI is not natively handled, the output will have to be wrapped.
- */
- OutputStream wrapOutIfNeeded(OutputStream out);
-
- /**
- * For terminals that don't wrap when character is written in last column,
- * only when the next character is written.
- * These are the ones that have 'am' and 'xn' termcap attributes (xterm and
- * rxvt flavors falls under that category)
- */
- boolean hasWeirdWrap();
-
- boolean isEchoEnabled();
-
- void setEchoEnabled(boolean enabled);
-
- int readCharacter(InputStream in) throws IOException;
-
- int readVirtualKey(InputStream in) throws IOException;
-
- InputStream getDefaultBindings();
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/TerminalFactory.java b/src/jline/src/main/java/scala/tools/jline/TerminalFactory.java
deleted file mode 100644
index 95b7c28bd5..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/TerminalFactory.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline;
-
-import scala.tools.jline.internal.Configuration;
-import scala.tools.jline.internal.Log;
-
-import java.text.MessageFormat;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Creates terminal instances.
- *
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.0
- */
-public class TerminalFactory
-{
- public static final String JLINE_TERMINAL = "jline.terminal";
-
- public static final String AUTO = "auto";
-
- public static final String UNIX = "unix";
-
- public static final String WIN = "win";
-
- public static final String WINDOWS = "windows";
-
- public static final String NONE = "none";
-
- public static final String OFF = "off";
-
- public static final String FALSE = "false";
-
- private static final InheritableThreadLocal<Terminal> holder = new InheritableThreadLocal<Terminal>();
-
- public static synchronized Terminal create() {
- if (Log.TRACE) {
- //noinspection ThrowableInstanceNeverThrown
- Log.trace(new Throwable("CREATE MARKER"));
- }
-
- String type = Configuration.getString(JLINE_TERMINAL);
- if (type == null) {
- type = AUTO;
- }
-
- Log.debug("Creating terminal; type=", type);
-
- Terminal t;
- try {
- String tmp = type.toLowerCase();
-
- if (tmp.equals(UNIX)) {
- t = getFlavor(Flavor.UNIX);
- }
- else if (tmp.equals(WIN) | tmp.equals(WINDOWS)) {
- t = getFlavor(Flavor.WINDOWS);
- }
- else if (tmp.equals(NONE) || tmp.equals(OFF) || tmp.equals(FALSE)) {
- t = new UnsupportedTerminal();
- }
- else {
- if (tmp.equals(AUTO)) {
- String os = Configuration.getOsName();
- Flavor flavor = Flavor.UNIX;
- if (os.contains(WINDOWS)) {
- flavor = Flavor.WINDOWS;
- }
- t = getFlavor(flavor);
- }
- else {
- try {
- t = (Terminal) Thread.currentThread().getContextClassLoader().loadClass(type).newInstance();
- }
- catch (Exception e) {
- throw new IllegalArgumentException(MessageFormat.format("Invalid terminal type: {0}", type), e);
- }
- }
- }
- }
- catch (Exception e) {
- Log.error("Failed to construct terminal; falling back to unsupported", e);
- t = new UnsupportedTerminal();
- }
-
- Log.debug("Created Terminal: ", t);
-
- try {
- t.init();
- }
- catch (Exception e) {
- Log.error("Terminal initialization failed; falling back to unsupported", e);
- return new UnsupportedTerminal();
- }
-
- return t;
- }
-
- public static synchronized void reset() {
- holder.remove();
- }
-
- public static synchronized void resetIf(final Terminal t) {
- if (holder.get() == t) {
- reset();
- }
- }
-
- public static enum Type
- {
- AUTO,
- WINDOWS,
- UNIX,
- NONE
- }
-
- public static synchronized void configure(final String type) {
- assert type != null;
- System.setProperty(JLINE_TERMINAL, type);
- }
-
- public static synchronized void configure(final Type type) {
- assert type != null;
- configure(type.name().toLowerCase());
- }
-
- //
- // Flavor Support
- //
-
- public static enum Flavor
- {
- WINDOWS,
- UNIX
- }
-
- private static final Map<Flavor, Class<? extends Terminal>> FLAVORS = new HashMap<Flavor, Class<? extends Terminal>>();
-
- static {
- registerFlavor(Flavor.WINDOWS, AnsiWindowsTerminal.class);
- registerFlavor(Flavor.UNIX, UnixTerminal.class);
- }
-
- public static synchronized Terminal get() {
- Terminal t = holder.get();
- if (t == null) {
- t = create();
- holder.set(t);
- }
- return t;
- }
-
- public static Terminal getFlavor(final Flavor flavor) throws Exception {
- Class<? extends Terminal> type = FLAVORS.get(flavor);
- if (type != null) {
- return type.newInstance();
- }
-
- throw new InternalError();
- }
-
- public static void registerFlavor(final Flavor flavor, final Class<? extends Terminal> type) {
- FLAVORS.put(flavor, type);
- }
-
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/TerminalSupport.java b/src/jline/src/main/java/scala/tools/jline/TerminalSupport.java
deleted file mode 100644
index 1ca12cb73f..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/TerminalSupport.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline;
-
-import scala.tools.jline.internal.Log;
-import scala.tools.jline.internal.Configuration;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-/**
- * Provides support for {@link Terminal} instances.
- *
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.0
- */
-public abstract class TerminalSupport
- implements Terminal
-{
- public static String DEFAULT_KEYBINDINGS_PROPERTIES = "keybindings.properties";
-
- public static final String JLINE_SHUTDOWNHOOK = "jline.shutdownhook";
-
- public static final int DEFAULT_WIDTH = 80;
-
- public static final int DEFAULT_HEIGHT = 24;
-
- private Thread shutdownHook;
-
- private boolean shutdownHookEnabled;
-
- private boolean supported;
-
- private boolean echoEnabled;
-
- private boolean ansiSupported;
-
- protected TerminalSupport(final boolean supported) {
- this.supported = supported;
- this.shutdownHookEnabled = Configuration.getBoolean(JLINE_SHUTDOWNHOOK, false);
- }
-
- public void init() throws Exception {
- installShutdownHook(new RestoreHook());
- }
-
- public void restore() throws Exception {
- TerminalFactory.resetIf(this);
- removeShutdownHook();
- }
-
- public void reset() throws Exception {
- restore();
- init();
- }
-
- // Shutdown hooks causes classloader leakage in sbt,
- // so they are only installed if -Djline.shutdownhook is true.
- protected void installShutdownHook(final Thread hook) {
- if (!shutdownHookEnabled) {
- Log.debug("Not install shutdown hook " + hook + " because they are disabled.");
- return;
- }
-
- assert hook != null;
-
- if (shutdownHook != null) {
- throw new IllegalStateException("Shutdown hook already installed");
- }
-
- try {
- Runtime.getRuntime().addShutdownHook(hook);
- shutdownHook = hook;
- }
- catch (AbstractMethodError e) {
- // JDK 1.3+ only method. Bummer.
- Log.trace("Failed to register shutdown hook: ", e);
- }
- }
-
- protected void removeShutdownHook() {
- if (!shutdownHookEnabled)
- return;
-
- if (shutdownHook != null) {
- try {
- Runtime.getRuntime().removeShutdownHook(shutdownHook);
- }
- catch (AbstractMethodError e) {
- // JDK 1.3+ only method. Bummer.
- Log.trace("Failed to remove shutdown hook: ", e);
- }
- catch (IllegalStateException e) {
- // The VM is shutting down, not a big deal; ignore
- }
- shutdownHook = null;
- }
- }
-
- public final boolean isSupported() {
- return supported;
- }
-
- public synchronized boolean isAnsiSupported() {
- return ansiSupported;
- }
-
- protected synchronized void setAnsiSupported(final boolean supported) {
- this.ansiSupported = supported;
- Log.debug("Ansi supported: ", supported);
- }
-
- /**
- * Subclass to change behavior if needed.
- * @return the passed out
- */
- public OutputStream wrapOutIfNeeded(OutputStream out) {
- return out;
- }
-
- /**
- * Defaults to true which was the behaviour before this method was added.
- */
- public boolean hasWeirdWrap() {
- return true;
- }
-
- public int getWidth() {
- return DEFAULT_WIDTH;
- }
-
- public int getHeight() {
- return DEFAULT_HEIGHT;
- }
-
- public synchronized boolean isEchoEnabled() {
- return echoEnabled;
- }
-
- public synchronized void setEchoEnabled(final boolean enabled) {
- this.echoEnabled = enabled;
- Log.debug("Echo enabled: ", enabled);
- }
-
- public int readCharacter(final InputStream in) throws IOException {
- return in.read();
- }
-
- public int readVirtualKey(final InputStream in) throws IOException {
- return readCharacter(in);
- }
-
- public InputStream getDefaultBindings() {
- return TerminalSupport.class.getResourceAsStream(DEFAULT_KEYBINDINGS_PROPERTIES);
- }
-
- //
- // RestoreHook
- //
-
- protected class RestoreHook
- extends Thread
- {
- public void start() {
- try {
- restore();
- }
- catch (Exception e) {
- Log.trace("Failed to restore: ", e);
- }
- }
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java b/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java
deleted file mode 100644
index 94a1b98c0d..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/UnixTerminal.java
+++ /dev/null
@@ -1,248 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline;
-
-import scala.tools.jline.console.Key;
-import scala.tools.jline.internal.Configuration;
-import scala.tools.jline.internal.Log;
-import scala.tools.jline.internal.ReplayPrefixOneCharInputStream;
-import scala.tools.jline.internal.TerminalLineSettings;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.HashMap;
-import java.util.Map;
-
-import static scala.tools.jline.UnixTerminal.UnixKey.*;
-import static scala.tools.jline.console.Key.*;
-
-/**
- * Terminal that is used for unix platforms. Terminal initialization
- * is handled by issuing the <em>stty</em> command against the
- * <em>/dev/tty</em> file to disable character echoing and enable
- * character input. All known unix systems (including
- * Linux and Macintosh OS X) support the <em>stty</em>), so this
- * implementation should work for an reasonable POSIX system.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:dwkemp@gmail.com">Dale Kemp</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @author <a href="mailto:jbonofre@apache.org">Jean-Baptiste Onofré</a>
- * @since 2.0
- */
-public class UnixTerminal
- extends TerminalSupport
-{
- private final TerminalLineSettings settings = new TerminalLineSettings();
-
- private final ReplayPrefixOneCharInputStream replayStream;
-
- private final InputStreamReader replayReader;
-
- public UnixTerminal() throws Exception {
- super(true);
-
- this.replayStream = new ReplayPrefixOneCharInputStream(Configuration.getInputEncoding());
- this.replayReader = new InputStreamReader(replayStream, replayStream.getEncoding());
- }
-
- protected TerminalLineSettings getSettings() {
- return settings;
- }
-
- /**
- * Remove line-buffered input by invoking "stty -icanon min 1"
- * against the current terminal.
- */
- @Override
- public void init() throws Exception {
- super.init();
-
- setAnsiSupported(true);
-
- // set the console to be character-buffered instead of line-buffered
- settings.set("-icanon min 1");
-
- setEchoEnabled(false);
- }
-
- /**
- * Restore the original terminal configuration, which can be used when
- * shutting down the console reader. The ConsoleReader cannot be
- * used after calling this method.
- */
- @Override
- public void restore() throws Exception {
- settings.restore();
- super.restore();
- // print a newline after the terminal exits.
- // this should probably be a configurable.
- System.out.println();
- }
-
- /**
- * Returns the value of <tt>stty columns</tt> param.
- */
- @Override
- public int getWidth() {
- int w = settings.getProperty("columns");
- return w < 1 ? DEFAULT_WIDTH : w;
- }
-
- /**
- * Returns the value of <tt>stty rows>/tt> param.
- */
- @Override
- public int getHeight() {
- int h = settings.getProperty("rows");
- return h < 1 ? DEFAULT_HEIGHT : h;
- }
-
- @Override
- public synchronized void setEchoEnabled(final boolean enabled) {
- try {
- if (enabled) {
- settings.set("echo");
- }
- else {
- settings.set("-echo");
- }
- super.setEchoEnabled(enabled);
- }
- catch (Exception e) {
- Log.error("Failed to ", (enabled ? "enable" : "disable"), " echo: ", e);
- }
- }
-
- @Override
- public int readVirtualKey(final InputStream in) throws IOException {
- int c = readCharacter(in);
-
- if (Key.valueOf(c) == DELETE && settings.getProperty("erase") == DELETE.code) {
- c = BACKSPACE.code;
- }
-
- UnixKey key = UnixKey.valueOf(c);
-
- // in Unix terminals, arrow keys are represented by a sequence of 3 characters. E.g., the up arrow key yields 27, 91, 68
- if (key == ARROW_START) {
- // also the escape key is 27 thats why we read until we have something different than 27
- // this is a bugfix, because otherwise pressing escape and than an arrow key was an undefined state
- while (key == ARROW_START) {
- c = readCharacter(in);
- key = UnixKey.valueOf(c);
- }
-
- if (key == ARROW_PREFIX || key == O_PREFIX) {
- c = readCharacter(in);
- key = UnixKey.valueOf(c);
-
- if (key == ARROW_UP) {
- return CTRL_P.code;
- }
- else if (key == ARROW_DOWN) {
- return CTRL_N.code;
- }
- else if (key == ARROW_LEFT) {
- return CTRL_B.code;
- }
- else if (key == ARROW_RIGHT) {
- return CTRL_F.code;
- }
- else if (key == HOME_CODE) {
- return CTRL_A.code;
- }
- else if (key == END_CODE) {
- return CTRL_E.code;
- }
- else if (key == DEL_THIRD) {
- readCharacter(in); // read 4th & ignore
- return DELETE.code;
- }
- }
- else if (c == 'b') { // alt-b: go back a word
- return CTRL_O.code; // PREV_WORD
- }
- else if (c == 'f') { // alt-f: go forward a word
- return CTRL_T.code; // NEXT_WORD
- }
- else if (key == DEL) { // alt-backspace: delete previous word
- return CTRL_W.code; // DELETE_PREV_WORD
- }
- else if (c == 'd') { // alt-d: delete next word
- return CTRL_X.code; // DELETE_NEXT_WORD
- }
-
- }
-
- // handle unicode characters, thanks for a patch from amyi@inf.ed.ac.uk
- if (c > 128) {
- // handle unicode characters longer than 2 bytes,
- // thanks to Marc.Herbert@continuent.com
- replayStream.setInput(c, in);
- // replayReader = new InputStreamReader(replayStream, encoding);
- c = replayReader.read();
- }
-
- return c;
- }
-
- /**
- * Unix keys.
- */
- public static enum UnixKey
- {
- ARROW_START(27),
-
- ARROW_PREFIX(91),
-
- ARROW_LEFT(68),
-
- ARROW_RIGHT(67),
-
- ARROW_UP(65),
-
- ARROW_DOWN(66),
-
- O_PREFIX(79),
-
- HOME_CODE(72),
-
- END_CODE(70),
-
- DEL_THIRD(51),
-
- DEL_SECOND(126),
-
- DEL(127);
-
-
- public final short code;
-
- UnixKey(final int code) {
- this.code = (short) code;
- }
-
- private static final Map<Short, UnixKey> codes;
-
- static {
- Map<Short, UnixKey> map = new HashMap<Short, UnixKey>();
-
- for (UnixKey key : UnixKey.values()) {
- map.put(key.code, key);
- }
-
- codes = map;
- }
-
- public static UnixKey valueOf(final int code) {
- return codes.get((short) code);
- }
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/UnsupportedTerminal.java b/src/jline/src/main/java/scala/tools/jline/UnsupportedTerminal.java
deleted file mode 100644
index 04fe4f7f16..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/UnsupportedTerminal.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline;
-
-/**
- * An unsupported terminal.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.0
- */
-public class UnsupportedTerminal
- extends TerminalSupport
-{
- public UnsupportedTerminal() {
- super(false);
- setAnsiSupported(false);
- setEchoEnabled(true);
- }
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/WindowsTerminal.java b/src/jline/src/main/java/scala/tools/jline/WindowsTerminal.java
deleted file mode 100644
index 4c70155f59..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/WindowsTerminal.java
+++ /dev/null
@@ -1,468 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline;
-
-import java.io.FileDescriptor;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.HashMap;
-import java.util.Map;
-
-import scala.tools.jline.internal.Configuration;
-import org.fusesource.jansi.internal.WindowsSupport;
-
-import scala.tools.jline.internal.Log;
-import scala.tools.jline.internal.ReplayPrefixOneCharInputStream;
-
-import static scala.tools.jline.WindowsTerminal.ConsoleMode.*;
-import static scala.tools.jline.WindowsTerminal.WindowsKey.*;
-import static scala.tools.jline.console.Key.*;
-
-/**
- * Terminal implementation for Microsoft Windows. Terminal initialization in
- * {@link #init} is accomplished by extracting the
- * <em>jline_<i>version</i>.dll</em>, saving it to the system temporary
- * directoy (determined by the setting of the <em>java.io.tmpdir</em> System
- * property), loading the library, and then calling the Win32 APIs <a
- * href="http://msdn.microsoft.com/library/default.asp?
- * url=/library/en-us/dllproc/base/setconsolemode.asp">SetConsoleMode</a> and
- * <a href="http://msdn.microsoft.com/library/default.asp?
- * url=/library/en-us/dllproc/base/getconsolemode.asp">GetConsoleMode</a> to
- * disable character echoing.
- * <p/>
- * <p>
- * By default, the {@link #readCharacter} method will attempt to test to see if
- * the specified {@link InputStream} is {@link System#in} or a wrapper around
- * {@link FileDescriptor#in}, and if so, will bypass the character reading to
- * directly invoke the readc() method in the JNI library. This is so the class
- * can read special keys (like arrow keys) which are otherwise inaccessible via
- * the {@link System#in} stream. Using JNI reading can be bypassed by setting
- * the <code>jline.WindowsTerminal.directConsole</code> system property
- * to <code>false</code>.
- * </p>
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.0
- */
-public class WindowsTerminal
- extends TerminalSupport
-{
- public static final String JLINE_WINDOWS_TERMINAL_INPUT_ENCODING = "jline.WindowsTerminal.input.encoding";
-
- public static final String JLINE_WINDOWS_TERMINAL_OUTPUT_ENCODING = "jline.WindowsTerminal.output.encoding";
-
- public static final String JLINE_WINDOWS_TERMINAL_DIRECT_CONSOLE = "jline.WindowsTerminal.directConsole";
-
- public static final String WINDOWSBINDINGS_PROPERTIES = "windowsbindings.properties";
-
- public static final String ANSI = WindowsTerminal.class.getName() + ".ansi";
-
- private boolean directConsole;
-
- private int originalMode;
-
- private final ReplayPrefixOneCharInputStream replayStream;
-
- private final InputStreamReader replayReader;
-
- public WindowsTerminal() throws Exception {
- super(true);
-
- this.replayStream =
- new ReplayPrefixOneCharInputStream(Configuration.getString(JLINE_WINDOWS_TERMINAL_INPUT_ENCODING, Configuration.getFileEncoding()));
- this.replayReader = new InputStreamReader(replayStream, replayStream.getEncoding());
- }
-
- @Override
- public void init() throws Exception {
- super.init();
-
- setAnsiSupported(Boolean.getBoolean(ANSI));
-
- //
- // FIXME: Need a way to disable direct console and sysin detection muck
- //
-
- setDirectConsole(Boolean.getBoolean(JLINE_WINDOWS_TERMINAL_DIRECT_CONSOLE));
-
- this.originalMode = getConsoleMode();
- setConsoleMode(originalMode & ~ENABLE_ECHO_INPUT.code);
- setEchoEnabled(false);
- }
-
- /**
- * Restore the original terminal configuration, which can be used when
- * shutting down the console reader. The ConsoleReader cannot be
- * used after calling this method.
- */
- @Override
- public void restore() throws Exception {
- // restore the old console mode
- setConsoleMode(originalMode);
- super.restore();
- }
-
- @Override
- public int getWidth() {
- int w = getWindowsTerminalWidth();
- return w < 1 ? DEFAULT_WIDTH : w;
- }
-
- @Override
- public int getHeight() {
- int h = getWindowsTerminalHeight();
- return h < 1 ? DEFAULT_HEIGHT : h;
- }
-
- @Override
- public void setEchoEnabled(final boolean enabled) {
- // Must set these four modes at the same time to make it work fine.
- if (enabled) {
- setConsoleMode(getConsoleMode() |
- ENABLE_ECHO_INPUT.code |
- ENABLE_LINE_INPUT.code |
- ENABLE_PROCESSED_INPUT.code |
- ENABLE_WINDOW_INPUT.code);
- }
- else {
- setConsoleMode(getConsoleMode() &
- ~(ENABLE_LINE_INPUT.code |
- ENABLE_ECHO_INPUT.code |
- ENABLE_PROCESSED_INPUT.code |
- ENABLE_WINDOW_INPUT.code));
- }
- super.setEchoEnabled(enabled);
- }
-
- /**
- * Whether or not to allow the use of the JNI console interaction.
- */
- public void setDirectConsole(final boolean flag) {
- this.directConsole = flag;
- Log.debug("Direct console: ", flag);
- }
-
- /**
- * Whether or not to allow the use of the JNI console interaction.
- */
- public Boolean getDirectConsole() {
- return directConsole;
- }
-
-
- @Override
- public int readCharacter(final InputStream in) throws IOException {
- // if we can detect that we are directly wrapping the system
- // input, then bypass the input stream and read directly (which
- // allows us to access otherwise unreadable strokes, such as
- // the arrow keys)
-
- if (directConsole || isSystemIn(in)) {
- return readByte();
- }
- else {
- return super.readCharacter(in);
- }
- }
-
- private boolean isSystemIn(final InputStream in) throws IOException {
- assert in != null;
-
- if (in == System.in) {
- return true;
- }
- else if (in instanceof FileInputStream && ((FileInputStream) in).getFD() == FileDescriptor.in) {
- return true;
- }
-
- return false;
- }
-
- @Override
- public int readVirtualKey(final InputStream in) throws IOException {
- int indicator = readCharacter(in);
-
- // in Windows terminals, arrow keys are represented by
- // a sequence of 2 characters. E.g., the up arrow
- // key yields 224, 72
- if (indicator == SPECIAL_KEY_INDICATOR.code || indicator == NUMPAD_KEY_INDICATOR.code) {
- int c = readCharacter(in);
- WindowsKey key = WindowsKey.valueOf(c);
- if (key == null)
- return 0;
-
- switch (key) {
- case UP_ARROW_KEY:
- return CTRL_P.code; // translate UP -> CTRL-P
-
- case LEFT_ARROW_KEY:
- return CTRL_B.code; // translate LEFT -> CTRL-B
-
- case RIGHT_ARROW_KEY:
- return CTRL_F.code; // translate RIGHT -> CTRL-F
-
- case DOWN_ARROW_KEY:
- return CTRL_N.code; // translate DOWN -> CTRL-N
-
- case DELETE_KEY:
- return CTRL_QM.code; // translate DELETE -> CTRL-?
-
- case HOME_KEY:
- return CTRL_A.code;
-
- case END_KEY:
- return CTRL_E.code;
-
- case PAGE_UP_KEY:
- return CTRL_K.code;
-
- case PAGE_DOWN_KEY:
- return CTRL_L.code;
-
- case ESCAPE_KEY:
- return CTRL_OB.code; // translate ESCAPE -> CTRL-[
-
- case INSERT_KEY:
- return CTRL_C.code;
-
- default:
- return 0;
- }
- }
- else if (indicator > 128) {
- // handle unicode characters longer than 2 bytes,
- // thanks to Marc.Herbert@continuent.com
- replayStream.setInput(indicator, in);
- // replayReader = new InputStreamReader(replayStream, encoding);
- indicator = replayReader.read();
-
- }
-
- return indicator;
- }
-
- @Override
- public InputStream getDefaultBindings() {
- return WindowsTerminal.class.getResourceAsStream(WINDOWSBINDINGS_PROPERTIES);
- }
-
- //
- // Native Bits
- //
- private int getConsoleMode() {
- return WindowsSupport.getConsoleMode();
- }
-
- private void setConsoleMode(int mode) {
- WindowsSupport.setConsoleMode(mode);
- }
-
- private int readByte() {
- return WindowsSupport.readByte();
- }
-
- private int getWindowsTerminalWidth() {
- return WindowsSupport.getWindowsTerminalWidth();
- }
-
- private int getWindowsTerminalHeight() {
- return WindowsSupport.getWindowsTerminalHeight();
- }
-
- /**
- * Console mode
- * <p/>
- * Constants copied <tt>wincon.h</tt>.
- */
- public static enum ConsoleMode
- {
- /**
- * The ReadFile or ReadConsole function returns only when a carriage return
- * character is read. If this mode is disable, the functions return when one
- * or more characters are available.
- */
- ENABLE_LINE_INPUT(2),
-
- /**
- * Characters read by the ReadFile or ReadConsole function are written to
- * the active screen buffer as they are read. This mode can be used only if
- * the ENABLE_LINE_INPUT mode is also enabled.
- */
- ENABLE_ECHO_INPUT(4),
-
- /**
- * CTRL+C is processed by the system and is not placed in the input buffer.
- * If the input buffer is being read by ReadFile or ReadConsole, other
- * control keys are processed by the system and are not returned in the
- * ReadFile or ReadConsole buffer. If the ENABLE_LINE_INPUT mode is also
- * enabled, backspace, carriage return, and linefeed characters are handled
- * by the system.
- */
- ENABLE_PROCESSED_INPUT(1),
-
- /**
- * User interactions that change the size of the console screen buffer are
- * reported in the console's input buffee. Information about these events
- * can be read from the input buffer by applications using
- * theReadConsoleInput function, but not by those using ReadFile
- * orReadConsole.
- */
- ENABLE_WINDOW_INPUT(8),
-
- /**
- * If the mouse pointer is within the borders of the console window and the
- * window has the keyboard focus, mouse events generated by mouse movement
- * and button presses are placed in the input buffer. These events are
- * discarded by ReadFile or ReadConsole, even when this mode is enabled.
- */
- ENABLE_MOUSE_INPUT(16),
-
- /**
- * When enabled, text entered in a console window will be inserted at the
- * current cursor location and all text following that location will not be
- * overwritten. When disabled, all following text will be overwritten. An OR
- * operation must be performed with this flag and the ENABLE_EXTENDED_FLAGS
- * flag to enable this functionality.
- */
- ENABLE_PROCESSED_OUTPUT(1),
-
- /**
- * This flag enables the user to use the mouse to select and edit text. To
- * enable this option, use the OR to combine this flag with
- * ENABLE_EXTENDED_FLAGS.
- */
- ENABLE_WRAP_AT_EOL_OUTPUT(2),;
-
- public final int code;
-
- ConsoleMode(final int code) {
- this.code = code;
- }
- }
-
- /**
- * Windows keys.
- * <p/>
- * Constants copied <tt>wincon.h</tt>.
- */
- public static enum WindowsKey
- {
- /**
- * On windows terminals, this character indicates that a 'special' key has
- * been pressed. This means that a key such as an arrow key, or delete, or
- * home, etc. will be indicated by the next character.
- */
- SPECIAL_KEY_INDICATOR(224),
-
- /**
- * On windows terminals, this character indicates that a special key on the
- * number pad has been pressed.
- */
- NUMPAD_KEY_INDICATOR(0),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR,
- * this character indicates an left arrow key press.
- */
- LEFT_ARROW_KEY(75),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR
- * this character indicates an
- * right arrow key press.
- */
- RIGHT_ARROW_KEY(77),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR
- * this character indicates an up
- * arrow key press.
- */
- UP_ARROW_KEY(72),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR
- * this character indicates an
- * down arrow key press.
- */
- DOWN_ARROW_KEY(80),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR
- * this character indicates that
- * the delete key was pressed.
- */
- DELETE_KEY(83),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR
- * this character indicates that
- * the home key was pressed.
- */
- HOME_KEY(71),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR
- * this character indicates that
- * the end key was pressed.
- */
- END_KEY(79),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR
- * this character indicates that
- * the page up key was pressed.
- */
- PAGE_UP_KEY(73),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR
- * this character indicates that
- * the page down key was pressed.
- */
- PAGE_DOWN_KEY(81),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR
- * this character indicates that
- * the insert key was pressed.
- */
- INSERT_KEY(82),
-
- /**
- * When following the SPECIAL_KEY_INDICATOR or NUMPAD_KEY_INDICATOR,
- * this character indicates that the escape key was pressed.
- */
- ESCAPE_KEY(0),;
-
- public final int code;
-
- WindowsKey(final int code) {
- this.code = code;
- }
-
- private static final Map<Integer, WindowsKey> codes;
-
- static {
- Map<Integer, WindowsKey> map = new HashMap<Integer, WindowsKey>();
-
- for (WindowsKey key : WindowsKey.values()) {
- map.put(key.code, key);
- }
-
- codes = map;
- }
-
- public static WindowsKey valueOf(final int code) {
- return codes.get(code);
- }
- }
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
deleted file mode 100644
index a375b84a5c..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
+++ /dev/null
@@ -1,2185 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console;
-
-import scala.tools.jline.Terminal;
-import scala.tools.jline.TerminalFactory;
-import scala.tools.jline.console.completer.CandidateListCompletionHandler;
-import scala.tools.jline.console.completer.Completer;
-import scala.tools.jline.console.completer.CompletionHandler;
-import scala.tools.jline.console.history.History;
-import scala.tools.jline.console.history.MemoryHistory;
-import scala.tools.jline.internal.Configuration;
-import scala.tools.jline.internal.Log;
-import org.fusesource.jansi.AnsiOutputStream;
-
-import java.awt.Toolkit;
-import java.awt.datatransfer.Clipboard;
-import java.awt.datatransfer.DataFlavor;
-import java.awt.datatransfer.Transferable;
-import java.awt.datatransfer.UnsupportedFlavorException;
-import java.awt.event.ActionListener;
-import java.io.*;
-import java.util.*;
-
-/**
- * A reader for console applications. It supports custom tab-completion,
- * saveable command history, and command line editing. On some platforms,
- * platform-specific commands will need to be issued before the reader will
- * function properly. See {@link jline.Terminal#init} for convenience
- * methods for issuing platform-specific setup commands.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- */
-public class ConsoleReader
-{
- public static final String JLINE_NOBELL = "jline.nobell";
-
- public static final String JLINE_EXPANDEVENTS = "jline.expandevents";
-
- public static final char BACKSPACE = '\b';
-
- public static final char RESET_LINE = '\r';
-
- public static final char KEYBOARD_BELL = '\07';
-
- public static final char NULL_MASK = 0;
-
- public static final int TAB_WIDTH = 4;
-
- private static final ResourceBundle
- resources = ResourceBundle.getBundle(CandidateListCompletionHandler.class.getName());
-
- private final Terminal terminal;
-
- private InputStream in;
-
- private final Writer out;
-
- private final CursorBuffer buf = new CursorBuffer();
-
- private String prompt;
-
- private boolean bellEnabled = true;
-
- private boolean expandEvents = false;
-
- private Character mask;
-
- private Character echoCharacter;
-
- private StringBuffer searchTerm = null;
-
- private String previousSearchTerm = "";
-
- private int searchIndex = -1;
-
- public ConsoleReader(final InputStream in, final OutputStream out, final InputStream bindings, final Terminal term) throws
- IOException
- {
- this.in = in;
- this.terminal = term != null ? term : TerminalFactory.get();
- this.out = new PrintWriter(getTerminal().wrapOutIfNeeded(out));
- this.keyBindings = loadKeyBindings(bindings);
-
- setBellEnabled(!Configuration.getBoolean(JLINE_NOBELL, false));
- setExpandEvents(Configuration.getBoolean(JLINE_EXPANDEVENTS, false));
- }
-
- /**
- * @deprecated use {@link #ConsoleReader(InputStream, OutputStream, InputStream, Terminal)}
- * to let the terminal wrap the output stream if needed.
- */
- public ConsoleReader(final InputStream in, final Writer out, final InputStream bindings, final Terminal term) throws
- IOException
- {
- this.in = in;
- this.out = out;
- this.terminal = term != null ? term : TerminalFactory.get();
- this.keyBindings = loadKeyBindings(bindings);
-
- setBellEnabled(!Configuration.getBoolean(JLINE_NOBELL, false));
- }
-
- /**
- * @deprecated use {@link #ConsoleReader(InputStream, OutputStream, InputStream, Terminal)}
- * to let the terminal wrap the output stream if needed.
- */
- public ConsoleReader(final InputStream in, final Writer out, final Terminal term) throws IOException {
- this(in, out, null, term);
- }
-
- /**
- * @deprecated use {@link #ConsoleReader(InputStream, OutputStream, InputStream, Terminal)}
- * to let the terminal wrap the output stream if needed.
- */
- public ConsoleReader(final InputStream in, final Writer out) throws IOException
- {
- this(in, out, null, null);
- }
-
- /**
- * Create a new reader using {@link FileDescriptor#in} for input and
- * {@link System#out} for output.
- * <p/>
- * {@link FileDescriptor#in} is used because it has a better chance of not being buffered.
- */
- public ConsoleReader() throws IOException {
- this(new FileInputStream(FileDescriptor.in), System.out, null, null );
- }
-
- // FIXME: Only used for tests
-
- void setInput(final InputStream in) {
- this.in = in;
- }
-
- public InputStream getInput() {
- return in;
- }
-
- public Writer getOutput() {
- return out;
- }
-
- public Terminal getTerminal() {
- return terminal;
- }
-
- public CursorBuffer getCursorBuffer() {
- return buf;
- }
-
- public void setBellEnabled(final boolean enabled) {
- this.bellEnabled = enabled;
- }
-
- public boolean isBellEnabled() {
- return bellEnabled;
- }
-
- public void setExpandEvents(final boolean expand) {
- this.expandEvents = expand;
- }
-
- public boolean getExpandEvents() {
- return expandEvents;
- }
-
- public void setPrompt(final String prompt) {
- this.prompt = prompt;
- }
-
- public String getPrompt() {
- return prompt;
- }
-
- /**
- * Set the echo character. For example, to have "*" entered when a password is typed:
- * <p/>
- * <pre>
- * myConsoleReader.setEchoCharacter(new Character('*'));
- * </pre>
- * <p/>
- * Setting the character to
- * <p/>
- * <pre>
- * null
- * </pre>
- * <p/>
- * will restore normal character echoing. Setting the character to
- * <p/>
- * <pre>
- * new Character(0)
- * </pre>
- * <p/>
- * will cause nothing to be echoed.
- *
- * @param c the character to echo to the console in place of the typed character.
- */
- public void setEchoCharacter(final Character c) {
- this.echoCharacter = c;
- }
-
- /**
- * Returns the echo character.
- */
- public Character getEchoCharacter() {
- return echoCharacter;
- }
-
- /**
- * Erase the current line.
- *
- * @return false if we failed (e.g., the buffer was empty)
- */
- protected final boolean resetLine() throws IOException {
- if (buf.cursor == 0) {
- return false;
- }
-
- backspaceAll();
-
- return true;
- }
-
- int getCursorPosition() {
- // FIXME: does not handle anything but a line with a prompt absolute position
- String prompt = getPrompt();
- return ((prompt == null) ? 0 : stripAnsi(lastLine(prompt)).length()) + buf.cursor;
- }
-
- /**
- * Returns the text after the last '\n'.
- * prompt is returned if no '\n' characters are present.
- * null is returned if prompt is null.
- */
- private String lastLine(String str) {
- if (str == null) return "";
- int last = str.lastIndexOf("\n");
-
- if (last >= 0) {
- return str.substring(last + 1, str.length());
- }
-
- return str;
- }
-
- private String stripAnsi(String str) {
- if (str == null) return "";
- try {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- AnsiOutputStream aos = new AnsiOutputStream(baos);
- aos.write(str.getBytes());
- aos.flush();
- return baos.toString();
- } catch (IOException e) {
- return str;
- }
- }
-
- /**
- * Move the cursor position to the specified absolute index.
- */
- public final boolean setCursorPosition(final int position) throws IOException {
- return moveCursor(position - buf.cursor) != 0;
- }
-
- /**
- * Set the current buffer's content to the specified {@link String}. The
- * visual console will be modified to show the current buffer.
- *
- * @param buffer the new contents of the buffer.
- */
- private void setBuffer(final String buffer) throws IOException {
- // don't bother modifying it if it is unchanged
- if (buffer.equals(buf.buffer.toString())) {
- return;
- }
-
- // obtain the difference between the current buffer and the new one
- int sameIndex = 0;
-
- for (int i = 0, l1 = buffer.length(), l2 = buf.buffer.length(); (i < l1)
- && (i < l2); i++) {
- if (buffer.charAt(i) == buf.buffer.charAt(i)) {
- sameIndex++;
- }
- else {
- break;
- }
- }
-
- int diff = buf.cursor - sameIndex;
- if (diff < 0) { // we can't backspace here so try from the end of the buffer
- moveToEnd();
- diff = buf.buffer.length() - sameIndex;
- }
-
- backspace(diff); // go back for the differences
- killLine(); // clear to the end of the line
- buf.buffer.setLength(sameIndex); // the new length
- putString(buffer.substring(sameIndex)); // append the differences
- }
-
- private void setBuffer(final CharSequence buffer) throws IOException {
- setBuffer(String.valueOf(buffer));
- }
-
- /**
- * Output put the prompt + the current buffer
- */
- public final void drawLine() throws IOException {
- String prompt = getPrompt();
- if (prompt != null) {
- print(prompt);
- }
-
- print(buf.buffer.toString());
-
- if (buf.length() != buf.cursor) { // not at end of line
- back(buf.length() - buf.cursor - 1);
- }
- // force drawBuffer to check for weird wrap (after clear screen)
- drawBuffer();
- }
-
- /**
- * Clear the line and redraw it.
- */
- public final void redrawLine() throws IOException {
- print(RESET_LINE);
-// flush();
- drawLine();
- }
-
- /**
- * Clear the buffer and add its contents to the history.
- *
- * @return the former contents of the buffer.
- */
- final String finishBuffer() throws IOException { // FIXME: Package protected because used by tests
- String str = buf.buffer.toString();
-
- if (expandEvents) {
- str = expandEvents(str);
- }
-
- // we only add it to the history if the buffer is not empty
- // and if mask is null, since having a mask typically means
- // the string was a password. We clear the mask after this call
- if (str.length() > 0) {
- if (mask == null && isHistoryEnabled()) {
- history.add(str);
- }
- else {
- mask = null;
- }
- }
-
- history.moveToEnd();
-
- buf.buffer.setLength(0);
- buf.cursor = 0;
-
- return str;
- }
-
- /**
- * Expand event designator such as !!, !#, !3, etc...
- * See http://www.gnu.org/software/bash/manual/html_node/Event-Designators.html
- *
- * @param str
- * @return
- */
- protected String expandEvents(String str) throws IOException {
- StringBuilder sb = new StringBuilder();
- for (int i = 0; i < str.length(); i++) {
- char c = str.charAt(i);
- switch (c) {
- case '!':
- if (i + 1 < str.length()) {
- c = str.charAt(++i);
- boolean neg = false;
- String rep = null;
- int i1, idx;
- switch (c) {
- case '!':
- if (history.size() == 0) {
- throw new IllegalArgumentException("!!: event not found");
- }
- rep = history.get(history.index() - 1).toString();
- break;
- case '#':
- sb.append(sb.toString());
- break;
- case '?':
- i1 = str.indexOf('?', i + 1);
- if (i1 < 0) {
- i1 = str.length();
- }
- String sc = str.substring(i + 1, i1);
- i = i1;
- idx = searchBackwards(sc);
- if (idx < 0) {
- throw new IllegalArgumentException("!?" + sc + ": event not found");
- } else {
- rep = history.get(idx).toString();
- }
- break;
- case ' ':
- case '\t':
- sb.append('!');
- sb.append(c);
- break;
- case '-':
- neg = true;
- i++;
- // fall through
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- i1 = i;
- for (; i < str.length(); i++) {
- c = str.charAt(i);
- if (c < '0' || c > '9') {
- break;
- }
- }
- idx = 0;
- try {
- idx = Integer.parseInt(str.substring(i1, i));
- } catch (NumberFormatException e) {
- throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found");
- }
- if (neg) {
- if (idx < history.size()) {
- rep = (history.get(history.index() - idx)).toString();
- } else {
- throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found");
- }
- } else {
- if (idx >= history.index() - history.size() && idx < history.index()) {
- rep = (history.get(idx)).toString();
- } else {
- throw new IllegalArgumentException((neg ? "!-" : "!") + str.substring(i1, i) + ": event not found");
- }
- }
- break;
- default:
- String ss = str.substring(i);
- i = str.length();
- idx = searchBackwards(ss, history.index(), true);
- if (idx < 0) {
- throw new IllegalArgumentException("!" + ss + ": event not found");
- } else {
- rep = history.get(idx).toString();
- }
- break;
- }
- if (rep != null) {
- sb.append(rep);
- }
- } else {
- sb.append(c);
- }
- break;
- case '^':
- if (i == 0) {
- int i1 = str.indexOf('^', i + 1);
- int i2 = str.indexOf('^', i1 + 1);
- if (i2 < 0) {
- i2 = str.length();
- }
- if (i1 > 0 && i2 > 0) {
- String s1 = str.substring(i + 1, i1);
- String s2 = str.substring(i1 + 1, i2);
- String s = history.get(history.index() - 1).toString().replace(s1, s2);
- sb.append(s);
- i = i2 + 1;
- break;
- }
- }
- sb.append(c);
- break;
- default:
- sb.append(c);
- break;
- }
- }
- String result = sb.toString();
- if (!str.equals(result)) {
- print(result);
- println();
- flush();
- }
- return result;
-
- }
-
- /**
- * Write out the specified string to the buffer and the output stream.
- */
- public final void putString(final CharSequence str) throws IOException {
- buf.write(str);
- print(str);
- drawBuffer();
- }
-
- /**
- * Output the specified character, both to the buffer and the output stream.
- */
- private void putChar(final int c, final boolean print) throws IOException {
- buf.write((char) c);
-
- if (print) {
- if (mask == null) {
- // no masking
- print(c);
- }
- else if (mask == NULL_MASK) {
- // Don't print anything
- }
- else {
- print(mask);
- }
-
- drawBuffer();
- }
- }
-
- /**
- * Redraw the rest of the buffer from the cursor onwards. This is necessary
- * for inserting text into the buffer.
- *
- * @param clear the number of characters to clear after the end of the buffer
- */
- private void drawBuffer(final int clear) throws IOException {
- // debug ("drawBuffer: " + clear);
- if (buf.cursor == buf.length() && clear == 0) {
- } else {
- char[] chars = buf.buffer.substring(buf.cursor).toCharArray();
- if (mask != null) {
- Arrays.fill(chars, mask);
- }
- if (getTerminal().hasWeirdWrap()) {
- // need to determine if wrapping will occur:
- int width = getTerminal().getWidth();
- int pos = getCursorPosition();
- for (int i = 0; i < chars.length; i++) {
- print(chars[i]);
- if ((pos + i + 1) % width == 0) {
- print(32); // move cursor to next line by printing dummy space
- print(13); // CR / not newline.
- }
- }
- } else {
- print(chars);
- }
- clearAhead(clear, chars.length);
- if (getTerminal().isAnsiSupported()) {
- if (chars.length > 0) {
- back(chars.length);
- }
- } else {
- back(chars.length);
- }
- }
- if (getTerminal().hasWeirdWrap()) {
- int width = getTerminal().getWidth();
- // best guess on whether the cursor is in that weird location...
- // Need to do this without calling ansi cursor location methods
- // otherwise it breaks paste of wrapped lines in xterm.
- if (getCursorPosition() > 0 && (getCursorPosition() % width == 0)
- && buf.cursor == buf.length() && clear == 0) {
- // the following workaround is reverse-engineered from looking
- // at what bash sent to the terminal in the same situation
- print(32); // move cursor to next line by printing dummy space
- print(13); // CR / not newline.
- }
- }
- }
-
- /**
- * Redraw the rest of the buffer from the cursor onwards. This is necessary
- * for inserting text into the buffer.
- */
- private void drawBuffer() throws IOException {
- drawBuffer(0);
- }
-
- /**
- * Clear ahead the specified number of characters without moving the cursor.
- *
- * @param num the number of characters to clear
- * @param delta the difference between the internal cursor and the screen
- * cursor - if > 0, assume some stuff was printed and weird wrap has to be
- * checked
- */
- private void clearAhead(final int num, int delta) throws IOException {
- if (num == 0) {
- return;
- }
-
- if (getTerminal().isAnsiSupported()) {
- int width = getTerminal().getWidth();
- int screenCursorCol = getCursorPosition() + delta;
- // clear current line
- printAnsiSequence("K");
- // if cursor+num wraps, then we need to clear the line(s) below too
- int curCol = screenCursorCol % width;
- int endCol = (screenCursorCol + num - 1) % width;
- int lines = num / width;
- if (endCol < curCol) lines++;
- for (int i = 0; i < lines; i++) {
- printAnsiSequence("B");
- printAnsiSequence("2K");
- }
- for (int i = 0; i < lines; i++) {
- printAnsiSequence("A");
- }
- return;
- }
-
- // print blank extra characters
- print(' ', num);
-
- // we need to flush here so a "clever" console doesn't just ignore the redundancy
- // of a space followed by a backspace.
-// flush();
-
- // reset the visual cursor
- back(num);
-
-// flush();
- }
-
- /**
- * Move the visual cursor backwards without modifying the buffer cursor.
- */
- protected void back(final int num) throws IOException {
- if (num == 0) return;
- if (getTerminal().isAnsiSupported()) {
- int width = getTerminal().getWidth();
- int cursor = getCursorPosition();
- int realCursor = cursor + num;
- int realCol = realCursor % width;
- int newCol = cursor % width;
- int moveup = num / width;
- int delta = realCol - newCol;
- if (delta < 0) moveup++;
- if (moveup > 0) {
- printAnsiSequence(moveup + "A");
- }
- printAnsiSequence((1 + newCol) + "G");
- return;
- }
- print(BACKSPACE, num);
-// flush();
- }
-
- /**
- * Flush the console output stream. This is important for printout out single characters (like a backspace or
- * keyboard) that we want the console to handle immediately.
- */
- public void flush() throws IOException {
- out.flush();
- }
-
- private int backspaceAll() throws IOException {
- return backspace(Integer.MAX_VALUE);
- }
-
- /**
- * Issue <em>num</em> backspaces.
- *
- * @return the number of characters backed up
- */
- private int backspace(final int num) throws IOException {
- if (buf.cursor == 0) {
- return 0;
- }
-
- int count = 0;
-
- int termwidth = getTerminal().getWidth();
- int lines = getCursorPosition() / termwidth;
- count = moveCursor(-1 * num) * -1;
- buf.buffer.delete(buf.cursor, buf.cursor + count);
- if (getCursorPosition() / termwidth != lines) {
- if (getTerminal().isAnsiSupported()) {
- // debug("doing backspace redraw: " + getCursorPosition() + " on " + termwidth + ": " + lines);
- printAnsiSequence("K");
- // if cursor+num wraps, then we need to clear the line(s) below too
- // last char printed is one pos less than cursor so we subtract
- // one
-/*
- // TODO: fixme (does not work - test with reverse search with wrapping line and CTRL-E)
- int endCol = (getCursorPosition() + num - 1) % termwidth;
- int curCol = getCursorPosition() % termwidth;
- if (endCol < curCol) lines++;
- for (int i = 1; i < lines; i++) {
- printAnsiSequence("B");
- printAnsiSequence("2K");
- }
- for (int i = 1; i < lines; i++) {
- printAnsiSequence("A");
- }
- return count;
-*/
- }
- }
- drawBuffer(count);
-
- return count;
- }
-
- /**
- * Issue a backspace.
- *
- * @return true if successful
- */
- public boolean backspace() throws IOException {
- return backspace(1) == 1;
- }
-
- protected boolean moveToEnd() throws IOException {
- return moveCursor(buf.length() - buf.cursor) > 0;
- }
-
- /**
- * Delete the character at the current position and redraw the remainder of the buffer.
- */
- private boolean deleteCurrentCharacter() throws IOException {
- if (buf.length() == 0 || buf.cursor == buf.length()) {
- return false;
- }
-
- buf.buffer.deleteCharAt(buf.cursor);
- drawBuffer(1);
- return true;
- }
-
- private boolean previousWord() throws IOException {
- while (isDelimiter(buf.charLeftOfCursor()) && (moveCursor(-1) != 0)) {
- // nothing
- }
-
- while (!isDelimiter(buf.charLeftOfCursor()) && (moveCursor(-1) != 0)) {
- // nothing
- }
-
- return true;
- }
-
- private boolean nextWord() throws IOException {
- while (isDelimiter(buf.charAtCursor()) && (moveCursor(1) != 0)) {
- // nothing
- }
-
- while (!isDelimiter(buf.charAtCursor()) && (moveCursor(1) != 0)) {
- // nothing
- }
-
- return true;
- }
-
- private boolean deletePreviousWord() throws IOException {
- while (isDelimiter(buf.charLeftOfCursor()) && backspace()) {
- // nothing
- }
-
- while (!isDelimiter(buf.charLeftOfCursor()) && backspace()) {
- // nothing
- }
-
- return true;
- }
-
- private boolean deleteNextWord() throws IOException {
- while (isDelimiter(buf.charAtCursor()) && deleteCurrentCharacter()) {
- // nothing
- }
-
- while (!isDelimiter(buf.charAtCursor()) && deleteCurrentCharacter()) {
- // nothing
- }
-
- return true;
- }
-
- /**
- * Move the cursor <i>where</i> characters.
- *
- * @param num If less than 0, move abs(<i>where</i>) to the left, otherwise move <i>where</i> to the right.
- * @return The number of spaces we moved
- */
- public int moveCursor(final int num) throws IOException {
- int where = num;
-
- if ((buf.cursor == 0) && (where <= 0)) {
- return 0;
- }
-
- if ((buf.cursor == buf.buffer.length()) && (where >= 0)) {
- return 0;
- }
-
- if ((buf.cursor + where) < 0) {
- where = -buf.cursor;
- }
- else if ((buf.cursor + where) > buf.buffer.length()) {
- where = buf.buffer.length() - buf.cursor;
- }
-
- moveInternal(where);
-
- return where;
- }
-
- /**
- * Move the cursor <i>where</i> characters, without checking the current buffer.
- *
- * @param where the number of characters to move to the right or left.
- */
- private void moveInternal(final int where) throws IOException {
- // debug ("move cursor " + where + " ("
- // + buf.cursor + " => " + (buf.cursor + where) + ")");
- buf.cursor += where;
-
- if (getTerminal().isAnsiSupported()) {
- if (where < 0) {
- back(Math.abs(where));
- } else {
- int width = getTerminal().getWidth();
- int cursor = getCursorPosition();
- int oldLine = (cursor - where) / width;
- int newLine = cursor / width;
- if (newLine > oldLine) {
- if (getTerminal().hasWeirdWrap()) {
- // scroll up if at bottom
- // note:
- // on rxvt cywgin getTerminal().getHeight() is incorrect
- // MacOs xterm does not seem to support scrolling
- if (getCurrentAnsiRow() == getTerminal().getHeight()) {
- printAnsiSequence((newLine - oldLine) + "S");
- }
- }
- printAnsiSequence((newLine - oldLine) + "B");
- }
- printAnsiSequence(1 +(cursor % width) + "G");
- }
-// flush();
- return;
- }
-
- char c;
-
- if (where < 0) {
- int len = 0;
- for (int i = buf.cursor; i < buf.cursor - where; i++) {
- if (buf.buffer.charAt(i) == '\t') {
- len += TAB_WIDTH;
- }
- else {
- len++;
- }
- }
-
- char chars[] = new char[len];
- Arrays.fill(chars, BACKSPACE);
- out.write(chars);
-
- return;
- }
- else if (buf.cursor == 0) {
- return;
- }
- else if (mask != null) {
- c = mask;
- }
- else {
- print(buf.buffer.substring(buf.cursor - where, buf.cursor).toCharArray());
- return;
- }
-
- // null character mask: don't output anything
- if (mask == NULL_MASK) {
- return;
- }
-
- print(c, Math.abs(where));
- }
-
- // FIXME: replace() is not used
-
- public final boolean replace(final int num, final String replacement) {
- buf.buffer.replace(buf.cursor - num, buf.cursor, replacement);
- try {
- moveCursor(-num);
- drawBuffer(Math.max(0, num - replacement.length()));
- moveCursor(replacement.length());
- }
- catch (IOException e) {
- e.printStackTrace();
- return false;
- }
- return true;
- }
-
- //
- // Key reading
- //
-
- /**
- * Read a character from the console.
- *
- * @return the character, or -1 if an EOF is received.
- */
- public final int readVirtualKey() throws IOException {
- int c = getTerminal().readVirtualKey(in);
-
- Log.trace("Keystroke: ", c);
-
- // clear any echo characters
- clearEcho(c);
-
- return c;
- }
-
- /**
- * Clear the echoed characters for the specified character code.
- */
- private int clearEcho(final int c) throws IOException {
- // if the terminal is not echoing, then ignore
- if (!getTerminal().isEchoEnabled()) {
- return 0;
- }
-
- // otherwise, clear
- int num = countEchoCharacters((char) c);
- back(num);
- drawBuffer(num);
-
- return num;
- }
-
- private int countEchoCharacters(final char c) {
- // tabs as special: we need to determine the number of spaces
- // to cancel based on what out current cursor position is
- if (c == 9) {
- int tabStop = 8; // will this ever be different?
- int position = getCursorPosition();
-
- return tabStop - (position % tabStop);
- }
-
- return getPrintableCharacters(c).length();
- }
-
- /**
- * Return the number of characters that will be printed when the specified
- * character is echoed to the screen
- *
- * Adapted from cat by Torbjorn Granlund, as repeated in stty by David MacKenzie.
- */
- private StringBuilder getPrintableCharacters(final char ch) {
- StringBuilder sbuff = new StringBuilder();
-
- if (ch >= 32) {
- if (ch < 127) {
- sbuff.append(ch);
- }
- else if (ch == 127) {
- sbuff.append('^');
- sbuff.append('?');
- }
- else {
- sbuff.append('M');
- sbuff.append('-');
-
- if (ch >= (128 + 32)) {
- if (ch < (128 + 127)) {
- sbuff.append((char) (ch - 128));
- }
- else {
- sbuff.append('^');
- sbuff.append('?');
- }
- }
- else {
- sbuff.append('^');
- sbuff.append((char) (ch - 128 + 64));
- }
- }
- }
- else {
- sbuff.append('^');
- sbuff.append((char) (ch + 64));
- }
-
- return sbuff;
- }
-
- public final int readCharacter(final char... allowed) throws IOException {
- // if we restrict to a limited set and the current character is not in the set, then try again.
- char c;
-
- Arrays.sort(allowed); // always need to sort before binarySearch
-
- while (Arrays.binarySearch(allowed, c = (char) readVirtualKey()) < 0) {
- // nothing
- }
-
- return c;
- }
-
- //
- // Key Bindings
- //
-
- public static final String JLINE_COMPLETION_THRESHOLD = "jline.completion.threshold";
-
- public static final String JLINE_KEYBINDINGS = "jline.keybindings";
-
- public static final String JLINEBINDINGS_PROPERTIES = ".jlinebindings.properties";
-
- /**
- * The map for logical operations.
- */
- private final short[] keyBindings;
-
- private short[] loadKeyBindings(InputStream input) throws IOException {
- if (input == null) {
- try {
- File file = new File(Configuration.getUserHome(), JLINEBINDINGS_PROPERTIES);
-
- String path = Configuration.getString(JLINE_KEYBINDINGS);
- if (path != null) {
- file = new File(path);
- }
-
- if (file.isFile()) {
- Log.debug("Loading user bindings from: ", file);
- input = new FileInputStream(file);
- }
- }
- catch (Exception e) {
- Log.error("Failed to load user bindings", e);
- }
- }
-
- if (input == null) {
- Log.debug("Using default bindings");
- input = getTerminal().getDefaultBindings();
- }
-
- short[] keyBindings = new short[Character.MAX_VALUE * 2];
-
- Arrays.fill(keyBindings, Operation.UNKNOWN.code);
-
- // Loads the key bindings. Bindings file is in the format:
- //
- // keycode: operation name
-
- if (input != null) {
- input = new BufferedInputStream(input);
- Properties p = new Properties();
- p.load(input);
- input.close();
-
- for (Object key : p.keySet()) {
- String val = (String) key;
-
- try {
- short code = Short.parseShort(val);
- String name = p.getProperty(val);
- Operation op = Operation.valueOf(name);
- keyBindings[code] = op.code;
- }
- catch (NumberFormatException e) {
- Log.error("Failed to convert binding code: ", val, e);
- }
- }
-
- // hardwired arrow key bindings
- // keybindings[VK_UP] = PREV_HISTORY;
- // keybindings[VK_DOWN] = NEXT_HISTORY;
- // keybindings[VK_LEFT] = PREV_CHAR;
- // keybindings[VK_RIGHT] = NEXT_CHAR;
- }
-
- return keyBindings;
- }
-
- int getKeyForAction(final short logicalAction) {
- for (int i = 0; i < keyBindings.length; i++) {
- if (keyBindings[i] == logicalAction) {
- return i;
- }
- }
-
- return -1;
- }
-
- int getKeyForAction(final Operation op) {
- assert op != null;
- return getKeyForAction(op.code);
- }
-
- public void printBindings() {
- System.out.println("printBindings(): keyBindings.length = " + keyBindings.length);
- for (int i = 0; i < keyBindings.length; i++) {
- if (keyBindings[i] != Operation.UNKNOWN.code) {
- System.out.println("keyBindings[" + i + "] = " + keyBindings[i]);
- }
- }
- }
-
- /**
- * Reads the console input and returns an array of the form [raw, key binding].
- */
- private int[] readBinding() throws IOException {
- int c = readVirtualKey();
-
- if (c == -1) {
- return null;
- }
-
- // extract the appropriate key binding
- short code = keyBindings[c];
-
- Log.trace("Translated: ", c, " -> ", code);
-
- return new int[]{c, code};
- }
-
- //
- // Line Reading
- //
-
- /**
- * Read the next line and return the contents of the buffer.
- */
- public String readLine() throws IOException {
- return readLine((String) null);
- }
-
- /**
- * Read the next line with the specified character mask. If null, then
- * characters will be echoed. If 0, then no characters will be echoed.
- */
- public String readLine(final Character mask) throws IOException {
- return readLine(null, mask);
- }
-
- public String readLine(final String prompt) throws IOException {
- return readLine(prompt, null);
- }
-
- /**
- * Read a line from the <i>in</i> {@link InputStream}, and return the line
- * (without any trailing newlines).
- *
- * @param prompt The prompt to issue to the console, may be null.
- * @return A line that is read from the terminal, or null if there was null input (e.g., <i>CTRL-D</i>
- * was pressed).
- */
- public String readLine(String prompt, final Character mask) throws IOException {
- // prompt may be null
- // mask may be null
-
- // FIXME: This blows, each call to readLine will reset the console's state which doesn't seem very nice.
- this.mask = mask;
- if (prompt != null) {
- setPrompt(prompt);
- }
- else {
- prompt = getPrompt();
- }
-
- try {
- if (!getTerminal().isSupported()) {
- beforeReadLine(prompt, mask);
- }
-
- if (prompt != null && prompt.length() > 0) {
- out.write(prompt);
- out.flush();
- }
-
- // if the terminal is unsupported, just use plain-java reading
- if (!getTerminal().isSupported()) {
- return readLine(in);
- }
-
- String originalPrompt = this.prompt;
-
- final int NORMAL = 1;
- final int SEARCH = 2;
- int state = NORMAL;
-
- boolean success = true;
-
- while (true) {
- int[] next = readBinding();
-
- if (next == null) {
- return null;
- }
-
- int c = next[0];
- // int code = next[1];
- Operation code = Operation.valueOf(next[1]);
-
- if (c == -1) {
- return null;
- }
-
- // Search mode.
- //
- // Note that we have to do this first, because if there is a command
- // not linked to a search command, we leave the search mode and fall
- // through to the normal state.
- if (state == SEARCH) {
- int cursorDest = -1;
-
- switch (code) {
- // This doesn't work right now, it seems CTRL-G is not passed
- // down correctly. :(
- case ABORT:
- state = NORMAL;
- break;
-
- case SEARCH_PREV:
- if (searchTerm.length() == 0) {
- searchTerm.append(previousSearchTerm);
- }
-
- if (searchIndex == -1) {
- searchIndex = searchBackwards(searchTerm.toString());
- } else {
- searchIndex = searchBackwards(searchTerm.toString(), searchIndex);
- }
- break;
-
- case DELETE_PREV_CHAR:
- if (searchTerm.length() > 0) {
- searchTerm.deleteCharAt(searchTerm.length() - 1);
- searchIndex = searchBackwards(searchTerm.toString());
- }
- break;
-
- case UNKNOWN:
- searchTerm.appendCodePoint(c);
- searchIndex = searchBackwards(searchTerm.toString());
- break;
-
- default:
- // Set buffer and cursor position to the found string.
- if (searchIndex != -1) {
- history.moveTo(searchIndex);
- // set cursor position to the found string
- cursorDest = history.current().toString().indexOf(searchTerm.toString());
- }
- state = NORMAL;
- break;
- }
-
- // if we're still in search mode, print the search status
- if (state == SEARCH) {
- if (searchTerm.length() == 0) {
- printSearchStatus("", "");
- searchIndex = -1;
- } else {
- if (searchIndex == -1) {
- beep();
- } else {
- printSearchStatus(searchTerm.toString(), history.get(searchIndex).toString());
- }
- }
- }
- // otherwise, restore the line
- else {
- restoreLine(originalPrompt, cursorDest);
- }
- }
-
- if (state == NORMAL) {
- switch (code) {
- case EXIT: // ctrl-d
- if (buf.buffer.length() == 0) {
- return null;
- } else {
- success = deleteCurrentCharacter();
- }
- break;
-
- case COMPLETE: // tab
- success = complete();
- break;
-
- case MOVE_TO_BEG:
- success = setCursorPosition(0);
- break;
-
- case KILL_LINE: // CTRL-K
- success = killLine();
- break;
-
- case CLEAR_SCREEN: // CTRL-L
- success = clearScreen();
- break;
-
- case KILL_LINE_PREV: // CTRL-U
- success = resetLine();
- break;
-
- case NEWLINE: // enter
- moveToEnd();
- println(); // output newline
- flush();
- return finishBuffer();
-
- case DELETE_PREV_CHAR: // backspace
- success = backspace();
- break;
-
- case DELETE_NEXT_CHAR: // delete
- success = deleteCurrentCharacter();
- break;
-
- case MOVE_TO_END:
- success = moveToEnd();
- break;
-
- case PREV_CHAR:
- success = moveCursor(-1) != 0;
- break;
-
- case NEXT_CHAR:
- success = moveCursor(1) != 0;
- break;
-
- case NEXT_HISTORY:
- success = moveHistory(true);
- break;
-
- case PREV_HISTORY:
- success = moveHistory(false);
- break;
-
- case ABORT:
- case REDISPLAY:
- break;
-
- case PASTE:
- success = paste();
- break;
-
- case DELETE_PREV_WORD:
- success = deletePreviousWord();
- break;
-
- case DELETE_NEXT_WORD:
- success = deleteNextWord();
- break;
-
- case PREV_WORD:
- success = previousWord();
- break;
-
- case NEXT_WORD:
- success = nextWord();
- break;
-
- case START_OF_HISTORY:
- success = history.moveToFirst();
- if (success) {
- setBuffer(history.current());
- }
- break;
-
- case END_OF_HISTORY:
- success = history.moveToLast();
- if (success) {
- setBuffer(history.current());
- }
- break;
-
- case CLEAR_LINE:
- moveInternal(-(buf.cursor));
- killLine();
- break;
-
- case INSERT:
- buf.setOverTyping(!buf.isOverTyping());
- break;
-
- case SEARCH_PREV: // CTRL-R
- if (searchTerm != null) {
- previousSearchTerm = searchTerm.toString();
- }
- searchTerm = new StringBuffer(buf.buffer);
- state = SEARCH;
- if (searchTerm.length() > 0) {
- searchIndex = searchBackwards(searchTerm.toString());
- if (searchIndex == -1) {
- beep();
- }
- printSearchStatus(searchTerm.toString(),
- searchIndex > -1 ? history.get(searchIndex).toString() : "");
- } else {
- searchIndex = -1;
- printSearchStatus("", "");
- }
- break;
-
- case UNKNOWN:
- default:
- if (c != 0) { // ignore null chars
- ActionListener action = triggeredActions.get((char) c);
- if (action != null) {
- action.actionPerformed(null);
- }
- else {
- putChar(c, true);
- }
- }
- else {
- success = false;
- }
- }
-
- if (!success) {
- beep();
- }
-
- flush();
- }
- }
- }
- finally {
- if (!getTerminal().isSupported()) {
- afterReadLine();
- }
- }
- }
-
- /**
- * Read a line for unsupported terminals.
- */
- private String readLine(final InputStream in) throws IOException {
- StringBuilder buff = new StringBuilder();
-
- while (true) {
- int i = in.read();
-
- if (i == -1 || i == '\n' || i == '\r') {
- return buff.toString();
- }
-
- buff.append((char) i);
- }
-
- // return new BufferedReader (new InputStreamReader (in)).readLine ();
- }
-
- //
- // Completion
- //
-
- private final List<Completer> completers = new LinkedList<Completer>();
-
- private CompletionHandler completionHandler = new CandidateListCompletionHandler();
-
- /**
- * Add the specified {@link jline.console.completer.Completer} to the list of handlers for tab-completion.
- *
- * @param completer the {@link jline.console.completer.Completer} to add
- * @return true if it was successfully added
- */
- public boolean addCompleter(final Completer completer) {
- return completers.add(completer);
- }
-
- /**
- * Remove the specified {@link jline.console.completer.Completer} from the list of handlers for tab-completion.
- *
- * @param completer The {@link Completer} to remove
- * @return True if it was successfully removed
- */
- public boolean removeCompleter(final Completer completer) {
- return completers.remove(completer);
- }
-
- /**
- * Returns an unmodifiable list of all the completers.
- */
- public Collection<Completer> getCompleters() {
- return Collections.unmodifiableList(completers);
- }
-
- public void setCompletionHandler(final CompletionHandler handler) {
- assert handler != null;
- this.completionHandler = handler;
- }
-
- public CompletionHandler getCompletionHandler() {
- return this.completionHandler;
- }
-
- /**
- * Use the completers to modify the buffer with the appropriate completions.
- *
- * @return true if successful
- */
- protected boolean complete() throws IOException {
- // debug ("tab for (" + buf + ")");
- if (completers.size() == 0) {
- return false;
- }
-
- List<CharSequence> candidates = new LinkedList<CharSequence>();
- String bufstr = buf.buffer.toString();
- int cursor = buf.cursor;
-
- int position = -1;
-
- for (Completer comp : completers) {
- if ((position = comp.complete(bufstr, cursor, candidates)) != -1) {
- break;
- }
- }
-
- return candidates.size() != 0 && getCompletionHandler().complete(this, candidates, position);
- }
-
- /**
- * The number of tab-completion candidates above which a warning will be
- * prompted before showing all the candidates.
- */
- private int autoprintThreshold = Integer.getInteger(JLINE_COMPLETION_THRESHOLD, 100); // same default as bash
-
- /**
- * @param threshold the number of candidates to print without issuing a warning.
- */
- public void setAutoprintThreshold(final int threshold) {
- this.autoprintThreshold = threshold;
- }
-
- /**
- * @return the number of candidates to print without issuing a warning.
- */
- public int getAutoprintThreshold() {
- return autoprintThreshold;
- }
-
- private boolean paginationEnabled;
-
- /**
- * Whether to use pagination when the number of rows of candidates exceeds the height of the terminal.
- */
- public void setPaginationEnabled(final boolean enabled) {
- this.paginationEnabled = enabled;
- }
-
- /**
- * Whether to use pagination when the number of rows of candidates exceeds the height of the terminal.
- */
- public boolean isPaginationEnabled() {
- return paginationEnabled;
- }
-
- //
- // History
- //
-
- private History history = new MemoryHistory();
-
- public void setHistory(final History history) {
- this.history = history;
- }
-
- public History getHistory() {
- return history;
- }
-
- private boolean historyEnabled = true;
-
- /**
- * Whether or not to add new commands to the history buffer.
- */
- public void setHistoryEnabled(final boolean enabled) {
- this.historyEnabled = enabled;
- }
-
- /**
- * Whether or not to add new commands to the history buffer.
- */
- public boolean isHistoryEnabled() {
- return historyEnabled;
- }
-
- /**
- * Move up or down the history tree.
- */
- private boolean moveHistory(final boolean next) throws IOException {
- if (next && !history.next()) {
- return false;
- }
- else if (!next && !history.previous()) {
- return false;
- }
-
- setBuffer(history.current());
-
- return true;
- }
-
- //
- // Printing
- //
-
- public static final String CR = System.getProperty("line.separator");
-
- /**
- * Output the specified character to the output stream without manipulating the current buffer.
- */
- private void print(final int c) throws IOException {
- if (c == '\t') {
- char chars[] = new char[TAB_WIDTH];
- Arrays.fill(chars, ' ');
- out.write(chars);
- return;
- }
-
- out.write(c);
- }
-
- /**
- * Output the specified characters to the output stream without manipulating the current buffer.
- */
- private void print(final char... buff) throws IOException {
- int len = 0;
- for (char c : buff) {
- if (c == '\t') {
- len += TAB_WIDTH;
- }
- else {
- len++;
- }
- }
-
- char chars[];
- if (len == buff.length) {
- chars = buff;
- }
- else {
- chars = new char[len];
- int pos = 0;
- for (char c : buff) {
- if (c == '\t') {
- Arrays.fill(chars, pos, pos + TAB_WIDTH, ' ');
- pos += TAB_WIDTH;
- }
- else {
- chars[pos] = c;
- pos++;
- }
- }
- }
-
- out.write(chars);
- }
-
- private void print(final char c, final int num) throws IOException {
- if (num == 1) {
- print(c);
- }
- else {
- char[] chars = new char[num];
- Arrays.fill(chars, c);
- print(chars);
- }
- }
-
- /**
- * Output the specified string to the output stream (but not the buffer).
- */
- public final void print(final CharSequence s) throws IOException {
- assert s != null;
- print(s.toString().toCharArray());
- }
-
- public final void println(final CharSequence s) throws IOException {
- assert s != null;
- print(s.toString().toCharArray());
- println();
- }
-
- /**
- * Output a platform-dependent newline.
- */
- public final void println() throws IOException {
- print(CR);
-// flush();
- }
-
- //
- // Actions
- //
-
- /**
- * Issue a delete.
- *
- * @return true if successful
- */
- public final boolean delete() throws IOException {
- return delete(1) == 1;
- }
-
- // FIXME: delete(int) only used by above + the return is always 1 and num is ignored
-
- /**
- * Issue <em>num</em> deletes.
- *
- * @return the number of characters backed up
- */
- private int delete(final int num) throws IOException {
- // TODO: Try to use jansi for this
-
- /* Commented out because of DWA-2949:
- if (buf.cursor == 0) {
- return 0;
- }
- */
-
- buf.buffer.delete(buf.cursor, buf.cursor + 1);
- drawBuffer(1);
-
- return 1;
- }
-
- /**
- * Kill the buffer ahead of the current cursor position.
- *
- * @return true if successful
- */
- public boolean killLine() throws IOException {
- int cp = buf.cursor;
- int len = buf.buffer.length();
-
- if (cp >= len) {
- return false;
- }
-
- int num = buf.buffer.length() - cp;
- clearAhead(num, 0);
-
- for (int i = 0; i < num; i++) {
- buf.buffer.deleteCharAt(len - i - 1);
- }
-
- return true;
- }
-
- /**
- * Clear the screen by issuing the ANSI "clear screen" code.
- */
- public boolean clearScreen() throws IOException {
- if (!getTerminal().isAnsiSupported()) {
- return false;
- }
-
- // send the ANSI code to clear the screen
- printAnsiSequence("2J");
-
- // then send the ANSI code to go to position 1,1
- printAnsiSequence("1;1H");
-
- redrawLine();
-
- return true;
- }
-
- /**
- * Issue an audible keyboard bell, if {@link #isBellEnabled} return true.
- */
- public void beep() throws IOException {
- if (isBellEnabled()) {
- print(KEYBOARD_BELL);
- // need to flush so the console actually beeps
- flush();
- }
- }
-
- /**
- * Paste the contents of the clipboard into the console buffer
- *
- * @return true if clipboard contents pasted
- */
- public boolean paste() throws IOException {
- Clipboard clipboard;
- try { // May throw ugly exception on system without X
- clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
- }
- catch (Exception e) {
- return false;
- }
-
- if (clipboard == null) {
- return false;
- }
-
- Transferable transferable = clipboard.getContents(null);
-
- if (transferable == null) {
- return false;
- }
-
- try {
- Object content = transferable.getTransferData(DataFlavor.plainTextFlavor);
-
- // This fix was suggested in bug #1060649 at
- // http://sourceforge.net/tracker/index.php?func=detail&aid=1060649&group_id=64033&atid=506056
- // to get around the deprecated DataFlavor.plainTextFlavor, but it
- // raises a UnsupportedFlavorException on Mac OS X
-
- if (content == null) {
- try {
- content = new DataFlavor().getReaderForText(transferable);
- }
- catch (Exception e) {
- // ignore
- }
- }
-
- if (content == null) {
- return false;
- }
-
- String value;
-
- if (content instanceof Reader) {
- // TODO: we might want instead connect to the input stream
- // so we can interpret individual lines
- value = "";
- String line;
-
- BufferedReader read = new BufferedReader((Reader) content);
- while ((line = read.readLine()) != null) {
- if (value.length() > 0) {
- value += "\n";
- }
-
- value += line;
- }
- }
- else {
- value = content.toString();
- }
-
- if (value == null) {
- return true;
- }
-
- putString(value);
-
- return true;
- }
- catch (UnsupportedFlavorException e) {
- Log.error("Paste failed: ", e);
-
- return false;
- }
- }
-
- //
- // Triggered Actions
- //
-
- private final Map<Character, ActionListener> triggeredActions = new HashMap<Character, ActionListener>();
-
- /**
- * Adding a triggered Action allows to give another curse of action if a character passed the pre-processing.
- * <p/>
- * Say you want to close the application if the user enter q.
- * addTriggerAction('q', new ActionListener(){ System.exit(0); }); would do the trick.
- */
- public void addTriggeredAction(final char c, final ActionListener listener) {
- triggeredActions.put(c, listener);
- }
-
- //
- // Formatted Output
- //
-
- /**
- * Output the specified {@link Collection} in proper columns.
- */
- public void printColumns(final Collection<? extends CharSequence> items) throws IOException {
- if (items == null || items.isEmpty()) {
- return;
- }
-
- int width = getTerminal().getWidth();
- int height = getTerminal().getHeight();
-
- int maxWidth = 0;
- for (CharSequence item : items) {
- maxWidth = Math.max(maxWidth, item.length());
- }
- Log.debug("Max width: ", maxWidth);
-
- int showLines;
- if (isPaginationEnabled()) {
- showLines = height - 1; // page limit
- }
- else {
- showLines = Integer.MAX_VALUE;
- }
-
- StringBuilder buff = new StringBuilder();
- for (CharSequence item : items) {
- if ((buff.length() + maxWidth) > width) {
- println(buff);
- buff.setLength(0);
-
- if (--showLines == 0) {
- // Overflow
- print(resources.getString("display-more"));
- flush();
- int c = readVirtualKey();
- if (c == '\r' || c == '\n') {
- // one step forward
- showLines = 1;
- }
- else if (c != 'q') {
- // page forward
- showLines = height - 1;
- }
-
- back(resources.getString("display-more").length());
- if (c == 'q') {
- // cancel
- break;
- }
- }
- }
-
- // NOTE: toString() is important here due to AnsiString being retarded
- buff.append(item.toString());
- for (int i = 0; i < (maxWidth + 3 - item.length()); i++) {
- buff.append(' ');
- }
- }
-
- if (buff.length() > 0) {
- println(buff);
- }
- }
-
- //
- // Non-supported Terminal Support
- //
-
- private Thread maskThread;
-
- private void beforeReadLine(final String prompt, final Character mask) {
- if (mask != null && maskThread == null) {
- final String fullPrompt = "\r" + prompt
- + " "
- + " "
- + " "
- + "\r" + prompt;
-
- maskThread = new Thread()
- {
- public void run() {
- while (!interrupted()) {
- try {
- Writer out = getOutput();
- out.write(fullPrompt);
- out.flush();
- sleep(3);
- }
- catch (IOException e) {
- return;
- }
- catch (InterruptedException e) {
- return;
- }
- }
- }
- };
-
- maskThread.setPriority(Thread.MAX_PRIORITY);
- maskThread.setDaemon(true);
- maskThread.start();
- }
- }
-
- private void afterReadLine() {
- if (maskThread != null && maskThread.isAlive()) {
- maskThread.interrupt();
- }
-
- maskThread = null;
- }
-
- /**
- * Erases the current line with the existing prompt, then redraws the line
- * with the provided prompt and buffer
- * @param prompt
- * the new prompt
- * @param buffer
- * the buffer to be drawn
- * @param cursorDest
- * where you want the cursor set when the line has been drawn.
- * -1 for end of line.
- * */
- public void resetPromptLine(String prompt, String buffer, int cursorDest) throws IOException {
- // move cursor to end of line
- moveToEnd();
-
- // backspace all text, including prompt
- buf.buffer.append(this.prompt);
- buf.cursor += this.prompt.length();
- this.prompt = "";
- backspaceAll();
-
- this.prompt = prompt;
- redrawLine();
- setBuffer(buffer);
-
- // move cursor to destination (-1 will move to end of line)
- if (cursorDest < 0) cursorDest = buffer.length();
- setCursorPosition(cursorDest);
-
- flush();
- }
-
- public void printSearchStatus(String searchTerm, String match) throws IOException {
- String prompt = "(reverse-i-search)`" + searchTerm + "': ";
- String buffer = match;
- int cursorDest = match.indexOf(searchTerm);
- resetPromptLine(prompt, buffer, cursorDest);
- }
-
- public void restoreLine(String originalPrompt, int cursorDest) throws IOException {
- // TODO move cursor to matched string
- String prompt = lastLine(originalPrompt);
- String buffer = buf.buffer.toString();
- resetPromptLine(prompt, buffer, cursorDest);
- }
-
- //
- // History search
- //
- /**
- * Search backward in history from a given position.
- *
- * @param searchTerm substring to search for.
- * @param startIndex the index from which on to search
- * @return index where this substring has been found, or -1 else.
- */
- public int searchBackwards(String searchTerm, int startIndex) {
- return searchBackwards(searchTerm, startIndex, false);
- }
-
- /**
- * Search backwards in history from the current position.
- *
- * @param searchTerm substring to search for.
- * @return index where the substring has been found, or -1 else.
- */
- public int searchBackwards(String searchTerm) {
- return searchBackwards(searchTerm, history.index());
- }
-
-
- public int searchBackwards(String searchTerm, int startIndex, boolean startsWith) {
- ListIterator<History.Entry> it = history.entries(startIndex);
- while (it.hasPrevious()) {
- History.Entry e = it.previous();
- if (startsWith) {
- if (e.value().toString().startsWith(searchTerm)) {
- return e.index();
- }
- } else {
- if (e.value().toString().contains(searchTerm)) {
- return e.index();
- }
- }
- }
- return -1;
- }
-
- //
- // Helpers
- //
-
- /**
- * Checks to see if the specified character is a delimiter. We consider a
- * character a delimiter if it is anything but a letter or digit.
- *
- * @param c The character to test
- * @return True if it is a delimiter
- */
- private boolean isDelimiter(final char c) {
- return !Character.isLetterOrDigit(c);
- }
-
- private void printAnsiSequence(String sequence) throws IOException {
- print(27);
- print('[');
- print(sequence);
- flush(); // helps with step debugging
- }
-
- // return column position, reported by the terminal
- private int getCurrentPosition() {
- // check for ByteArrayInputStream to disable for unit tests
- if (getTerminal().isAnsiSupported() && !(in instanceof ByteArrayInputStream)) {
- try {
- printAnsiSequence("6n");
- flush();
- StringBuffer b = new StringBuffer(8);
- // position is sent as <ESC>[{ROW};{COLUMN}R
- int r;
- while((r = in.read()) > -1 && r != 'R') {
- if (r != 27 && r != '[') {
- b.append((char) r);
- }
- }
- String[] pos = b.toString().split(";");
- return Integer.parseInt(pos[1]);
- } catch (Exception x) {
- // no luck
- }
- }
-
- return -1; // TODO: throw exception instead?
- }
-
- // return row position, reported by the terminal
- // needed to know whether to scroll up on cursor move in last col for weird
- // wrapping terminals - not tested for anything else
- private int getCurrentAnsiRow() {
- // check for ByteArrayInputStream to disable for unit tests
- if (getTerminal().isAnsiSupported() && !(in instanceof ByteArrayInputStream)) {
- try {
- printAnsiSequence("6n");
- flush();
- StringBuffer b = new StringBuffer(8);
- // position is sent as <ESC>[{ROW};{COLUMN}R
- int r;
- while((r = in.read()) > -1 && r != 'R') {
- if (r != 27 && r != '[') {
- b.append((char) r);
- }
- }
- String[] pos = b.toString().split(";");
- return Integer.parseInt(pos[0]);
- } catch (Exception x) {
- // no luck
- }
- }
-
- return -1; // TODO: throw exception instead?
- }
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java b/src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java
deleted file mode 100644
index 7993def002..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/CursorBuffer.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console;
-
-/**
- * A holder for a {@link StringBuilder} that also contains the current cursor position.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.0
- */
-public class CursorBuffer
-{
- private boolean overTyping = false;
-
- public int cursor = 0;
-
- public final StringBuilder buffer = new StringBuilder();
-
- public boolean isOverTyping() {
- return overTyping;
- }
-
- public void setOverTyping(final boolean b) {
- overTyping = b;
- }
-
- public int length() {
- return buffer.length();
- }
-
- /**
- * Gets the character to the left of the cursor.
- */
- public char charLeftOfCursor() {
- if (cursor <= 0) {
- return 0;
- }
-
- return buffer.charAt(cursor - 1);
- }
-
- /**
- * Gets the character at the cursor.
- */
- public char charAtCursor() {
- if (cursor < 0 || cursor >= buffer.length()) {
- return 0;
- }
- return buffer.charAt(cursor);
- }
-
- /**
- * Write the specific character into the buffer, setting the cursor position
- * ahead one. The text may overwrite or insert based on the current setting
- * of {@link #isOverTyping}.
- *
- * @param c the character to insert
- */
- public void write(final char c) {
- buffer.insert(cursor++, c);
- if (isOverTyping() && cursor < buffer.length()) {
- buffer.deleteCharAt(cursor);
- }
- }
-
- /**
- * Insert the specified chars into the buffer, setting the cursor to the end of the insertion point.
- */
- public void write(final CharSequence str) {
- assert str != null;
-
- if (buffer.length() == 0) {
- buffer.append(str);
- }
- else {
- buffer.insert(cursor, str);
- }
-
- cursor += str.length();
-
- if (isOverTyping() && cursor < buffer.length()) {
- buffer.delete(cursor, (cursor + str.length()));
- }
- }
-
- public boolean clear() {
- if (buffer.length() == 0) {
- return false;
- }
-
- buffer.delete(0, buffer.length());
- cursor = 0;
- return true;
- }
-
- @Override
- public String toString() {
- return buffer.toString();
- }
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/Key.java b/src/jline/src/main/java/scala/tools/jline/console/Key.java
deleted file mode 100644
index 2e713a7da2..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/Key.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Map from key name to key codes.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @see java.awt.event.KeyEvent
- * @since 2.0
- */
-public enum Key
-{
- CTRL_A(1),
-
- CTRL_B(2),
-
- CTRL_C(3),
-
- CTRL_D(4),
-
- CTRL_E(5),
-
- CTRL_F(6),
-
- CTRL_G(7),
-
- CTRL_K(11),
-
- CTRL_L(12),
-
- CTRL_N(14),
-
- CTRL_O(15),
-
- CTRL_P(16),
-
- CTRL_T(20),
-
- CTRL_W(23),
-
- CTRL_X(24),
-
- CTRL_OB(27),
-
- CTRL_QM(127),
-
- BACKSPACE('\b'),
-
- DELETE(127),;
-
- public final short code;
-
- Key(final int code) {
- this.code = (short) code;
- }
-
- private static final Map<Short, Key> codes;
-
- static {
- Map<Short, Key> map = new HashMap<Short, Key>();
-
- for (Key op : Key.values()) {
- map.put(op.code, op);
- }
-
- codes = map;
- }
-
- public static Key valueOf(final int code) {
- return codes.get((short) code);
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/console/Operation.java b/src/jline/src/main/java/scala/tools/jline/console/Operation.java
deleted file mode 100644
index 59ee878d45..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/Operation.java
+++ /dev/null
@@ -1,291 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Map for console operation to virtual key bindings.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @see java.awt.event.KeyEvent
- * @since 2.0
- */
-public enum Operation
-{
- /**
- * Unknown operation.
- */
- UNKNOWN(-99),
-
- /**
- * Operation that moves to the beginning of the buffer.
- */
- MOVE_TO_BEG(-1),
-
- /**
- * Operation that moves to the end of the buffer.
- */
- MOVE_TO_END(-3),
-
- /**
- * Operation that moved to the previous character in the buffer.
- */
- PREV_CHAR(-4),
-
- /**
- * Operation that issues a newline.
- */
- NEWLINE(-6),
-
- /**
- * Operation that deletes the buffer from the current character to the end.
- */
- KILL_LINE(-7),
-
- /**
- * Operation that clears the screen.
- */
- CLEAR_SCREEN(-8),
-
- /**
- * Operation that sets the buffer to the next history item.
- */
- NEXT_HISTORY(-9),
-
- /**
- * Operation that sets the buffer to the previous history item.
- */
- PREV_HISTORY(-11),
-
- /**
- * Operation that redisplays the current buffer.
- */
- REDISPLAY(-13),
-
- /**
- * Operation that deletes the buffer from the cursor to the beginning.
- */
- KILL_LINE_PREV(-15),
-
- /**
- * Operation that deletes the previous word in the buffer.
- */
- DELETE_PREV_WORD(-16),
-
- /**
- * Operation that moves to the next character in the buffer.
- */
- NEXT_CHAR(-19),
-
- /**
- * Operation that moves to the previous character in the buffer.
- */
- REPEAT_PREV_CHAR(-20),
-
- /**
- * Operation that searches backwards in the command history.
- */
- SEARCH_PREV(-21),
-
- /**
- * Operation that repeats the character.
- */
- REPEAT_NEXT_CHAR(-24),
-
- /**
- * Operation that searches forward in the command history.
- */
- SEARCH_NEXT(-25),
-
- /**
- * Operation that moved to the previous whitespace.
- */
- PREV_SPACE_WORD(-27),
-
- /**
- * Operation that moved to the end of the current word.
- */
- TO_END_WORD(-29),
-
- /**
- * Operation that
- */
- REPEAT_SEARCH_PREV(-34),
-
- /**
- * Operation that
- */
- PASTE_PREV(-36),
-
- /**
- * Operation that
- */
- REPLACE_MODE(-37),
-
- /**
- * Operation that
- */
- SUBSTITUTE_LINE(-38),
-
- /**
- * Operation that
- */
- TO_PREV_CHAR(-39),
-
- /**
- * Operation that
- */
- NEXT_SPACE_WORD(-40),
-
- /**
- * Operation that
- */
- DELETE_PREV_CHAR(-41),
-
- /**
- * Operation that
- */
- ADD(-42),
-
- /**
- * Operation that
- */
- PREV_WORD(-43),
-
- /**
- * Operation that
- */
- CHANGE_META(-44),
-
- /**
- * Operation that
- */
- DELETE_META(-45),
-
- /**
- * Operation that
- */
- END_WORD(-46),
-
- /**
- * Operation that toggles insert/overtype
- */
- INSERT(-48),
-
- /**
- * Operation that
- */
- REPEAT_SEARCH_NEXT(-49),
-
- /**
- * Operation that
- */
- PASTE_NEXT(-50),
-
- /**
- * Operation that
- */
- REPLACE_CHAR(-51),
-
- /**
- * Operation that
- */
- SUBSTITUTE_CHAR(-52),
-
- /**
- * Operation that
- */
- TO_NEXT_CHAR(-53),
-
- /**
- * Operation that undoes the previous operation.
- */
- UNDO(-54),
-
- /**
- * Operation that moved to the next word.
- */
- NEXT_WORD(-55),
-
- /**
- * Operation that deletes the previous character.
- */
- DELETE_NEXT_CHAR(-56),
-
- /**
- * Operation that toggles between uppercase and lowercase.
- */
- CHANGE_CASE(-57),
-
- /**
- * Operation that performs completion operation on the current word.
- */
- COMPLETE(-58),
-
- /**
- * Operation that exits the command prompt.
- */
- EXIT(-59),
-
- /**
- * Operation that pastes the contents of the clipboard into the line
- */
- PASTE(-60),
-
- /**
- * Operation that moves the current History to the beginning.
- */
- START_OF_HISTORY(-61),
-
- /**
- * Operation that moves the current History to the end.
- */
- END_OF_HISTORY(-62),
-
- /**
- * Operation that clears whatever text is on the current line.
- */
- CLEAR_LINE(-63),
-
- /**
- * Cancel search
- */
- ABORT(-64),
-
- /**
- * Delete next word
- */
- DELETE_NEXT_WORD(-65),
-
- ;
-
- public final short code;
-
- Operation(final int code) {
- this.code = (short) code;
- }
-
- private static final Map<Short, Operation> codes;
-
- static {
- Map<Short, Operation> map = new HashMap<Short, Operation>();
-
- for (Operation op : Operation.values()) {
- map.put(op.code, op);
- }
-
- codes = map;
- }
-
- public static Operation valueOf(final int code) {
- return codes.get((short) code);
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/AggregateCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/AggregateCompleter.java
deleted file mode 100644
index 3170bd1c68..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/completer/AggregateCompleter.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package scala.tools.jline.console.completer;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Completer which contains multiple completers and aggregates them together.
- *
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public class AggregateCompleter
- implements Completer
-{
- private final List<Completer> completers = new ArrayList<Completer>();
-
- public AggregateCompleter() {
- // empty
- }
-
- public AggregateCompleter(final Collection<Completer> completers) {
- assert completers != null;
- this.completers.addAll(completers);
- }
-
- public AggregateCompleter(final Completer... completers) {
- this(Arrays.asList(completers));
- }
-
- public Collection<Completer> getCompleters() {
- return completers;
- }
-
- public int complete(final String buffer, final int cursor, final List<CharSequence> candidates) {
- // buffer could be null
- assert candidates != null;
-
- List<Completion> completions = new ArrayList<Completion>(completers.size());
-
- // Run each completer, saving its completion results
- int max = -1;
- for (Completer completer : completers) {
- Completion completion = new Completion(candidates);
- completion.complete(completer, buffer, cursor);
-
- // Compute the max cursor position
- max = Math.max(max, completion.cursor);
-
- completions.add(completion);
- }
-
- // Append candidates from completions which have the same cursor position as max
- for (Completion completion : completions) {
- if (completion.cursor == max) {
- candidates.addAll(completion.candidates);
- }
- }
-
- return max;
- }
-
- @Override
- public String toString() {
- return getClass().getSimpleName() + "{" +
- "completers=" + completers +
- '}';
- }
-
- private class Completion
- {
- public final List<CharSequence> candidates;
-
- public int cursor;
-
- public Completion(final List<CharSequence> candidates) {
- assert candidates != null;
- this.candidates = new LinkedList<CharSequence>(candidates);
- }
-
- public void complete(final Completer completer, final String buffer, final int cursor) {
- assert completer != null;
-
- this.cursor = completer.complete(buffer, cursor, candidates);
- }
- }
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/ArgumentCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/ArgumentCompleter.java
deleted file mode 100644
index 6f60029a1d..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/completer/ArgumentCompleter.java
+++ /dev/null
@@ -1,398 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console.completer;
-
-import scala.tools.jline.internal.Log;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * A {@link Completer} implementation that invokes a child completer using the appropriate <i>separator</i> argument.
- * This can be used instead of the individual completers having to know about argument parsing semantics.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public class ArgumentCompleter
- implements Completer
-{
- private final ArgumentDelimiter delimiter;
-
- private final List<Completer> completers = new ArrayList<Completer>();
-
- private boolean strict = true;
-
- /**
- * Create a new completer with the specified argument delimiter.
- *
- * @param delimiter The delimiter for parsing arguments
- * @param completers The embedded completers
- */
- public ArgumentCompleter(final ArgumentDelimiter delimiter, final Collection<Completer> completers) {
- assert delimiter != null;
- this.delimiter = delimiter;
- assert completers != null;
- this.completers.addAll(completers);
- }
-
- /**
- * Create a new completer with the specified argument delimiter.
- *
- * @param delimiter The delimiter for parsing arguments
- * @param completers The embedded completers
- */
- public ArgumentCompleter(final ArgumentDelimiter delimiter, final Completer... completers) {
- this(delimiter, Arrays.asList(completers));
- }
-
- /**
- * Create a new completer with the default {@link WhitespaceArgumentDelimiter}.
- *
- * @param completers The embedded completers
- */
- public ArgumentCompleter(final Completer... completers) {
- this(new WhitespaceArgumentDelimiter(), completers);
- }
-
- /**
- * Create a new completer with the default {@link WhitespaceArgumentDelimiter}.
- *
- * @param completers The embedded completers
- */
- public ArgumentCompleter(final List<Completer> completers) {
- this(new WhitespaceArgumentDelimiter(), completers);
- }
-
- /**
- * If true, a completion at argument index N will only succeed
- * if all the completions from 0-(N-1) also succeed.
- */
- public void setStrict(final boolean strict) {
- this.strict = strict;
- }
-
- /**
- * Returns whether a completion at argument index N will success
- * if all the completions from arguments 0-(N-1) also succeed.
- *
- * @return True if strict.
- * @since 2.3
- */
- public boolean isStrict() {
- return this.strict;
- }
-
- /**
- * @since 2.3
- */
- public ArgumentDelimiter getDelimiter() {
- return delimiter;
- }
-
- /**
- * @since 2.3
- */
- public List<Completer> getCompleters() {
- return completers;
- }
-
- public int complete(final String buffer, final int cursor, final List<CharSequence> candidates) {
- // buffer can be null
- assert candidates != null;
-
- ArgumentDelimiter delim = getDelimiter();
- ArgumentList list = delim.delimit(buffer, cursor);
- int argpos = list.getArgumentPosition();
- int argIndex = list.getCursorArgumentIndex();
-
- if (argIndex < 0) {
- return -1;
- }
-
- List<Completer> completers = getCompleters();
- Completer completer;
-
- // if we are beyond the end of the completers, just use the last one
- if (argIndex >= completers.size()) {
- completer = completers.get(completers.size() - 1);
- }
- else {
- completer = completers.get(argIndex);
- }
-
- // ensure that all the previous completers are successful before allowing this completer to pass (only if strict).
- for (int i = 0; isStrict() && (i < argIndex); i++) {
- Completer sub = completers.get(i >= completers.size() ? (completers.size() - 1) : i);
- String[] args = list.getArguments();
- String arg = (args == null || i >= args.length) ? "" : args[i];
-
- List<CharSequence> subCandidates = new LinkedList<CharSequence>();
-
- if (sub.complete(arg, arg.length(), subCandidates) == -1) {
- return -1;
- }
-
- if (subCandidates.size() == 0) {
- return -1;
- }
- }
-
- int ret = completer.complete(list.getCursorArgument(), argpos, candidates);
-
- if (ret == -1) {
- return -1;
- }
-
- int pos = ret + list.getBufferPosition() - argpos;
-
- // Special case: when completing in the middle of a line, and the area under the cursor is a delimiter,
- // then trim any delimiters from the candidates, since we do not need to have an extra delimiter.
- //
- // E.g., if we have a completion for "foo", and we enter "f bar" into the buffer, and move to after the "f"
- // and hit TAB, we want "foo bar" instead of "foo bar".
-
- if ((cursor != buffer.length()) && delim.isDelimiter(buffer, cursor)) {
- for (int i = 0; i < candidates.size(); i++) {
- CharSequence val = candidates.get(i);
-
- while (val.length() > 0 && delim.isDelimiter(val, val.length() - 1)) {
- val = val.subSequence(0, val.length() - 1);
- }
-
- candidates.set(i, val);
- }
- }
-
- Log.trace("Completing ", buffer, " (pos=", cursor, ") with: ", candidates, ": offset=", pos);
-
- return pos;
- }
-
- /**
- * The {@link ArgumentCompleter.ArgumentDelimiter} allows custom breaking up of a {@link String} into individual
- * arguments in order to dispatch the arguments to the nested {@link Completer}.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- */
- public static interface ArgumentDelimiter
- {
- /**
- * Break the specified buffer into individual tokens that can be completed on their own.
- *
- * @param buffer The buffer to split
- * @param pos The current position of the cursor in the buffer
- * @return The tokens
- */
- ArgumentList delimit(CharSequence buffer, int pos);
-
- /**
- * Returns true if the specified character is a whitespace parameter.
- *
- * @param buffer The complete command buffer
- * @param pos The index of the character in the buffer
- * @return True if the character should be a delimiter
- */
- boolean isDelimiter(CharSequence buffer, int pos);
- }
-
- /**
- * Abstract implementation of a delimiter that uses the {@link #isDelimiter} method to determine if a particular
- * character should be used as a delimiter.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- */
- public abstract static class AbstractArgumentDelimiter
- implements ArgumentDelimiter
- {
- // TODO: handle argument quoting and escape characters
-
- private char[] quoteChars = {'\'', '"'};
-
- private char[] escapeChars = {'\\'};
-
- public void setQuoteChars(final char[] chars) {
- this.quoteChars = chars;
- }
-
- public char[] getQuoteChars() {
- return this.quoteChars;
- }
-
- public void setEscapeChars(final char[] chars) {
- this.escapeChars = chars;
- }
-
- public char[] getEscapeChars() {
- return this.escapeChars;
- }
-
- public ArgumentList delimit(final CharSequence buffer, final int cursor) {
- List<String> args = new LinkedList<String>();
- StringBuilder arg = new StringBuilder();
- int argpos = -1;
- int bindex = -1;
-
- for (int i = 0; (buffer != null) && (i <= buffer.length()); i++) {
- // once we reach the cursor, set the
- // position of the selected index
- if (i == cursor) {
- bindex = args.size();
- // the position in the current argument is just the
- // length of the current argument
- argpos = arg.length();
- }
-
- if ((i == buffer.length()) || isDelimiter(buffer, i)) {
- if (arg.length() > 0) {
- args.add(arg.toString());
- arg.setLength(0); // reset the arg
- }
- }
- else {
- arg.append(buffer.charAt(i));
- }
- }
-
- return new ArgumentList(args.toArray(new String[args.size()]), bindex, argpos, cursor);
- }
-
- /**
- * Returns true if the specified character is a whitespace parameter. Check to ensure that the character is not
- * escaped by any of {@link #getQuoteChars}, and is not escaped by ant of the {@link #getEscapeChars}, and
- * returns true from {@link #isDelimiterChar}.
- *
- * @param buffer The complete command buffer
- * @param pos The index of the character in the buffer
- * @return True if the character should be a delimiter
- */
- public boolean isDelimiter(final CharSequence buffer, final int pos) {
- return !isQuoted(buffer, pos) && !isEscaped(buffer, pos) && isDelimiterChar(buffer, pos);
- }
-
- public boolean isQuoted(final CharSequence buffer, final int pos) {
- return false;
- }
-
- public boolean isEscaped(final CharSequence buffer, final int pos) {
- if (pos <= 0) {
- return false;
- }
-
- for (int i = 0; (escapeChars != null) && (i < escapeChars.length);
- i++) {
- if (buffer.charAt(pos) == escapeChars[i]) {
- return !isEscaped(buffer, pos - 1); // escape escape
- }
- }
-
- return false;
- }
-
- /**
- * Returns true if the character at the specified position if a delimiter. This method will only be called if
- * the character is not enclosed in any of the {@link #getQuoteChars}, and is not escaped by ant of the
- * {@link #getEscapeChars}. To perform escaping manually, override {@link #isDelimiter} instead.
- */
- public abstract boolean isDelimiterChar(CharSequence buffer, int pos);
- }
-
- /**
- * {@link ArgumentCompleter.ArgumentDelimiter} implementation that counts all whitespace (as reported by
- * {@link Character#isWhitespace}) as being a delimiter.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- */
- public static class WhitespaceArgumentDelimiter
- extends AbstractArgumentDelimiter
- {
- /**
- * The character is a delimiter if it is whitespace, and the
- * preceding character is not an escape character.
- */
- @Override
- public boolean isDelimiterChar(final CharSequence buffer, final int pos) {
- return Character.isWhitespace(buffer.charAt(pos));
- }
- }
-
- /**
- * The result of a delimited buffer.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- */
- public static class ArgumentList
- {
- private String[] arguments;
-
- private int cursorArgumentIndex;
-
- private int argumentPosition;
-
- private int bufferPosition;
-
- /**
- * @param arguments The array of tokens
- * @param cursorArgumentIndex The token index of the cursor
- * @param argumentPosition The position of the cursor in the current token
- * @param bufferPosition The position of the cursor in the whole buffer
- */
- public ArgumentList(final String[] arguments, final int cursorArgumentIndex, final int argumentPosition, final int bufferPosition) {
- assert arguments != null;
-
- this.arguments = arguments;
- this.cursorArgumentIndex = cursorArgumentIndex;
- this.argumentPosition = argumentPosition;
- this.bufferPosition = bufferPosition;
- }
-
- public void setCursorArgumentIndex(final int i) {
- this.cursorArgumentIndex = i;
- }
-
- public int getCursorArgumentIndex() {
- return this.cursorArgumentIndex;
- }
-
- public String getCursorArgument() {
- if ((cursorArgumentIndex < 0) || (cursorArgumentIndex >= arguments.length)) {
- return null;
- }
-
- return arguments[cursorArgumentIndex];
- }
-
- public void setArgumentPosition(final int pos) {
- this.argumentPosition = pos;
- }
-
- public int getArgumentPosition() {
- return this.argumentPosition;
- }
-
- public void setArguments(final String[] arguments) {
- this.arguments = arguments;
- }
-
- public String[] getArguments() {
- return this.arguments;
- }
-
- public void setBufferPosition(final int pos) {
- this.bufferPosition = pos;
- }
-
- public int getBufferPosition() {
- return this.bufferPosition;
- }
- }
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/CandidateListCompletionHandler.java b/src/jline/src/main/java/scala/tools/jline/console/completer/CandidateListCompletionHandler.java
deleted file mode 100644
index fa5bfd2777..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/completer/CandidateListCompletionHandler.java
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console.completer;
-
-import scala.tools.jline.console.ConsoleReader;
-import scala.tools.jline.console.CursorBuffer;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.ResourceBundle;
-import java.util.Set;
-
-/**
- * A {@link CompletionHandler} that deals with multiple distinct completions
- * by outputting the complete list of possibilities to the console. This
- * mimics the behavior of the
- * <a href="http://www.gnu.org/directory/readline.html">readline</a> library.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public class CandidateListCompletionHandler
- implements CompletionHandler
-{
- // TODO: handle quotes and escaped quotes && enable automatic escaping of whitespace
-
- public boolean complete(final ConsoleReader reader, final List<CharSequence> candidates, final int pos) throws
- IOException
- {
- CursorBuffer buf = reader.getCursorBuffer();
-
- // if there is only one completion, then fill in the buffer
- if (candidates.size() == 1) {
- CharSequence value = candidates.get(0);
-
- // fail if the only candidate is the same as the current buffer
- if (value.equals(buf.toString())) {
- return false;
- }
-
- setBuffer(reader, value, pos);
-
- return true;
- }
- else if (candidates.size() > 1) {
- String value = getUnambiguousCompletions(candidates);
- setBuffer(reader, value, pos);
- }
-
- printCandidates(reader, candidates);
-
- // redraw the current console buffer
- reader.drawLine();
-
- return true;
- }
-
- public static void setBuffer(final ConsoleReader reader, final CharSequence value, final int offset) throws
- IOException
- {
- while ((reader.getCursorBuffer().cursor > offset) && reader.backspace()) {
- // empty
- }
-
- reader.putString(value);
- reader.setCursorPosition(offset + value.length());
- }
-
- /**
- * Print out the candidates. If the size of the candidates is greater than the
- * {@link ConsoleReader#getAutoprintThreshold}, they prompt with a warning.
- *
- * @param candidates the list of candidates to print
- */
- public static void printCandidates(final ConsoleReader reader, Collection<CharSequence> candidates) throws
- IOException
- {
- Set<CharSequence> distinct = new HashSet<CharSequence>(candidates);
-
- if (distinct.size() > reader.getAutoprintThreshold()) {
- //noinspection StringConcatenation
- reader.print(Messages.DISPLAY_CANDIDATES.format(candidates.size()));
- reader.flush();
-
- int c;
-
- String noOpt = Messages.DISPLAY_CANDIDATES_NO.format();
- String yesOpt = Messages.DISPLAY_CANDIDATES_YES.format();
- char[] allowed = {yesOpt.charAt(0), noOpt.charAt(0)};
-
- while ((c = reader.readCharacter(allowed)) != -1) {
- String tmp = new String(new char[]{(char) c});
-
- if (noOpt.startsWith(tmp)) {
- reader.println();
- return;
- }
- else if (yesOpt.startsWith(tmp)) {
- break;
- }
- else {
- reader.beep();
- }
- }
- }
-
- // copy the values and make them distinct, without otherwise affecting the ordering. Only do it if the sizes differ.
- if (distinct.size() != candidates.size()) {
- Collection<CharSequence> copy = new ArrayList<CharSequence>();
-
- for (CharSequence next : candidates) {
- if (!copy.contains(next)) {
- copy.add(next);
- }
- }
-
- candidates = copy;
- }
-
- reader.println();
- reader.printColumns(candidates);
- }
-
- /**
- * Returns a root that matches all the {@link String} elements of the specified {@link List},
- * or null if there are no commonalities. For example, if the list contains
- * <i>foobar</i>, <i>foobaz</i>, <i>foobuz</i>, the method will return <i>foob</i>.
- */
- private String getUnambiguousCompletions(final List<CharSequence> candidates) {
- if (candidates == null || candidates.isEmpty()) {
- return null;
- }
-
- // convert to an array for speed
- String[] strings = candidates.toArray(new String[candidates.size()]);
-
- String first = strings[0];
- StringBuilder candidate = new StringBuilder();
-
- for (int i = 0; i < first.length(); i++) {
- if (startsWith(first.substring(0, i + 1), strings)) {
- candidate.append(first.charAt(i));
- }
- else {
- break;
- }
- }
-
- return candidate.toString();
- }
-
- /**
- * @return true is all the elements of <i>candidates</i> start with <i>starts</i>
- */
- private boolean startsWith(final String starts, final String[] candidates) {
- for (String candidate : candidates) {
- if (!candidate.startsWith(starts)) {
- return false;
- }
- }
-
- return true;
- }
-
- private static enum Messages
- {
- DISPLAY_CANDIDATES,
- DISPLAY_CANDIDATES_YES,
- DISPLAY_CANDIDATES_NO,;
-
- private static final
- ResourceBundle
- bundle =
- ResourceBundle.getBundle(CandidateListCompletionHandler.class.getName(), Locale.getDefault());
-
- public String format(final Object... args) {
- if (bundle == null)
- return "";
- else
- return String.format(bundle.getString(name()), args);
- }
- }
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/Completer.java b/src/jline/src/main/java/scala/tools/jline/console/completer/Completer.java
deleted file mode 100644
index 52d33847f2..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/completer/Completer.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console.completer;
-
-import java.util.List;
-
-/**
- * A completer is the mechanism by which tab-completion candidates will be resolved.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public interface Completer
-{
- //
- // FIXME: Check if we can use CharSequece for buffer?
- //
-
- /**
- * Populates <i>candidates</i> with a list of possible completions for the <i>buffer</i>.
- *
- * The <i>candidates</i> list will not be sorted before being displayed to the user: thus, the
- * complete method should sort the {@link List} before returning.
- *
- * @param buffer The buffer
- * @param cursor The current position of the cursor in the <i>buffer</i>
- * @param candidates The {@link List} of candidates to populate
- * @return The index of the <i>buffer</i> for which the completion will be relative
- */
- int complete(String buffer, int cursor, List<CharSequence> candidates);
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/CompletionHandler.java b/src/jline/src/main/java/scala/tools/jline/console/completer/CompletionHandler.java
deleted file mode 100644
index 030dc84205..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/completer/CompletionHandler.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console.completer;
-
-import scala.tools.jline.console.ConsoleReader;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- * Handler for dealing with candidates for tab-completion.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public interface CompletionHandler
-{
- boolean complete(ConsoleReader reader, List<CharSequence> candidates, int position) throws IOException;
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/EnumCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/EnumCompleter.java
deleted file mode 100644
index 5ad049b857..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/completer/EnumCompleter.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (C) 2009 the original author(s).
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package scala.tools.jline.console.completer;
-
-/**
- * {@link Completer} for {@link Enum} names.
- *
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public class EnumCompleter
- extends StringsCompleter
-{
- public EnumCompleter(Class<? extends Enum> source) {
- assert source != null;
-
- for (Enum<?> n : source.getEnumConstants()) {
- this.getStrings().add(n.name().toLowerCase());
- }
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/FileNameCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/FileNameCompleter.java
deleted file mode 100644
index 6556138769..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/completer/FileNameCompleter.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console.completer;
-
-import scala.tools.jline.internal.Configuration;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * A file name completer takes the buffer and issues a list of
- * potential completions.
- * <p/>
- * This completer tries to behave as similar as possible to
- * <i>bash</i>'s file name completion (using GNU readline)
- * with the following exceptions:
- * <p/>
- * <ul>
- * <li>Candidates that are directories will end with "/"</li>
- * <li>Wildcard regular expressions are not evaluated or replaced</li>
- * <li>The "~" character can be used to represent the user's home,
- * but it cannot complete to other users' homes, since java does
- * not provide any way of determining that easily</li>
- * </ul>
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public class FileNameCompleter
- implements Completer
-{
- // TODO: Handle files with spaces in them
-
- private static final boolean OS_IS_WINDOWS;
-
- static {
- String os = Configuration.getOsName();
- OS_IS_WINDOWS = os.contains("windows");
- }
-
- public int complete(String buffer, final int cursor, final List<CharSequence> candidates) {
- // buffer can be null
- assert candidates != null;
-
- if (buffer == null) {
- buffer = "";
- }
-
- if (OS_IS_WINDOWS) {
- buffer = buffer.replace('/', '\\');
- }
-
- String translated = buffer;
-
- File homeDir = getUserHome();
-
- // Special character: ~ maps to the user's home directory
- if (translated.startsWith("~" + separator())) {
- translated = homeDir.getPath() + translated.substring(1);
- }
- else if (translated.startsWith("~")) {
- translated = homeDir.getParentFile().getAbsolutePath();
- }
- else if (!(translated.startsWith(separator()))) {
- String cwd = getUserDir().getAbsolutePath();
- translated = cwd + separator() + translated;
- }
-
- File file = new File(translated);
- final File dir;
-
- if (translated.endsWith(separator())) {
- dir = file;
- }
- else {
- dir = file.getParentFile();
- }
-
- File[] entries = dir == null ? new File[0] : dir.listFiles();
-
- return matchFiles(buffer, translated, entries, candidates);
- }
-
- protected String separator() {
- return File.separator;
- }
-
- protected File getUserHome() {
- return Configuration.getUserHome();
- }
-
- protected File getUserDir() {
- return new File(".");
- }
-
- protected int matchFiles(final String buffer, final String translated, final File[] files, final List<CharSequence> candidates) {
- if (files == null) {
- return -1;
- }
-
- int matches = 0;
-
- // first pass: just count the matches
- for (File file : files) {
- if (file.getAbsolutePath().startsWith(translated)) {
- matches++;
- }
- }
- for (File file : files) {
- if (file.getAbsolutePath().startsWith(translated)) {
- CharSequence name = file.getName() + (matches == 1 && file.isDirectory() ? separator() : " ");
- candidates.add(render(file, name).toString());
- }
- }
-
- final int index = buffer.lastIndexOf(separator());
-
- return index + separator().length();
- }
-
- protected CharSequence render(final File file, final CharSequence name) {
- assert file != null;
- assert name != null;
-
- return name;
- }
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/NullCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/NullCompleter.java
deleted file mode 100644
index 93cf563bcd..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/completer/NullCompleter.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package scala.tools.jline.console.completer;
-
-import java.util.List;
-
-/**
- * Null completer.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public final class NullCompleter
- implements Completer
-{
- public static final NullCompleter INSTANCE = new NullCompleter();
-
- public int complete(final String buffer, final int cursor, final List<CharSequence> candidates) {
- return -1;
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/StringsCompleter.java b/src/jline/src/main/java/scala/tools/jline/console/completer/StringsCompleter.java
deleted file mode 100644
index 2abfdd0340..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/completer/StringsCompleter.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package scala.tools.jline.console.completer;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-/**
- * Completer for a set of strings.
- *
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public class StringsCompleter
- implements Completer
-{
- private final SortedSet<String> strings = new TreeSet<String>();
-
- public StringsCompleter() {
- // empty
- }
-
- public StringsCompleter(final Collection<String> strings) {
- assert strings != null;
- getStrings().addAll(strings);
- }
-
- public StringsCompleter(final String... strings) {
- this(Arrays.asList(strings));
- }
-
- public Collection<String> getStrings() {
- return strings;
- }
-
- public int complete(final String buffer, final int cursor, final List<CharSequence> candidates) {
- // buffer could be null
- assert candidates != null;
-
- if (buffer == null) {
- candidates.addAll(strings);
- }
- else {
- for (String match : strings.tailSet(buffer)) {
- if (!match.startsWith(buffer)) {
- break;
- }
-
- candidates.add(match);
- }
- }
-
- if (candidates.size() == 1) {
- candidates.set(0, candidates.get(0) + " ");
- }
-
- return candidates.isEmpty() ? -1 : 0;
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/console/completer/package-info.java b/src/jline/src/main/java/scala/tools/jline/console/completer/package-info.java
deleted file mode 100644
index 8150710cfc..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/completer/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (C) 2010 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Console completer support.
- *
- * @since 2.3
- */
-package scala.tools.jline.console.completer; \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/console/history/FileHistory.java b/src/jline/src/main/java/scala/tools/jline/console/history/FileHistory.java
deleted file mode 100644
index 5eccba3ce5..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/history/FileHistory.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console.history;
-
-import scala.tools.jline.internal.Log;
-
-import java.io.BufferedOutputStream;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.Flushable;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.PrintStream;
-import java.io.Reader;
-import java.util.ListIterator;
-
-/**
- * {@link History} using a file for persistent backing.
- * <p/>
- * Implementers should install shutdown hook to call {@link FileHistory#flush}
- * to save history to disk.
- *
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.0
- */
-public class FileHistory
- extends MemoryHistory
- implements PersistentHistory, Flushable
-{
- private final File file;
-
- public FileHistory(final File file) throws IOException {
- assert file != null;
- this.file = file;
- load(file);
- }
-
- public File getFile() {
- return file;
- }
-
- public void load(final File file) throws IOException {
- assert file != null;
- if (file.exists()) {
- Log.trace("Loading history from: ", file);
- load(new FileReader(file));
- }
- }
-
- public void load(final InputStream input) throws IOException {
- assert input != null;
- load(new InputStreamReader(input));
- }
-
- public void load(final Reader reader) throws IOException {
- assert reader != null;
- BufferedReader input = new BufferedReader(reader);
-
- String item;
- while ((item = input.readLine()) != null) {
- add(item);
- }
- }
-
- public void flush() throws IOException {
- Log.trace("Flushing history");
-
- if (!file.exists()) {
- File dir = file.getParentFile();
- if (!dir.exists() && !dir.mkdirs()) {
- Log.warn("Failed to create directory: ", dir);
- }
- if (!file.createNewFile()) {
- Log.warn("Failed to create file: ", file);
- }
- }
-
- PrintStream out = new PrintStream(new BufferedOutputStream(new FileOutputStream(file)));
- try {
- for (Entry entry : this) {
- out.println(entry.value());
- }
- }
- finally {
- out.close();
- }
- }
-
- public void purge() throws IOException {
- Log.trace("Purging history");
-
- clear();
-
- if (!file.delete()) {
- Log.warn("Failed to delete history file: ", file);
- }
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/console/history/History.java b/src/jline/src/main/java/scala/tools/jline/console/history/History.java
deleted file mode 100644
index d8602f2150..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/history/History.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console.history;
-
-import java.util.Iterator;
-import java.util.ListIterator;
-
-/**
- * Console history.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public interface History
- extends Iterable<History.Entry>
-{
- int size();
-
- boolean isEmpty();
-
- int index();
-
- void clear();
-
- CharSequence get(int index);
-
- void add(CharSequence line);
-
- void replace(CharSequence item);
-
- //
- // Entries
- //
-
- interface Entry
- {
- int index();
-
- CharSequence value();
- }
-
- ListIterator<Entry> entries(int index);
-
- ListIterator<Entry> entries();
-
- Iterator<Entry> iterator();
-
- //
- // Navigation
- //
-
- CharSequence current();
-
- boolean previous();
-
- boolean next();
-
- boolean moveToFirst();
-
- boolean moveToLast();
-
- boolean moveTo(int index);
-
- void moveToEnd();
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/history/MemoryHistory.java b/src/jline/src/main/java/scala/tools/jline/console/history/MemoryHistory.java
deleted file mode 100644
index 3af936428a..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/history/MemoryHistory.java
+++ /dev/null
@@ -1,318 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console.history;
-
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.ListIterator;
-import java.util.NoSuchElementException;
-
-/**
- * Non-persistent {@link History}.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public class MemoryHistory
- implements History
-{
- public static final int DEFAULT_MAX_SIZE = 500;
-
- private final LinkedList<CharSequence> items = new LinkedList<CharSequence>();
-
- private int maxSize = DEFAULT_MAX_SIZE;
-
- private boolean ignoreDuplicates = true;
-
- private boolean autoTrim = false;
-
- // NOTE: These are all ideas from looking at the Bash man page:
-
- // TODO: Add ignore space? (lines starting with a space are ignored)
-
- // TODO: Add ignore patterns?
-
- // TODO: Add history timestamp?
-
- // TODO: Add erase dups?
-
- private int offset = 0;
-
- private int index = 0;
-
- public void setMaxSize(final int maxSize) {
- this.maxSize = maxSize;
- maybeResize();
- }
-
- public int getMaxSize() {
- return maxSize;
- }
-
- public boolean isIgnoreDuplicates() {
- return ignoreDuplicates;
- }
-
- public void setIgnoreDuplicates(final boolean flag) {
- this.ignoreDuplicates = flag;
- }
-
- public boolean isAutoTrim() {
- return autoTrim;
- }
-
- public void setAutoTrim(final boolean flag) {
- this.autoTrim = flag;
- }
-
- public int size() {
- return items.size();
- }
-
- public boolean isEmpty() {
- return items.isEmpty();
- }
-
- public int index() {
- return offset + index;
- }
-
- public void clear() {
- items.clear();
- offset = 0;
- index = 0;
- }
-
- public CharSequence get(final int index) {
- return items.get(index - offset);
- }
-
- public void add(CharSequence item) {
- assert item != null;
-
- if (isAutoTrim()) {
- item = String.valueOf(item).trim();
- }
-
- if (isIgnoreDuplicates()) {
- if (!items.isEmpty() && item.equals(items.getLast())) {
- return;
- }
- }
-
- items.add(item);
-
- maybeResize();
- }
-
- public void replace(final CharSequence item) {
- items.removeLast();
- add(item);
- }
-
- private void maybeResize() {
- while (size() > getMaxSize()) {
- items.removeFirst();
- offset++;
- }
-
- index = size();
- }
-
- public ListIterator<Entry> entries(final int index) {
- return new EntriesIterator(index - offset);
- }
-
- public ListIterator<Entry> entries() {
- return entries(offset);
- }
-
- public Iterator<Entry> iterator() {
- return entries();
- }
-
- private static class EntryImpl
- implements Entry
- {
- private final int index;
-
- private final CharSequence value;
-
- public EntryImpl(int index, CharSequence value) {
- this.index = index;
- this.value = value;
- }
-
- public int index() {
- return index;
- }
-
- public CharSequence value() {
- return value;
- }
-
- @Override
- public String toString() {
- return String.format("%d: %s", index, value);
- }
- }
-
- private class EntriesIterator
- implements ListIterator<Entry>
- {
- private final ListIterator<CharSequence> source;
-
- private EntriesIterator(final int index) {
- source = items.listIterator(index);
- }
-
- public Entry next() {
- if (!source.hasNext()) {
- throw new NoSuchElementException();
- }
- return new EntryImpl(offset + source.nextIndex(), source.next());
- }
-
- public Entry previous() {
- if (!source.hasPrevious()) {
- throw new NoSuchElementException();
- }
- return new EntryImpl(offset + source.previousIndex(), source.previous());
- }
-
- public int nextIndex() {
- return offset + source.nextIndex();
- }
-
- public int previousIndex() {
- return offset + source.previousIndex();
- }
-
- public boolean hasNext() {
- return source.hasNext();
- }
-
- public boolean hasPrevious() {
- return source.hasPrevious();
- }
-
- public void remove() {
- throw new UnsupportedOperationException();
- }
-
- public void set(final Entry entry) {
- throw new UnsupportedOperationException();
- }
-
- public void add(final Entry entry) {
- throw new UnsupportedOperationException();
- }
- }
-
- //
- // Navigation
- //
-
- /**
- * This moves the history to the last entry. This entry is one position
- * before the moveToEnd() position.
- *
- * @return Returns false if there were no history entries or the history
- * index was already at the last entry.
- */
- public boolean moveToLast() {
- int lastEntry = size() - 1;
- if (lastEntry >= 0 && lastEntry != index) {
- index = size() - 1;
- return true;
- }
-
- return false;
- }
-
- /**
- * Move to the specified index in the history
- * @param index
- * @return
- */
- public boolean moveTo(int index) {
- index -= offset;
- if (index >= 0 && index < size() ) {
- this.index = index;
- return true;
- }
- return false;
- }
-
- /**
- * Moves the history index to the first entry.
- *
- * @return Return false if there are no entries in the history or if the
- * history is already at the beginning.
- */
- public boolean moveToFirst() {
- if (size() > 0 && index != 0) {
- index = 0;
- return true;
- }
-
- return false;
- }
-
- /**
- * Move to the end of the history buffer. This will be a blank entry, after
- * all of the other entries.
- */
- public void moveToEnd() {
- index = size();
- }
-
- /**
- * Return the content of the current buffer.
- */
- public CharSequence current() {
- if (index >= size()) {
- return "";
- }
-
- return items.get(index);
- }
-
- /**
- * Move the pointer to the previous element in the buffer.
- *
- * @return true if we successfully went to the previous element
- */
- public boolean previous() {
- if (index <= 0) {
- return false;
- }
-
- index--;
-
- return true;
- }
-
- /**
- * Move the pointer to the next element in the buffer.
- *
- * @return true if we successfully went to the next element
- */
- public boolean next() {
- if (index >= size()) {
- return false;
- }
-
- index++;
-
- return true;
- }
-
-
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/console/history/PersistentHistory.java b/src/jline/src/main/java/scala/tools/jline/console/history/PersistentHistory.java
deleted file mode 100644
index 916532e7fc..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/history/PersistentHistory.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.console.history;
-
-import java.io.IOException;
-
-/**
- * Persistent {@link History}.
- *
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.3
- */
-public interface PersistentHistory
- extends History
-{
- /**
- * Flush all items to persistent storage.
- *
- * @throws IOException Flush failed
- */
- void flush() throws IOException;
-
- /**
- * Purge persistent storage and {@link #clear}.
- *
- * @throws IOException Purge failed
- */
- void purge() throws IOException;
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/console/history/package-info.java b/src/jline/src/main/java/scala/tools/jline/console/history/package-info.java
deleted file mode 100644
index 4635752898..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/history/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (C) 2009 the original author(s).
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Console history support.
- *
- * @since 2.0
- */
-package scala.tools.jline.console.history; \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/console/package-info.java b/src/jline/src/main/java/scala/tools/jline/console/package-info.java
deleted file mode 100644
index 9f284e9c05..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/console/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (C) 2009 the original author(s).
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Console support.
- *
- * @since 2.0
- */
-package scala.tools.jline.console; \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/internal/Configuration.java b/src/jline/src/main/java/scala/tools/jline/internal/Configuration.java
deleted file mode 100644
index 5350d6c19e..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/internal/Configuration.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package scala.tools.jline.internal;
-
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-
-/**
- * Provides access to configuration values.
- *
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.4
- */
-public final class Configuration
-{
- public static final String JLINE_RC = ".jline.rc";
-
- private static final Properties userprops;
-
- static {
- Properties props = new Properties();
-
- File file = new File(getUserHome(), JLINE_RC);
- if (file.exists() && file.canRead()) {
- try {
- InputStream input = new BufferedInputStream(new FileInputStream(file));
- try {
- props.load(input);
- Log.debug("Loaded user configuration: ", file);
- }
- finally {
- input.close();
- }
- }
- catch (IOException e) {
- Log.warn("Unable to read user configuration: ", file, e);
- }
- }
- else {
- Log.trace("User configuration file missing or unreadable: ", file);
- }
-
- userprops = props;
- }
-
- private static boolean isEmpty(final String value) {
- return value == null || value.trim().length() == 0;
- }
-
- public static String getString(final String name, final String defaultValue) {
- assert name != null;
-
- String value;
-
- // Check sysprops first, it always wins
- value = System.getProperty(name);
-
- if (isEmpty(value)) {
- // Next try userprops
- value = userprops.getProperty(name);
-
- if (isEmpty(value)) {
- // else use the default
- value = defaultValue;
- }
- }
-
- return value;
- }
-
- public static String getString(final String name) {
- return getString(name, null);
- }
-
- public static Boolean getBoolean(final String name, final Boolean defaultValue) {
- String value = getString(name);
- if (isEmpty(value)) {
- return defaultValue;
- }
- return Boolean.valueOf(value);
- }
-
- public static Boolean getBoolean(final String name) {
- return getBoolean(name, null);
- }
-
- //
- // System property helpers
- //
-
- public static File getUserHome() {
- return new File(System.getProperty("user.home"));
- }
-
- public static String getOsName() {
- return System.getProperty("os.name").toLowerCase();
- }
-
- public static String getFileEncoding() {
- return System.getProperty("file.encoding");
- }
-
- public static String getInputEncoding() {
- return System.getProperty("input.encoding", "UTF-8");
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/internal/Log.java b/src/jline/src/main/java/scala/tools/jline/internal/Log.java
deleted file mode 100644
index b226a10532..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/internal/Log.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package scala.tools.jline.internal;
-
-import java.io.PrintStream;
-
-/**
- * Internal logger.
- *
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.0
- */
-public final class Log
-{
- ///CLOVER:OFF
-
- public static enum Level
- {
- TRACE,
- DEBUG,
- INFO,
- WARN,
- ERROR
- }
-
- @SuppressWarnings({"StringConcatenation"})
- public static final boolean DEBUG = Boolean.getBoolean(Log.class.getName() + ".debug");
-
- @SuppressWarnings({"StringConcatenation"})
- public static final boolean TRACE = Boolean.getBoolean(Log.class.getName() + ".trace");
-
- private static PrintStream output = System.err;
-
- public static PrintStream getOutput() {
- return output;
- }
-
- public static void setOutput(final PrintStream out) {
- assert out != null;
- output = out;
- }
-
- private static void print(final Object message) {
- if (message instanceof Throwable) {
- ((Throwable) message).printStackTrace();
- }
- else if (message.getClass().isArray()) {
- Object[] array = (Object[]) message;
-
- for (int i = 0; i < array.length; i++) {
- output.print(array[i]);
- if (i + 1 < array.length) {
- output.print(",");
- }
- }
- }
- else {
- output.print(message);
- }
- }
-
- private static void log(final Level level, final Object[] messages) {
- //noinspection SynchronizeOnNonFinalField
- synchronized (output) {
- output.format("[%s] ", level);
-
- for (Object message : messages) {
- print(message);
- }
-
- output.println();
- output.flush();
- }
- }
-
- public static void trace(final Object... messages) {
- if (TRACE) {
- log(Level.TRACE, messages);
- }
- }
-
- public static void debug(final Object... messages) {
- if (TRACE || DEBUG) {
- log(Level.DEBUG, messages);
- }
- }
-
- public static void warn(final Object... messages) {
- log(Level.WARN, messages);
- }
-
- public static void error(final Object... messages) {
- log(Level.ERROR, messages);
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/internal/ReplayPrefixOneCharInputStream.java b/src/jline/src/main/java/scala/tools/jline/internal/ReplayPrefixOneCharInputStream.java
deleted file mode 100644
index 2adabdd2ab..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/internal/ReplayPrefixOneCharInputStream.java
+++ /dev/null
@@ -1,95 +0,0 @@
-package scala.tools.jline.internal;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.text.MessageFormat;
-
-/**
- * This is awkward and inefficient, but probably the minimal way to add UTF-8 support to JLine
- *
- * @author <a href="mailto:Marc.Herbert@continuent.com">Marc Herbert</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @since 2.0
- */
-public final class ReplayPrefixOneCharInputStream
- extends InputStream
-{
- private byte firstByte;
-
- private int byteLength;
-
- private InputStream wrappedStream;
-
- private int byteRead;
-
- private final String encoding;
-
- public ReplayPrefixOneCharInputStream(final String encoding) {
- assert encoding != null;
- this.encoding = encoding;
- }
-
- public String getEncoding() {
- return encoding;
- }
-
- public void setInput(final int recorded, final InputStream wrapped) throws IOException {
- this.byteRead = 0;
- this.firstByte = (byte) recorded;
- this.wrappedStream = wrapped;
-
- byteLength = 1;
- if (encoding.equalsIgnoreCase("UTF-8")) {
- setInputUTF8(recorded, wrapped);
- }
- else if (encoding.equalsIgnoreCase("UTF-16")) {
- byteLength = 2;
- }
- else if (encoding.equalsIgnoreCase("UTF-32")) {
- byteLength = 4;
- }
- }
-
-
- public void setInputUTF8(final int recorded, final InputStream wrapped) throws IOException {
- // 110yyyyy 10zzzzzz
- if ((firstByte & (byte) 0xE0) == (byte) 0xC0) {
- this.byteLength = 2;
- }
- // 1110xxxx 10yyyyyy 10zzzzzz
- else if ((firstByte & (byte) 0xF0) == (byte) 0xE0) {
- this.byteLength = 3;
- }
- // 11110www 10xxxxxx 10yyyyyy 10zzzzzz
- else if ((firstByte & (byte) 0xF8) == (byte) 0xF0) {
- this.byteLength = 4;
- }
- else {
- throw new IOException(MessageFormat.format("Invalid UTF-8 first byte: {0}", firstByte));
- }
- }
-
- public int read() throws IOException {
- if (available() == 0) {
- return -1;
- }
-
- byteRead++;
-
- if (byteRead == 1) {
- return firstByte;
- }
-
- return wrappedStream.read();
- }
-
- /**
- * InputStreamReader is greedy and will try to read bytes in advance. We
- * do NOT want this to happen since we use a temporary/"losing bytes"
- * InputStreamReader above, that's why we hide the real
- * wrappedStream.available() here.
- */
- public int available() {
- return byteLength - byteRead;
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/internal/TerminalLineSettings.java b/src/jline/src/main/java/scala/tools/jline/internal/TerminalLineSettings.java
deleted file mode 100644
index 151862c14d..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/internal/TerminalLineSettings.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-
-package scala.tools.jline.internal;
-
-import java.io.ByteArrayOutputStream;
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.text.MessageFormat;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * Provides access to terminal line settings via <tt>stty</tt>.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- * @author <a href="mailto:dwkemp@gmail.com">Dale Kemp</a>
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- * @author <a href="mailto:jbonofre@apache.org">Jean-Baptiste Onofré</a>
- * @since 2.0
- */
-public final class TerminalLineSettings
-{
- public static final String JLINE_STTY = "jline.stty";
-
- public static final String DEFAULT_STTY = "stty";
-
- public static final String JLINE_SH = "jline.sh";
-
- public static final String DEFAULT_SH = "sh";
-
- private static String sttyCommand = Configuration.getString(JLINE_STTY, DEFAULT_STTY);
-
- private static String shCommand = Configuration.getString(JLINE_SH, DEFAULT_SH);
-
- private String config;
-
- private long configLastFetched;
-
- public TerminalLineSettings() throws IOException, InterruptedException {
- config = get("-a");
- configLastFetched = System.currentTimeMillis();
-
- Log.debug("Config: ", config);
-
- // sanity check
- if (config.length() == 0) {
- throw new IOException(MessageFormat.format("Unrecognized stty code: {0}", config));
- }
- }
-
- public String getConfig() {
- return config;
- }
-
- public void restore() throws IOException, InterruptedException {
- set("sane");
- }
-
- public String get(final String args) throws IOException, InterruptedException {
- return stty(args);
- }
-
- public void set(final String args) throws IOException, InterruptedException {
- stty(args);
- }
-
- /**
- * <p>
- * Get the value of a stty property, including the management of a cache.
- * </p>
- *
- * @param name the stty property.
- * @return the stty property value.
- */
- public int getProperty(String name) {
- assert name != null;
- try {
- // tty properties are cached so we don't have to worry too much about getting term widht/height
- if (config == null || System.currentTimeMillis() - configLastFetched > 1000 ) {
- config = get("-a");
- configLastFetched = System.currentTimeMillis();
- }
- return this.getProperty(name, config);
- } catch (Exception e) {
- Log.warn("Failed to query stty ", name, e);
- return -1;
- }
- }
-
- /**
- * <p>
- * Parses a stty output (provided by stty -a) and return the value of a given property.
- * </p>
- *
- * @param name property name.
- * @param stty string resulting of stty -a execution.
- * @return value of the given property.
- */
- protected int getProperty(String name, String stty) {
- // try the first kind of regex
- Pattern pattern = Pattern.compile(name + "\\s+=\\s+([^;]*)[;\\n\\r]");
- Matcher matcher = pattern.matcher(stty);
- if (!matcher.find()) {
- // try a second kind of regex
- pattern = Pattern.compile(name + "\\s+([^;]*)[;\\n\\r]");
- matcher = pattern.matcher(stty);
- if (!matcher.find()) {
- // try a second try of regex
- pattern = Pattern.compile("(\\S*)\\s+" + name);
- matcher = pattern.matcher(stty);
- if (!matcher.find()) {
- return -1;
- }
- }
- }
- return parseControlChar(matcher.group(1));
- }
-
- private int parseControlChar(String str) {
- // under
- if ("<undef>".equals(str)) {
- return -1;
- }
- // octal
- if (str.charAt(0) == '0') {
- return Integer.parseInt(str, 8);
- }
- // decimal
- if (str.charAt(0) >= '1' && str.charAt(0) <= '9') {
- return Integer.parseInt(str, 10);
- }
- // control char
- if (str.charAt(0) == '^') {
- if (str.charAt(1) == '?') {
- return 127;
- } else {
- return str.charAt(1) - 64;
- }
- } else if (str.charAt(0) == 'M' && str.charAt(1) == '-') {
- if (str.charAt(2) == '^') {
- if (str.charAt(3) == '?') {
- return 127 + 128;
- } else {
- return str.charAt(3) - 64 + 128;
- }
- } else {
- return str.charAt(2) + 128;
- }
- } else {
- return str.charAt(0);
- }
- }
-
- private static String stty(final String args) throws IOException, InterruptedException {
- assert args != null;
- return exec(String.format("%s %s < /dev/tty", sttyCommand, args));
- }
-
- private static String exec(final String cmd) throws IOException, InterruptedException {
- assert cmd != null;
- return exec(shCommand, "-c", cmd);
- }
-
- private static String exec(final String... cmd) throws IOException, InterruptedException {
- assert cmd != null;
-
- ByteArrayOutputStream bout = new ByteArrayOutputStream();
-
- Log.trace("Running: ", cmd);
-
- Process p = Runtime.getRuntime().exec(cmd);
-
- InputStream in = null;
- InputStream err = null;
- OutputStream out = null;
- try {
- int c;
- in = p.getInputStream();
- while ((c = in.read()) != -1) {
- bout.write(c);
- }
- err = p.getErrorStream();
- while ((c = err.read()) != -1) {
- bout.write(c);
- }
- out = p.getOutputStream();
- p.waitFor();
- }
- finally {
- close(in, out, err);
- }
-
- String result = bout.toString();
-
- Log.trace("Result: ", result);
-
- return result;
- }
-
- private static void close(final Closeable... closeables) {
- for (Closeable c : closeables) {
- try {
- c.close();
- }
- catch (Exception e) {
- // Ignore
- }
- }
- }
-} \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/internal/package-info.java b/src/jline/src/main/java/scala/tools/jline/internal/package-info.java
deleted file mode 100644
index d27444cfdf..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/internal/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (C) 2009 the original author(s).
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Internal support.
- *
- * @since 2.0
- */
-package scala.tools.jline.internal; \ No newline at end of file
diff --git a/src/jline/src/main/java/scala/tools/jline/package-info.java b/src/jline/src/main/java/scala/tools/jline/package-info.java
deleted file mode 100644
index fde16f98de..0000000000
--- a/src/jline/src/main/java/scala/tools/jline/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (C) 2009 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * JLine 2.
- *
- * @since 2.0
- */
-package scala.tools.jline; \ No newline at end of file
diff --git a/src/jline/src/main/resources/scala/tools/jline/console/completer/CandidateListCompletionHandler.properties b/src/jline/src/main/resources/scala/tools/jline/console/completer/CandidateListCompletionHandler.properties
deleted file mode 100644
index fd097efb8a..0000000000
--- a/src/jline/src/main/resources/scala/tools/jline/console/completer/CandidateListCompletionHandler.properties
+++ /dev/null
@@ -1,4 +0,0 @@
-DISPLAY_CANDIDATES=Display all %d possibilities? (y or n)
-DISPLAY_CANDIDATES_YES=y
-DISPLAY_CANDIDATES_NO=n
-DISPLAY_MORE=--More--
diff --git a/src/jline/src/main/resources/scala/tools/jline/keybindings.properties b/src/jline/src/main/resources/scala/tools/jline/keybindings.properties
deleted file mode 100644
index ad932d2a80..0000000000
--- a/src/jline/src/main/resources/scala/tools/jline/keybindings.properties
+++ /dev/null
@@ -1,71 +0,0 @@
-# Keybinding mapping for JLine. The format is:
-# [key code]=[logical operation]
-
-# CTRL-A: move to the beginning of the line
-1=MOVE_TO_BEG
-
-# CTRL-B: move to the previous character
-2=PREV_CHAR
-
-# CTRL-D: close out the input stream
-4=EXIT
-
-# CTRL-E: move the cursor to the end of the line
-5=MOVE_TO_END
-
-# CTRL-F: move to the next character
-6=NEXT_CHAR
-
-# CTRL-G: abort
-7=ABORT
-
-# BACKSPACE, CTRL-H: delete the previous character
-# 8 is the ASCII code for backspace and therefor
-# deleting the previous character
-8=DELETE_PREV_CHAR
-
-# TAB, CTRL-I: signal that console completion should be attempted
-9=COMPLETE
-
-# CTRL-J, CTRL-M: newline
-10=NEWLINE
-
-# CTRL-K: erase the current line
-11=KILL_LINE
-
-# CTRL-L: clear screen
-12=CLEAR_SCREEN
-
-# ENTER: newline
-13=NEWLINE
-
-# CTRL-N: scroll to the next element in the history buffer
-14=NEXT_HISTORY
-
-# CTRL-O: move to the previous word
-15=PREV_WORD
-
-# CTRL-P: scroll to the previous element in the history buffer
-16=PREV_HISTORY
-
-# CTRL-R: search history
-18=SEARCH_PREV
-
-# CTRL-T: move to next word
-20=NEXT_WORD
-
-# CTRL-U: delete all the characters before the cursor position
-21=KILL_LINE_PREV
-
-# CTRL-V: paste the contents of the clipboard (useful for Windows terminal)
-22=PASTE
-
-# CTRL-W: delete the word directly before the cursor
-23=DELETE_PREV_WORD
-
-# CTRL-X: delete the word directly after the cursor
-24=DELETE_NEXT_WORD
-
-# DELETE, CTRL-?: delete the next character
-# 127 is the ASCII code for delete
-127=DELETE_NEXT_CHAR
diff --git a/src/jline/src/main/resources/scala/tools/jline/windowsbindings.properties b/src/jline/src/main/resources/scala/tools/jline/windowsbindings.properties
deleted file mode 100644
index 340b5aa5b9..0000000000
--- a/src/jline/src/main/resources/scala/tools/jline/windowsbindings.properties
+++ /dev/null
@@ -1,71 +0,0 @@
-# Keybinding mapping for JLine. The format is:
-# [key code]=[logical operation]
-
-# CTRL-A: move to the beginning of the line
-1=MOVE_TO_BEG
-
-# CTRL-B: move to the previous character
-2=PREV_CHAR
-
-# CTRL-C: toggle overtype mode (frankly, I wasn't sure where to bind this)
-3=INSERT
-
-# CTRL-D: close out the input stream
-4=EXIT
-
-# CTRL-E: move the cursor to the end of the line
-5=MOVE_TO_END
-
-# CTRL-F: move to the next character
-6=NEXT_CHAR
-
-# CTRL-G: move to the previous word
-7=ABORT
-
-# CTRL-H: delete the previous character
-8=DELETE_PREV_CHAR
-
-# TAB, CTRL-I: signal that console completion should be attempted
-9=COMPLETE
-
-# CTRL-J, CTRL-M: newline
-10=NEWLINE
-
-# CTRL-K: erase the current line
-11=KILL_LINE
-
-# CTRL-L: clear screen
-12=CLEAR_SCREEN
-
-# ENTER: newline
-13=NEWLINE
-
-# CTRL-N: scroll to the next element in the history buffer
-14=NEXT_HISTORY
-
-# CTRL-O: move to the previous word
-15=PREV_WORD
-
-# CTRL-P: scroll to the previous element in the history buffer
-16=PREV_HISTORY
-
-# CTRL-R: search backwards in history
-18=SEARCH_PREV
-
-# CTRL-S: Move to the end of the history
-19=END_OF_HISTORY
-
-# CTRL-U: delete all the characters before the cursor position
-21=KILL_LINE_PREV
-
-# CTRL-V: paste the contents of the clipboard (useful for Windows terminal)
-22=PASTE
-
-# CTRL-W: delete the word directly before the cursor
-23=DELETE_PREV_WORD
-
-# CTRL-[: escape - clear the current line.
-27=CLEAR_LINE
-
-# CTRL-?: delete the previous character
-127=DELETE_NEXT_CHAR
diff --git a/src/jline/src/test/java/scala/tools/jline/TerminalFactoryTest.java b/src/jline/src/test/java/scala/tools/jline/TerminalFactoryTest.java
deleted file mode 100644
index c0c070bdfd..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/TerminalFactoryTest.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package scala.tools.jline;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-
-/**
- * Tests for the {@link TerminalFactory}.
- */
-public class TerminalFactoryTest
-{
- @Before
- public void setUp() throws Exception {
- TerminalFactory.reset();
- }
-
- @Test
- public void testConfigureNone() {
- TerminalFactory.configure(TerminalFactory.NONE);
- Terminal t = TerminalFactory.get();
- assertNotNull(t);
- assertEquals(UnsupportedTerminal.class.getName(), t.getClass().getName());
- }
-
- @Test
- public void testConfigureUnsupportedTerminal() {
- TerminalFactory.configure(UnsupportedTerminal.class.getName());
- Terminal t = TerminalFactory.get();
- assertNotNull(t);
- assertEquals(UnsupportedTerminal.class.getName(), t.getClass().getName());
- }
-} \ No newline at end of file
diff --git a/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTest.java b/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTest.java
deleted file mode 100644
index 0e6cba15a0..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTest.java
+++ /dev/null
@@ -1,261 +0,0 @@
-package scala.tools.jline.console;
-
-import scala.tools.jline.TerminalFactory;
-import scala.tools.jline.WindowsTerminal;
-import scala.tools.jline.console.history.History;
-import scala.tools.jline.console.history.MemoryHistory;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.io.StringWriter;
-import java.io.Writer;
-
-import static scala.tools.jline.WindowsTerminal.WindowsKey.DELETE_KEY;
-import static scala.tools.jline.WindowsTerminal.WindowsKey.END_KEY;
-import static scala.tools.jline.WindowsTerminal.WindowsKey.ESCAPE_KEY;
-import static scala.tools.jline.WindowsTerminal.WindowsKey.HOME_KEY;
-import static scala.tools.jline.WindowsTerminal.WindowsKey.INSERT_KEY;
-import static scala.tools.jline.WindowsTerminal.WindowsKey.LEFT_ARROW_KEY;
-import static scala.tools.jline.WindowsTerminal.WindowsKey.NUMPAD_KEY_INDICATOR;
-import static scala.tools.jline.WindowsTerminal.WindowsKey.PAGE_DOWN_KEY;
-import static scala.tools.jline.WindowsTerminal.WindowsKey.PAGE_UP_KEY;
-import static scala.tools.jline.WindowsTerminal.WindowsKey.SPECIAL_KEY_INDICATOR;
-import static scala.tools.jline.console.Operation.DELETE_NEXT_CHAR;
-import static scala.tools.jline.console.Operation.DELETE_PREV_CHAR;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-
-/**
- * Tests for the {@link ConsoleReader}.
- */
-public class ConsoleReaderTest
-{
- @Before
- public void setUp() throws Exception {
- System.setProperty(WindowsTerminal.JLINE_WINDOWS_TERMINAL_DIRECT_CONSOLE, "false");
- }
-
- private void assertWindowsKeyBehavior(String expected, char[] input) throws Exception {
- StringBuilder buffer = new StringBuilder();
- buffer.append(input);
- ConsoleReader reader = createConsole(buffer.toString().getBytes());
- assertNotNull(reader);
- String line = reader.readLine();
- assertEquals(expected, line);
- }
-
- private ConsoleReader createConsole(byte[] bytes) throws Exception {
- InputStream in = new ByteArrayInputStream(bytes);
- Writer writer = new StringWriter();
- ConsoleReader reader = new ConsoleReader(in, writer);
- reader.setHistory(createSeededHistory());
- return reader;
- }
-
- private History createSeededHistory() {
- History history = new MemoryHistory();
- history.add("dir");
- history.add("cd c:\\");
- history.add("mkdir monkey");
- return history;
- }
-
- @Test
- public void testDeleteAndBackspaceKeymappings() throws Exception {
- // test only works on Windows
- if (!(TerminalFactory.get() instanceof WindowsTerminal)) {
- return;
- }
-
- ConsoleReader consoleReader = new ConsoleReader();
- assertNotNull(consoleReader);
- assertEquals(127, consoleReader.getKeyForAction(DELETE_NEXT_CHAR));
- assertEquals(8, consoleReader.getKeyForAction(DELETE_PREV_CHAR));
- }
-
- @Test
- public void testReadline() throws Exception {
- ConsoleReader consoleReader = createConsole("Sample String\r\n".getBytes());
- assertNotNull(consoleReader);
- String line = consoleReader.readLine();
- assertEquals("Sample String", line);
- }
-
- @Test
- public void testDeleteOnWindowsTerminal() throws Exception {
- // test only works on Windows
- if (!(TerminalFactory.get() instanceof WindowsTerminal)) {
- return;
- }
-
- char[] characters = new char[]{
- 'S', 's',
- (char) SPECIAL_KEY_INDICATOR.code,
- (char) LEFT_ARROW_KEY.code,
- (char) SPECIAL_KEY_INDICATOR.code,
- (char) DELETE_KEY.code, '\r', 'n'
- };
- assertWindowsKeyBehavior("S", characters);
- }
-
- @Test
- public void testNumpadDeleteOnWindowsTerminal() throws Exception {
- // test only works on Windows
- if (!(TerminalFactory.get() instanceof WindowsTerminal)) {
- return;
- }
-
- char[] characters = new char[]{
- 'S', 's',
- (char) NUMPAD_KEY_INDICATOR.code,
- (char) LEFT_ARROW_KEY.code,
- (char) NUMPAD_KEY_INDICATOR.code,
- (char) DELETE_KEY.code, '\r', 'n'
- };
- assertWindowsKeyBehavior("S", characters);
- }
-
- @Test
- public void testHomeKeyOnWindowsTerminal() throws Exception {
- // test only works on Windows
- if (!(TerminalFactory.get() instanceof WindowsTerminal)) {
- return;
- }
-
- char[] characters = new char[]{
- 'S', 's',
- (char) SPECIAL_KEY_INDICATOR.code,
- (char) HOME_KEY.code, 'x', '\r', '\n'
- };
- assertWindowsKeyBehavior("xSs", characters);
-
- }
-
- @Test
- public void testEndKeyOnWindowsTerminal() throws Exception {
- // test only works on Windows
- if (!(TerminalFactory.get() instanceof WindowsTerminal)) {
- return;
- }
-
- char[] characters = new char[]{
- 'S', 's',
- (char) SPECIAL_KEY_INDICATOR.code,
- (char) HOME_KEY.code, 'x',
- (char) SPECIAL_KEY_INDICATOR.code, (char) END_KEY.code,
- 'j', '\r', '\n'
- };
- assertWindowsKeyBehavior("xSsj", characters);
- }
-
- @Test
- public void testPageUpOnWindowsTerminal() throws Exception {
- // test only works on Windows
- if (!(TerminalFactory.get() instanceof WindowsTerminal)) {
- return;
- }
-
- char[] characters = new char[]{
- (char) SPECIAL_KEY_INDICATOR.code,
- (char) PAGE_UP_KEY.code, '\r', '\n'
- };
- assertWindowsKeyBehavior("dir", characters);
- }
-
- @Test
- public void testPageDownOnWindowsTerminal() throws Exception {
- // test only works on Windows
- if (!(TerminalFactory.get() instanceof WindowsTerminal)) {
- return;
- }
-
- char[] characters = new char[]{
- (char) SPECIAL_KEY_INDICATOR.code,
- (char) PAGE_DOWN_KEY.code, '\r', '\n'
- };
- assertWindowsKeyBehavior("mkdir monkey", characters);
- }
-
- @Test
- public void testEscapeOnWindowsTerminal() throws Exception {
- // test only works on Windows
- if (!(TerminalFactory.get() instanceof WindowsTerminal)) {
- return;
- }
-
- char[] characters = new char[]{
- 's', 's', 's',
- (char) SPECIAL_KEY_INDICATOR.code,
- (char) ESCAPE_KEY.code, '\r', '\n'
- };
- assertWindowsKeyBehavior("", characters);
- }
-
- @Test
- public void testInsertOnWindowsTerminal() throws Exception {
- // test only works on Windows
- if (!(TerminalFactory.get() instanceof WindowsTerminal)) {
- return;
- }
-
- char[] characters = new char[]{
- 'o', 'p', 's',
- (char) SPECIAL_KEY_INDICATOR.code,
- (char) HOME_KEY.code,
- (char) SPECIAL_KEY_INDICATOR.code,
- (char) INSERT_KEY.code, 'o', 'o', 'p', 's', '\r', '\n'
- };
- assertWindowsKeyBehavior("oops", characters);
- }
-
- @Test
- public void testExpansion() throws Exception {
- ConsoleReader reader = new ConsoleReader();
- MemoryHistory history = new MemoryHistory();
- history.setMaxSize(3);
- history.add("foo");
- history.add("dir");
- history.add("cd c:\\");
- history.add("mkdir monkey");
- reader.setHistory(history);
-
- assertEquals("echo a!", reader.expandEvents("echo a!"));
- assertEquals("mkdir monkey ; echo a!", reader.expandEvents("!! ; echo a!"));
- assertEquals("echo ! a", reader.expandEvents("echo ! a"));
- assertEquals("echo !\ta", reader.expandEvents("echo !\ta"));
-
- assertEquals("mkdir barey", reader.expandEvents("^monk^bar^"));
- assertEquals("mkdir barey", reader.expandEvents("^monk^bar"));
- assertEquals("a^monk^bar", reader.expandEvents("a^monk^bar"));
-
- assertEquals("mkdir monkey", reader.expandEvents("!!"));
- assertEquals("echo echo a", reader.expandEvents("echo !#a"));
-
- assertEquals("mkdir monkey", reader.expandEvents("!mk"));
- try {
- reader.expandEvents("!mz");
- } catch (IllegalArgumentException e) {
- assertEquals("!mz: event not found", e.getMessage());
- }
-
- assertEquals("mkdir monkey", reader.expandEvents("!?mo"));
- assertEquals("mkdir monkey", reader.expandEvents("!?mo?"));
-
- assertEquals("mkdir monkey", reader.expandEvents("!-1"));
- assertEquals("cd c:\\", reader.expandEvents("!-2"));
- assertEquals("cd c:\\", reader.expandEvents("!2"));
- assertEquals("mkdir monkey", reader.expandEvents("!3"));
- try {
- reader.expandEvents("!20");
- } catch (IllegalArgumentException e) {
- assertEquals("!20: event not found", e.getMessage());
- }
- try {
- reader.expandEvents("!-20");
- } catch (IllegalArgumentException e) {
- assertEquals("!-20: event not found", e.getMessage());
- }
- }
-}
diff --git a/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java b/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java
deleted file mode 100644
index c19099f0b2..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-package scala.tools.jline.console;
-
-import scala.tools.jline.UnixTerminal;
-import org.junit.Before;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.PrintWriter;
-
-import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_DOWN;
-import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_LEFT;
-import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_PREFIX;
-import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_RIGHT;
-import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_START;
-import static scala.tools.jline.UnixTerminal.UnixKey.ARROW_UP;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-/**
- * Provides support for console reader tests.
- */
-public abstract class ConsoleReaderTestSupport
-{
- protected ConsoleReader console;
-
- @Before
- public void setUp() throws Exception {
- console = new ConsoleReader(null, new PrintWriter(new OutputStreamWriter(new ByteArrayOutputStream())), new UnixTerminal());
- }
-
- protected void assertBuffer(final String expected, final Buffer buffer) throws IOException {
- assertBuffer(expected, buffer, true);
- }
-
- protected void assertBuffer(final String expected, final Buffer buffer, final boolean clear) throws IOException {
- // clear current buffer, if any
- if (clear) {
- console.finishBuffer();
- console.getHistory().clear();
- }
-
- console.setInput(new ByteArrayInputStream(buffer.getBytes()));
-
- // run it through the reader
- while (console.readLine((String) null) != null) {
- // ignore
- }
-
- assertEquals(expected, console.getCursorBuffer().toString());
- }
-
- private int getKeyForAction(final Operation key) {
- return getKeyForAction(key.code);
- }
-
- private int getKeyForAction(final short logicalAction) {
- int action = console.getKeyForAction(logicalAction);
-
- if (action == -1) {
- console.printBindings();
- fail("Keystroke for logical action " + logicalAction + " was not bound in the console");
- }
-
- return action;
- }
-
- protected class Buffer
- {
- private final ByteArrayOutputStream out = new ByteArrayOutputStream();
-
- public Buffer() {
- // nothing
- }
-
- public Buffer(final String str) {
- append(str);
- }
-
- public byte[] getBytes() {
- return out.toByteArray();
- }
-
- public Buffer op(final short operation) {
- return append(getKeyForAction(operation));
- }
-
- public Buffer op(final Operation op) {
- return op(op.code);
- }
-
- public Buffer ctrlA() {
- return append(getKeyForAction(Operation.MOVE_TO_BEG));
- }
-
- public Buffer ctrlU() {
- return append(getKeyForAction(Operation.KILL_LINE_PREV));
- }
-
- public Buffer tab() {
- return append(getKeyForAction(Operation.COMPLETE));
- }
-
- public Buffer back() {
- return append(getKeyForAction(Operation.DELETE_PREV_CHAR));
- }
-
- public Buffer left() {
- return append(ARROW_START.code).append(ARROW_PREFIX.code).append(ARROW_LEFT.code);
- }
-
- public Buffer right() {
- return append(ARROW_START.code).append(ARROW_PREFIX.code).append(ARROW_RIGHT.code);
- }
-
- public Buffer up() {
- return append(ARROW_START.code).append(ARROW_PREFIX.code).append(ARROW_UP.code);
- }
-
- public Buffer down() {
- return append(ARROW_START.code).append(ARROW_PREFIX.code).append(ARROW_DOWN.code);
- }
-
- public Buffer append(final String str) {
- for (byte b : str.getBytes()) {
- append(b);
- }
- return this;
- }
-
- public Buffer append(final int i) {
- out.write((byte) i);
- return this;
- }
- }
-}
diff --git a/src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java b/src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java
deleted file mode 100644
index 6f5d46121e..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/console/EditLineTest.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-package scala.tools.jline.console;
-
-import org.junit.Test;
-
-import static scala.tools.jline.console.Operation.*;
-
-/**
- * Tests various features of editing lines.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- */
-public class EditLineTest
- extends ConsoleReaderTestSupport
-{
- @Test
- public void testDeletePreviousWord() throws Exception {
- Buffer b = new Buffer("This is a test");
-
- assertBuffer("This is a ", b = b.op(DELETE_PREV_WORD));
- assertBuffer("This is ", b = b.op(DELETE_PREV_WORD));
- assertBuffer("This ", b = b.op(DELETE_PREV_WORD));
- assertBuffer("", b = b.op(DELETE_PREV_WORD));
- assertBuffer("", b = b.op(DELETE_PREV_WORD));
- assertBuffer("", b.op(DELETE_PREV_WORD));
- }
-
- @Test
- public void testDeleteNextWord() throws Exception {
- Buffer b = new Buffer("This is a test ");
-
- assertBuffer(" is a test ", b = b.op(MOVE_TO_BEG).op(DELETE_NEXT_WORD));
- assertBuffer(" a test ", b = b.op(DELETE_NEXT_WORD));
- assertBuffer(" test ", b = b.op(DELETE_NEXT_WORD));
- assertBuffer(" ", b = b.op(DELETE_NEXT_WORD));
- assertBuffer("", b = b.op(DELETE_NEXT_WORD));
- assertBuffer("", b.op(DELETE_NEXT_WORD));
- }
-
- @Test
- public void testMoveToEnd() throws Exception {
- Buffer b = new Buffer("This is a test");
-
- assertBuffer("This is a XtestX",
- new Buffer("This is a test").op(PREV_WORD)
- .append('X')
- .op(MOVE_TO_END)
- .append('X'));
-
- assertBuffer("This is Xa testX",
- new Buffer("This is a test").op(PREV_WORD)
- .op(PREV_WORD)
- .append('X')
- .op(MOVE_TO_END)
- .append('X'));
-
- assertBuffer("This Xis a testX",
- new Buffer("This is a test").op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .append('X')
- .op(MOVE_TO_END)
- .append('X'));
- }
-
- @Test
- public void testPreviousWord() throws Exception {
- assertBuffer("This is a Xtest",
- new Buffer("This is a test").op(PREV_WORD)
- .append('X'));
- assertBuffer("This is Xa test",
- new Buffer("This is a test").op(PREV_WORD)
- .op(PREV_WORD)
- .append('X'));
- assertBuffer("This Xis a test",
- new Buffer("This is a test").op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .append('X'));
- assertBuffer("XThis is a test",
- new Buffer("This is a test").op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .append('X'));
- assertBuffer("XThis is a test",
- new Buffer("This is a test").op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .append('X'));
- assertBuffer("XThis is a test",
- new Buffer("This is a test").op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .op(PREV_WORD)
- .append('X'));
- }
-
- @Test
- public void testNextWord() throws Exception {
- assertBuffer("ThisX is a test",
- new Buffer("This is a test").op(MOVE_TO_BEG)
- .op(NEXT_WORD)
- .append('X'));
- assertBuffer("This isX a test",
- new Buffer("This is a test").op(MOVE_TO_BEG)
- .op(NEXT_WORD)
- .op(NEXT_WORD)
- .append('X'));
- assertBuffer("This is aX test",
- new Buffer("This is a test").op(MOVE_TO_BEG)
- .op(NEXT_WORD)
- .op(NEXT_WORD)
- .op(NEXT_WORD)
- .append('X'));
- assertBuffer("This is a testX ",
- new Buffer("This is a test ").op(MOVE_TO_BEG)
- .op(NEXT_WORD)
- .op(NEXT_WORD)
- .op(NEXT_WORD)
- .op(NEXT_WORD)
- .append('X'));
- }
-
- @Test
- public void testLineStart() throws Exception {
- assertBuffer("XThis is a test",
- new Buffer("This is a test").ctrlA().append('X'));
- assertBuffer("TXhis is a test",
- new Buffer("This is a test").ctrlA().right().append('X'));
- }
-
- @Test
- public void testClearLine() throws Exception {
- assertBuffer("", new Buffer("This is a test").ctrlU());
- assertBuffer("t", new Buffer("This is a test").left().ctrlU());
- assertBuffer("st", new Buffer("This is a test").left().left().ctrlU());
- }
-
- @Test
- public void testRight() throws Exception {
- Buffer b = new Buffer("This is a test");
- b = b.left().right().back();
- assertBuffer("This is a tes", b);
- b = b.left().left().left().right().left().back();
- assertBuffer("This is ates", b);
- b.append('X');
- assertBuffer("This is aXtes", b);
- }
-
- @Test
- public void testLeft() throws Exception {
- Buffer b = new Buffer("This is a test");
- b = b.left().left().left();
- assertBuffer("This is a est", b = b.back());
- assertBuffer("This is aest", b = b.back());
- assertBuffer("This is est", b = b.back());
- assertBuffer("This isest", b = b.back());
- assertBuffer("This iest", b = b.back());
- assertBuffer("This est", b = b.back());
- assertBuffer("Thisest", b = b.back());
- assertBuffer("Thiest", b = b.back());
- assertBuffer("Thest", b = b.back());
- assertBuffer("Test", b = b.back());
- assertBuffer("est", b = b.back());
- assertBuffer("est", b = b.back());
- assertBuffer("est", b = b.back());
- assertBuffer("est", b = b.back());
- assertBuffer("est", b.back());
- }
-
- @Test
- public void testBackspace() throws Exception {
- Buffer b = new Buffer("This is a test");
- assertBuffer("This is a tes", b = b.back());
- assertBuffer("This is a te", b = b.back());
- assertBuffer("This is a t", b = b.back());
- assertBuffer("This is a ", b = b.back());
- assertBuffer("This is a", b = b.back());
- assertBuffer("This is ", b = b.back());
- assertBuffer("This is", b = b.back());
- assertBuffer("This i", b = b.back());
- assertBuffer("This ", b = b.back());
- assertBuffer("This", b = b.back());
- assertBuffer("Thi", b = b.back());
- assertBuffer("Th", b = b.back());
- assertBuffer("T", b = b.back());
- assertBuffer("", b = b.back());
- assertBuffer("", b = b.back());
- assertBuffer("", b = b.back());
- assertBuffer("", b = b.back());
- assertBuffer("", b.back());
- }
-
- @Test
- public void testBuffer() throws Exception {
- assertBuffer("This is a test", new Buffer("This is a test"));
- }
-}
diff --git a/src/jline/src/test/java/scala/tools/jline/console/completer/ArgumentCompleterTest.java b/src/jline/src/test/java/scala/tools/jline/console/completer/ArgumentCompleterTest.java
deleted file mode 100644
index 9e2a2ab031..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/console/completer/ArgumentCompleterTest.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2010 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package scala.tools.jline.console.completer;
-
-import scala.tools.jline.console.ConsoleReaderTestSupport;
-import scala.tools.jline.console.completer.ArgumentCompleter;
-import scala.tools.jline.console.completer.StringsCompleter;
-import org.junit.Test;
-
-/**
- * Tests for {@link jline.console.completer.ArgumentCompleter}.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- */
-public class ArgumentCompleterTest
- extends ConsoleReaderTestSupport
-{
- @Test
- public void test1() throws Exception {
- console.addCompleter(new ArgumentCompleter(new StringsCompleter("foo", "bar", "baz")));
-
- assertBuffer("foo foo ", new Buffer("foo f").tab());
- assertBuffer("foo ba", new Buffer("foo b").tab());
- assertBuffer("foo ba", new Buffer("foo ba").tab());
- assertBuffer("foo baz ", new Buffer("foo baz").tab());
-
- // test completion in the mid range
- assertBuffer("foo baz", new Buffer("f baz").left().left().left().left().tab());
- assertBuffer("ba foo", new Buffer("b foo").left().left().left().left().tab());
- assertBuffer("foo ba baz", new Buffer("foo b baz").left().left().left().left().tab());
- assertBuffer("foo foo baz", new Buffer("foo f baz").left().left().left().left().tab());
- }
-} \ No newline at end of file
diff --git a/src/jline/src/test/java/scala/tools/jline/console/completer/NullCompleterTest.java b/src/jline/src/test/java/scala/tools/jline/console/completer/NullCompleterTest.java
deleted file mode 100644
index 70a4c3b554..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/console/completer/NullCompleterTest.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2010 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package scala.tools.jline.console.completer;
-
-import scala.tools.jline.console.ConsoleReaderTestSupport;
-import scala.tools.jline.console.completer.NullCompleter;
-import org.junit.Test;
-
-/**
- * Tests for {@link NullCompleter}.
- *
- * @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
- */
-public class NullCompleterTest
- extends ConsoleReaderTestSupport
-{
- @Test
- public void test1() throws Exception {
- console.addCompleter(NullCompleter.INSTANCE);
-
- assertBuffer("f", new Buffer("f").tab());
- assertBuffer("ba", new Buffer("ba").tab());
- assertBuffer("baz", new Buffer("baz").tab());
- }
-} \ No newline at end of file
diff --git a/src/jline/src/test/java/scala/tools/jline/console/completer/StringsCompleterTest.java b/src/jline/src/test/java/scala/tools/jline/console/completer/StringsCompleterTest.java
deleted file mode 100644
index 518b88d031..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/console/completer/StringsCompleterTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (C) 2010 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package scala.tools.jline.console.completer;
-
-import scala.tools.jline.console.ConsoleReaderTestSupport;
-import scala.tools.jline.console.completer.StringsCompleter;
-import org.junit.Test;
-
-/**
- * Tests for {@link jline.console.completer.StringsCompleter}.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- */
-public class StringsCompleterTest
- extends ConsoleReaderTestSupport
-{
- @Test
- public void test1() throws Exception {
- console.addCompleter(new StringsCompleter("foo", "bar", "baz"));
-
- assertBuffer("foo ", new Buffer("f").tab());
- // single tab completes to unambiguous "ba"
- assertBuffer("ba", new Buffer("b").tab());
- assertBuffer("ba", new Buffer("ba").tab());
- assertBuffer("baz ", new Buffer("baz").tab());
- }
-} \ No newline at end of file
diff --git a/src/jline/src/test/java/scala/tools/jline/console/history/HistoryTest.java b/src/jline/src/test/java/scala/tools/jline/console/history/HistoryTest.java
deleted file mode 100644
index 0a987b2b26..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/console/history/HistoryTest.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2002-2007, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-package scala.tools.jline.console.history;
-
-import scala.tools.jline.console.ConsoleReaderTestSupport;
-import org.junit.Test;
-
-import static scala.tools.jline.console.Operation.MOVE_TO_BEG;
-import static scala.tools.jline.console.Operation.NEWLINE;
-import static scala.tools.jline.console.Operation.NEXT_HISTORY;
-import static scala.tools.jline.console.Operation.PREV_HISTORY;
-import static scala.tools.jline.console.Operation.PREV_CHAR;
-
-/**
- * Tests command history.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- */
-public class HistoryTest
- extends ConsoleReaderTestSupport
-{
- @Test
- public void testSingleHistory() throws Exception {
- Buffer b = new Buffer().
- append("test line 1").op(NEWLINE).
- append("test line 2").op(NEWLINE).
- append("test line 3").op(NEWLINE).
- append("test line 4").op(NEWLINE).
- append("test line 5").op(NEWLINE).
- append("");
-
- assertBuffer("", b);
-
- assertBuffer("test line 5", b = b.op(PREV_HISTORY));
- assertBuffer("test line 5", b = b.op(PREV_CHAR));
- assertBuffer("test line 4", b = b.op(PREV_HISTORY));
- assertBuffer("test line 5", b = b.op(NEXT_HISTORY));
- assertBuffer("test line 4", b = b.op(PREV_HISTORY));
- assertBuffer("test line 3", b = b.op(PREV_HISTORY));
- assertBuffer("test line 2", b = b.op(PREV_HISTORY));
- assertBuffer("test line 1", b = b.op(PREV_HISTORY));
-
- // beginning of history
- assertBuffer("test line 1", b = b.op(PREV_HISTORY));
- assertBuffer("test line 1", b = b.op(PREV_HISTORY));
- assertBuffer("test line 1", b = b.op(PREV_HISTORY));
- assertBuffer("test line 1", b = b.op(PREV_HISTORY));
-
- assertBuffer("test line 2", b = b.op(NEXT_HISTORY));
- assertBuffer("test line 3", b = b.op(NEXT_HISTORY));
- assertBuffer("test line 4", b = b.op(NEXT_HISTORY));
- assertBuffer("test line 5", b = b.op(NEXT_HISTORY));
-
- // end of history
- assertBuffer("", b = b.op(NEXT_HISTORY));
- assertBuffer("", b = b.op(NEXT_HISTORY));
- assertBuffer("", b = b.op(NEXT_HISTORY));
-
- assertBuffer("test line 5", b = b.op(PREV_HISTORY));
- assertBuffer("test line 4", b = b.op(PREV_HISTORY));
- b = b.op(MOVE_TO_BEG).append("XXX").op(NEWLINE);
- assertBuffer("XXXtest line 4", b = b.op(PREV_HISTORY));
- assertBuffer("test line 5", b = b.op(PREV_HISTORY));
- assertBuffer("test line 4", b = b.op(PREV_HISTORY));
- assertBuffer("test line 5", b = b.op(NEXT_HISTORY));
- assertBuffer("XXXtest line 4", b = b.op(NEXT_HISTORY));
- assertBuffer("", b = b.op(NEXT_HISTORY));
-
- assertBuffer("XXXtest line 4", b = b.op(PREV_HISTORY));
- assertBuffer("XXXtest line 4", b = b.op(NEWLINE).op(PREV_HISTORY));
- assertBuffer("XXXtest line 4", b = b.op(NEWLINE).op(PREV_HISTORY));
- assertBuffer("XXXtest line 4", b = b.op(NEWLINE).op(PREV_HISTORY));
- assertBuffer("XXXtest line 4", b = b.op(NEWLINE).op(PREV_HISTORY));
- }
-}
diff --git a/src/jline/src/test/java/scala/tools/jline/console/history/MemoryHistoryTest.java b/src/jline/src/test/java/scala/tools/jline/console/history/MemoryHistoryTest.java
deleted file mode 100644
index 91b81548c8..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/console/history/MemoryHistoryTest.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright (C) 2010 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package scala.tools.jline.console.history;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import static junit.framework.Assert.*;
-
-/**
- * Tests for {@link MemoryHistory}.
- *
- * @author <a href="mailto:mwp1@cornell.edu">Marc Prud'hommeaux</a>
- */
-public class MemoryHistoryTest
-{
- private MemoryHistory history;
-
- @Before
- public void setUp() {
- history = new MemoryHistory();
- }
-
- @After
- public void tearDown() {
- history = null;
- }
-
- @Test
- public void testAdd() {
- assertEquals(0, history.size());
-
- history.add("test");
-
- assertEquals(1, history.size());
- assertEquals("test", history.get(0));
- assertEquals(1, history.index());
- }
-
- private void assertHistoryContains(final int offset, final String... items) {
- assertEquals(items.length, history.size());
- int i=0;
- for (History.Entry entry : history) {
- assertEquals(offset + i, entry.index());
- assertEquals(items[i++], entry.value());
- }
- }
-
- @Test
- public void testOffset() {
- history.setMaxSize(5);
-
- assertEquals(0, history.size());
- assertEquals(0, history.index());
-
- history.add("a");
- history.add("b");
- history.add("c");
- history.add("d");
- history.add("e");
-
- assertEquals(5, history.size());
- assertEquals(5, history.index());
- assertHistoryContains(0, "a", "b", "c", "d", "e");
-
- history.add("f");
-
- assertEquals(5, history.size());
- assertEquals(6, history.index());
-
- assertHistoryContains(1, "b", "c", "d", "e", "f");
- assertEquals("f", history.get(5));
- }
-
- @Test
- public void testReplace() {
- assertEquals(0, history.size());
-
- history.add("a");
- history.add("b");
- history.replace("c");
-
- assertHistoryContains(0, "a", "c");
- }
-} \ No newline at end of file
diff --git a/src/jline/src/test/java/scala/tools/jline/example/Example.java b/src/jline/src/test/java/scala/tools/jline/example/Example.java
deleted file mode 100644
index a89a09c5c9..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/example/Example.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright (c) 2002-2006, Marc Prud'hommeaux. All rights reserved.
- *
- * This software is distributable under the BSD license. See the terms of the
- * BSD license in the documentation provided with this software.
- */
-package scala.tools.jline.example;
-
-import scala.tools.jline.console.completer.*;
-import scala.tools.jline.console.ConsoleReader;
-
-import java.io.*;
-import java.util.*;
-
-public class Example
-{
- public static void usage() {
- System.out.println("Usage: java " + Example.class.getName()
- + " [none/simple/files/dictionary [trigger mask]]");
- System.out.println(" none - no completors");
- System.out.println(" simple - a simple completor that comples "
- + "\"foo\", \"bar\", and \"baz\"");
- System.out
- .println(" files - a completor that comples " + "file names");
- System.out.println(" classes - a completor that comples "
- + "java class names");
- System.out
- .println(" trigger - a special word which causes it to assume "
- + "the next line is a password");
- System.out.println(" mask - is the character to print in place of "
- + "the actual password character");
- System.out.println(" color - colored prompt and feedback");
- System.out.println("\n E.g - java Example simple su '*'\n"
- + "will use the simple compleator with 'su' triggering\n"
- + "the use of '*' as a password mask.");
- }
-
- public static void main(String[] args) throws IOException {
- Character mask = null;
- String trigger = null;
- boolean color = false;
-
- ConsoleReader reader = new ConsoleReader();
-
- reader.setBellEnabled(false);
- reader.setPrompt("prompt> ");
-
- if ((args == null) || (args.length == 0)) {
- usage();
-
- return;
- }
-
- List<Completer> completors = new LinkedList<Completer>();
-
- if (args.length > 0) {
- if (args[0].equals("none")) {
- }
- else if (args[0].equals("files")) {
- completors.add(new FileNameCompleter());
- }
- else if (args[0].equals("simple")) {
- completors.add(new StringsCompleter("foo", "bar", "baz"));
- }
- else if (args[0].equals("color")) {
- color = true;
- reader.setPrompt("\u001B[1mfoo\u001B[0m@bar\u001B[32m@baz\u001B[0m> ");
- }
- else {
- usage();
-
- return;
- }
- }
-
- if (args.length == 3) {
- mask = args[2].charAt(0);
- trigger = args[1];
- }
-
- for (Completer c : completors) {
- reader.addCompleter(c);
- }
-
- String line;
- PrintWriter out = new PrintWriter(
- reader.getTerminal().wrapOutIfNeeded(System.out));
-
- while ((line = reader.readLine()) != null) {
- if (color){
- out.println("\u001B[33m======>\u001B[0m\"" + line + "\"");
- } else {
- out.println("======>\"" + line + "\"");
- }
- out.flush();
-
- // If we input the special word then we will mask
- // the next line.
- if ((trigger != null) && (line.compareTo(trigger) == 0)) {
- line = reader.readLine("password> ", mask);
- }
- if (line.equalsIgnoreCase("quit") || line.equalsIgnoreCase("exit")) {
- break;
- }
- }
- }
-}
diff --git a/src/jline/src/test/java/scala/tools/jline/internal/TerminalLineSettingsTest.java b/src/jline/src/test/java/scala/tools/jline/internal/TerminalLineSettingsTest.java
deleted file mode 100644
index 3af10887f1..0000000000
--- a/src/jline/src/test/java/scala/tools/jline/internal/TerminalLineSettingsTest.java
+++ /dev/null
@@ -1,146 +0,0 @@
-package scala.tools.jline.internal;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-
-/**
- * Tests for the {@link TerminalLineSettings}.
- *
- * @author <a href="mailto:jbonofre@apache.org">Jean-Baptiste Onofré</a>
- */
-public class TerminalLineSettingsTest
-{
- private TerminalLineSettings settings;
-
- private final String linuxSttySample = "speed 38400 baud; rows 85; columns 244; line = 0;\n" +
- "intr = ^C; quit = ^\\; erase = ^?; kill = ^U; eof = ^D; eol = M-^?; eol2 = M-^?; swtch = M-^?; start = ^Q; stop = ^S; susp = ^Z; rprnt = ^R; werase = ^W; lnext = ^V; flush = ^O; min = 1; time = 0;\n" +
- "-parenb -parodd cs8 hupcl -cstopb cread -clocal -crtscts\n" +
- "-ignbrk brkint -ignpar -parmrk -inpck -istrip -inlcr -igncr icrnl ixon -ixoff -iuclc ixany imaxbel iutf8\n" +
- "opost -olcuc -ocrnl onlcr -onocr -onlret -ofill -ofdel nl0 cr0 tab0 bs0 vt0 ff0\n" +
- "isig icanon iexten echo echoe echok -echonl -noflsh -xcase -tostop -echoprt echoctl echoke";
-
- private final String solarisSttySample = "speed 38400 baud; \n" +
- "rows = 85; columns = 244; ypixels = 0; xpixels = 0;\n" +
- "csdata ?\n" +
- "eucw 1:0:0:0, scrw 1:0:0:0\n" +
- "intr = ^c; quit = ^\\; erase = ^?; kill = ^u;\n" +
- "eof = ^d; eol = -^?; eol2 = -^?; swtch = <undef>;\n" +
- "start = ^q; stop = ^s; susp = ^z; dsusp = ^y;\n" +
- "rprnt = ^r; flush = ^o; werase = ^w; lnext = ^v;\n" +
- "-parenb -parodd cs8 -cstopb -hupcl cread -clocal -loblk -crtscts -crtsxoff -parext \n" +
- "-ignbrk brkint -ignpar -parmrk -inpck -istrip -inlcr -igncr icrnl -iuclc \n" +
- "ixon ixany -ixoff imaxbel \n" +
- "isig icanon -xcase echo echoe echok -echonl -noflsh \n" +
- "-tostop echoctl -echoprt echoke -defecho -flusho -pendin iexten \n" +
- "opost -olcuc onlcr -ocrnl -onocr -onlret -ofill -ofdel tab3";
-
- private final String aixSttySample = "speed 38400 baud; 85 rows; 244 columns;\n" +
- "eucw 1:1:0:0, scrw 1:1:0:0:\n" +
- "intr = ^C; quit = ^\\; erase = ^?; kill = ^U; eof = ^D; eol = <undef>\n" +
- "eol2 = <undef>; start = ^Q; stop = ^S; susp = ^Z; dsusp = ^Y; reprint = ^R\n" +
- "discard = ^O; werase = ^W; lnext = ^V\n" +
- "-parenb -parodd cs8 -cstopb -hupcl cread -clocal -parext \n" +
- "-ignbrk brkint -ignpar -parmrk -inpck -istrip -inlcr -igncr icrnl -iuclc \n" +
- "ixon ixany -ixoff imaxbel \n" +
- "isig icanon -xcase echo echoe echok -echonl -noflsh \n" +
- "-tostop echoctl -echoprt echoke -flusho -pending iexten \n" +
- "opost -olcuc onlcr -ocrnl -onocr -onlret -ofill -ofdel tab3";
-
- private final String macOsSttySample = "speed 9600 baud; 47 rows; 155 columns;\n" +
- "lflags: icanon isig iexten echo echoe -echok echoke -echonl echoctl\n" +
- "-echoprt -altwerase -noflsh -tostop -flusho pendin -nokerninfo\n" +
- "-extproc\n" +
- "iflags: -istrip icrnl -inlcr -igncr ixon -ixoff ixany imaxbel iutf8\n" +
- "-ignbrk brkint -inpck -ignpar -parmrk\n" +
- "oflags: opost onlcr -oxtabs -onocr -onlret\n" +
- "cflags: cread cs8 -parenb -parodd hupcl -clocal -cstopb -crtscts -dsrflow\n" +
- "-dtrflow -mdmbuf\n" +
- "cchars: discard = ^O; dsusp = ^Y; eof = ^D; eol = <undef>;\n" +
- "eol2 = <undef>; erase = ^?; intr = ^C; kill = ^U; lnext = ^V;\n" +
- "min = 1; quit = ^\\; reprint = ^R; start = ^Q; status = ^T;\n" +
- "stop = ^S; susp = ^Z; time = 0; werase = ^W;";
-
- private final String netBsdSttySample = "speed 38400 baud; 85 rows; 244 columns;\n" +
- "lflags: icanon isig iexten echo echoe echok echoke -echonl echoctl\n" +
- " -echoprt -altwerase -noflsh -tostop -flusho pendin -nokerninfo\n" +
- " -extproc\n" +
- "iflags: -istrip icrnl -inlcr -igncr ixon -ixoff ixany imaxbel -ignbrk\n" +
- " brkint -inpck -ignpar -parmrk\n" +
- "oflags: opost onlcr -ocrnl oxtabs onocr onlret\n" +
- "cflags: cread cs8 -parenb -parodd hupcl -clocal -cstopb -crtscts -mdmbuf\n" +
- " -cdtrcts\n" +
- "cchars: discard = ^O; dsusp = ^Y; eof = ^D; eol = <undef>;\n" +
- " eol2 = <undef>; erase = ^?; intr = ^C; kill = ^U; lnext = ^V;\n" +
- " min = 1; quit = ^\\; reprint = ^R; start = ^Q; status = ^T;\n" +
- " stop = ^S; susp = ^Z; time = 0; werase = ^W;";
-
- private final String freeBsdSttySample = "speed 9600 baud; 32 rows; 199 columns;\n" +
- "lflags: icanon isig iexten echo echoe echok echoke -echonl echoctl\n" +
- " -echoprt -altwerase -noflsh -tostop -flusho -pendin -nokerninfo\n" +
- " -extproc\n" +
- "iflags: -istrip icrnl -inlcr -igncr ixon -ixoff ixany imaxbel -ignbrk\n" +
- " brkint -inpck -ignpar -parmrk\n" +
- "oflags: opost onlcr -ocrnl tab0 -onocr -onlret\n" +
- "cflags: cread cs8 -parenb -parodd hupcl -clocal -cstopb -crtscts -dsrflow\n" +
- " -dtrflow -mdmbuf\n" +
- "cchars: discard = ^O; dsusp = ^Y; eof = ^D; eol = <undef>;\n" +
- " eol2 = <undef>; erase = ^?; erase2 = ^H; intr = ^C; kill = ^U;\n" +
- " lnext = ^V; min = 1; quit = ^\\; reprint = ^R; start = ^Q;\n" +
- " status = ^T; stop = ^S; susp = ^Z; time = 0; werase = ^W;";
-
- @Before
- public void setUp() throws Exception {
- settings = new TerminalLineSettings();
- }
-
- @Test
- public void testGetConfig() {
- String config = settings.getConfig();
- System.out.println(config);
- }
-
- @Test
- public void testLinuxSttyParsing() {
- assertEquals(0x7f, settings.getProperty("erase", linuxSttySample));
- assertEquals(244, settings.getProperty("columns", linuxSttySample));
- assertEquals(85, settings.getProperty("rows", linuxSttySample));
- }
-
- @Test
- public void testSolarisSttyParsing() {
- assertEquals(0x7f, settings.getProperty("erase", solarisSttySample));
- assertEquals(244, settings.getProperty("columns", solarisSttySample));
- assertEquals(85, settings.getProperty("rows", solarisSttySample));
- }
-
- @Test
- public void testAixSttyParsing() {
- assertEquals(0x7f, settings.getProperty("erase", aixSttySample));
- assertEquals(244, settings.getProperty("columns", aixSttySample));
- assertEquals(85, settings.getProperty("rows", aixSttySample));
- }
-
- @Test
- public void testMacOsSttyParsing() {
- assertEquals(0x7f, settings.getProperty("erase", macOsSttySample));
- assertEquals(155, settings.getProperty("columns", macOsSttySample));
- assertEquals(47, settings.getProperty("rows", macOsSttySample));
- }
-
- @Test
- public void testNetBsdSttyParsing() {
- assertEquals(0x7f, settings.getProperty("erase", netBsdSttySample));
- assertEquals(244, settings.getProperty("columns", netBsdSttySample));
- assertEquals(85, settings.getProperty("rows", netBsdSttySample));
- }
-
- @Test
- public void testFreeBsdSttyParsing() {
- assertEquals(0x7f, settings.getProperty("erase", freeBsdSttySample));
- assertEquals(199, settings.getProperty("columns", freeBsdSttySample));
- assertEquals(32, settings.getProperty("rows", freeBsdSttySample));
- }
-
-} \ No newline at end of file
diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt
index 4795a47efe..e84942b8c4 100644
--- a/src/library/rootdoc.txt
+++ b/src/library/rootdoc.txt
@@ -12,7 +12,7 @@ Notable packages include:
- [[scala.collection.immutable `scala.collection.immutable`]] - Immutable, sequential data-structures such as
[[scala.collection.immutable.Vector `Vector`]], [[scala.collection.immutable.List `List`]],
[[scala.collection.immutable.Range `Range`]], [[scala.collection.immutable.HashMap `HashMap`]] or
- [[scala.collection.immutable.HashSet `HasSet`]]
+ [[scala.collection.immutable.HashSet `HashSet`]]
- [[scala.collection.mutable `scala.collection.mutable`]] - Mutable, sequential data-structures such as
[[scala.collection.mutable.ArrayBuffer `ArrayBuffer`]],
[[scala.collection.mutable.StringBuilder `StringBuilder`]],
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index d4b9c17eab..c4aa511cd7 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -121,7 +121,8 @@ abstract class Enumeration (initial: Int) extends Serializable {
* @throws NoSuchElementException if no `Value` with a matching
* name is in this `Enumeration`
*/
- final def withName(s: String): Value = values.find(_.toString == s).get
+ final def withName(s: String): Value = values.find(_.toString == s).getOrElse(
+ throw new NoSuchElementException(s"No value found for '$s'"))
/** Creates a fresh value, part of this enumeration. */
protected final def Value: Value = Value(nextId)
@@ -239,6 +240,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
*
* @param nnIds The set of ids of values (adjusted so that the lowest value does
* not fall below zero), organized as a `BitSet`.
+ * @define Coll `collection.immutable.SortedSet`
*/
class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet)
extends AbstractSet[Value]
diff --git a/src/library/scala/Mutable.scala b/src/library/scala/Mutable.scala
index 8ef0424db6..43f98ee4df 100644
--- a/src/library/scala/Mutable.scala
+++ b/src/library/scala/Mutable.scala
@@ -11,7 +11,7 @@
package scala
/**
- * A marker trait for mutable datatructures such as mutable collections
+ * A marker trait for mutable data structures such as mutable collections
*
* @since 2.8
*/
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 905e925f57..f134f5ce3d 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -94,6 +94,7 @@ object Option {
* @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current
* representation type `Repr` and the new element type `B`.
*/
+@SerialVersionUID(-114498752079829388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
sealed abstract class Option[+A] extends Product with Serializable {
self =>
@@ -107,7 +108,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
/** Returns the option's value.
* @note The option must be nonEmpty.
- * @throws Predef.NoSuchElementException if the option is empty.
+ * @throws java.util.NoSuchElementException if the option is empty.
*/
def get: A
@@ -124,8 +125,8 @@ sealed abstract class Option[+A] extends Product with Serializable {
* Although the use of null is discouraged, code written to use
* $option must often interface with code that expects and returns nulls.
* @example {{{
- * val initalText: Option[String] = getInitialText
- * val textField = new JComponent(initalText.orNull,20)
+ * val initialText: Option[String] = getInitialText
+ * val textField = new JComponent(initialText.orNull,20)
* }}}
*/
@inline final def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse ev(null)
@@ -211,6 +212,17 @@ sealed abstract class Option[+A] extends Product with Serializable {
/** Tests whether the option contains a given value as an element.
*
+ * @example {{{
+ * // Returns true because Some instance contains string "something" which equals "something".
+ * Some("something") contains "something"
+ *
+ * // Returns false because "something" != "anything".
+ * Some("something") contains "anything"
+ *
+ * // Returns false when method called on None.
+ * None contains "anything"
+ * }}}
+ *
* @param elem the element to test.
* @return `true` if the option has an element that is equal (as
* determined by `==`) to `elem`, `false` otherwise.
@@ -251,6 +263,17 @@ sealed abstract class Option[+A] extends Product with Serializable {
* nonempty '''and''' `pf` is defined for that value.
* Returns $none otherwise.
*
+ * @example {{{
+ * // Returns Some(HTTP) because the partial function covers the case.
+ * Some("http") collect {case "http" => "HTTP"}
+ *
+ * // Returns None because the partial function doesn't cover the case.
+ * Some("ftp") collect {case "http" => "HTTP"}
+ *
+ * // Returns None because None is passed to the collect method.
+ * None collect {case value => value}
+ * }}}
+ *
* @param pf the partial function.
* @return the result of applying `pf` to this $option's
* value (if possible), or $none.
@@ -306,6 +329,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @author Martin Odersky
* @version 1.0, 16/07/2003
*/
+@SerialVersionUID(1234815782226070388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
final case class Some[+A](x: A) extends Option[A] {
def isEmpty = false
def get = x
@@ -317,6 +341,7 @@ final case class Some[+A](x: A) extends Option[A] {
* @author Martin Odersky
* @version 1.0, 16/07/2003
*/
+@SerialVersionUID(5066590221178148012L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
case object None extends Option[Nothing] {
def isEmpty = true
def get = throw new NoSuchElementException("None.get")
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index 7f4a9dc45d..fba759eb32 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -20,6 +20,11 @@ package scala
* {{{
* val f: PartialFunction[Int, Any] = { case _ => 1/0 }
* }}}
+ *
+ * It is the responsibility of the caller to call `isDefinedAt` before
+ * calling `apply`, because if `isDefinedAt` is false, it is not guaranteed
+ * `apply` will throw an exception to indicate an error condition. If an
+ * exception is not thrown, evaluation may result in an arbitrary value.
*
* The main distinction between `PartialFunction` and [[scala.Function1]] is
* that the user of a `PartialFunction` may choose to do something different
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index faeb1dcbe2..4eed672794 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -36,8 +36,8 @@ import scala.io.StdIn
*
* A set of `assert` functions are provided for use as a way to document
* and dynamically check invariants in code. `assert` statements can be elided
- * at runtime by providing the command line argument `-Xdisable-assertions` to
- * the `scala` command.
+ * at compile time by providing the command line argument `-Xdisable-assertions` to
+ * the `scalac` command.
*
* Variants of `assert` intended for use with static analysis tools are also
* provided: `assume`, `require` and `ensuring`. `require` and `ensuring` are
@@ -58,7 +58,7 @@ import scala.io.StdIn
* condition fails, then the caller of the function is to blame rather than a
* logical error having been made within `addNaturals` itself. `ensures` is a
* form of `assert` that declares the guarantee the function is providing with
- * regards to it's return value.
+ * regards to its return value.
*
* === Implicit Conversions ===
* A number of commonly applied implicit conversions are also defined here, and
@@ -85,7 +85,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
type String = java.lang.String
type Class[T] = java.lang.Class[T]
- // miscelleaneous -----------------------------------------------------
+ // miscellaneous -----------------------------------------------------
scala.`package` // to force scala package object to be seen.
scala.collection.immutable.List // to force Nil, :: to be seen.
@@ -220,7 +220,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
}
/** `???` can be used for marking methods that remain to be implemented.
- * @throws A `NotImplementedError`
+ * @throws NotImplementedError
*/
def ??? : Nothing = throw new NotImplementedError
@@ -303,7 +303,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
@inline implicit def augmentString(x: String): StringOps = new StringOps(x)
@inline implicit def unaugmentString(x: StringOps): String = x.repr
- // printing and reading -----------------------------------------------
+ // printing -----------------------------------------------------------
def print(x: Any) = Console.print(x)
def println() = Console.println()
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 0798587772..9cd38ed148 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -22,7 +22,7 @@ trait Product extends Any with Equals {
* product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 < n < k`.
*
* @param n the index of the element to return
- * @throws `IndexOutOfBoundsException`
+ * @throws IndexOutOfBoundsException
* @return the element `n` elements after the first element
*/
def productElement(n: Int): Any
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index cd928a2b61..e60fa2f290 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -8,6 +8,9 @@
package scala
+import java.lang.{ StringBuilder => JLSBuilder }
+import scala.annotation.tailrec
+
/** This class provides the basic mechanism to do String Interpolation.
* String Interpolation allows users
* to embed variable references directly in *processed* string literals.
@@ -35,13 +38,13 @@ package scala
* To provide your own string interpolator, create an implicit class
* which adds a method to `StringContext`. Here's an example:
* {{{
- * implicit class JsonHelper(val sc: StringContext) extends AnyVal {
+ * implicit class JsonHelper(private val sc: StringContext) extends AnyVal {
* def json(args: Any*): JSONObject = ...
* }
* val x: JSONObject = json"{ a: $a }"
* }}}
*
- * Here the `JsonHelper` extenion class implicitly adds the `json` method to
+ * Here the `JsonHelper` extension class implicitly adds the `json` method to
* `StringContext` which can be used for `json` string literals.
*
* @since 2.10.0
@@ -55,7 +58,7 @@ case class StringContext(parts: String*) {
/** Checks that the length of the given argument `args` is one less than the number
* of `parts` supplied to the enclosing `StringContext`.
* @param `args` The arguments to be checked.
- * @throws An `IllegalArgumentException` if this is not the case.
+ * @throws IllegalArgumentException if this is not the case.
*/
def checkLengths(args: Seq[Any]): Unit =
if (parts.length != args.length + 1)
@@ -82,10 +85,11 @@ case class StringContext(parts: String*) {
* will print the string `1 + 1 = 2`.
*
* @param `args` The arguments to be inserted into the resulting string.
- * @throws An `IllegalArgumentException`
+ * @throws IllegalArgumentException
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
- * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
+ * @throws StringContext.InvalidEscapeException
+ * if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
*/
def s(args: Any*): String = standardInterpolator(treatEscapes, args)
@@ -101,16 +105,14 @@ case class StringContext(parts: String*) {
* ''Note:'' Even when using the raw interpolator, Scala will preprocess unicode escapes.
* For example:
* {{{
- * scala> raw"\u005cu0025"
+ * scala> raw"\u005cu0023"
* res0: String = #
* }}}
*
* @param `args` The arguments to be inserted into the resulting string.
- * @throws An `IllegalArgumentException`
+ * @throws IllegalArgumentException
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
- * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
- * that does not start a valid escape sequence.
*/
def raw(args: Any*): String = standardInterpolator(identity, args)
@@ -118,7 +120,7 @@ case class StringContext(parts: String*) {
checkLengths(args)
val pi = parts.iterator
val ai = args.iterator
- val bldr = new java.lang.StringBuilder(process(pi.next()))
+ val bldr = new JLSBuilder(process(pi.next()))
while (ai.hasNext) {
bldr append ai.next
bldr append process(pi.next())
@@ -143,10 +145,11 @@ case class StringContext(parts: String*) {
* }}}
*
* @param `args` The arguments to be inserted into the resulting string.
- * @throws An `IllegalArgumentException`
+ * @throws IllegalArgumentException
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
- * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
+ * @throws StringContext.InvalidEscapeException
+ * if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
*
* Note: The `f` method works by assembling a format string from all the `parts` strings and using
@@ -162,7 +165,7 @@ case class StringContext(parts: String*) {
*/
// The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
// Using the mechanism implemented in `scala.tools.reflect.FastTrack`
- def f(args: Any*): String = macro ???
+ def f[A >: Any](args: A*): String = macro ???
}
object StringContext {
@@ -172,8 +175,13 @@ object StringContext {
* @param str The offending string
* @param idx The index of the offending backslash character in `str`.
*/
- class InvalidEscapeException(str: String, @deprecatedName('idx) val index: Int)
- extends IllegalArgumentException("invalid escape character at index "+index+" in \""+str+"\"")
+ class InvalidEscapeException(str: String, @deprecatedName('idx) val index: Int) extends IllegalArgumentException(
+ s"""invalid escape ${
+ require(index >= 0 && index < str.length)
+ val ok = """[\b, \t, \n, \f, \r, \\, \", \']"""
+ if (index == str.length - 1) "at terminal" else s"'\\${str(index + 1)}' not one of $ok at"
+ } index $index in "$str". Use \\\\ for literal \\."""
+ )
/** Expands standard Scala escape sequences in a string.
* Escape sequences are:
@@ -186,60 +194,60 @@ object StringContext {
*/
def treatEscapes(str: String): String = treatEscapes0(str, strict = false)
+ /** Treats escapes, but disallows octal escape sequences. */
def processEscapes(str: String): String = treatEscapes0(str, strict = true)
private def treatEscapes0(str: String, strict: Boolean): String = {
- lazy val bldr = new java.lang.StringBuilder
val len = str.length
- var start = 0
- var cur = 0
- var idx = 0
- def output(ch: Char) = {
- bldr.append(str, start, cur)
- bldr append ch
- start = idx
- }
- while (idx < len) {
- cur = idx
- if (str(idx) == '\\') {
- idx += 1
- if (idx >= len) throw new InvalidEscapeException(str, cur)
- if ('0' <= str(idx) && str(idx) <= '7') {
- if (strict) throw new InvalidEscapeException(str, cur)
- val leadch = str(idx)
- var oct = leadch - '0'
- idx += 1
- if (idx < len && '0' <= str(idx) && str(idx) <= '7') {
- oct = oct * 8 + str(idx) - '0'
- idx += 1
- if (idx < len && leadch <= '3' && '0' <= str(idx) && str(idx) <= '7') {
- oct = oct * 8 + str(idx) - '0'
+ // replace escapes with given first escape
+ def replace(first: Int): String = {
+ val b = new JLSBuilder
+ // append replacement starting at index `i`, with `next` backslash
+ @tailrec def loop(i: Int, next: Int): String = {
+ if (next >= 0) {
+ //require(str(next) == '\\')
+ if (next > i) b.append(str, i, next)
+ var idx = next + 1
+ if (idx >= len) throw new InvalidEscapeException(str, next)
+ val c = str(idx) match {
+ case 'b' => '\b'
+ case 't' => '\t'
+ case 'n' => '\n'
+ case 'f' => '\f'
+ case 'r' => '\r'
+ case '"' => '"'
+ case '\'' => '\''
+ case '\\' => '\\'
+ case o if '0' <= o && o <= '7' =>
+ if (strict) throw new InvalidEscapeException(str, next)
+ val leadch = str(idx)
+ var oct = leadch - '0'
idx += 1
- }
+ if (idx < len && '0' <= str(idx) && str(idx) <= '7') {
+ oct = oct * 8 + str(idx) - '0'
+ idx += 1
+ if (idx < len && leadch <= '3' && '0' <= str(idx) && str(idx) <= '7') {
+ oct = oct * 8 + str(idx) - '0'
+ idx += 1
+ }
+ }
+ idx -= 1 // retreat
+ oct.toChar
+ case _ => throw new InvalidEscapeException(str, next)
}
- output(oct.toChar)
+ idx += 1 // advance
+ b append c
+ loop(idx, str.indexOf('\\', idx))
} else {
- val ch = str(idx)
- idx += 1
- output {
- ch match {
- case 'b' => '\b'
- case 't' => '\t'
- case 'n' => '\n'
- case 'f' => '\f'
- case 'r' => '\r'
- case '\"' => '\"'
- case '\'' => '\''
- case '\\' => '\\'
- case _ => throw new InvalidEscapeException(str, cur)
- }
- }
+ if (i < len) b.append(str, i, len)
+ b.toString
}
- } else {
- idx += 1
}
+ loop(0, first)
+ }
+ str indexOf '\\' match {
+ case -1 => str
+ case i => replace(i)
}
- if (start == 0) str
- else bldr.append(str, start, idx).toString
}
}
diff --git a/src/library/scala/annotation/switch.scala b/src/library/scala/annotation/switch.scala
index 23e3923407..00124cf88b 100644
--- a/src/library/scala/annotation/switch.scala
+++ b/src/library/scala/annotation/switch.scala
@@ -22,6 +22,9 @@ package scala.annotation
}
}}}
*
+ * Note: for pattern matches with one or two cases, the compiler generates jump instructions.
+ * Annotating such a match with `@switch` does not issue any warning.
+ *
* @author Paul Phillips
* @since 2.8
*/
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 4e7d359251..bce9740522 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -102,7 +102,7 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
*/
def mapValues[C](f: B => C): GenMap[A, C]
- /** Compares two maps structurally; i.e. checks if all mappings
+ /** Compares two maps structurally; i.e., checks if all mappings
* contained in this map are also contained in the other map,
* and vice versa.
*
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index c3bad60072..cf1de0c8e6 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -47,7 +47,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param idx The index to select.
* @return the element of this $coll at index `idx`, where `0` indicates the first element.
- * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`.
+ * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`.
*/
def apply(idx: Int): A
@@ -397,7 +397,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @inheritdoc
*
* Another way to express this
- * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`.
* `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
*
* $willNotTerminateInf
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index ca098e57b9..8b9d3e7a17 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -63,7 +63,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
/** Selects the first element of this $coll.
* $orderDependent
* @return the first element of this $coll.
- * @throws `NoSuchElementException` if the $coll is empty.
+ * @throws NoSuchElementException if the $coll is empty.
*/
def head: A
@@ -83,7 +83,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* $orderDependent
* @return a $coll consisting of all elements of this $coll
* except the first one.
- * @throws `UnsupportedOperationException` if the $coll is empty.
+ * @throws UnsupportedOperationException if the $coll is empty.
*/
def tail: Repr
@@ -105,7 +105,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* $orderDependent
* @return a $coll consisting of all elements of this $coll
* except the last one.
- * @throws `UnsupportedOperationException` if the $coll is empty.
+ * @throws UnsupportedOperationException if the $coll is empty.
*/
def init: Repr
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 0cd91409cf..f77462ce88 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -268,7 +268,7 @@ trait GenTraversableOnce[+A] extends Any {
* op(x_1, op(x_2, ..., op(x_{n-1}, x_n)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- * @throws `UnsupportedOperationException` if this $coll is empty.
+ * @throws UnsupportedOperationException if this $coll is empty.
*/
def reduceRight[B >: A](op: (A, B) => B): B
@@ -278,7 +278,7 @@ trait GenTraversableOnce[+A] extends Any {
*
* @param op the binary operator.
* @tparam B the result type of the binary operator.
- * @return an option value containing the result of `reduceLeft(op)` is this $coll is nonempty,
+ * @return an option value containing the result of `reduceLeft(op)` if this $coll is nonempty,
* `None` otherwise.
*/
def reduceLeftOption[B >: A](op: (B, A) => B): Option[B]
@@ -290,7 +290,7 @@ trait GenTraversableOnce[+A] extends Any {
*
* @param op the binary operator.
* @tparam B the result type of the binary operator.
- * @return an option value containing the result of `reduceRight(op)` is this $coll is nonempty,
+ * @return an option value containing the result of `reduceRight(op)` if this $coll is nonempty,
* `None` otherwise.
*/
def reduceRightOption[B >: A](op: (A, B) => B): Option[B]
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index ade04e4de8..a7e06b4d1a 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -141,10 +141,10 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
def drop(n: Int): Repr = slice(n, length)
override /*IterableLike*/
- def takeRight(n: Int): Repr = slice(length - n, length)
+ def takeRight(n: Int): Repr = slice(length - math.max(n, 0), length)
override /*IterableLike*/
- def dropRight(n: Int): Repr = slice(0, length - n)
+ def dropRight(n: Int): Repr = slice(0, length - math.max(n, 0))
override /*TraversableLike*/
def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n))
@@ -206,7 +206,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
override /*SeqLike*/
def lastIndexWhere(p: A => Boolean, end: Int): Int = {
- var i = end
+ var i = math.min(end, length - 1)
while (i >= 0 && !p(this(i))) i -= 1
i
}
diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala
index a5ab8efd5c..afbffd36c6 100644
--- a/src/library/scala/collection/Iterable.scala
+++ b/src/library/scala/collection/Iterable.scala
@@ -38,7 +38,7 @@ trait Iterable[+A] extends Traversable[A]
}
/** $factoryInfo
- * The current default implementation of a $Coll is a `Vector`.
+ * The current default implementation of a $Coll is a `List`.
* @define coll iterable collection
* @define Coll `Iterable`
*/
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 91ab1f6ac2..ecf64624e8 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -179,6 +179,7 @@ self =>
/** Groups elements in fixed size blocks by passing a "sliding window"
* over them (as opposed to partitioning them, as is done in grouped.)
+ * "Sliding window" step is 1 by default.
* @see [[scala.collection.Iterator]], method `sliding`
*
* @param size the number of elements per group
@@ -194,7 +195,7 @@ self =>
*
* @param size the number of elements per group
* @param step the distance between the first elements of successive
- * groups (defaults to 1)
+ * groups
* @return An iterator producing ${coll}s of size `size`, except the
* last and the only element will be truncated if there are
* fewer elements than size.
@@ -217,12 +218,12 @@ self =>
val b = newBuilder
b.sizeHintBounded(n, this)
val lead = this.iterator drop n
- var go = false
- for (x <- this) {
- if (lead.hasNext) lead.next()
- else go = true
- if (go) b += x
+ val it = this.iterator
+ while (lead.hasNext) {
+ lead.next()
+ it.next()
}
+ while (it.hasNext) b += it.next()
b.result()
}
@@ -282,7 +283,7 @@ self =>
var i = 0
for (x <- this) {
b += ((x, i))
- i +=1
+ i += 1
}
b.result()
}
diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala
index 3a0e2ab115..97aa830c5a 100644
--- a/src/library/scala/collection/IterableProxy.scala
+++ b/src/library/scala/collection/IterableProxy.scala
@@ -16,4 +16,5 @@ package collection
* @version 2.8
* @since 2.8
*/
+@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3")
trait IterableProxy[+A] extends Iterable[A] with IterableProxyLike[A, Iterable[A]]
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 668190f700..b84d90c51b 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -150,10 +150,10 @@ trait IterableViewLike[+A,
sliding(size, 1) // we could inherit this, but that implies knowledge of the way the super class is implemented.
override def dropRight(n: Int): This =
- take(thisSeq.length - n)
+ take(thisSeq.length - math.max(n, 0))
override def takeRight(n: Int): This =
- drop(thisSeq.length - n)
+ drop(thisSeq.length - math.max(n, 0))
override def stringPrefix = "IterableView"
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 1b496383a3..0783beac0f 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -320,7 +320,14 @@ trait Iterator[+A] extends TraversableOnce[A] {
* it omits the first `n` values.
* @note Reuse: $consumesAndProducesIterator
*/
- def drop(n: Int): Iterator[A] = slice(n, Int.MaxValue)
+ def drop(n: Int): Iterator[A] = {
+ var j = 0
+ while (j < n && hasNext) {
+ next()
+ j += 1
+ }
+ this
+ }
/** Creates an iterator returning an interval of the values produced by this iterator.
*
@@ -472,7 +479,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
}
- /** Produces a collection containing cummulative results of applying the
+ /** Produces a collection containing cumulative results of applying the
* operator going left to right.
*
* $willNotTerminateInf
@@ -495,8 +502,8 @@ trait Iterator[+A] extends TraversableOnce[A] {
} else Iterator.empty.next()
}
- /** Produces a collection containing cummulative results of applying the operator going right to left.
- * The head of the collection is the last cummulative result.
+ /** Produces a collection containing cumulative results of applying the operator going right to left.
+ * The head of the collection is the last cumulative result.
*
* $willNotTerminateInf
* $orderDependent
@@ -922,11 +929,16 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** For reasons which remain to be determined, calling
* self.take(n).toSeq cause an infinite loop, so we have
* a slight variation on take for local usage.
+ * NB: self.take.toSeq is slice.toStream, lazily built on self,
+ * so a subsequent self.hasNext would not test self after the
+ * group was consumed.
*/
private def takeDestructively(size: Int): Seq[A] = {
val buf = new ArrayBuffer[A]
var i = 0
- while (self.hasNext && i < size) {
+ // The order of terms in the following condition is important
+ // here as self.hasNext could be blocking
+ while (i < size && self.hasNext) {
buf += self.next
i += 1
}
@@ -943,12 +955,10 @@ trait Iterator[+A] extends TraversableOnce[A] {
// so the rest of the code can be oblivious
val xs: Seq[B] = {
val res = takeDestructively(count)
- // extra checks so we don't calculate length unless there's reason
- if (pad.isDefined && !self.hasNext) {
- val shortBy = count - res.length
- if (shortBy > 0) res ++ padding(shortBy) else res
- }
- else res
+ // was: extra checks so we don't calculate length unless there's reason
+ // but since we took the group eagerly, just use the fast length
+ val shortBy = count - res.length
+ if (shortBy > 0 && pad.isDefined) res ++ padding(shortBy) else res
}
lazy val len = xs.length
lazy val incomplete = len < count
@@ -1085,6 +1095,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
/** Returns this iterator with patched values.
+ * Patching at negative indices is the same as patching starting at 0.
+ * Patching at indices at or larger than the length of the original iterator appends the patch to the end.
+ * If more values are replaced than actually exist, the excess is ignored.
*
* @param from The start index from which to patch
* @param patchElems The iterator of patch values
@@ -1093,18 +1106,33 @@ trait Iterator[+A] extends TraversableOnce[A] {
*/
def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] {
private var origElems = self
- private var i = 0
- def hasNext: Boolean =
- if (i < from) origElems.hasNext
- else patchElems.hasNext || origElems.hasNext
+ private var i = (if (from > 0) from else 0) // Counts down, switch to patch on 0, -1 means use patch first
+ def hasNext: Boolean = {
+ if (i == 0) {
+ origElems = origElems drop replaced
+ i = -1
+ }
+ origElems.hasNext || patchElems.hasNext
+ }
def next(): B = {
- // We have to do this *first* just in case from = 0.
- if (i == from) origElems = origElems drop replaced
- val result: B =
- if (i < from || !patchElems.hasNext) origElems.next()
- else patchElems.next()
- i += 1
- result
+ if (i == 0) {
+ origElems = origElems drop replaced
+ i = -1
+ }
+ if (i < 0) {
+ if (patchElems.hasNext) patchElems.next()
+ else origElems.next()
+ }
+ else {
+ if (origElems.hasNext) {
+ i -= 1
+ origElems.next()
+ }
+ else {
+ i = -1
+ patchElems.next()
+ }
+ }
}
}
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index a4fa58b13c..875f6e1c02 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -37,8 +37,8 @@ import convert._
* val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
* assert(sl eq sl2)
* }}}
- * The following conversions also are supported, but the
- * direction Scala to Java is done my a more specifically named method:
+ * The following conversions are also supported, but the
+ * direction from Scala to Java is done by the more specifically named methods:
* `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
*
* - `scala.collection.Iterable` <=> `java.util.Collection`
diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala
index 1e4975a0a7..5a7bb5891e 100644
--- a/src/library/scala/collection/LinearSeq.scala
+++ b/src/library/scala/collection/LinearSeq.scala
@@ -15,7 +15,14 @@ import generic._
import mutable.Builder
/** A base trait for linear sequences.
+ *
* $linearSeqInfo
+ *
+ * @define linearSeqInfo
+ * Linear sequences have reasonably efficient `head`, `tail`, and `isEmpty` methods.
+ * If these methods provide the fastest way to traverse the collection, a
+ * collection `Coll` that extends this trait should also extend
+ * `LinearSeqOptimized[A, Coll[A]]`.
*/
trait LinearSeq[+A] extends Seq[A]
with GenericTraversableTemplate[A, LinearSeq]
@@ -25,7 +32,7 @@ trait LinearSeq[+A] extends Seq[A]
}
/** $factoryInfo
- * The current default implementation of a $Coll is a `Vector`.
+ * The current default implementation of a $Coll is a `List`.
* @define coll linear sequence
* @define Coll `LinearSeq`
*/
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index ff7985bf0d..96e2135fd1 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -14,22 +14,10 @@ import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]`.
*
- * $linearSeqInfo
- *
- * This trait just implements `iterator` in terms of `isEmpty, ``head`, and `tail`.
- * However, see `LinearSeqOptimized` for an implementation trait that overrides operations
+ * This trait just implements `iterator` and `corresponds` in terms of `isEmpty, ``head`, and `tail`.
+ * However, see `LinearSeqOptimized` for an implementation trait that overrides many more operations
* to make them run faster under the assumption of fast linear access with `head` and `tail`.
*
- * @define linearSeqInfo
- * Linear sequences are defined in terms of three abstract methods, which are assumed
- * to have efficient implementations. These are:
- * {{{
- * def isEmpty: Boolean
- * def head: A
- * def tail: Repr
- * }}}
- * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself.
- *
* Linear sequences do not add any new methods to `Seq`, but promise efficient implementations
* of linear access patterns.
* @author Martin Odersky
@@ -58,12 +46,18 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
val result = these.head; these = these.tail; result
} else Iterator.empty.next()
- /** Have to clear `these` so the iterator is exhausted like
- * it would be without the optimization.
- */
override def toList: List[A] = {
+ /* Have to clear `these` so the iterator is exhausted like
+ * it would be without the optimization.
+ *
+ * Calling "newBuilder.result()" in toList method
+ * prevents original seq from garbage collection,
+ * so we use these.take(0) here.
+ *
+ * Check SI-8924 for details
+ */
val xs = these.toList
- these = newBuilder.result()
+ these = these.take(0)
xs
}
}
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index 8635b090b9..9c336e8e31 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -13,10 +13,24 @@ import mutable.ListBuffer
import immutable.List
import scala.annotation.tailrec
-/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
- * the implementation of several methods under the assumption of fast linear access.
+/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
+ * the implementation of various methods under the assumption of fast linear access.
+ *
+ * $linearSeqOptim
+ *
+ * @define linearSeqOptim
+ * Linear-optimized sequences implement most operations in in terms of three methods,
+ * which are assumed to have efficient implementations. These are:
+ * {{{
+ * def isEmpty: Boolean
+ * def head: A
+ * def tail: Repr
+ * }}}
+ * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself.
+ * Note that default implementations are provided via inheritance, but these
+ * should be overridden for performance.
+ *
*
- * $linearSeqInfo
*/
trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends LinearSeqLike[A, Repr] { self: Repr =>
@@ -30,7 +44,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
*
* $willNotTerminateInf
*
- * Note: the execution of `length` may take time proportial to the length of the sequence.
+ * Note: the execution of `length` may take time proportional to the length of the sequence.
*/
def length: Int = {
var these = self
@@ -43,8 +57,8 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
/** Selects an element by its index in the $coll.
- * Note: the execution of `apply` may take time proportial to the index value.
- * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`.
+ * Note: the execution of `apply` may take time proportional to the index value.
+ * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`.
*/
def apply(n: Int): A = {
val rest = drop(n)
@@ -235,13 +249,16 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
override /*IterableLike*/
def sameElements[B >: A](that: GenIterable[B]): Boolean = that match {
case that1: LinearSeq[_] =>
- var these = this
- var those = that1
- while (!these.isEmpty && !those.isEmpty && these.head == those.head) {
- these = these.tail
- those = those.tail
+ // Probably immutable, so check reference identity first (it's quick anyway)
+ (this eq that1) || {
+ var these = this
+ var those = that1
+ while (!these.isEmpty && !those.isEmpty && these.head == those.head) {
+ these = these.tail
+ those = those.tail
+ }
+ these.isEmpty && those.isEmpty
}
- these.isEmpty && those.isEmpty
case _ =>
super.sameElements(that)
}
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 5ec7d5c615..d133400570 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -222,7 +222,7 @@ self =>
* but it might be overridden in subclasses.
*
* @param key the given key value for which a binding is missing.
- * @throws `NoSuchElementException`
+ * @throws NoSuchElementException
*/
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala
index 941c1f5a4a..26a7c710ee 100644
--- a/src/library/scala/collection/MapProxy.scala
+++ b/src/library/scala/collection/MapProxy.scala
@@ -17,4 +17,5 @@ package collection
* @version 1.0, 21/07/2003
* @since 1
*/
+@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3")
trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]]
diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala
index fec4bbf502..b68124b3f8 100644
--- a/src/library/scala/collection/Searching.scala
+++ b/src/library/scala/collection/Searching.scala
@@ -54,7 +54,7 @@ object Searching {
*/
final def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult =
coll match {
- case _: IndexedSeq[A] => binarySearch(elem, -1, coll.length)(ord)
+ case _: IndexedSeq[A] => binarySearch(elem, 0, coll.length)(ord)
case _ => linearSearch(coll.view, elem, 0)(ord)
}
@@ -81,18 +81,18 @@ object Searching {
final def search[B >: A](elem: B, from: Int, to: Int)
(implicit ord: Ordering[B]): SearchResult =
coll match {
- case _: IndexedSeq[A] => binarySearch(elem, from-1, to)(ord)
+ case _: IndexedSeq[A] => binarySearch(elem, from, to)(ord)
case _ => linearSearch(coll.view(from, to), elem, from)(ord)
}
@tailrec
private def binarySearch[B >: A](elem: B, from: Int, to: Int)
(implicit ord: Ordering[B]): SearchResult = {
- if ((to-from) == 1) InsertionPoint(from) else {
- val idx = from+(to-from)/2
+ if (to == from) InsertionPoint(from) else {
+ val idx = from+(to-from-1)/2
math.signum(ord.compare(elem, coll(idx))) match {
case -1 => binarySearch(elem, from, idx)(ord)
- case 1 => binarySearch(elem, idx, to)(ord)
+ case 1 => binarySearch(elem, idx + 1, to)(ord)
case _ => Found(idx)
}
}
@@ -105,7 +105,7 @@ object Searching {
while (it.hasNext) {
val cur = it.next()
if (ord.equiv(elem, cur)) return Found(idx)
- else if (ord.lt(elem, cur)) return InsertionPoint(idx-1)
+ else if (ord.lt(elem, cur)) return InsertionPoint(idx)
idx += 1
}
InsertionPoint(idx)
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index fdfb1f2efc..b775480532 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -140,7 +140,15 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
if (isEmpty) Iterator(repr)
else new PermutationsItr
- /** Iterates over combinations.
+ /** Iterates over combinations. A _combination_ of length `n` is a subsequence of
+ * the original sequence, with the elements taken in order. Thus, `"xy"` and `"yy"`
+ * are both length-2 combinations of `"xyy"`, but `"yx"` is not. If there is
+ * more than one way to generate the same subsequence, only one will be returned.
+ *
+ * For example, `"xyyy"` has three different ways to generate `"xy"` depending on
+ * whether the first, second, or third `"y"` is selected. However, since all are
+ * identical, only one will be chosen. Which of the three will be taken is an
+ * implementation detail that is not defined.
*
* @return An Iterator which traverses the possible n-element combinations of this $coll.
* @example `"abbbc".combinations(2) = Iterator(ab, ac, bb, bc)`
@@ -405,7 +413,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* @inheritdoc
*
* Another way to express this
- * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`.
* `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
*
* $willNotTerminateInf
@@ -439,9 +447,11 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def diff[B >: A](that: GenSeq[B]): Repr = {
val occ = occCounts(that.seq)
val b = newBuilder
- for (x <- this)
- if (occ(x) == 0) b += x
- else occ(x) -= 1
+ for (x <- this) {
+ val ox = occ(x) // Avoid multiple map lookups
+ if (ox == 0) b += x
+ else occ(x) = ox - 1
+ }
b.result()
}
@@ -468,11 +478,13 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def intersect[B >: A](that: GenSeq[B]): Repr = {
val occ = occCounts(that.seq)
val b = newBuilder
- for (x <- this)
- if (occ(x) > 0) {
+ for (x <- this) {
+ val ox = occ(x) // Avoid multiple map lookups
+ if (ox > 0) {
b += x
- occ(x) -= 1
+ occ(x) = ox - 1
}
+ }
b.result()
}
@@ -501,22 +513,35 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
- val (prefix, rest) = this.splitAt(from)
- b ++= toCollection(prefix)
+ var i = 0
+ val it = this.iterator
+ while (i < from && it.hasNext) {
+ b += it.next()
+ i += 1
+ }
b ++= patch.seq
- b ++= toCollection(rest).view drop replaced
+ i = replaced
+ while (i > 0 && it.hasNext) {
+ it.next()
+ i -= 1
+ }
+ while (it.hasNext) b += it.next()
b.result()
}
def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
if (index < 0) throw new IndexOutOfBoundsException(index.toString)
val b = bf(repr)
- val (prefix, rest) = this.splitAt(index)
- val restColl = toCollection(rest)
- if (restColl.isEmpty) throw new IndexOutOfBoundsException(index.toString)
- b ++= toCollection(prefix)
+ var i = 0
+ val it = this.iterator
+ while (i < index && it.hasNext) {
+ b += it.next()
+ i += 1
+ }
+ if (!it.hasNext) throw new IndexOutOfBoundsException(index.toString)
b += elem
- b ++= restColl.view.tail
+ it.next()
+ while (it.hasNext) b += it.next()
b.result()
}
@@ -536,8 +561,9 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
- b.sizeHint(length max len)
- var diff = len - length
+ val L = length
+ b.sizeHint(math.max(L, len))
+ var diff = len - L
b ++= thisCollection
while (diff > 0) {
b += elem
@@ -609,16 +635,23 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
*/
def sorted[B >: A](implicit ord: Ordering[B]): Repr = {
val len = this.length
- val arr = new ArraySeq[A](len)
- var i = 0
- for (x <- this) {
- arr(i) = x
- i += 1
- }
- java.util.Arrays.sort(arr.array, ord.asInstanceOf[Ordering[Object]])
val b = newBuilder
- b.sizeHint(len)
- for (x <- arr) b += x
+ if (len == 1) b ++= this
+ else if (len > 1) {
+ b.sizeHint(len)
+ val arr = new Array[AnyRef](len) // Previously used ArraySeq for more compact but slower code
+ var i = 0
+ for (x <- this) {
+ arr(i) = x.asInstanceOf[AnyRef]
+ i += 1
+ }
+ java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]])
+ i = 0
+ while (i < arr.length) {
+ b += arr(i).asInstanceOf[A]
+ i += 1
+ }
+ }
b.result()
}
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 5e31ac4a53..3473c8aff1 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -55,7 +55,7 @@ trait SeqViewLike[+A,
trait Sliced extends super.Sliced with Transformed[A] {
def length = iterator.size
def apply(idx: Int): A =
- if (idx + from < until) self.apply(idx + from)
+ if (idx >= 0 && idx + from < until) self.apply(idx + from)
else throw new IndexOutOfBoundsException(idx.toString)
override def foreach[U](f: A => U) = iterator foreach f
@@ -83,6 +83,7 @@ trait SeqViewLike[+A,
}
def length = index(self.length)
def apply(idx: Int) = {
+ if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString)
val row = findRow(idx, 0, self.length - 1)
mapping(self(row)).seq.toSeq(idx - index(row))
}
@@ -154,17 +155,27 @@ trait SeqViewLike[+A,
}
}
+ // Note--for this to work, must ensure 0 <= from and 0 <= replaced
+ // Must also take care to allow patching inside an infinite stream
+ // (patching in an infinite stream is not okay)
trait Patched[B >: A] extends Transformed[B] {
protected[this] val from: Int
protected[this] val patch: GenSeq[B]
protected[this] val replaced: Int
private lazy val plen = patch.length
override def iterator: Iterator[B] = self.iterator patch (from, patch.iterator, replaced)
- def length: Int = self.length + plen - replaced
- def apply(idx: Int): B =
- if (idx < from) self.apply(idx)
- else if (idx < from + plen) patch.apply(idx - from)
+ def length: Int = {
+ val len = self.length
+ val pre = math.min(from, len)
+ val post = math.max(0, len - pre - replaced)
+ pre + plen + post
+ }
+ def apply(idx: Int): B = {
+ val actualFrom = if (self.lengthCompare(from) < 0) self.length else from
+ if (idx < actualFrom) self.apply(idx)
+ else if (idx < actualFrom + plen) patch.apply(idx - actualFrom)
else self.apply(idx - plen + replaced)
+ }
final override protected[this] def viewIdentifier = "P"
}
@@ -210,7 +221,10 @@ trait SeqViewLike[+A,
override def reverse: This = newReversed.asInstanceOf[This]
override def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = {
- newPatched(from, patch, replaced).asInstanceOf[That]
+ // Be careful to not evaluate the entire sequence! Patch should work (slowly, perhaps) on infinite streams.
+ val nonNegFrom = math.max(0,from)
+ val nonNegRep = math.max(0,replaced)
+ newPatched(nonNegFrom, patch, nonNegRep).asInstanceOf[That]
// was: val b = bf(repr)
// if (b.isInstanceOf[NoBuilder[_]]) newPatched(from, patch, replaced).asInstanceOf[That]
// else super.patch[B, That](from, patch, replaced)(bf)
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 0c5c7e0b29..f8ac1d754d 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -17,6 +17,9 @@ import parallel.ParSet
/** A template trait for sets.
*
* $setNote
+ * '''Implementation note:'''
+ * This trait provides most of the operations of a `Set` independently of its representation.
+ * It is typically inherited by concrete implementations of sets.
* $setTags
* @since 2.8
*
@@ -24,10 +27,6 @@ import parallel.ParSet
*
* A set is a collection that contains no duplicate elements.
*
- * '''Implementation note:'''
- * This trait provides most of the operations of a `Set` independently of its representation.
- * It is typically inherited by concrete implementations of sets.
- *
* To implement a concrete set, you need to provide implementations of the
* following methods:
* {{{
@@ -108,22 +107,36 @@ self =>
*/
def + (elem: A): This
- /** Creates a new $coll with additional elements.
+ /** Creates a new $coll with additional elements, omitting duplicates.
+ *
+ * This method takes two or more elements to be added. Elements that already exist in the $coll will
+ * not be added. Another overloaded variant of this method handles the case where a single element is added.
*
- * This method takes two or more elements to be added. Another overloaded
- * variant of this method handles the case where a single element is added.
+ * Example:
+ * {{{
+ * scala> val a = Set(1, 3) + 2 + 3
+ * a: scala.collection.immutable.Set[Int] = Set(1, 3, 2)
+ * }}}
*
* @param elem1 the first element to add.
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
- * @return a new $coll with the given elements added.
+ * @return a new $coll with the given elements added, omitting duplicates.
*/
def + (elem1: A, elem2: A, elems: A*): This = this + elem1 + elem2 ++ elems
- /** Creates a new $coll by adding all elements contained in another collection to this $coll.
+ /** Creates a new $coll by adding all elements contained in another collection to this $coll, omitting duplicates.
+ *
+ * This method takes a collection of elements and adds all elements, omitting duplicates, into $coll.
+ *
+ * Example:
+ * {{{
+ * scala> val a = Set(1, 2) ++ Set(2, "a")
+ * a: scala.collection.immutable.Set[Any] = Set(1, 2, a)
+ * }}}
*
- * @param elems the collection containing the added elements.
- * @return a new $coll with the given elements added.
+ * @param elems the collection containing the elements to add.
+ * @return a new $coll with the given elements added, omitting duplicates.
*/
def ++ (elems: GenTraversableOnce[A]): This = (repr /: elems.seq)(_ + _)
@@ -172,7 +185,7 @@ self =>
*
* @return the iterator.
*/
- def subsets: Iterator[This] = new AbstractIterator[This] {
+ def subsets(): Iterator[This] = new AbstractIterator[This] {
private val elms = self.toIndexedSeq
private var len = 0
private var itr: Iterator[This] = Iterator.empty
diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala
index f9f38f148a..e17fb215b9 100644
--- a/src/library/scala/collection/SetProxy.scala
+++ b/src/library/scala/collection/SetProxy.scala
@@ -17,4 +17,5 @@ package collection
* @author Martin Odersky
* @version 2.0, 01/01/2007
*/
+@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3")
trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]]
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index b53724c568..a35750a35f 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -87,7 +87,7 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
}
/** $factoryInfo
- * The current default implementation of a $Coll is a `Vector`.
+ * The current default implementation of a $Coll is a `List`.
*/
object Traversable extends TraversableFactory[Traversable] { self =>
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index b60ea86ab0..96374ef653 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -54,7 +54,7 @@ import scala.language.higherKinds
* `HashMap` of objects. The traversal order for hash maps will
* depend on the hash codes of its elements, and these hash codes might
* differ from one run to the next. By contrast, a `LinkedHashMap`
- * is ordered because it's `foreach` method visits elements in the
+ * is ordered because its `foreach` method visits elements in the
* order they were inserted into the `HashMap`.
*
* @author Martin Odersky
@@ -345,8 +345,8 @@ trait TraversableLike[+A, +Repr] extends Any
* $mayNotTerminateInf
*
* @param p the predicate used to test elements.
- * @return `true` if the given predicate `p` holds for all elements
- * of this $coll, otherwise `false`.
+ * @return `true` if this $coll is empty, otherwise `true` if the given predicate `p`
+ * holds for all elements of this $coll, otherwise `false`.
*/
def forall(p: A => Boolean): Boolean = {
var result = true
@@ -362,8 +362,8 @@ trait TraversableLike[+A, +Repr] extends Any
* $mayNotTerminateInf
*
* @param p the predicate used to test elements.
- * @return `true` if the given predicate `p` holds for some of the
- * elements of this $coll, otherwise `false`.
+ * @return `false` if this $coll is empty, otherwise `true` if the given predicate `p`
+ * holds for some of the elements of this $coll, otherwise `false`
*/
def exists(p: A => Boolean): Boolean = {
var result = false
@@ -419,7 +419,7 @@ trait TraversableLike[+A, +Repr] extends Any
/** Selects the first element of this $coll.
* $orderDependent
* @return the first element of this $coll.
- * @throws `NoSuchElementException` if the $coll is empty.
+ * @throws NoSuchElementException if the $coll is empty.
*/
def head: A = {
var result: () => A = () => throw new NoSuchElementException
@@ -473,7 +473,7 @@ trait TraversableLike[+A, +Repr] extends Any
* $orderDependent
* @return a $coll consisting of all elements of this $coll
* except the last one.
- * @throws `UnsupportedOperationException` if the $coll is empty.
+ * @throws UnsupportedOperationException if the $coll is empty.
*/
def init: Repr = {
if (isEmpty) throw new UnsupportedOperationException("empty.init")
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 072fd3da44..c5b0d0f085 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -75,7 +75,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
// at least indirectly. Currently, these are `ArrayOps` and `StringOps`.
// It is also implemented in `TraversableOnce[A]`.
/** A version of this collection with all
- * of the operations implemented sequentially (i.e. in a single-threaded manner).
+ * of the operations implemented sequentially (i.e., in a single-threaded manner).
*
* This method returns a reference to this collection. In parallel collections,
* it is redefined to return a sequential implementation of this collection. In
@@ -85,10 +85,9 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
*/
def seq: TraversableOnce[A]
- /** Presently these are abstract because the Traversable versions use
- * breakable/break, and I wasn't sure enough of how that's supposed to
- * function to consolidate them with the Iterator versions.
- */
+ // Presently these are abstract because the Traversable versions use
+ // breakable/break, and I wasn't sure enough of how that's supposed to
+ // function to consolidate them with the Iterator versions.
def forall(p: A => Boolean): Boolean
def exists(p: A => Boolean): Boolean
def find(p: A => Boolean): Option[A]
@@ -129,8 +128,21 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)`
*/
def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = {
- // make sure to use an iterator or `seq`
- self.toIterator.foreach(pf.runWith(b => return Some(b)))
+ // TODO 2.12 -- move out alternate implementations into child classes
+ val i: Iterator[A] = self match {
+ case it: Iterator[A] => it
+ case _: GenIterable[_] => self.toIterator // If it might be parallel, be sure to .seq or use iterator!
+ case _ => // Not parallel, not iterable--just traverse
+ self.foreach(pf.runWith(b => return Some(b)))
+ return None
+ }
+ // Presumably the fastest way to get in and out of a partial function is for a sentinel function to return itself
+ // (Tested to be lower-overhead than runWith. Would be better yet to not need to (formally) allocate it--change in 2.12.)
+ val sentinel: Function1[A, Any] = new scala.runtime.AbstractFunction1[A, Any]{ def apply(a: A) = this }
+ while (i.hasNext) {
+ val x = pf.applyOrElse(i.next, sentinel)
+ if (x.asInstanceOf[AnyRef] ne sentinel) return Some(x.asInstanceOf[B])
+ }
None
}
@@ -160,7 +172,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* op( op( ... op(x_1, x_2) ..., x_{n-1}), x_n)
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- * @throws `UnsupportedOperationException` if this $coll is empty. */
+ * @throws UnsupportedOperationException if this $coll is empty. */
def reduceLeft[B >: A](op: (B, A) => B): B = {
if (isEmpty)
throw new UnsupportedOperationException("empty.reduceLeft")
diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala
index 65936da0e4..9eec685d10 100644
--- a/src/library/scala/collection/TraversableProxy.scala
+++ b/src/library/scala/collection/TraversableProxy.scala
@@ -21,4 +21,5 @@ package collection
* @version 2.8
* @since 2.8
*/
+@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3")
trait TraversableProxy[+A] extends Traversable[A] with TraversableProxyLike[A, Traversable[A]]
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index 02e5dd01f5..cfb567abe9 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -20,7 +20,7 @@ package collection.concurrent
* @tparam A the key type of the map
* @tparam B the value type of the map
*
- * @define Coll `ConcurrentMap`
+ * @define Coll `concurrent.Map`
* @define coll concurrent map
* @define concurrentmapinfo
* This is a base trait for all Scala concurrent map implementations. It
@@ -49,7 +49,7 @@ trait Map[A, B] extends scala.collection.mutable.Map[A, B] {
def putIfAbsent(k: A, v: B): Option[B]
/**
- * Removes the entry for the specified key if its currently mapped to the
+ * Removes the entry for the specified key if it's currently mapped to the
* specified value.
*
* $atomicop
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index fccc1d81b9..bcfea7a463 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -873,6 +873,44 @@ extends scala.collection.concurrent.Map[K, V]
insertifhc(k, hc, v, INode.KEY_ABSENT)
}
+ // TODO once computeIfAbsent is added to concurrent.Map,
+ // move the comment there and tweak the 'at most once' part
+ /** If the specified key is not already in the map, computes its value using
+ * the given thunk `op` and enters it into the map.
+ *
+ * Since concurrent maps cannot contain `null` for keys or values,
+ * a `NullPointerException` is thrown if the thunk `op`
+ * returns `null`.
+ *
+ * If the specified mapping function throws an exception,
+ * that exception is rethrown.
+ *
+ * Note: This method will invoke op at most once.
+ * However, `op` may be invoked without the result being added to the map if
+ * a concurrent process is also trying to add a value corresponding to the
+ * same key `k`.
+ *
+ * @param k the key to modify
+ * @param op the expression that computes the value
+ * @return the newly added value
+ */
+ override def getOrElseUpdate(k: K, op: =>V): V = {
+ val oldv = lookup(k)
+ if (oldv != null) oldv.asInstanceOf[V]
+ else {
+ val v = op
+ if (v == null) {
+ throw new NullPointerException("Concurrent TrieMap values cannot be null.")
+ } else {
+ val hc = computeHash(k)
+ insertifhc(k, hc, v, INode.KEY_ABSENT) match {
+ case Some(oldv) => oldv
+ case None => v
+ }
+ }
+ }
+ }
+
def remove(k: K, v: V): Boolean = {
val hc = computeHash(k)
removehc(k, v, hc).nonEmpty
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
index c724831c54..5448f5f91c 100644
--- a/src/library/scala/collection/convert/DecorateAsScala.scala
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -135,6 +135,12 @@ trait DecorateAsScala {
* If the Java `Map` was previously obtained from an implicit or explicit
* call of `asMap(scala.collection.mutable.Map)` then the original
* Scala `Map` will be returned.
+ *
+ * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`),
+ * it is your responsibility to wrap all
+ * non-atomic operations with `underlying.synchronized`.
+ * This includes `get`, as `java.util.Map`'s API does not allow for an
+ * atomic `get` when `null` values may be present.
*
* @param m The `Map` to be converted.
* @return An object with an `asScala` method that returns a Scala mutable
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index d4ab451b0d..ab151a6778 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -133,7 +133,13 @@ trait WrapAsScala {
* If the Java `Map` was previously obtained from an implicit or
* explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then
* the original Scala Map will be returned.
- *
+ *
+ * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`),
+ * it is your responsibility to wrap all
+ * non-atomic operations with `underlying.synchronized`.
+ * This includes `get`, as `java.util.Map`'s API does not allow for an
+ * atomic `get` when `null` values may be present.
+ *
* @param m The Map to be converted.
* @return A Scala mutable Map view of the argument.
*/
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 14ae57c43a..9f9732c62f 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -194,7 +194,7 @@ private[collection] trait Wrappers {
def getKey = k
def getValue = v
def setValue(v1 : B) = self.put(k, v1)
- override def hashCode = byteswap32(k.hashCode) + (byteswap32(v.hashCode) << 16)
+ override def hashCode = byteswap32(k.##) + (byteswap32(v.##) << 16)
override def equals(other: Any) = other match {
case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
case _ => false
@@ -288,6 +288,13 @@ private[collection] trait Wrappers {
override def empty: Repr = null.asInstanceOf[Repr]
}
+ /** Wraps a Java map as a Scala one. If the map is to support concurrent access,
+ * use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized
+ * (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility
+ * to wrap all non-atomic operations with `underlying.synchronized`.
+ * This includes `get`, as `java.util.Map`'s API does not allow for an
+ * atomic `get` when `null` values may be present.
+ */
case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
override def empty = JMapWrapper(new ju.HashMap[A, B])
}
@@ -314,6 +321,10 @@ private[collection] trait Wrappers {
def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
}
+ /** Wraps a concurrent Java map as a Scala one. Single-element concurrent
+ * access is supported; multi-element operations such as maps and filters
+ * are not guaranteed to be atomic.
+ */
case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] {
override def get(k: A) = {
val v = underlying get k
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index cd48cd23f4..bdd91ba7a4 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -25,7 +25,7 @@ import scala.language.higherKinds
* @author Martin Odersky
* @since 2.8
* @define coll collection
- * @define Coll CC
+ * @define Coll Traversable
*/
trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
@@ -45,7 +45,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
/** Selects the first element of this $coll.
*
* @return the first element of this $coll.
- * @throws `NoSuchElementException` if the $coll is empty.
+ * @throws NoSuchElementException if the $coll is empty.
*/
def head: A
@@ -202,7 +202,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
* element type of this $coll is a `Traversable`.
* @return a two-dimensional $coll of ${coll}s which has as ''n''th row
* the ''n''th column of this $coll.
- * @throws `IllegalArgumentException` if all collections in this $coll
+ * @throws IllegalArgumentException if all collections in this $coll
* are not of the same size.
*/
@migration("`transpose` throws an `IllegalArgumentException` if collections are not uniformly sized.", "2.9.0")
@@ -216,7 +216,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
val bs: IndexedSeq[Builder[B, CC[B]]] = IndexedSeq.fill(headSize)(genericBuilder[B])
for (xs <- sequential) {
var i = 0
- for (x <- asTraversable(xs)) {
+ for (x <- asTraversable(xs).seq) {
if (i >= headSize) fail
bs(i) += x
i += 1
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 726937efd9..f548eac88d 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -162,6 +162,13 @@ class HashSet[A] extends AbstractSet[A]
def - (e: A): HashSet[A] =
nullToEmpty(removed0(e, computeHash(e), 0))
+ /** Returns this $coll as an immutable set.
+ *
+ * A new set will not be built; lazy collections will stay lazy.
+ */
+ @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
+
override def filter(p: A => Boolean) = {
val buffer = new Array[HashSet[A]](bufferSize(size))
nullToEmpty(filter0(p, false, 0, buffer, 0))
@@ -399,7 +406,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
// create a new HashSet1 with the hash we already know
new HashSet1(ks1.head, hash)
case _ =>
- // create a new HashSetCollison with the hash we already know and the new keys
+ // create a new HashSetCollision with the hash we already know and the new keys
new HashSetCollision1(hash, ks1)
}
}
@@ -419,7 +426,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
// create a new HashSet1 with the hash we already know
new HashSet1(ks1.head, hash)
case _ =>
- // create a new HashSetCollison with the hash we already know and the new keys
+ // create a new HashSetCollision with the hash we already know and the new keys
new HashSetCollision1(hash, ks1)
}
}
@@ -438,7 +445,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
// Should only have HSC1 if size > 1
this
case _ =>
- // create a new HashSetCollison with the hash we already know and the new keys
+ // create a new HashSetCollision with the hash we already know and the new keys
new HashSetCollision1(hash, ks1)
}
} else this
diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala
index 6e4eb1e45f..df322396d0 100644
--- a/src/library/scala/collection/immutable/Iterable.scala
+++ b/src/library/scala/collection/immutable/Iterable.scala
@@ -35,6 +35,7 @@ trait Iterable[+A] extends Traversable[A]
}
/** $factoryInfo
+ * The current default implementation of a $Coll is a `List`.
* @define Coll `immutable.Iterable`
* @define coll immutable iterable collection
*/
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 930e13a9d3..254f14f13c 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -80,6 +80,7 @@ import java.io._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
+@SerialVersionUID(-6084104484083858598L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
sealed abstract class List[+A] extends AbstractSeq[A]
with LinearSeq[A]
with Product
@@ -190,11 +191,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
// Overridden methods from IterableLike and SeqLike or overloaded variants of such methods
- override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
- val b = bf(this)
- if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.seq.toList).asInstanceOf[That]
+ override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That =
+ if (bf eq List.ReusableCBF) (this ::: that.seq.toList).asInstanceOf[That]
else super.++(that)
- }
override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[List[A], B, That]): That = bf match {
case _: List.GenericCanBuildFrom[_] => (elem :: this).asInstanceOf[That]
@@ -292,7 +291,6 @@ sealed abstract class List[+A] extends AbstractSeq[A]
if (this eq Nil) Nil.asInstanceOf[That] else {
var rest = this
var h: ::[B] = null
- var x: A = null.asInstanceOf[A]
// Special case for first element
do {
val x: Any = pf.applyOrElse(rest.head, List.partialNotApplied)
@@ -326,7 +324,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
var h: ::[B] = null
var t: ::[B] = null
while (rest ne Nil) {
- f(rest.head).foreach{ b =>
+ f(rest.head).seq.foreach{ b =>
if (!found) {
h = new ::(b, Nil)
t = h
@@ -430,13 +428,14 @@ case object Nil extends List[Nothing] {
}
/** A non empty list characterized by a head and a tail.
- * @param hd the first element of the list
+ * @param head the first element of the list
* @param tl the list containing the remaining elements of this list after the first one.
* @tparam B the type of the list elements.
* @author Martin Odersky
* @version 1.0, 15/07/2003
* @since 2.8
*/
+@SerialVersionUID(509929039250432923L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
final case class ::[B](override val head: B, private[scala] var tl: List[B]) extends List[B] {
override def tail : List[B] = tl
override def isEmpty: Boolean = false
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 1bb07eb02d..a6e6fba0a5 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -111,7 +111,7 @@ class ListSet[A] extends AbstractSet[A]
/** Creates a new iterator over all elements contained in this set.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the new iterator
*/
def iterator: Iterator[A] = new AbstractIterator[A] {
@@ -127,17 +127,24 @@ class ListSet[A] extends AbstractSet[A]
}
/**
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
*/
override def head: A = throw new NoSuchElementException("Set has no elements")
/**
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
*/
override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set")
override def stringPrefix = "ListSet"
+ /** Returns this $coll as an immutable set.
+ *
+ * A new set will not be built; lazy collections will stay lazy.
+ */
+ @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
+
/** Represents an entry in the `ListSet`.
*/
protected class Node(override val head: A) extends ListSet[A] with Serializable {
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 3a64820be6..f11217d26a 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -158,7 +158,7 @@ extends scala.collection.AbstractSeq[T]
* @note Calling this method will force the entire sequence to be read.
*/
def length: Int = {
- while (!latest.isLast) addMore()
+ while (!latest.isLast && latest.end < end) addMore()
(latest.end min end) - start
}
@@ -175,7 +175,8 @@ extends scala.collection.AbstractSeq[T]
*/
override def isDefinedAt(index: Int) =
index >= 0 && index < end - start && {
- val p = page(index + start); index + start < p.end
+ val absidx = index + start
+ absidx >= 0 && absidx < page(absidx).end
}
/** The subsequence from index `start` up to `end -1` if `end`
@@ -192,6 +193,9 @@ extends scala.collection.AbstractSeq[T]
if (f.next eq null) f.addMore(more)
f = f.next
}
+ // Warning -- not refining `more` means that slices can freely request and obtain
+ // data outside of their slice. This is part of the design of PagedSeq
+ // (to read pages!) but can be surprising.
new PagedSeq(more, f, s, e)
}
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 264304db68..98266716cc 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -53,7 +53,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
*
* @param n index of the element to return
* @return the element at position `n` in this queue.
- * @throws Predef.NoSuchElementException if the queue is too short.
+ * @throws java.util.NoSuchElementException if the queue is too short.
*/
override def apply(n: Int): A = {
val len = out.length
@@ -120,7 +120,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
/** Returns a tuple with the first element in the queue,
* and a new queue with this element removed.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the first element of the queue.
*/
def dequeue: (A, Queue[A]) = out match {
@@ -139,7 +139,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
/** Returns the first element in the queue, or throws an error if there
* is no element contained in the queue.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the first element.
*/
def front: A = head
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 26ccd09803..3ae8a2c342 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -33,7 +33,13 @@ import scala.collection.parallel.immutable.ParRange
* `init`) are also permitted on overfull ranges.
*
* @param start the start of this range.
- * @param end the exclusive end of the range.
+ * @param end the end of the range. For exclusive ranges, e.g.
+ * `Range(0,3)` or `(0 until 3)`, this is one
+ * step past the last one in the range. For inclusive
+ * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`,
+ * it may be in the range if it is not skipped by the step size.
+ * To find the last element inside a non-empty range,
+ use `last` instead.
* @param step the step for the range.
*
* @author Martin Odersky
@@ -364,15 +370,16 @@ extends scala.collection.AbstractSeq[Int]
override def equals(other: Any) = other match {
case x: Range =>
// Note: this must succeed for overfull ranges (length > Int.MaxValue)
- (x canEqual this) && (
- isEmpty || // all empty sequences are equal
- (start == x.start && { // Otherwise, must have same start
- val l0 = last
- (l0 == x.last && ( // And same end
- start == l0 || step == x.step // And either the same step, or not take any steps
- ))
- })
- )
+ (x canEqual this) && {
+ if (isEmpty) x.isEmpty // empty sequences are equal
+ else // this is non-empty...
+ x.nonEmpty && start == x.start && { // ...so other must contain something and have same start
+ val l0 = last
+ (l0 == x.last && ( // And same end
+ start == l0 || step == x.step // And either the same step, or not take any steps
+ ))
+ }
+ }
case _ =>
super.equals(other)
}
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index 0fbf7942d4..7725ad9ee3 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -35,12 +35,7 @@ trait Set[A] extends Iterable[A]
override def companion: GenericCompanion[Set] = Set
- /** Returns this $coll as an immutable map.
- *
- * A new map will not be built; lazy collections will stay lazy.
- */
- @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
- override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
+ override def toSet[B >: A]: Set[B] = to[({type l[a] = immutable.Set[B]})#l] // for bincompat; remove in dev
override def seq: Set[A] = this
protected override def parCombiner = ParSet.newCombiner[A] // if `immutable.SetLike` gets introduced, please move this there!
@@ -62,6 +57,7 @@ object Set extends ImmutableSetFactory[Set] {
def - (elem: Any): Set[Any] = this
def iterator: Iterator[Any] = Iterator.empty
override def foreach[U](f: Any => U): Unit = {}
+ override def toSet[B >: Any]: Set[B] = this.asInstanceOf[Set[B]]
}
private[collection] def emptyInstance: Set[Any] = EmptySet
@@ -92,6 +88,8 @@ object Set extends ImmutableSetFactory[Set] {
if (f(elem1)) Some(elem1)
else None
}
+ @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
}
/** An optimized representation for immutable sets of size 2 */
@@ -123,6 +121,8 @@ object Set extends ImmutableSetFactory[Set] {
else if (f(elem2)) Some(elem2)
else None
}
+ @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
}
/** An optimized representation for immutable sets of size 3 */
@@ -156,6 +156,8 @@ object Set extends ImmutableSetFactory[Set] {
else if (f(elem3)) Some(elem3)
else None
}
+ @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
}
/** An optimized representation for immutable sets of size 4 */
@@ -191,6 +193,8 @@ object Set extends ImmutableSetFactory[Set] {
else if (f(elem4)) Some(elem4)
else None
}
+ @deprecatedOverriding("Immutable sets should do nothing on toSet but return themselves cast as a Set.", "2.11.0")
+ override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]]
}
}
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index b77b16f23f..1c28093b2c 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -95,7 +95,7 @@ class Stack[+A] protected (protected val elems: List[A])
/** Returns the top element of the stack. An error is signaled if
* there is no element on the stack.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the top element.
*/
def top: A =
@@ -105,7 +105,7 @@ class Stack[+A] protected (protected val elems: List[A])
/** Removes the top element from the stack.
* Note: should return `(A, Stack[A])` as for queues (mics)
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the new stack without the former top element.
*/
def pop: Stack[A] =
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 60de147477..cf7b7e272a 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -97,6 +97,14 @@ import scala.language.implicitConversions
* If, on the other hand, there is nothing holding on to the head (e.g. we used
* `def` to define the `Stream`) then once it is no longer being used directly,
* it disappears.
+ *
+ * - Note that some operations, including [[drop]], [[dropWhile]],
+ * [[flatMap]] or [[collect]] may process a large number of intermediate
+ * elements before returning. These necessarily hold onto the head, since
+ * they are methods on `Stream`, and a stream holds its own head. For
+ * computations of this sort where memoization is not desired, use
+ * `Iterator` when possible.
+ *
* {{{
* // For example, let's build the natural numbers and do some silly iteration
* // over them.
@@ -168,6 +176,12 @@ import scala.language.implicitConversions
* loop(1, 1)
* }
* }}}
+ *
+ * Note that `mkString` forces evaluation of a `Stream`, but `addString` does
+ * not. In both cases, a `Stream` that is or ends in a cycle
+ * (e.g. `lazy val s: Stream[Int] = 0 #:: s`) will convert additional trips
+ * through the cycle to `...`. Additionally, `addString` will display an
+ * un-memoized tail as `?`.
*
* @tparam A the type of the elements contained in this stream.
*
@@ -211,7 +225,7 @@ self =>
* }}}
*
* @return The first element of the `Stream`.
- * @throws Predef.NoSuchElementException if the stream is empty.
+ * @throws java.util.NoSuchElementException if the stream is empty.
*/
def head: A
@@ -222,7 +236,7 @@ self =>
* returns the lazy result.
*
* @return The tail of the `Stream`.
- * @throws Predef.UnsupportedOperationException if the stream is empty.
+ * @throws UnsupportedOperationException if the stream is empty.
*/
def tail: Stream[A]
@@ -245,12 +259,22 @@ self =>
* @note Often we use `Stream`s to represent an infinite set or series. If
* that's the case for your particular `Stream` then this function will never
* return and will probably crash the VM with an `OutOfMemory` exception.
+ * This function will not hang on a finite cycle, however.
*
* @return The fully realized `Stream`.
*/
def force: Stream[A] = {
- var these = this
- while (!these.isEmpty) these = these.tail
+ // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one)
+ var these, those = this
+ if (!these.isEmpty) these = these.tail
+ while (those ne these) {
+ if (these.isEmpty) return this
+ these = these.tail
+ if (these.isEmpty) return this
+ these = these.tail
+ if (these eq those) return this
+ those = those.tail
+ }
this
}
@@ -301,9 +325,24 @@ self =>
override def toStream: Stream[A] = this
- override def hasDefiniteSize = {
- def loop(s: Stream[A]): Boolean = s.isEmpty || s.tailDefined && loop(s.tail)
- loop(this)
+ override def hasDefiniteSize: Boolean = isEmpty || {
+ if (!tailDefined) false
+ else {
+ // Two-iterator trick (2x & 1x speed) for cycle detection.
+ var those = this
+ var these = tail
+ while (those ne these) {
+ if (these.isEmpty) return true
+ if (!these.tailDefined) return false
+ these = these.tail
+ if (these.isEmpty) return true
+ if (!these.tailDefined) return false
+ these = these.tail
+ if (those eq these) return false
+ those = those.tail
+ }
+ false // Cycle detected
+ }
}
/** Create a new stream which contains all elements of this stream followed by
@@ -321,7 +360,7 @@ self =>
* `List(BigInt(12)) ++ fibs`.
*
* @tparam B The element type of the returned collection.'''That'''
- * @param that The [[scala.collection.GenTraversableOnce]] the be contatenated
+ * @param that The [[scala.collection.GenTraversableOnce]] the be concatenated
* to this `Stream`.
* @return A new collection containing the result of concatenating `this` with
* `that`.
@@ -461,7 +500,7 @@ self =>
else super.flatMap(f)(bf)
/** Returns all the elements of this `Stream` that satisfy the predicate `p`
- * in a new `Stream` - i.e. it is still a lazy data structure. The order of
+ * in a new `Stream` - i.e., it is still a lazy data structure. The order of
* the elements is preserved
*
* @param p the predicate used to filter the stream.
@@ -680,7 +719,8 @@ self =>
* `end`. Inside, the string representations of defined elements (w.r.t.
* the method `toString()`) are separated by the string `sep`. The method will
* not force evaluation of undefined elements. A tail of such elements will be
- * represented by a `"?"` instead.
+ * represented by a `"?"` instead. A cyclic stream is represented by a `"..."`
+ * at the point where the cycle repeats.
*
* @param b The [[collection.mutable.StringBuilder]] factory to which we need
* to add the string elements.
@@ -691,16 +731,86 @@ self =>
* resulting string.
*/
override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
- def loop(pre: String, these: Stream[A]) {
- if (these.isEmpty) b append end
- else {
- b append pre append these.head
- if (these.tailDefined) loop(sep, these.tail)
- else b append sep append "?" append end
+ b append start
+ if (!isEmpty) {
+ b append head
+ var cursor = this
+ var n = 1
+ if (cursor.tailDefined) { // If tailDefined, also !isEmpty
+ var scout = tail
+ if (scout.isEmpty) {
+ // Single element. Bail out early.
+ b append end
+ return b
+ }
+ if (cursor ne scout) {
+ cursor = scout
+ if (scout.tailDefined) {
+ scout = scout.tail
+ // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings
+ while ((cursor ne scout) && scout.tailDefined) {
+ b append sep append cursor.head
+ n += 1
+ cursor = cursor.tail
+ scout = scout.tail
+ if (scout.tailDefined) scout = scout.tail
+ }
+ }
+ }
+ if (!scout.tailDefined) { // Not a cycle, scout hit an end
+ while (cursor ne scout) {
+ b append sep append cursor.head
+ n += 1
+ cursor = cursor.tail
+ }
+ if (cursor.nonEmpty) {
+ b append sep append cursor.head
+ }
+ }
+ else {
+ // Cycle.
+ // If we have a prefix of length P followed by a cycle of length C,
+ // the scout will be at position (P%C) in the cycle when the cursor
+ // enters it at P. They'll then collide when the scout advances another
+ // C - (P%C) ahead of the cursor.
+ // If we run the scout P farther, then it will be at the start of
+ // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner
+ // starts at the beginning of the prefix, they'll collide exactly at
+ // the start of the loop.
+ var runner = this
+ var k = 0
+ while (runner ne scout) {
+ runner = runner.tail
+ scout = scout.tail
+ k += 1
+ }
+ // Now runner and scout are at the beginning of the cycle. Advance
+ // cursor, adding to string, until it hits; then we'll have covered
+ // everything once. If cursor is already at beginning, we'd better
+ // advance one first unless runner didn't go anywhere (in which case
+ // we've already looped once).
+ if ((cursor eq scout) && (k > 0)) {
+ b append sep append cursor.head
+ n += 1
+ cursor = cursor.tail
+ }
+ while (cursor ne scout) {
+ b append sep append cursor.head
+ n += 1
+ cursor = cursor.tail
+ }
+ // Subtract prefix length from total length for cycle reporting.
+ // (Not currently used, but probably a good idea for the future.)
+ n -= k
+ }
+ }
+ if (!cursor.isEmpty) {
+ // Either undefined or cyclic; we can check with tailDefined
+ if (!cursor.tailDefined) b append sep append "?"
+ else b append sep append "..."
}
}
- b append start
- loop("", this)
+ b append end
b
}
@@ -771,7 +881,7 @@ self =>
* @return A new `Stream` containing everything but the last element. If your
* `Stream` represents an infinite series, this method will not return.
*
- * @throws `Predef.UnsupportedOperationException` if the stream is empty.
+ * @throws UnsupportedOperationException if the stream is empty.
*/
override def init: Stream[A] =
if (isEmpty) super.init
@@ -839,7 +949,7 @@ self =>
*
* @param p the test predicate.
* @return A new `Stream` representing the results of applying `p` to the
- * oringal `Stream`.
+ * original `Stream`.
*
* @example {{{
* // Assume we have a Stream that takes the first 20 natural numbers
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 8e1d950d00..1ead894faf 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -10,7 +10,7 @@ package scala
package collection
package immutable
-import mutable.Builder
+import mutable.{ ArrayBuilder, Builder }
import scala.util.matching.Regex
import scala.math.ScalaNumber
import scala.reflect.ClassTag
@@ -121,37 +121,43 @@ self =>
}
/** Return all lines in this string in an iterator, excluding trailing line
- * end characters, i.e. apply `.stripLineEnd` to all lines
+ * end characters, i.e., apply `.stripLineEnd` to all lines
* returned by `linesWithSeparators`.
*/
def lines: Iterator[String] =
linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
/** Return all lines in this string in an iterator, excluding trailing line
- * end characters, i.e. apply `.stripLineEnd` to all lines
+ * end characters, i.e., apply `.stripLineEnd` to all lines
* returned by `linesWithSeparators`.
*/
@deprecated("Use `lines` instead.","2.11.0")
def linesIterator: Iterator[String] =
linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
- /** Returns this string with first character converted to upper case */
+ /** Returns this string with first character converted to upper case.
+ * If the first character of the string is capitalized, it is returned unchanged.
+ */
def capitalize: String =
if (toString == null) null
else if (toString.length == 0) ""
+ else if (toString.charAt(0).isUpper) toString
else {
val chars = toString.toCharArray
chars(0) = chars(0).toUpper
new String(chars)
}
- /** Returns this string with the given `prefix` stripped. */
+ /** Returns this string with the given `prefix` stripped. If this string does not
+ * start with `prefix`, it is returned unchanged.
+ */
def stripPrefix(prefix: String) =
if (toString.startsWith(prefix)) toString.substring(prefix.length)
else toString
/** Returns this string with the given `suffix` stripped. If this string does not
- * end with `suffix`, it is returned unchanged. */
+ * end with `suffix`, it is returned unchanged.
+ */
def stripSuffix(suffix: String) =
if (toString.endsWith(suffix)) toString.substring(0, toString.length() - suffix.length)
else toString
@@ -197,8 +203,33 @@ self =>
private def escape(ch: Char): String = "\\Q" + ch + "\\E"
- @throws(classOf[java.util.regex.PatternSyntaxException])
- def split(separator: Char): Array[String] = toString.split(escape(separator))
+ def split(separator: Char): Array[String] = {
+ val thisString = toString
+ var pos = thisString.indexOf(separator)
+
+ if (pos != -1) {
+ val res = new ArrayBuilder.ofRef[String]
+
+ var prev = 0
+ do {
+ res += thisString.substring(prev, pos)
+ prev = pos + 1
+ pos = thisString.indexOf(separator, prev)
+ } while (pos != -1)
+
+ if (prev != thisString.size)
+ res += thisString.substring(prev, thisString.size)
+
+ val initialResult = res.result()
+ pos = initialResult.length
+ while (pos > 0 && initialResult(pos - 1).isEmpty) pos = pos - 1
+ if (pos != initialResult.length) {
+ val trimmed = new Array[String](pos)
+ Array.copy(initialResult, 0, trimmed, 0, pos)
+ trimmed
+ } else initialResult
+ } else Array[String](thisString)
+ }
@throws(classOf[java.util.regex.PatternSyntaxException])
def split(separators: Array[Char]): Array[String] = {
@@ -224,31 +255,31 @@ self =>
def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*)
/**
- * @throws `java.lang.IllegalArgumentException` - If the string does not contain a parsable boolean.
+ * @throws java.lang.IllegalArgumentException - If the string does not contain a parsable boolean.
*/
def toBoolean: Boolean = parseBoolean(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable byte.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable byte.
*/
def toByte: Byte = java.lang.Byte.parseByte(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable short.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable short.
*/
def toShort: Short = java.lang.Short.parseShort(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable int.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable int.
*/
def toInt: Int = java.lang.Integer.parseInt(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable long.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable long.
*/
def toLong: Long = java.lang.Long.parseLong(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable float.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable float.
*/
def toFloat: Float = java.lang.Float.parseFloat(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable double.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable double.
*/
def toDouble: Double = java.lang.Double.parseDouble(toString)
@@ -281,7 +312,7 @@ self =>
* understands.
*
* @param args the arguments used to instantiating the pattern.
- * @throws `java.lang.IllegalArgumentException`
+ * @throws java.lang.IllegalArgumentException
*/
def format(args : Any*): String =
java.lang.String.format(toString, args map unwrapArg: _*)
@@ -298,7 +329,7 @@ self =>
*
* @param l an instance of `java.util.Locale`
* @param args the arguments used to instantiating the pattern.
- * @throws `java.lang.IllegalArgumentException`
+ * @throws java.lang.IllegalArgumentException
*/
def formatLocal(l: java.util.Locale, args: Any*): String =
java.lang.String.format(l, toString, args map unwrapArg: _*)
diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala
index 775d635fae..5fc0607a00 100644
--- a/src/library/scala/collection/immutable/Traversable.scala
+++ b/src/library/scala/collection/immutable/Traversable.scala
@@ -29,7 +29,7 @@ trait Traversable[+A] extends scala.collection.Traversable[A]
}
/** $factoryInfo
- * The current default implementation of a $Coll is a `Vector`.
+ * The current default implementation of a $Coll is a `List`.
* @define coll immutable traversable collection
* @define Coll `immutable.Traversable`
*/
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 8cc99a53e6..662075cd93 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -101,8 +101,8 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
else new TreeMap(RB.slice(tree, from, until))
}
- override def dropRight(n: Int) = take(size - n)
- override def takeRight(n: Int) = drop(size - n)
+ override def dropRight(n: Int) = take(size - math.max(n, 0))
+ override def takeRight(n: Int) = drop(size - math.max(n, 0))
override def splitAt(n: Int) = (take(n), drop(n))
private[this] def countWhile(p: ((A, B)) => Boolean): Int = {
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 681dbbd1a8..7378211db0 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -87,8 +87,8 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
else newSet(RB.slice(tree, from, until))
}
- override def dropRight(n: Int) = take(size - n)
- override def takeRight(n: Int) = drop(size - n)
+ override def dropRight(n: Int) = take(size - math.max(n, 0))
+ override def takeRight(n: Int) = drop(size - math.max(n, 0))
override def splitAt(n: Int) = (take(n), drop(n))
private[this] def countWhile(p: A => Boolean): Int = {
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index c7da447f72..47a623a616 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -215,7 +215,7 @@ override def companion: GenericCompanion[Vector] = Vector
import Vector.{Log2ConcatFaster, TinyAppendFaster}
if (that.isEmpty) this.asInstanceOf[That]
else {
- val again = if (!that.isTraversableAgain) that.toVector else that
+ val again = if (!that.isTraversableAgain) that.toVector else that.seq
again.size match {
// Often it's better to append small numbers of elements (or prepend if RHS is a vector)
case n if n <= TinyAppendFaster || n < (this.size >> Log2ConcatFaster) =>
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index de09bb2040..b63d0aae33 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -14,8 +14,8 @@ package mutable
* An immutable AVL Tree implementation formerly used by mutable.TreeSet
*
* @author Lucien Pereira
- * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.0")
*/
+@deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11.2")
private[mutable] sealed trait AVLTree[+A] extends Serializable {
def balance: Int
@@ -27,7 +27,7 @@ private[mutable] sealed trait AVLTree[+A] extends Serializable {
/**
* Returns a new tree containing the given element.
- * Thows an IllegalArgumentException if element is already present.
+ * Throws an IllegalArgumentException if element is already present.
*
*/
def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf)
@@ -95,7 +95,7 @@ private case class Node[A](data: A, left: AVLTree[A], right: AVLTree[A]) extends
/**
* Returns a new tree containing the given element.
- * Thows an IllegalArgumentException if element is already present.
+ * Throws an IllegalArgumentException if element is already present.
*
*/
override def insert[B >: A](value: B, ordering: Ordering[B]) = {
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
index 47fb66744e..fccc9d83e6 100644
--- a/src/library/scala/collection/mutable/AnyRefMap.scala
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -224,7 +224,7 @@ extends AbstractMap[K, V]
override def put(key: K, value: V): Option[V] = {
val h = hashOf(key)
val k = key
- var i = seekEntryOrOpen(h, k)
+ val i = seekEntryOrOpen(h, k)
if (i < 0) {
val j = i & IndexMask
_hashes(j) = h
@@ -251,7 +251,7 @@ extends AbstractMap[K, V]
override def update(key: K, value: V): Unit = {
val h = hashOf(key)
val k = key
- var i = seekEntryOrOpen(h, k)
+ val i = seekEntryOrOpen(h, k)
if (i < 0) {
val j = i & IndexMask
_hashes(j) = h
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 2d43b352c5..011fd415ee 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -30,8 +30,8 @@ import parallel.mutable.ParArray
*
* @tparam A the type of this arraybuffer's elements.
*
- * @define Coll `ArrayBuffer`
- * @define coll arraybuffer
+ * @define Coll `mutable.ArrayBuffer`
+ * @define coll array buffer
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ArrayBuffer[B]` because an implicit of type `CanBuildFrom[ArrayBuffer, B, ArrayBuffer[B]]`
* is defined in object `ArrayBuffer`.
@@ -128,21 +128,22 @@ class ArrayBuffer[A](override protected val initialSize: Int)
override def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this }
/** Inserts new elements at the index `n`. Opposed to method
- * `update`, this method will not replace an element with a
+ * `update`, this method will not replace an element with a new
* one. Instead, it will insert a new element at index `n`.
*
* @param n the index where a new element will be inserted.
* @param seq the traversable object providing all elements to insert.
- * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
+ * @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def insertAll(n: Int, seq: Traversable[A]) {
if (n < 0 || n > size0) throw new IndexOutOfBoundsException(n.toString)
- val xs = seq.toList
- val len = xs.length
- ensureSize(size0 + len)
+ val len = seq.size
+ val newSize = size0 + len
+ ensureSize(newSize)
+
copy(n, n + len, size0 - n)
- xs.copyToArray(array.asInstanceOf[scala.Array[Any]], n)
- size0 += len
+ seq.copyToArray(array.asInstanceOf[Array[Any]], n)
+ size0 = newSize
}
/** Removes the element on a given index position. It takes time linear in
@@ -150,7 +151,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
*
* @param n the index which refers to the first element to delete.
* @param count the number of elements to delete
- * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
+ * @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
override def remove(n: Int, count: Int) {
require(count >= 0, "removing negative number of elements")
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index 43d23acc1a..e92d48cfeb 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -110,7 +110,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
* @return the bitset itself.
*/
def |= (other: BitSet): this.type = {
- ensureCapacity(other.nwords)
+ ensureCapacity(other.nwords - 1)
for (i <- 0 until other.nwords)
elems(i) = elems(i) | other.word(i)
this
@@ -121,8 +121,10 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
* @return the bitset itself.
*/
def &= (other: BitSet): this.type = {
- ensureCapacity(other.nwords)
- for (i <- 0 until other.nwords)
+ // Different from other operations: no need to ensure capacity because
+ // anything beyond the capacity is 0. Since we use other.word which is 0
+ // off the end, we also don't need to make sure we stay in bounds there.
+ for (i <- 0 until nwords)
elems(i) = elems(i) & other.word(i)
this
}
@@ -132,7 +134,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
* @return the bitset itself.
*/
def ^= (other: BitSet): this.type = {
- ensureCapacity(other.nwords)
+ ensureCapacity(other.nwords - 1)
for (i <- 0 until other.nwords)
elems(i) = elems(i) ^ other.word(i)
this
@@ -143,7 +145,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
* @return the bitset itself.
*/
def &~= (other: BitSet): this.type = {
- ensureCapacity(other.nwords)
+ ensureCapacity(other.nwords - 1)
for (i <- 0 until other.nwords)
elems(i) = elems(i) & ~other.word(i)
this
@@ -160,6 +162,9 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
*
* @return an immutable set containing all the elements of this set.
*/
+ @deprecated("If this BitSet contains a value that is 128 or greater, the result of this method is an 'immutable' " +
+ "BitSet that shares state with this mutable BitSet. Thus, if the mutable BitSet is modified, it will violate the " +
+ "immutability of the result.", "2.11.6")
def toImmutable = immutable.BitSet.fromBitMaskNoCopy(elems)
override def clone(): BitSet = {
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index 671b79f8c2..fd95e74fbc 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -41,7 +41,7 @@ import generic._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
+@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
@SerialVersionUID(-8144992287952814767L)
class DoubleLinkedList[A]() extends AbstractSeq[A]
with LinearSeq[A]
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index a43fe34c99..aafe34f50a 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -56,10 +56,10 @@ import scala.annotation.migration
* @define Coll `DoubleLinkedList`
* @define coll double linked list
*/
-@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
+@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self =>
- /** A reference to the node in the linked list preceeding the current node. */
+ /** A reference to the node in the linked list preceding the current node. */
var prev: This = _
// returns that list if this list is empty
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 65d9c35052..b48a32fa37 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -449,7 +449,7 @@ private[collection] object HashTable {
// h
/* OLD VERSION
- * quick, but bad for sequence 0-10000 - little enthropy in higher bits
+ * quick, but bad for sequence 0-10000 - little entropy in higher bits
* since 2003 */
// var h: Int = hcode + ~(hcode << 9)
// h = h ^ (h >>> 14)
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index 31a4749960..7acdeeff18 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -50,7 +50,7 @@ self =>
trait Sliced extends super.Sliced with Transformed[A] {
override def length = endpoints.width
def update(idx: Int, elem: A) =
- if (idx + from < until) self.update(idx + from, elem)
+ if (idx >= 0 && idx + from < until) self.update(idx + from, elem)
else throw new IndexOutOfBoundsException(idx.toString)
}
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index b64504be3d..275f490675 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -160,6 +160,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
override def clear() {
clearTable()
firstEntry = null
+ lastEntry = null
}
private def writeObject(out: java.io.ObjectOutputStream) {
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 1768c946ed..756a2f73c1 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -112,6 +112,7 @@ class LinkedHashSet[A] extends AbstractSet[A]
override def clear() {
clearTable()
firstEntry = null
+ lastEntry = null
}
private def writeObject(out: java.io.ObjectOutputStream) {
diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala
index 092698ac0b..b3500367af 100644
--- a/src/library/scala/collection/mutable/LinkedList.scala
+++ b/src/library/scala/collection/mutable/LinkedList.scala
@@ -76,7 +76,7 @@ import generic._
* }}}
*/
@SerialVersionUID(-7308240733518833071L)
-@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
+@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
class LinkedList[A]() extends AbstractSeq[A]
with LinearSeq[A]
with GenericTraversableTemplate[A, LinkedList]
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 987b83d23b..a9d385bc5b 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -55,7 +55,7 @@ import scala.annotation.tailrec
*
* }}}
*/
-@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
+@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends SeqLike[A, This] { self =>
var elem: A = _
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 5e838d0d88..8faaf97741 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -132,7 +132,7 @@ final class ListBuffer[A]
*
* @param n the index of the element to replace.
* @param x the new element.
- * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
+ * @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def update(n: Int, x: A) {
// We check the bounds early, so that we don't trigger copying.
@@ -217,7 +217,7 @@ final class ListBuffer[A]
*
* @param n the index where a new element will be inserted.
* @param seq the iterable object providing all elements to insert.
- * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
+ * @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def insertAll(n: Int, seq: Traversable[A]) {
// We check the bounds early, so that we don't trigger copying.
@@ -330,7 +330,7 @@ final class ListBuffer[A]
* @param n the index which refers to the element to delete.
* @return n the element that was formerly at position `n`.
* @note an element must exists at position `n`.
- * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
+ * @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def remove(n: Int): A = {
if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString())
diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala
index 984ae6f7cc..c124f35cd7 100644
--- a/src/library/scala/collection/mutable/LongMap.scala
+++ b/src/library/scala/collection/mutable/LongMap.scala
@@ -19,7 +19,7 @@ import generic.CanBuildFrom
* on a map that will no longer have elements removed but will be
* used heavily may save both time and storage space.
*
- * This map is not indended to contain more than 2^29 entries (approximately
+ * This map is not intended to contain more than 2^29 entries (approximately
* 500 million). The maximum capacity is 2^30, but performance will degrade
* rapidly as 2^30 is approached.
*
@@ -81,7 +81,7 @@ extends AbstractMap[Long, V]
private def toIndex(k: Long): Int = {
// Part of the MurmurHash3 32 bit finalizer
val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt
- var x = (h ^ (h >>> 16)) * 0x85EBCA6B
+ val x = (h ^ (h >>> 16)) * 0x85EBCA6B
(x ^ (x >>> 13)) & mask
}
@@ -311,7 +311,7 @@ extends AbstractMap[Long, V]
}
}
else {
- var i = seekEntryOrOpen(key)
+ val i = seekEntryOrOpen(key)
if (i < 0) {
val j = i & IndexMask
_keys(j) = key
@@ -388,12 +388,14 @@ extends AbstractMap[Long, V]
nextPair = anotherPair
anotherPair = null
}
- nextPair = null
+ else nextPair = null
ans
}
}
override def foreach[A](f: ((Long,V)) => A) {
+ if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V]))
+ if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V]))
var i,j = 0
while (i < _keys.length & j < _size) {
val k = _keys(i)
@@ -403,8 +405,6 @@ extends AbstractMap[Long, V]
}
i += 1
}
- if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V]))
- if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V]))
}
override def clone(): LongMap[V] = {
@@ -417,6 +417,8 @@ extends AbstractMap[Long, V]
/** Applies a function to all keys of this map. */
def foreachKey[A](f: Long => A) {
+ if ((extraKeys & 1) == 1) f(0L)
+ if ((extraKeys & 2) == 2) f(Long.MinValue)
var i,j = 0
while (i < _keys.length & j < _size) {
val k = _keys(i)
@@ -426,12 +428,12 @@ extends AbstractMap[Long, V]
}
i += 1
}
- if ((extraKeys & 1) == 1) f(0L)
- if ((extraKeys & 2) == 2) f(Long.MinValue)
}
/** Applies a function to all values of this map. */
def foreachValue[A](f: V => A) {
+ if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V])
+ if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V])
var i,j = 0
while (i < _keys.length & j < _size) {
val k = _keys(i)
@@ -441,8 +443,6 @@ extends AbstractMap[Long, V]
}
i += 1
}
- if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V])
- if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V])
}
/** Creates a new `LongMap` with different values.
@@ -450,6 +450,8 @@ extends AbstractMap[Long, V]
* collection immediately.
*/
def mapValuesNow[V1](f: V => V1): LongMap[V1] = {
+ val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null
+ val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null
val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false)
val kz = java.util.Arrays.copyOf(_keys, _keys.length)
val vz = new Array[AnyRef](_values.length)
@@ -462,8 +464,6 @@ extends AbstractMap[Long, V]
}
i += 1
}
- val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null
- val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null
lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz)
lm
}
@@ -472,6 +472,8 @@ extends AbstractMap[Long, V]
* Note: the default, if any, is not transformed.
*/
def transformValues(f: V => V): this.type = {
+ if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef]
+ if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef]
var i,j = 0
while (i < _keys.length & j < _size) {
val k = _keys(i)
@@ -481,26 +483,8 @@ extends AbstractMap[Long, V]
}
i += 1
}
- if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef]
- if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef]
this
}
-
- /*
- override def toString = {
- val sb = new StringBuilder("LongMap(")
- var n = 0
- foreach{ case (k,v) =>
- if (n > 0) sb ++= ", "
- sb ++= k.toString
- sb ++= " -> "
- sb ++= v.toString
- n += 1
- }
- sb += ')'
- sb.result
- }
- */
}
object LongMap {
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 6230fc23aa..44af886cf5 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -18,6 +18,8 @@ import scala.collection.parallel.mutable.ParMap
/** A template trait for mutable maps.
* $mapNote
* $mapTags
+ * @define Coll `mutable.Map`
+ * @define coll mutable map
* @since 2.8
*
* @define mapNote
@@ -131,7 +133,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
/** Creates a new map containing the key/value mappings provided by the specified traversable object
* and all the key/value mappings of this map.
*
- * Note that existing mappings from this map with the same key as those in `xs` will be overriden.
+ * Note that existing mappings from this map with the same key as those in `xs` will be overridden.
*
* @param xs the traversable object.
* @return a new map containing mappings of this map and those provided by `xs`.
@@ -176,6 +178,10 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
*
* Otherwise, computes value from given expression `op`, stores with key
* in map and returns that value.
+ *
+ * Concurrent map implementations may evaluate the expression `op`
+ * multiple times, or may evaluate `op` without inserting the result.
+ *
* @param key the key to test
* @param op the computation yielding the value to associate with `key`, if
* `key` is previously unbound.
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index 78dfc35268..ac2ebf31d8 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -65,10 +65,9 @@ trait MultiMap[A, B] extends Map[A, Set[B]] {
*/
protected def makeSet: Set[B] = new HashSet[B]
- /** Assigns the specified `value` to a specified `key`, replacing
- * the existing value assigned to that `key` if it is equal to
- * the specified value. Otherwise, simply adds another binding to
- * the `key`.
+ /** Assigns the specified `value` to a specified `key`. If the key
+ * already has a binding to equal to `value`, nothing is changed;
+ * otherwise a new binding is added for that `key`.
*
* @param key The key to which to bind the new value.
* @param value The value to bind to the key.
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index a0d3ee0ef0..646023f469 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -13,7 +13,6 @@ package mutable
import generic._
import immutable.{List, Nil}
-// !!! todo: convert to LinkedListBuffer?
/**
* This class is used internally to represent mutable lists. It is the
* basis for the implementation of the class `Queue`.
@@ -22,6 +21,8 @@ import immutable.{List, Nil}
* @author Martin Odersky
* @version 2.8
* @since 1
+ * @define Coll `mutable.MutableList`
+ * @define coll mutable list
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]]
* section on `Mutable Lists` for more information.
*/
@@ -111,9 +112,21 @@ extends AbstractSeq[A]
}
}
- /** Returns an iterator over all elements of this list.
+ /** Returns an iterator over up to `length` elements of this list.
*/
- override def iterator: Iterator[A] = first0.iterator
+ override def iterator: Iterator[A] = if (isEmpty) Iterator.empty else
+ new AbstractIterator[A] {
+ var elems = first0
+ var count = len
+ def hasNext = count > 0 && elems.nonEmpty
+ def next() = {
+ if (!hasNext) throw new NoSuchElementException
+ count = count - 1
+ val e = elems.elem
+ elems = if (count == 0) null else elems.next
+ e
+ }
+ }
override def last = {
if (isEmpty) throw new NoSuchElementException("MutableList.empty.last")
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index aade2ed6fb..24f5761cf5 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -31,7 +31,7 @@ object OpenHashMap {
/** A mutable hash map based on an open hashing scheme. The precise scheme is
* undefined, but it should make a reasonable effort to ensure that an insert
- * with consecutive hash codes is not unneccessarily penalised. In particular,
+ * with consecutive hash codes is not unnecessarily penalised. In particular,
* mappings of consecutive integer keys should work without significant
* performance loss.
*
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index b949bec48a..d3c4161e3b 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -16,6 +16,11 @@ import generic._
* To prioritize elements of type A there must be an implicit
* Ordering[A] available at creation.
*
+ * Only the `dequeue` and `dequeueAll` methods will return methods in priority
+ * order (while removing elements from the heap). Standard collection methods
+ * including `drop` and `iterator` will remove or traverse the heap in whichever
+ * order seems most convenient.
+ *
* @tparam A type of the elements in this priority queue.
* @param ord implicit ordering used to compare the elements of type `A`.
*
@@ -121,7 +126,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
/** Returns the element with the highest priority in the queue,
* and removes this element from the queue.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the element with the highest priority.
*/
def dequeue(): A =
@@ -242,13 +247,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return a priority queue with the same elements.
*/
override def clone(): PriorityQueue[A] = new PriorityQueue[A] ++= this.iterator
-
- // def printstate() {
- // println("-----------------------")
- // println("Size: " + resarr.p_size0)
- // println("Internal array: " + resarr.p_array.toList)
- // println(toString)
- // }
}
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 7c890fe309..03d387a535 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -58,7 +58,7 @@ extends MutableList[A]
/** Returns the first element in the queue, and removes this element
* from the queue.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the first element of the queue.
*/
def dequeue(): A =
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index d749167870..81a71adc91 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -16,19 +16,20 @@ import scala.annotation.migration
import parallel.mutable.ParSet
/** A template trait for mutable sets of type `mutable.Set[A]`.
+ *
+ * This trait provides most of the operations of a `mutable.Set` independently of its representation.
+ * It is typically inherited by concrete implementations of sets.
+ *
+ * $setNote
+ *
* @tparam A the type of the elements of the set
* @tparam This the type of the set itself.
*
- * $setnote
- *
* @author Martin Odersky
* @version 2.8
* @since 2.8
*
- * @define setnote
- * @note
- * This trait provides most of the operations of a `mutable.Set` independently of its representation.
- * It is typically inherited by concrete implementations of sets.
+ * @define setNote
*
* To implement a concrete mutable set, you need to provide implementations
* of the following methods:
@@ -36,13 +37,13 @@ import parallel.mutable.ParSet
* def contains(elem: A): Boolean
* def iterator: Iterator[A]
* def += (elem: A): this.type
- * def -= (elem: A): this.type</pre>
+ * def -= (elem: A): this.type
* }}}
* If you wish that methods like `take`,
* `drop`, `filter` return the same kind of set,
* you should also override:
* {{{
- * def empty: This</pre>
+ * def empty: This
* }}}
* It is also good idea to override methods `foreach` and
* `size` for efficiency.
@@ -207,7 +208,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
/** Send a message to this scriptable object.
*
* @param cmd the message to send.
- * @throws `Predef.UnsupportedOperationException`
+ * @throws UnsupportedOperationException
* if the message was not understood.
*/
@deprecated("Scripting is deprecated.", "2.11.0")
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 53b6c59939..1a92f23b7b 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -125,7 +125,7 @@ extends AbstractSeq[A]
* the element from the stack. An error is signaled if there is no
* element on the stack.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the top element
*/
def top: A =
@@ -133,7 +133,7 @@ extends AbstractSeq[A]
/** Removes the top element from the stack.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the top element
*/
def pop(): A = {
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 498e9e461e..c56d40786e 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -22,6 +22,8 @@ import immutable.StringLike
* @author Martin Odersky
* @version 2.8
* @since 2.7
+ * @define Coll `mutable.IndexedSeq`
+ * @define coll string builder
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html# "Scala's Collection Library overview"]]
* section on `StringBuilders` for more information.
*/
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index 1f89199bdc..2212486bcf 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -85,7 +85,7 @@ extends scala.collection.mutable.AbstractBuffer[T]
def classTagCompanion = UnrolledBuffer
- /** Concatenates the targer unrolled buffer to this unrolled buffer.
+ /** Concatenates the target unrolled buffer to this unrolled buffer.
*
* The specified buffer `that` is cleared after this operation. This is
* an O(1) operation.
@@ -208,7 +208,7 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] {
def newBuilder[T](implicit t: ClassTag[T]): Builder[T, UnrolledBuffer[T]] = new UnrolledBuffer[T]
val waterline = 50
- val waterlineDelim = 100
+ val waterlineDelim = 100 // TODO -- fix this name! It's a denominator, not a delimiter. (But it's part of the API so we can't just change it.)
private[collection] val unrolledlength = 32
/** Unrolled buffer node.
@@ -300,27 +300,35 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] {
if (next eq null) true else false // checks if last node was thrown out
} else false
- @tailrec final def insertAll(idx: Int, t: scala.collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = if (idx < size) {
- // divide this node at the appropriate position and insert all into head
- // update new next
- val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff)
- Array.copy(array, idx, newnextnode.array, 0, size - idx)
- newnextnode.size = size - idx
- newnextnode.next = next
-
- // update this
- nullout(idx, size)
- size = idx
- next = null
-
- // insert everything from iterable to this
- var curr = this
- for (elem <- t) curr = curr append elem
- curr.next = newnextnode
-
- // try to merge the last node of this with the newnextnode
- if (curr.tryMergeWithNext()) buffer.lastPtr = curr
- } else insertAll(idx - size, t, buffer)
+ @tailrec final def insertAll(idx: Int, t: scala.collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = {
+ if (idx < size) {
+ // divide this node at the appropriate position and insert all into head
+ // update new next
+ val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff)
+ Array.copy(array, idx, newnextnode.array, 0, size - idx)
+ newnextnode.size = size - idx
+ newnextnode.next = next
+
+ // update this
+ nullout(idx, size)
+ size = idx
+ next = null
+
+ // insert everything from iterable to this
+ var curr = this
+ for (elem <- t) curr = curr append elem
+ curr.next = newnextnode
+
+ // try to merge the last node of this with the newnextnode and fix tail pointer if needed
+ if (curr.tryMergeWithNext()) buffer.lastPtr = curr
+ else if (newnextnode.next eq null) buffer.lastPtr = newnextnode
+ }
+ else if (idx == size || (next eq null)) {
+ var curr = this
+ for (elem <- t) curr = curr append elem
+ }
+ else next.insertAll(idx - size, t, buffer)
+ }
private def nullout(from: Int, until: Int) {
var idx = from
while (idx < until) {
@@ -338,7 +346,7 @@ object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] {
tryMergeWithNext()
}
- override def toString = array.take(size).mkString("Unrolled[" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "")
+ override def toString = array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "")
}
}
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index 53fca9f779..8740bda835 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -93,7 +93,7 @@ object WrappedArray {
def empty[T <: AnyRef]: WrappedArray[T] = EmptyWrappedArray.asInstanceOf[WrappedArray[T]]
// If make is called explicitly we use whatever we're given, even if it's
- // empty. This may be unnecesssary (if WrappedArray is to honor the collections
+ // empty. This may be unnecessary (if WrappedArray is to honor the collections
// contract all empty ones must be equal, so discriminating based on the reference
// equality of an empty array should not come up) but we may as well be
// conservative since wrapRefArray contributes most of the unnecessary allocations.
diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala
index 26b061b2a5..6a2b6de75a 100644
--- a/src/library/scala/collection/package.scala
+++ b/src/library/scala/collection/package.scala
@@ -18,7 +18,7 @@ package scala
*
* == Using Collections ==
*
- * It is convienient to treat all collections as either
+ * It is convenient to treat all collections as either
* a [[scala.collection.Traversable]] or [[scala.collection.Iterable]], as
* these traits define the vast majority of operations
* on a collection.
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index 2ceeb18eef..a5ba8c49ad 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -23,9 +23,6 @@ import scala.collection.parallel.mutable.ParArrayCombiner
*
* @author Aleksandar Prokopec
* @since 2.9
- *
- * @define Coll `ParIterable`
- * @define coll parallel iterable
*/
trait ParIterable[+T]
extends GenIterable[T]
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 445edd23cb..016255dca4 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -150,7 +150,8 @@ import scala.collection.parallel.ParallelCollectionImplicits._
* @define indexsignalling
* This method will use `indexFlag` signalling capabilities. This means
* that splitters may set and read the `indexFlag` state.
- *
+ * @define Coll `ParIterable`
+ * @define coll parallel iterable
*/
trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]]
extends GenIterableLike[T, Repr]
@@ -743,7 +744,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* The index flag is initially set to maximum integer value.
*
* @param pred the predicate used to test the elements
- * @return the longest prefix of this $coll of elements that satisy the predicate `pred`
+ * @return the longest prefix of this $coll of elements that satisfy the predicate `pred`
*/
def takeWhile(pred: T => Boolean): Repr = {
val cbf = combinerFactory
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index d2b15c727a..ee1334ba55 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -24,6 +24,8 @@ import scala.collection.generic.Signalling
*
* @tparam K the key type of the map
* @tparam V the value type of the map
+ * @define Coll `ParMap`
+ * @define coll parallel map
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index 4e9a2e5751..4feda5ff07 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -20,6 +20,8 @@ import scala.collection.Set
* $sideeffects
*
* @tparam T the element type of the set
+ * @define Coll `ParSet`
+ * @define coll parallel set
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index 42027f5bac..5d99394a50 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -22,6 +22,8 @@ import scala.collection.generic.Shrinkable
*
* @tparam K the key type of the map
* @tparam V the value type of the map
+ * @define Coll `ParMap`
+ * @define coll parallel map
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index 9367f1424d..4e2d3e0e4c 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -14,9 +14,6 @@ import scala.collection.parallel.Combiner
/** A mutable variant of `ParSet`.
*
- * @define Coll `mutable.ParSet`
- * @define coll mutable parallel set
- *
* @author Aleksandar Prokopec
*/
trait ParSet[T]
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 13af5ed649..08aa3b024b 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -21,6 +21,8 @@ import scala.collection.generic.Shrinkable
* $sideeffects
*
* @tparam T the element type of the set
+ * @define Coll `mutable.ParSet`
+ * @define coll mutable parallel set
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 91c54fa8f1..d77dcb0658 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -206,7 +206,7 @@ package parallel {
* Methods `beforeCombine` and `afterCombine` are called before and after
* combining the buckets, respectively, given that the argument to `combine`
* is not `this` (as required by the `combine` contract).
- * They can be overriden in subclasses to provide custom behaviour by modifying
+ * They can be overridden in subclasses to provide custom behaviour by modifying
* the receiver (which will be the return value).
*/
private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]]
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index 875d811b9b..4c82d6e15b 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -70,9 +70,9 @@ object Platform {
* @param elemClass the `Class` object of the component type of the array
* @param length the length of the new array.
* @return an array of the given component type as an `AnyRef`.
- * @throws `java.lang.NullPointerException` If `elemClass` is `null`.
- * @throws `java.lang.IllegalArgumentException` if componentType is [[scala.Unit]] or `java.lang.Void.TYPE`
- * @throws `java.lang.NegativeArraySizeException` if the specified length is negative
+ * @throws java.lang.NullPointerException If `elemClass` is `null`.
+ * @throws java.lang.IllegalArgumentException if componentType is [[scala.Unit]] or `java.lang.Void.TYPE`
+ * @throws java.lang.NegativeArraySizeException if the specified length is negative
*/
@inline
def createArray(elemClass: Class[_], length: Int): AnyRef =
@@ -80,7 +80,7 @@ object Platform {
/** Assigns the value of 0 to each element in the array.
* @param arr A non-null Array[Int].
- * @throws `java.lang.NullPointerException` If `arr` is `null`.
+ * @throws java.lang.NullPointerException If `arr` is `null`.
*/
@inline
def arrayclear(arr: Array[Int]) { java.util.Arrays.fill(arr, 0) }
@@ -92,9 +92,9 @@ object Platform {
*
* @param name the fully qualified name of the desired class.
* @return the `Class` object for the class with the specified name.
- * @throws `java.lang.LinkageError` if the linkage fails
- * @throws `java.lang.ExceptionInInitializerError` if the initialization provoked by this method fails
- * @throws `java.lang.ClassNotFoundException` if the class cannot be located
+ * @throws java.lang.LinkageError if the linkage fails
+ * @throws java.lang.ExceptionInInitializerError if the initialization provoked by this method fails
+ * @throws java.lang.ClassNotFoundException if the class cannot be located
* @example {{{
* val a = scala.compat.Platform.getClassForName("java.lang.Integer") // returns the Class[_] for java.lang.Integer
* }}}
diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala
index 067244bd1c..89ad7d8c0e 100644
--- a/src/library/scala/concurrent/Channel.scala
+++ b/src/library/scala/concurrent/Channel.scala
@@ -10,8 +10,10 @@
package scala.concurrent
-/** This class ...
+/** This class provides a simple FIFO queue of data objects,
+ * which are read by one or more reader threads.
*
+ * @tparam A type of data exchanged
* @author Martin Odersky
* @version 1.0, 10/03/2003
*/
@@ -20,11 +22,14 @@ class Channel[A] {
var elem: A = _
var next: LinkedList[A] = null
}
- private var written = new LinkedList[A] // FIFO buffer, realized through
+ private var written = new LinkedList[A] // FIFO queue, realized through
private var lastWritten = written // aliasing of a linked list
private var nreaders = 0
- /**
+ /** Append a value to the FIFO queue to be read by `read`.
+ * This operation is nonblocking and can be executed by any thread.
+ *
+ * @param x object to enqueue to this channel
*/
def write(x: A) = synchronized {
lastWritten.elem = x
@@ -33,6 +38,11 @@ class Channel[A] {
if (nreaders > 0) notify()
}
+ /** Retrieve the next waiting object from the FIFO queue,
+ * blocking if necessary until an object is available.
+ *
+ * @return next object dequeued from this channel
+ */
def read: A = synchronized {
while (written.next == null) {
try {
@@ -45,5 +55,4 @@ class Channel[A] {
written = written.next
x
}
-
}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index a1e94c8876..e380c55880 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -61,28 +61,44 @@ or import scala.concurrent.ExecutionContext.Implicits.global.""")
trait ExecutionContext {
/** Runs a block of code on this execution context.
+ *
+ * @param runnable the task to execute
*/
def execute(runnable: Runnable): Unit
/** Reports that an asynchronous computation failed.
+ *
+ * @param cause the cause of the failure
*/
def reportFailure(@deprecatedName('t) cause: Throwable): Unit
- /** Prepares for the execution of a task. Returns the prepared
- * execution context. A valid implementation of `prepare` is one
- * that simply returns `this`.
+ /** Prepares for the execution of a task. Returns the prepared execution context.
+ *
+ * `prepare` should be called at the site where an `ExecutionContext` is received (for
+ * example, through an implicit method parameter). The returned execution context may
+ * then be used to execute tasks. The role of `prepare` is to save any context relevant
+ * to an execution's ''call site'', so that this context may be restored at the
+ * ''execution site''. (These are often different: for example, execution may be
+ * suspended through a `Promise`'s future until the `Promise` is completed, which may
+ * be done in another thread, on another stack.)
+ *
+ * Note: a valid implementation of `prepare` is one that simply returns `this`.
+ *
+ * @return the prepared execution context
*/
def prepare(): ExecutionContext = this
}
/**
- * Union interface since Java does not support union types
+ * An [[ExecutionContext]] that is also a
+ * Java [[http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executor.html Executor]].
*/
trait ExecutionContextExecutor extends ExecutionContext with Executor
/**
- * Union interface since Java does not support union types
+ * An [[ExecutionContext]] that is also a
+ * Java [[http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html ExecutorService]].
*/
trait ExecutionContextExecutorService extends ExecutionContextExecutor with ExecutorService
@@ -91,38 +107,72 @@ trait ExecutionContextExecutorService extends ExecutionContextExecutor with Exec
*/
object ExecutionContext {
/**
- * This is the explicit global ExecutionContext,
- * call this when you want to provide the global ExecutionContext explicitly
+ * The explicit global `ExecutionContext`. Invoke `global` when you want to provide the global
+ * `ExecutionContext` explicitly.
+ *
+ * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default,
+ * the thread pool uses a target number of worker threads equal to the number of
+ * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
+ *
+ * @return the global `ExecutionContext`
*/
def global: ExecutionContextExecutor = Implicits.global
object Implicits {
/**
- * This is the implicit global ExecutionContext,
- * import this when you want to provide the global ExecutionContext implicitly
+ * The implicit global `ExecutionContext`. Import `global` when you want to provide the global
+ * `ExecutionContext` implicitly.
+ *
+ * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default,
+ * the thread pool uses a target number of worker threads equal to the number of
+ * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
*/
implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
}
/** Creates an `ExecutionContext` from the given `ExecutorService`.
+ *
+ * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
+ * @param reporter a function for error reporting
+ * @return the `ExecutionContext` using the given `ExecutorService`
*/
def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit): ExecutionContextExecutorService =
impl.ExecutionContextImpl.fromExecutorService(e, reporter)
- /** Creates an `ExecutionContext` from the given `ExecutorService` with the default Reporter.
+ /** Creates an `ExecutionContext` from the given `ExecutorService` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]].
+ *
+ * If it is guaranteed that none of the executed tasks are blocking, a single-threaded `ExecutorService`
+ * can be used to create an `ExecutionContext` as follows:
+ *
+ * {{{
+ * import java.util.concurrent.Executors
+ * val ec = ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor())
+ * }}}
+ *
+ * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
+ * @return the `ExecutionContext` using the given `ExecutorService`
*/
def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter)
/** Creates an `ExecutionContext` from the given `Executor`.
+ *
+ * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
+ * @param reporter a function for error reporting
+ * @return the `ExecutionContext` using the given `Executor`
*/
def fromExecutor(e: Executor, reporter: Throwable => Unit): ExecutionContextExecutor =
impl.ExecutionContextImpl.fromExecutor(e, reporter)
- /** Creates an `ExecutionContext` from the given `Executor` with the default Reporter.
+ /** Creates an `ExecutionContext` from the given `Executor` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]].
+ *
+ * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
+ * @return the `ExecutionContext` using the given `Executor`
*/
def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter)
- /** The default reporter simply prints the stack trace of the `Throwable` to System.err.
+ /** The default reporter simply prints the stack trace of the `Throwable` to [[http://docs.oracle.com/javase/8/docs/api/java/lang/System.html#err System.err]].
+ *
+ * @return the function for error reporting
*/
def defaultReporter: Throwable => Unit = _.printStackTrace()
}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 4ed0687334..914646320c 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -102,7 +102,7 @@ trait Future[+T] extends Awaitable[T] {
/* Callbacks */
- /** When this future is completed successfully (i.e. with a value),
+ /** When this future is completed successfully (i.e., with a value),
* apply the provided partial function to the value if the partial function
* is defined at that value.
*
@@ -118,7 +118,7 @@ trait Future[+T] extends Awaitable[T] {
case _ =>
}
- /** When this future is completed with a failure (i.e. with a throwable),
+ /** When this future is completed with a failure (i.e., with a throwable),
* apply the provided callback to the throwable.
*
* $caughtThrowables
@@ -485,7 +485,7 @@ object Future {
* The result becomes available once the asynchronous computation is completed.
*
* @tparam T the type of the result
- * @param body the asychronous computation
+ * @param body the asynchronous computation
* @param executor the execution context on which the future is run
* @return the `Future` holding the result of the computation
*/
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index 3d0597ca22..91e55d30cb 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -11,7 +11,7 @@ package scala.concurrent
import java.util.concurrent.{ExecutorService, Executor}
import scala.language.implicitConversions
-/** The `JavaConversions` object provides implicit converstions supporting
+/** The `JavaConversions` object provides implicit conversions supporting
* interoperability between Scala and Java concurrency classes.
*
* @author Philipp Haller
diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala
index 1c00c0e91f..8d18da2d38 100644
--- a/src/library/scala/concurrent/Lock.scala
+++ b/src/library/scala/concurrent/Lock.scala
@@ -14,8 +14,8 @@ package scala.concurrent
*
* @author Martin Odersky
* @version 1.0, 10/03/2003
- * @deprecated("Use java.util.concurrent.locks.Lock", "2.11.0")
*/
+@deprecated("Use java.util.concurrent.locks.Lock", "2.11.2")
class Lock {
var available = true
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index eb8044ed3b..0f4e98db57 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -66,10 +66,7 @@ trait Promise[T] {
*
* @return This promise
*/
- final def completeWith(other: Future[T]): this.type = {
- other onComplete { this complete _ }
- this
- }
+ final def completeWith(other: Future[T]): this.type = tryCompleteWith(other)
/** Attempts to complete this promise with the specified future, once that future is completed.
*
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index d5dc3d7e3f..9634f6d900 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -13,6 +13,7 @@ import java.util.concurrent.TimeUnit
/** A class to provide safe concurrent access to a mutable cell.
* All methods are synchronized.
*
+ * @tparam A type of the contained value
* @author Martin Odersky
* @version 1.0, 10/03/2003
*/
@@ -20,6 +21,12 @@ class SyncVar[A] {
private var isDefined: Boolean = false
private var value: Option[A] = None
+ /**
+ * Waits for this SyncVar to become defined and returns
+ * the result, without modifying the stored value.
+ *
+ * @return value that is held in this container
+ */
def get: A = synchronized {
while (!isDefined) wait()
value.get
@@ -57,8 +64,12 @@ class SyncVar[A] {
value
}
- /** Waits for this SyncVar to become defined and returns
- * the result */
+ /**
+ * Waits for this SyncVar to become defined and returns
+ * the result, unsetting the stored value before returning.
+ *
+ * @return value that was held in this container
+ */
def take(): A = synchronized {
try get
finally unsetVal()
@@ -82,7 +93,7 @@ class SyncVar[A] {
// [Heather] the reason why: it doesn't take into consideration
// whether or not the SyncVar is already defined. So, set has been
// deprecated in order to eventually be able to make "setting" private
- @deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0")
+ @deprecated("Use `put` instead, as `set` is potentially error-prone", "2.10.0")
// NOTE: Used by SBT 0.13.0-M2 and below
def set(x: A): Unit = setVal(x)
@@ -102,7 +113,7 @@ class SyncVar[A] {
// [Heather] the reason why: it doesn't take into consideration
// whether or not the SyncVar is already defined. So, unset has been
// deprecated in order to eventually be able to make "unsetting" private
- @deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0")
+ @deprecated("Use `take` instead, as `unset` is potentially error-prone", "2.10.0")
// NOTE: Used by SBT 0.13.0-M2 and below
def unset(): Unit = synchronized {
isDefined = false
@@ -129,4 +140,3 @@ class SyncVar[A] {
}
}
-
diff --git a/src/library/scala/concurrent/duration/Deadline.scala b/src/library/scala/concurrent/duration/Deadline.scala
index 61cbe47530..a25a478602 100644
--- a/src/library/scala/concurrent/duration/Deadline.scala
+++ b/src/library/scala/concurrent/duration/Deadline.scala
@@ -25,15 +25,15 @@ package scala.concurrent.duration
*/
case class Deadline private (time: FiniteDuration) extends Ordered[Deadline] {
/**
- * Return a deadline advanced (i.e. moved into the future) by the given duration.
+ * Return a deadline advanced (i.e., moved into the future) by the given duration.
*/
def +(other: FiniteDuration): Deadline = copy(time = time + other)
/**
- * Return a deadline moved backwards (i.e. towards the past) by the given duration.
+ * Return a deadline moved backwards (i.e., towards the past) by the given duration.
*/
def -(other: FiniteDuration): Deadline = copy(time = time - other)
/**
- * Calculate time difference between this and the other deadline, where the result is directed (i.e. may be negative).
+ * Calculate time difference between this and the other deadline, where the result is directed (i.e., may be negative).
*/
def -(other: Deadline): FiniteDuration = time - other.time
/**
diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
index 1b50b7fa56..182c2d172a 100644
--- a/src/library/scala/concurrent/duration/Duration.scala
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -182,6 +182,7 @@ object Duration {
def compare(other: Duration) = if (other eq this) 0 else 1
def unary_- : Duration = this
def toUnit(unit: TimeUnit): Double = Double.NaN
+ private def readResolve(): AnyRef = Undefined // Instructs deserialization to use this same instance
}
sealed abstract class Infinite extends Duration {
@@ -230,7 +231,7 @@ object Duration {
* but itself. This value closely corresponds to Double.PositiveInfinity,
* matching its semantics in arithmetic operations.
*/
- val Inf: Infinite = new Infinite {
+ val Inf: Infinite = new Infinite {
override def toString = "Duration.Inf"
def compare(other: Duration) = other match {
case x if x eq Undefined => -1 // Undefined != Undefined
@@ -239,6 +240,7 @@ object Duration {
}
def unary_- : Duration = MinusInf
def toUnit(unit: TimeUnit): Double = Double.PositiveInfinity
+ private def readResolve(): AnyRef = Inf // Instructs deserialization to use this same instance
}
/**
@@ -251,6 +253,7 @@ object Duration {
def compare(other: Duration) = if (other eq this) 0 else -1
def unary_- : Duration = Inf
def toUnit(unit: TimeUnit): Double = Double.NegativeInfinity
+ private def readResolve(): AnyRef = MinusInf // Instructs deserialization to use this same instance
}
// Java Factories
@@ -621,7 +624,7 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
}
def -(other: Duration) = other match {
case x: FiniteDuration => add(-x.length, x.unit)
- case _ => other
+ case _ => -other
}
def *(factor: Double) =
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index cc1350f5a9..d159dda414 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -12,6 +12,75 @@ import scala.concurrent.duration.Duration
import scala.annotation.implicitNotFound
/** This package object contains primitives for concurrent and parallel programming.
+ *
+ * == Guide ==
+ *
+ * A more detailed guide to Futures and Promises, including discussion and examples
+ * can be found at
+ * [[http://docs.scala-lang.org/overviews/core/futures.html]].
+ *
+ * == Common Imports ==
+ *
+ * When working with Futures, you will often find that importing the whole concurrent
+ * package is convenient, furthermore you are likely to need an implicit ExecutionContext
+ * in scope for many operations involving Futures and Promises:
+ *
+ * {{{
+ * import scala.concurrent._
+ * import ExecutionContext.Implicits.global
+ * }}}
+ *
+ * == Specifying Durations ==
+ *
+ * Operations often require a duration to be specified. A duration DSL is available
+ * to make defining these easier:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ * val d: Duration = 10.seconds
+ * }}}
+ *
+ * == Using Futures For Non-blocking Computation ==
+ *
+ * Basic use of futures is easy with the factory method on Future, which executes a
+ * provided function asynchronously, handing you back a future result of that function
+ * without blocking the current thread. In order to create the Future you will need
+ * either an implicit or explicit ExecutionContext to be provided:
+ *
+ * {{{
+ * import scala.concurrent._
+ * import ExecutionContext.Implicits.global // implicit execution context
+ *
+ * val firstZebra: Future[Int] = Future {
+ * val source = scala.io.Source.fromFile("/etc/dictionaries-common/words")
+ * source.toSeq.indexOfSlice("zebra")
+ * }
+ * }}}
+ *
+ * == Avoid Blocking ==
+ *
+ * Although blocking is possible in order to await results (with a mandatory timeout duration):
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ * Await.result(firstZebra, 10.seconds)
+ * }}}
+ *
+ * and although this is sometimes necessary to do, in particular for testing purposes, blocking
+ * in general is discouraged when working with Futures and concurrency in order to avoid
+ * potential deadlocks and improve performance. Instead, use callbacks or combinators to
+ * remain in the future domain:
+ *
+ * {{{
+ * val animalRange: Future[Int] = for {
+ * aardvark <- firstAardvark
+ * zebra <- firstZebra
+ * } yield zebra - aardvark
+ *
+ * animalRange.onSuccess {
+ * case x if x > 500000 => println("It's a long way from Aardvark to Zebra")
+ * }
+ * }}}
*/
package object concurrent {
type ExecutionException = java.util.concurrent.ExecutionException
@@ -47,14 +116,19 @@ package object concurrent {
* Blocking on an [[Awaitable]] should be done using [[Await.result]] instead of `blocking`.
*
* @param body A piece of code which contains potentially blocking or long running calls.
- * @throws `CancellationException` if the computation was cancelled
- * @throws `InterruptedException` in the case that a wait within the blocking `body` was interrupted
+ * @throws CancellationException if the computation was cancelled
+ * @throws InterruptedException in the case that a wait within the blocking `body` was interrupted
*/
@throws(classOf[Exception])
def blocking[T](body: =>T): T = BlockContext.current.blockOn(body)(scala.concurrent.AwaitPermission)
}
package concurrent {
+ /**
+ * This marker trait is used by [[Await]] to ensure that [[Awaitable.ready]] and [[Awaitable.result]]
+ * are not directly called by user code. An implicit instance of this trait is only available when
+ * user code is currently calling the methods on [[Await]].
+ */
@implicitNotFound("Don't call `Awaitable` methods directly, use the `Await` object.")
sealed trait CanAwait
@@ -65,6 +139,11 @@ package concurrent {
/**
* `Await` is what is used to ensure proper handling of blocking for `Awaitable` instances.
+ *
+ * While occasionally useful, e.g. for testing, it is recommended that you avoid Await
+ * when possible in favor of callbacks and combinators like onComplete and use in
+ * for comprehensions. Await will block the thread on which it runs, and could cause
+ * performance and deadlock issues.
*/
object Await {
/**
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index 1c87a1f421..52fa525b24 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -93,7 +93,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
val buf = new Array[Char](bufferSize)
var n = 0
while (n != -1) {
- n = charReader.read(buf)
+ n = allReader.read(buf)
if (n>0) sb.appendAll(buf, 0, n)
}
sb.result
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index 74c3e06839..9f0b56b4fe 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -169,9 +169,20 @@ object Source {
createBufferedSource(is, reset = () => fromInputStream(is)(codec), close = () => is.close())(codec)
}
-/** The class `Source` implements an iterable representation of source data.
- * Calling method `reset` returns an identical, resetted source, where
- * possible.
+/** An iterable representation of source data.
+ * It may be reset with the optional `reset` method.
+ *
+ * Subclasses must supply [[scala.io.Source@iter the underlying iterator]].
+ *
+ * Error handling may be customized by overriding the [[scala.io.Source@report report]] method.
+ *
+ * The [[scala.io.Source@ch current input]] and [[scala.io.Source@pos position]],
+ * as well as the [[scala.io.Source@next next character]] methods delegate to
+ * [[scala.io.Source$Positioner the positioner]].
+ *
+ * The default positioner encodes line and column numbers in the position passed to `report`.
+ * This behavior can be changed by supplying a
+ * [[scala.io.Source@withPositioning(pos:Source.this.Positioner):Source.this.type custom positioner]].
*
* @author Burak Emir
* @version 1.0
diff --git a/src/library/scala/io/StdIn.scala b/src/library/scala/io/StdIn.scala
index 64836ecd6e..0f9656436b 100644
--- a/src/library/scala/io/StdIn.scala
+++ b/src/library/scala/io/StdIn.scala
@@ -4,7 +4,7 @@ package io
import java.text.MessageFormat
/** private[scala] because this is not functionality we should be providing
- * in the standard library, at least not in this idiosyncractic form.
+ * in the standard library, at least not in this idiosyncratic form.
* Factored into trait because it is better code structure regardless.
*/
private[scala] trait StdIn {
diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala
index c638f531bb..2eb5514a18 100644
--- a/src/library/scala/language.scala
+++ b/src/library/scala/language.scala
@@ -1,3 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
package scala
/**
diff --git a/src/library/scala/languageFeature.scala b/src/library/scala/languageFeature.scala
index 1f411c412a..51118b43be 100644
--- a/src/library/scala/languageFeature.scala
+++ b/src/library/scala/languageFeature.scala
@@ -1,3 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
package scala
import scala.annotation.meta
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index bcbed645a7..d6e2963ad8 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -364,7 +364,7 @@ object BigDecimal {
* to a decimal text representation, and build a `BigDecimal` based on that.
* `BigDecimal.binary` will expand the binary fraction to the requested or default
* precision. `BigDecimal.exact` will expand the binary fraction to the
- * full number of digits, thus producing the exact decimal value corrsponding to
+ * full number of digits, thus producing the exact decimal value corresponding to
* the binary fraction of that floating-point number. `BigDecimal` equality
* matches the decimal expansion of `Double`: `BigDecimal.decimal(0.1) == 0.1`.
* Note that since `0.1f != 0.1`, the same is not true for `Float`. Instead,
@@ -417,7 +417,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
private final def computeHashCode(): Unit = {
computedHashCode =
if (isWhole && (precision - scale) < BigDecimal.maximumHashScale) toBigInt.hashCode
- else if (isValidDouble) doubleValue.##
+ else if (isDecimalDouble) doubleValue.##
else {
val temp = bigDecimal.stripTrailingZeros
scala.util.hashing.MurmurHash3.mixLast( temp.scaleByPowerOfTen(temp.scale).toBigInteger.hashCode, temp.scale )
@@ -431,7 +431,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
* with unequal `hashCode`s. These hash codes agree with `BigInt`
* for whole numbers up ~4934 digits (the range of IEEE 128 bit floating
* point). Beyond this, hash codes will disagree; this prevents the
- * explicit represention of the `BigInt` form for `BigDecimal` values
+ * explicit representation of the `BigInt` form for `BigDecimal` values
* with large exponents.
*/
override def hashCode(): Int = {
@@ -477,7 +477,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
* `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble`, depending on the intended meaning.
* By default, `decimal` creation is used, so `isDecimalDouble` is probably what you want.
*/
- @deprecated("Validity has two distinct meanings. Use `isExactBinaryDouble` or `equivalentToDouble` instead.", "2.11")
+ @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11")
def isValidDouble = {
val d = toDouble
!d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0
@@ -617,10 +617,10 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
*/
def abs: BigDecimal = if (signum < 0) unary_- else this
- /** Returns the sign of this BigDecimal, i.e.
+ /** Returns the sign of this BigDecimal;
* -1 if it is less than 0,
- * +1 if it is greater than 0
- * 0 if it is equal to 0
+ * +1 if it is greater than 0,
+ * 0 if it is equal to 0.
*/
def signum: Int = this.bigDecimal.signum()
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 689fc0c3e1..abc7371d9f 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -282,10 +282,10 @@ final class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNum
*/
def abs: BigInt = new BigInt(this.bigInteger.abs())
- /** Returns the sign of this BigInt, i.e.
+ /** Returns the sign of this BigInt;
* -1 if it is less than 0,
- * +1 if it is greater than 0
- * 0 if it is equal to 0
+ * +1 if it is greater than 0,
+ * 0 if it is equal to 0.
*/
def signum: Int = this.bigInteger.signum()
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index d1a4e7c35c..827cccc77e 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -26,7 +26,7 @@ import scala.language.{implicitConversions, higherKinds}
* val pairs = Array(("a", 5, 2), ("c", 3, 1), ("b", 1, 3))
*
* // sort by 2nd element
- * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2)
+ * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2))
*
* // sort by the 3rd element, then 1st
* Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1)))
@@ -284,6 +284,9 @@ object Ordering extends LowPriorityOrderingImplicits {
override def gteq(x: Float, y: Float): Boolean = outer.gteq(y, x)
override def lt(x: Float, y: Float): Boolean = outer.lt(y, x)
override def gt(x: Float, y: Float): Boolean = outer.gt(y, x)
+ override def min(x: Float, y: Float): Float = outer.max(x, y)
+ override def max(x: Float, y: Float): Float = outer.min(x, y)
+
}
}
implicit object Float extends FloatOrdering
@@ -309,6 +312,8 @@ object Ordering extends LowPriorityOrderingImplicits {
override def gteq(x: Double, y: Double): Boolean = outer.gteq(y, x)
override def lt(x: Double, y: Double): Boolean = outer.lt(y, x)
override def gt(x: Double, y: Double): Boolean = outer.gt(y, x)
+ override def min(x: Double, y: Double): Double = outer.max(x, y)
+ override def max(x: Double, y: Double): Double = outer.min(x, y)
}
}
implicit object Double extends DoubleOrdering
diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala
index 9e35381528..8d7fc32535 100644
--- a/src/library/scala/math/PartialOrdering.scala
+++ b/src/library/scala/math/PartialOrdering.scala
@@ -15,17 +15,24 @@ package math
* latter.
*
* A [[http://en.wikipedia.org/wiki/Partial_order partial ordering]] is a
- * binary relation on a type `T` that is also an equivalence relation on
- * values of type `T`. This relation is exposed as the `lteq` method of
- * the `PartialOrdering` trait. This relation must be:
+ * binary relation on a type `T`, exposed as the `lteq` method of this trait.
+ * This relation must be:
*
* - reflexive: `lteq(x, x) == '''true'''`, for any `x` of type `T`.
- * - anti-symmetric: `lteq(x, y) == '''true'''` and `lteq(y, x) == true`
- * then `equiv(x, y)`, for any `x` and `y` of type `T`.
+ * - anti-symmetric: if `lteq(x, y) == '''true'''` and
+ * `lteq(y, x) == '''true'''`
+ * then `equiv(x, y) == '''true'''`, for any `x` and `y` of type `T`.
* - transitive: if `lteq(x, y) == '''true'''` and
* `lteq(y, z) == '''true'''` then `lteq(x, z) == '''true'''`,
* for any `x`, `y`, and `z` of type `T`.
*
+ * Additionally, a partial ordering induces an
+ * [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]
+ * on a type `T`: `x` and `y` of type `T` are equivalent if and only if
+ * `lteq(x, y) && lteq(y, x) == '''true'''`. This equivalence relation is
+ * exposed as the `equiv` method, inherited from the
+ * [[scala.math.Equiv Equiv]] trait.
+ *
* @author Geoffrey Washburn
* @version 1.0, 2008-04-0-3
* @since 2.7
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index 33c5cee783..9dd96183da 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -2,8 +2,7 @@ package scala
package reflect
import java.lang.{ Class => jClass }
-import scala.language.{implicitConversions, existentials}
-import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass }
+import scala.runtime.ScalaRunTime.arrayElementClass
/**
*
@@ -70,26 +69,35 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial
* `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)`
* is uncheckable, but we have an instance of `ClassTag[T]`.
*/
- def unapply(x: Any): Option[T] = unapply_impl(x)
- def unapply(x: Byte): Option[T] = unapply_impl(x)
- def unapply(x: Short): Option[T] = unapply_impl(x)
- def unapply(x: Char): Option[T] = unapply_impl(x)
- def unapply(x: Int): Option[T] = unapply_impl(x)
- def unapply(x: Long): Option[T] = unapply_impl(x)
- def unapply(x: Float): Option[T] = unapply_impl(x)
- def unapply(x: Double): Option[T] = unapply_impl(x)
- def unapply(x: Boolean): Option[T] = unapply_impl(x)
- def unapply(x: Unit): Option[T] = unapply_impl(x)
+ def unapply(x: Any): Option[T] =
+ if (null != x && (
+ (runtimeClass.isInstance(x))
+ || (x.isInstanceOf[Byte] && runtimeClass.isAssignableFrom(classOf[Byte]))
+ || (x.isInstanceOf[Short] && runtimeClass.isAssignableFrom(classOf[Short]))
+ || (x.isInstanceOf[Char] && runtimeClass.isAssignableFrom(classOf[Char]))
+ || (x.isInstanceOf[Int] && runtimeClass.isAssignableFrom(classOf[Int]))
+ || (x.isInstanceOf[Long] && runtimeClass.isAssignableFrom(classOf[Long]))
+ || (x.isInstanceOf[Float] && runtimeClass.isAssignableFrom(classOf[Float]))
+ || (x.isInstanceOf[Double] && runtimeClass.isAssignableFrom(classOf[Double]))
+ || (x.isInstanceOf[Boolean] && runtimeClass.isAssignableFrom(classOf[Boolean]))
+ || (x.isInstanceOf[Unit] && runtimeClass.isAssignableFrom(classOf[Unit])))
+ ) Some(x.asInstanceOf[T])
+ else None
- private def unapply_impl[U: ClassTag](x: U): Option[T] =
- if (x == null) None
- else {
- val staticClass = classTag[U].runtimeClass
- val dynamicClass = x.getClass
- val effectiveClass = if (staticClass.isPrimitive) staticClass else dynamicClass
- val conforms = runtimeClass.isAssignableFrom(effectiveClass)
- if (conforms) Some(x.asInstanceOf[T]) else None
- }
+ // TODO: deprecate overloads in 2.12.0, remove in 2.13.0
+ def unapply(x: Byte) : Option[T] = unapplyImpl(x, classOf[Byte])
+ def unapply(x: Short) : Option[T] = unapplyImpl(x, classOf[Short])
+ def unapply(x: Char) : Option[T] = unapplyImpl(x, classOf[Char])
+ def unapply(x: Int) : Option[T] = unapplyImpl(x, classOf[Int])
+ def unapply(x: Long) : Option[T] = unapplyImpl(x, classOf[Long])
+ def unapply(x: Float) : Option[T] = unapplyImpl(x, classOf[Float])
+ def unapply(x: Double) : Option[T] = unapplyImpl(x, classOf[Double])
+ def unapply(x: Boolean) : Option[T] = unapplyImpl(x, classOf[Boolean])
+ def unapply(x: Unit) : Option[T] = unapplyImpl(x, classOf[Unit])
+
+ private[this] def unapplyImpl(x: Any, primitiveCls: java.lang.Class[_]): Option[T] =
+ if (runtimeClass.isInstance(x) || runtimeClass.isAssignableFrom(primitiveCls)) Some(x.asInstanceOf[T])
+ else None
// case class accessories
override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 803c980058..2f7643bccf 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -64,6 +64,7 @@ trait Manifest[T] extends ClassManifest[T] with Equals {
// TODO undeprecated until Scala reflection becomes non-experimental
// @deprecated("Use type tags and manually check the corresponding class or type instead", "2.10.0")
+@SerialVersionUID(1L)
abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals {
override def <:<(that: ClassManifest[_]): Boolean =
(that eq this) || (that eq Manifest.Any) || (that eq Manifest.AnyVal)
@@ -72,6 +73,7 @@ abstract class AnyValManifest[T <: AnyVal](override val toString: String) extend
case _ => false
}
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ @transient
override val hashCode = System.identityHashCode(this)
}
@@ -228,6 +230,7 @@ object ManifestFactory {
private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_],
override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) {
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ @transient
override val hashCode = System.identityHashCode(this)
}
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index 82a3b00ac4..9cb1dee41c 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -28,7 +28,7 @@ import scala.math.ScalaNumber;
* @version 2.0 */
public final class BoxesRunTime
{
- private static final int CHAR = 0, BYTE = 1, SHORT = 2, INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7;
+ private static final int CHAR = 0, /* BYTE = 1, SHORT = 2, */ INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7;
/** We don't need to return BYTE and SHORT, as everything which might
* care widens to INT.
@@ -43,10 +43,6 @@ public final class BoxesRunTime
return OTHER;
}
- private static String boxDescription(Object a) {
- return "" + a.getClass().getSimpleName() + "(" + a + ")";
- }
-
/* BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING */
public static java.lang.Boolean boxToBoolean(boolean b) {
diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala
index 2d5f832e1f..a8fdfc1059 100644
--- a/src/library/scala/runtime/MethodCache.scala
+++ b/src/library/scala/runtime/MethodCache.scala
@@ -16,7 +16,7 @@ import java.lang.{ Class => JClass }
import scala.annotation.tailrec
/** An element of a polymorphic object cache.
- * This class is refered to by the `CleanUp` phase. Each `PolyMethodCache` chain
+ * This class is referred to by the `CleanUp` phase. Each `PolyMethodCache` chain
* must only relate to one method as `PolyMethodCache` does not identify
* the method name and argument types. In practice, one variable will be
* generated per call point, and will uniquely relate to the method called
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 5fb24f2a36..18fcbf8276 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -62,7 +62,7 @@ object ScalaRunTime {
}
/** Return the class object representing an unboxed value type,
- * e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
+ * e.g., classOf[int], not classOf[java.lang.Integer]. The compiler
* rewrites expressions like 5.getClass to come here.
*/
def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] =
@@ -251,7 +251,7 @@ object ScalaRunTime {
*
* The primary motivation for this method is to provide a means for
* correctly obtaining a String representation of a value, while
- * avoiding the pitfalls of naïvely calling toString on said value.
+ * avoiding the pitfalls of naively calling toString on said value.
* In particular, it addresses the fact that (a) toString cannot be
* called on null and (b) depending on the apparent type of an
* array, toString may or may not print it in a human-readable form.
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
index ce7d7afc9e..74e67bb9e7 100644
--- a/src/library/scala/runtime/SeqCharSequence.scala
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -44,5 +44,10 @@ final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends
new ArrayCharSequence(xs, start1, start1 + newlen)
}
}
- override def toString = xs drop start take length mkString ""
+ override def toString = {
+ val start = math.max(this.start, 0)
+ val end = math.min(xs.length, start + length)
+
+ if (start >= end) "" else new String(xs, start, end - start)
+ }
}
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index b28f6d4269..512c4fbc27 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -17,6 +17,10 @@ import scala.language.{ higherKinds, implicitConversions }
/** This interface is intended as a minimal interface, not complicated
* by the requirement to resolve type constructors, for implicit search (which only
* needs to find an implicit conversion to Traversable for our purposes.)
+ * @define Coll `ZippedTraversable2`
+ * @define coll collection
+ * @define collectExample
+ * @define willNotTerminateInf
*/
trait ZippedTraversable2[+El1, +El2] extends Any {
def foreach[U](f: (El1, El2) => U): Unit
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index 7c501380a3..ffd44acf81 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -14,7 +14,12 @@ import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
import scala.language.{ higherKinds, implicitConversions }
-/** See comment on ZippedTraversable2. */
+/** See comment on ZippedTraversable2
+ * @define Coll `ZippedTraversable3`
+ * @define coll collection
+ * @define collectExample
+ * @define willNotTerminateInf
+ */
trait ZippedTraversable3[+El1, +El2, +El3] extends Any {
def foreach[U](f: (El1, El2, El3) => U): Unit
}
diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala
index 04c7b5108c..17ae8cb69c 100644
--- a/src/library/scala/sys/Prop.scala
+++ b/src/library/scala/sys/Prop.scala
@@ -20,7 +20,7 @@ package sys
* @since 2.9
*/
trait Prop[+T] {
- /** The full name of the property, e.g. "java.awt.headless".
+ /** The full name of the property, e.g., "java.awt.headless".
*/
def key: String
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index 39f66f5030..d2ebf8c044 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -21,6 +21,8 @@ import scala.language.implicitConversions
* System properties. If a security manager is in place which prevents
* the properties from being read or written, the AccessControlException
* will be caught and discarded.
+ * @define Coll `collection.mutable.Map`
+ * @define coll mutable map
*
* @author Paul Phillips
* @version 2.9
diff --git a/src/library/scala/sys/package.scala b/src/library/scala/sys/package.scala
index 386bd84113..e493603bc2 100644
--- a/src/library/scala/sys/package.scala
+++ b/src/library/scala/sys/package.scala
@@ -61,16 +61,15 @@ package object sys {
def env: immutable.Map[String, String] = immutable.Map(System.getenv().asScala.toSeq: _*)
/** Register a shutdown hook to be run when the VM exits.
- * The newly created thread is marked as a daemon so it will not
- * interfere with VM shutdown. The hook is automatically registered:
- * the returned value can be ignored, but is available in case the
- * Thread requires further modification. It can also be unregistered
- * by calling ShutdownHookThread#remove().
+ * The hook is automatically registered: the returned value can be ignored,
+ * but is available in case the Thread requires further modification.
+ * It can also be unregistered by calling ShutdownHookThread#remove().
*
* Note that shutdown hooks are NOT guaranteed to be run.
*
* @param body the body of code to run at shutdown
* @return the Thread which will run the shutdown hook.
+ * @see [[scala.sys.ShutdownHookThread]]
*/
def addShutdownHook(body: => Unit): ShutdownHookThread = ShutdownHookThread(body)
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index b31bbf0540..066b2f5373 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -203,7 +203,7 @@ object BasicIO {
/** Returns a `ProcessIO` connected to stdout and stderr, and, optionally, stdin. */
def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput))
- /** Retruns a `ProcessIO` connected to stdout, stderr and the provided `in` */
+ /** Returns a `ProcessIO` connected to stdout, stderr and the provided `in` */
def standard(in: OutputStream => Unit): ProcessIO = new ProcessIO(in, toStdOut, toStdErr)
/** Send all the input from the stream to stderr, and closes the input stream
diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala
index ae347221ef..6072894007 100644
--- a/src/library/scala/sys/process/ProcessLogger.scala
+++ b/src/library/scala/sys/process/ProcessLogger.scala
@@ -88,7 +88,7 @@ object ProcessLogger {
/** Creates a [[scala.sys.process.ProcessLogger]] that sends all output to the corresponding
* function.
*
- * @param fout This function will receive standard outpout.
+ * @param fout This function will receive standard output.
*
* @param ferr This function will receive standard error.
*/
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index 1340a6c415..b1976ad4b6 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -119,7 +119,7 @@ package scala.sys {
* ==Handling Input and Output==
*
* In the underlying Java model, once a `Process` has been started, one can
- * get `java.io.InputStream` and `java.io.OutpuStream` representing its
+ * get `java.io.InputStream` and `java.io.OutputStream` representing its
* output and input respectively. That is, what one writes to an
* `OutputStream` is turned into input to the process, and the output of a
* process can be read from an `InputStream` -- of which there are two, one
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index b1a932be7e..e196d403c2 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -274,7 +274,7 @@ object Either {
*/
final case class LeftProjection[+A, +B](e: Either[A, B]) {
/**
- * Returns the value from this `Left` or throws `Predef.NoSuchElementException`
+ * Returns the value from this `Left` or throws `java.util.NoSuchElementException`
* if this is a `Right`.
*
* {{{
@@ -282,7 +282,7 @@ object Either {
* Right(12).left.get // NoSuchElementException
* }}}
*
- * @throws Predef.NoSuchElementException if the projection is [[scala.util.Right]]
+ * @throws java.util.NoSuchElementException if the projection is [[scala.util.Right]]
*/
def get = e match {
case Left(a) => a
@@ -440,14 +440,14 @@ object Either {
/**
* Returns the value from this `Right` or throws
- * `Predef.NoSuchElementException` if this is a `Left`.
+ * `java.util.NoSuchElementException` if this is a `Left`.
*
* {{{
* Right(12).right.get // 12
* Left(12).right.get // NoSuchElementException
* }}}
*
- * @throws Predef.NoSuchElementException if the projection is `Left`.
+ * @throws java.util.NoSuchElementException if the projection is `Left`.
*/
def get = e match {
case Left(_) => throw new NoSuchElementException("Either.right.value on Left")
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index d597feb898..367488f116 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -62,10 +62,10 @@ private[scala] trait PropertiesTrait {
def envOrSome(name: String, alt: Option[String]) = envOrNone(name) orElse alt
- // for values based on propFilename
- def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt)
+ // for values based on propFilename, falling back to System properties
+ def scalaPropOrElse(name: String, alt: String): String = scalaPropOrNone(name).getOrElse(alt)
def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "")
- def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name))
+ def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name)).orElse(propOrNone("scala." + name))
/** The numeric portion of the runtime Scala version, if this is a final
* release. If for instance the versionString says "version 2.9.0.final",
@@ -107,7 +107,7 @@ private[scala] trait PropertiesTrait {
val versionString = "version " + scalaPropOrElse("version.number", "(unknown)")
val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2013, LAMP/EPFL")
- /** This is the encoding to use reading in source files, overridden with -encoding
+ /** This is the encoding to use reading in source files, overridden with -encoding.
* Note that it uses "prop" i.e. looks in the scala jar, not the system properties.
*/
def sourceEncoding = scalaPropOrElse("file.encoding", "UTF-8")
@@ -155,9 +155,12 @@ private[scala] trait PropertiesTrait {
// This is looking for javac, tools.jar, etc.
// Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
// and finally the system property based javaHome.
- def jdkHome = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome))
+ def jdkHome = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome))
- def versionMsg = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString)
+ // private[scala] for 2.12
+ private[this] def versionFor(command: String) = f"Scala $command $versionString -- $copyrightString"
+
+ def versionMsg = versionFor(propCategory)
def scalaCmd = if (isWin) "scala.bat" else "scala"
def scalacCmd = if (isWin) "scalac.bat" else "scalac"
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 8d68c5be38..2d38c9d4a0 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -121,15 +121,21 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable {
(bf(xs) ++= buf).result()
}
+ @deprecated("Preserved for backwards binary compatibility. To remove in 2.12.x.", "2.11.6")
+ final def `scala$util$Random$$isAlphaNum$1`(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
+
/** Returns a Stream of pseudorandomly chosen alphanumeric characters,
* equally chosen from A-Z, a-z, and 0-9.
*
* @since 2.8
*/
def alphanumeric: Stream[Char] = {
- def isAlphaNum(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
+ def nextAlphaNum: Char = {
+ val chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
+ chars charAt (self nextInt chars.length)
+ }
- Stream continually nextPrintableChar filter isAlphaNum
+ Stream continually nextAlphaNum
}
}
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index b0cf122f2a..f65c77f5a0 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -164,8 +164,8 @@ sealed abstract class Try[+T] {
def flatten[U](implicit ev: T <:< Try[U]): Try[U]
/**
- * Completes this `Try` with an exception wrapped in a `Success`. The exception is either the exception that the
- * `Try` failed with (if a `Failure`) or an `UnsupportedOperationException`.
+ * Inverts this `Try`. If this is a `Failure`, returns its exception wrapped in a `Success`.
+ * If this is a `Success`, returns a `Failure` containing an `UnsupportedOperationException`.
*/
def failed: Try[Throwable]
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index be6d03a145..aa30887ba0 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -155,7 +155,7 @@ object Exception {
/** A `Catch` object which catches everything. */
final def allCatch[T]: Catch[T] = new Catch(allCatcher[T]) withDesc "<everything>"
- /** A `Catch` object witch catches non-fatal exceptions. */
+ /** A `Catch` object which catches non-fatal exceptions. */
final def nonFatalCatch[T]: Catch[T] = new Catch(nonFatalCatcher[T]) withDesc "<non-fatal>"
/** Creates a `Catch` object which will catch any of the supplied exceptions.
diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala
index 1bfaeb255b..4e5537954f 100644
--- a/src/library/scala/util/hashing/MurmurHash3.scala
+++ b/src/library/scala/util/hashing/MurmurHash3.scala
@@ -191,7 +191,7 @@ private[hashing] class MurmurHash3 {
* This is based on the earlier MurmurHash3 code by Rex Kerr, but the
* MurmurHash3 algorithm was since changed by its creator Austin Appleby
* to remedy some weaknesses and improve performance. This represents the
- * latest and supposedly final version of the algortihm (revision 136).
+ * latest and supposedly final version of the algorithm (revision 136).
*
* @see [[http://code.google.com/p/smhasher]]
*/
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 6743b9e42a..6d3d015b1a 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
/**
* This package is concerned with regular expression (regex) matching against strings,
* with the main goal of pulling out information from those matches, or replacing
@@ -28,117 +27,127 @@
* into a [[java.lang.String]].
*
*/
-package scala
-package util.matching
+package scala.util.matching
import scala.collection.AbstractIterator
import java.util.regex.{ Pattern, Matcher }
-/** This class provides methods for creating and using regular expressions.
- * It is based on the regular expressions of the JDK since 1.4.
+/** A regular expression is used to determine whether a string matches a pattern
+ * and, if it does, to extract or transform the parts that match.
*
- * Its main goal is to extract strings that match a pattern, or the subgroups
- * that make it up. For that reason, it is usually used with for comprehensions
- * and matching (see methods for examples).
+ * This class delegates to the [[java.util.regex]] package of the Java Platform.
+ * See the documentation for [[java.util.regex.Pattern]] for details about
+ * the regular expression syntax for pattern strings.
*
- * A Regex is created from a [[java.lang.String]] representation of the
- * regular expression pattern^1^. That pattern is compiled
- * during construction, so frequently used patterns should be declared outside
- * loops if performance is of concern. Possibly, they might be declared on a
- * companion object, so that they need only to be initialized once.
+ * An instance of `Regex` represents a compiled regular expression pattern.
+ * Since compilation is expensive, frequently used `Regex`es should be constructed
+ * once, outside of loops and perhaps in a companion object.
*
- * The canonical way of creating regex patterns is by using the method `r`, provided
- * on [[java.lang.String]] through an implicit conversion into
- * [[scala.collection.immutable.WrappedString]]. Using triple quotes to write these
- * strings avoids having to quote the backslash character (`\`).
+ * The canonical way to create a `Regex` is by using the method `r`, provided
+ * implicitly for strings:
*
- * Using the constructor directly, on the other hand, makes
- * it possible to declare names for subgroups in the pattern.
+ * {{{
+ * val date = """(\d\d\d\d)-(\d\d)-(\d\d)""".r
+ * }}}
*
- * For example, both declarations below generate the same regex, but the second
- * one associate names with the subgroups.
+ * Since escapes are not processed in multi-line string literals, using triple quotes
+ * avoids having to escape the backslash character, so that `"\\d"` can be written `"""\d"""`.
+ *
+ * To extract the capturing groups when a `Regex` is matched, use it as
+ * an extractor in a pattern match:
*
* {{{
- * val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r
- * val dateP2 = new scala.util.matching.Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day")
+ * "2004-01-20" match {
+ * case date(year, month, day) => s"$year was a good year for PLs."
+ * }
* }}}
*
- * There are two ways of using a `Regex` to find a pattern: calling methods on
- * Regex, such as `findFirstIn` or `findAllIn`, or using it as an extractor in a
- * pattern match.
+ * To check only whether the `Regex` matches, ignoring any groups,
+ * use a sequence wildcard:
+ *
+ * {{{
+ * "2004-01-20" match {
+ * case date(_*) => "It's a date!"
+ * }
+ * }}}
*
- * Note that, when calling `findAllIn`, the resulting [[scala.util.matching.Regex.MatchIterator]]
- * needs to be initialized (by calling `hasNext` or `next()`, or causing these to be
- * called) before information about a match can be retrieved:
+ * That works because a `Regex` extractor produces a sequence of strings.
+ * Extracting only the year from a date could also be expressed with
+ * a sequence wildcard:
*
* {{{
- * val msg = "I love Scala"
+ * "2004-01-20" match {
+ * case date(year, _*) => s"$year was a good year for PLs."
+ * }
+ * }}}
*
- * // val start = " ".r.findAllIn(msg).start // throws an IllegalStateException
+ * In a pattern match, `Regex` normally matches the entire input.
+ * However, an unanchored `Regex` finds the pattern anywhere
+ * in the input.
*
- * val matches = " ".r.findAllIn(msg)
- * matches.hasNext // initializes the matcher
- * val start = matches.start
+ * {{{
+ * val embeddedDate = date.unanchored
+ * "Date: 2004-01-20 17:25:18 GMT (10 years, 28 weeks, 5 days, 17 hours and 51 minutes ago)" match {
+ * case embeddedDate("2004", "01", "20") => "A Scala is born."
+ * }
* }}}
*
- * When Regex is used as an extractor in a pattern match, note that it
- * only succeeds if the whole text can be matched. For this reason, one usually
- * calls a method to find the matching substrings, and then use it as an extractor
- * to break match into subgroups.
+ * To find or replace matches of the pattern, use the various find and replace methods.
+ * There is a flavor of each method that produces matched strings and
+ * another that produces `Match` objects.
*
- * As an example, the above patterns can be used like this:
+ * For example, pattern matching with an unanchored `Regex`, as in the previous example,
+ * is the same as using `findFirstMatchIn`, except that the findFirst methods return an `Option`,
+ * or `None` for no match:
*
* {{{
- * val dateP1(year, month, day) = "2011-07-15"
+ * val dates = "Important dates in history: 2004-01-20, 1958-09-05, 2010-10-06, 2011-07-15"
+ * val firstDate = date findFirstIn dates getOrElse "No date found."
+ * val firstYear = for (m <- date findFirstMatchIn dates) yield m group 1
+ * }}}
*
- * // val dateP1(year, month, day) = "Date 2011-07-15" // throws an exception at runtime
+ * To find all matches:
*
- * val copyright: String = dateP1 findFirstIn "Date of this document: 2011-07-15" match {
- * case Some(dateP1(year, month, day)) => "Copyright "+year
- * case None => "No copyright"
- * }
+ * {{{
+ * val allYears = for (m <- date findAllMatchIn dates) yield m group 1
+ * }}}
*
- * val copyright: Option[String] = for {
- * dateP1(year, month, day) <- dateP1 findFirstIn "Last modified 2011-07-15"
- * } yield year
-
- * def getYears(text: String): Iterator[String] = for (dateP1(year, _, _) <- dateP1 findAllIn text) yield year
- * def getFirstDay(text: String): Option[String] = for (m <- dateP2 findFirstMatchIn text) yield m group "day"
+ * But `findAllIn` returns a special iterator of strings that can be queried for the `MatchData`
+ * of the last match:
+ *
+ * {{{
+ * val mi = date findAllIn dates
+ * val oldies = mi filter (_ => (mi group 1).toInt < 1960) map (s => s"$s: An oldie but goodie.")
* }}}
*
- * Regex does not provide a method that returns a [[scala.Boolean]]. One can
- * use [[java.lang.String]] `matches` method, or, if `Regex` is preferred,
- * either ignore the return value or test the `Option` for emptyness. For example:
+ * Note that `findAllIn` finds matches that don't overlap. (See [[findAllIn]] for more examples.)
*
* {{{
- * def hasDate(text: String): Boolean = (dateP1 findFirstIn text).nonEmpty
- * def printLinesWithDates(lines: Traversable[String]) {
- * lines foreach { line =>
- * dateP1 findFirstIn line foreach { _ => println(line) }
- * }
- * }
+ * val num = """(\d+)""".r
+ * val all = (num findAllIn "123").toList // List("123"), not List("123", "23", "3")
* }}}
*
- * There are also methods that can be used to replace the patterns
- * on a text. The substitutions can be simple replacements, or more
- * complex functions. For example:
+ * Text replacement can be performed unconditionally or as a function of the current match:
*
* {{{
- * val months = Map( 1 -> "Jan", 2 -> "Feb", 3 -> "Mar",
- * 4 -> "Apr", 5 -> "May", 6 -> "Jun",
- * 7 -> "Jul", 8 -> "Aug", 9 -> "Sep",
- * 10 -> "Oct", 11 -> "Nov", 12 -> "Dec")
- *
- * import scala.util.matching.Regex.Match
- * def reformatDate(text: String) = dateP2 replaceAllIn ( text, (m: Match) =>
- * "%s %s, %s" format (months(m group "month" toInt), m group "day", m group "year")
- * )
+ * val redacted = date replaceAllIn (dates, "XXXX-XX-XX")
+ * val yearsOnly = date replaceAllIn (dates, m => m group 1)
+ * val months = (0 to 11) map { i => val c = Calendar.getInstance; c.set(2014, i, 1); f"$c%tb" }
+ * val reformatted = date replaceAllIn (dates, _ match { case date(y,m,d) => f"${months(m.toInt - 1)} $d, $y" })
* }}}
*
- * You can use special pattern syntax constructs like `(?idmsux-idmsux)`¹ to switch
- * various regex compilation options like `CASE_INSENSITIVE` or `UNICODE_CASE`.
+ * Pattern matching the `Match` against the `Regex` that created it does not reapply the `Regex`.
+ * In the expression for `reformatted`, each `date` match is computed once. But it is possible to apply a
+ * `Regex` to a `Match` resulting from a different pattern:
+ *
+ * {{{
+ * val docSpree = """2011(?:-\d{2}){2}""".r
+ * val docView = date replaceAllIn (dates, _ match {
+ * case docSpree() => "Historic doc spree!"
+ * case _ => "Something else happened"
+ * })
+ * }}}
*
- * @note ¹ A detailed description is available in [[java.util.regex.Pattern]].
* @see [[java.util.regex.Pattern]]
*
* @author Thibaud Hottelier
@@ -154,9 +163,8 @@ import java.util.regex.{ Pattern, Matcher }
* interpreted as a reference to a group in the matched pattern, with numbers
* 1 through 9 corresponding to the first nine groups, and 0 standing for the
* whole match. Any other character is an error. The backslash (`\`) character
- * will be interpreted as an escape character, and can be used to escape the
- * dollar sign. One can use [[scala.util.matching.Regex]]'s `quoteReplacement`
- * to automatically escape these characters.
+ * will be interpreted as an escape character and can be used to escape the
+ * dollar sign. Use `Regex.quoteReplacement` to escape these characters.
*/
@SerialVersionUID(-2094783597747625537L)
class Regex private[matching](val pattern: Pattern, groupNames: String*) extends Serializable {
@@ -164,51 +172,84 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
import Regex._
- /**
- * @param regex A string representing a regular expression
- * @param groupNames A mapping from names to indices in capture groups
- */
+ /** Compile a regular expression, supplied as a string, into a pattern that
+ * can be matched against inputs.
+ *
+ * If group names are supplied, they can be used this way:
+ *
+ * {{{
+ * val namedDate = new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day")
+ * val namedYears = for (m <- namedDate findAllMatchIn dates) yield m group "year"
+ * }}}
+ *
+ * This constructor does not support options as flags, which must be
+ * supplied as inline flags in the pattern string: `(?idmsux-idmsux)`.
+ *
+ * @param regex The regular expression to compile.
+ * @param groupNames Names of capturing groups.
+ */
def this(regex: String, groupNames: String*) = this(Pattern.compile(regex), groupNames: _*)
/** Tries to match a [[java.lang.CharSequence]].
+ *
* If the match succeeds, the result is a list of the matching
* groups (or a `null` element if a group did not match any input).
* If the pattern specifies no groups, then the result will be an empty list
* on a successful match.
*
* This method attempts to match the entire input by default; to find the next
- * matching subsequence, use an unanchored Regex.
-
+ * matching subsequence, use an unanchored `Regex`.
+ *
* For example:
*
* {{{
* val p1 = "ab*c".r
* val p1Matches = "abbbc" match {
- * case p1() => true
+ * case p1() => true // no groups
* case _ => false
* }
* val p2 = "a(b*)c".r
+ * val p2Matches = "abbbc" match {
+ * case p2(_*) => true // any groups
+ * case _ => false
+ * }
* val numberOfB = "abbbc" match {
- * case p2(b) => Some(b.length)
+ * case p2(b) => Some(b.length) // one group
* case _ => None
* }
* val p3 = "b*".r.unanchored
* val p3Matches = "abbbc" match {
- * case p3() => true
+ * case p3() => true // find the b's
* case _ => false
* }
+ * val p4 = "a(b*)(c+)".r
+ * val p4Matches = "abbbcc" match {
+ * case p4(_*) => true // multiple groups
+ * case _ => false
+ * }
+ * val allGroups = "abbbcc" match {
+ * case p4(all @ _*) => all mkString "/" // "bbb/cc"
+ * case _ => ""
+ * }
+ * val cGroup = "abbbcc" match {
+ * case p4(_, c) => c
+ * case _ => ""
+ * }
* }}}
*
* @param s The string to match
* @return The matches
*/
- def unapplySeq(s: CharSequence): Option[List[String]] = {
- val m = pattern matcher s
- if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group)
- else None
+ def unapplySeq(s: CharSequence): Option[List[String]] = s match {
+ case null => None
+ case _ =>
+ val m = pattern matcher s
+ if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group)
+ else None
}
/** Tries to match the String representation of a [[scala.Char]].
+ *
* If the match succeeds, the result is the first matching
* group if any groups are defined, or an empty Sequence otherwise.
*
@@ -247,13 +288,16 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
}
/** Tries to match on a [[scala.util.matching.Regex.Match]].
+ *
* A previously failed match results in None.
+ *
* If a successful match was made against the current pattern, then that result is used.
+ *
* Otherwise, this Regex is applied to the previously matched input,
* and the result of that match is used.
*/
def unapplySeq(m: Match): Option[List[String]] =
- if (m.matched == null) None
+ if (m == null || m.matched == null) None
else if (m.matcher.pattern == this.pattern) Some((1 to m.groupCount).toList map m.group)
else unapplySeq(m.matched)
@@ -274,25 +318,48 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
// @see UnanchoredRegex
protected def runMatcher(m: Matcher) = m.matches()
- /** Return all matches of this regexp in given character sequence as a [[scala.util.matching.Regex.MatchIterator]],
+ /** Return all non-overlapping matches of this `Regex` in the given character
+ * sequence as a [[scala.util.matching.Regex.MatchIterator]],
* which is a special [[scala.collection.Iterator]] that returns the
- * matched strings, but can also be converted into a normal iterator
- * that returns objects of type [[scala.util.matching.Regex.Match]]
- * that can be queried for data such as the text that precedes the
- * match, subgroups, etc.
+ * matched strings but can also be queried for more data about the last match,
+ * such as capturing groups and start position.
+ *
+ * A `MatchIterator` can also be converted into an iterator
+ * that returns objects of type [[scala.util.matching.Regex.Match]],
+ * such as is normally returned by `findAllMatchIn`.
+ *
+ * Where potential matches overlap, the first possible match is returned,
+ * followed by the next match that follows the input consumed by the
+ * first match:
*
- * Attempting to retrieve information about a match before initializing
- * the iterator can result in [[java.lang.IllegalStateException]]s. See
- * [[scala.util.matching.Regex.MatchIterator]] for details.
+ * {{{
+ * val hat = "hat[^a]+".r
+ * val hathaway = "hathatthattthatttt"
+ * val hats = (hat findAllIn hathaway).toList // List(hath, hattth)
+ * val pos = (hat findAllMatchIn hathaway map (_.start)).toList // List(0, 7)
+ * }}}
+ *
+ * To return overlapping matches, it is possible to formulate a regular expression
+ * with lookahead (`?=`) that does not consume the overlapping region.
+ *
+ * {{{
+ * val madhatter = "(h)(?=(at[^a]+))".r
+ * val madhats = (madhatter findAllMatchIn hathaway map {
+ * case madhatter(x,y) => s"$x$y"
+ * }).toList // List(hath, hatth, hattth, hatttt)
+ * }}}
+ *
+ * Attempting to retrieve match information before performing the first match
+ * or after exhausting the iterator results in [[java.lang.IllegalStateException]].
+ * See [[scala.util.matching.Regex.MatchIterator]] for details.
*
* @param source The text to match against.
- * @return A [[scala.util.matching.Regex.MatchIterator]] of all matches.
+ * @return A [[scala.util.matching.Regex.MatchIterator]] of matched substrings.
* @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}}
*/
def findAllIn(source: CharSequence) = new Regex.MatchIterator(source, this, groupNames)
-
- /** Return all matches of this regexp in given character sequence as a
+ /** Return all non-overlapping matches of this regexp in given character sequence as a
* [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]].
*
* @param source The text to match against.
@@ -310,8 +377,8 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
}
}
- /** Return optionally first matching string of this regexp in given character sequence,
- * or None if it does not exist.
+ /** Return an optional first matching string of this `Regex` in the given character sequence,
+ * or None if there is no match.
*
* @param source The text to match against.
* @return An [[scala.Option]] of the first matching string in the text.
@@ -322,13 +389,11 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
if (m.find) Some(m.group) else None
}
- /** Return optionally first match of this regexp in given character sequence,
+ /** Return an optional first match of this `Regex` in the given character sequence,
* or None if it does not exist.
*
- * The main difference between this method and `findFirstIn` is that the (optional) return
- * type for this is [[scala.util.matching.Regex.Match]], through which more
- * data can be obtained about the match, such as the strings that precede and follow it,
- * or subgroups.
+ * If the match is successful, the [[scala.util.matching.Regex.Match]] can be queried for
+ * more data.
*
* @param source The text to match against.
* @return A [[scala.Option]] of [[scala.util.matching.Regex.Match]] of the first matching string in the text.
@@ -339,30 +404,28 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
if (m.find) Some(new Match(source, m, groupNames)) else None
}
- /** Return optionally match of this regexp at the beginning of the
- * given character sequence, or None if regexp matches no prefix
+ /** Return an optional match of this `Regex` at the beginning of the
+ * given character sequence, or None if it matches no prefix
* of the character sequence.
*
- * The main difference from this method to `findFirstIn` is that this
- * method will not return any matches that do not begin at the start
- * of the text being matched against.
+ * Unlike `findFirstIn`, this method will only return a match at
+ * the beginning of the input.
*
* @param source The text to match against.
* @return A [[scala.Option]] of the matched prefix.
- * @example {{{"""[a-z]""".r findPrefixOf "A simple example." // returns None, since the text does not begin with a lowercase letter}}}
+ * @example {{{"""\p{Lower}""".r findPrefixOf "A simple example." // returns None, since the text does not begin with a lowercase letter}}}
*/
def findPrefixOf(source: CharSequence): Option[String] = {
val m = pattern.matcher(source)
if (m.lookingAt) Some(m.group) else None
}
- /** Return optionally match of this regexp at the beginning of the
- * given character sequence, or None if regexp matches no prefix
+ /** Return an optional match of this `Regex` at the beginning of the
+ * given character sequence, or None if it matches no prefix
* of the character sequence.
*
- * The main difference from this method to `findFirstMatchIn` is that
- * this method will not return any matches that do not begin at the
- * start of the text being matched against.
+ * Unlike `findFirstMatchIn`, this method will only return a match at
+ * the beginning of the input.
*
* @param source The text to match against.
* @return A [[scala.Option]] of the [[scala.util.matching.Regex.Match]] of the matched string.
@@ -396,7 +459,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
* import scala.util.matching.Regex
* val datePattern = new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day")
* val text = "From 2011-07-15 to 2011-07-17"
- * val repl = datePattern replaceAllIn (text, m => m.group("month")+"/"+m.group("day"))
+ * val repl = datePattern replaceAllIn (text, m => s"${m group "month"}/${m group "day"}")
* }}}
*
* $replacementString
@@ -414,15 +477,15 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
/**
* Replaces some of the matches using a replacer function that returns an [[scala.Option]].
* The replacer function takes a [[scala.util.matching.Regex.Match]] so that extra
- * information can be btained from the match. For example:
+ * information can be obtained from the match. For example:
*
* {{{
* import scala.util.matching.Regex._
*
- * val map = Map("x" -> "a var", "y" -> """some $ and \ signs""")
+ * val vars = Map("x" -> "a var", "y" -> """some $ and \ signs""")
* val text = "A text with variables %x, %y and %z."
* val varPattern = """%(\w+)""".r
- * val mapper = (m: Match) => map get (m group 1) map (quoteReplacement(_))
+ * val mapper = (m: Match) => vars get (m group 1) map (quoteReplacement(_))
* val repl = varPattern replaceSomeIn (text, mapper)
* }}}
*
@@ -463,17 +526,25 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
pattern.split(toSplit)
/** Create a new Regex with the same pattern, but no requirement that
- * the entire String matches in extractor patterns. For instance, the strings
- * shown below lead to successful matches, where they would not otherwise.
+ * the entire String matches in extractor patterns.
+ *
+ * Normally, matching on `date` behaves as though the pattern were
+ * enclosed in anchors, `"^pattern$"`.
+ *
+ * The unanchored `Regex` behaves as though those anchors were removed.
+ *
+ * Note that this method does not actually strip any matchers from the pattern.
+ *
+ * Calling `anchored` returns the original `Regex`.
*
* {{{
- * val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored
+ * val date = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored
*
- * val dateP1(year, month, day) = "Date 2011-07-15"
+ * val date(year, month, day) = "Date 2011-07-15" // OK
*
* val copyright: String = "Date of this document: 2011-07-15" match {
- * case dateP1(year, month, day) => "Copyright "+year
- * case _ => "No copyright"
+ * case date(year, month, day) => s"Copyright $year" // OK
+ * case _ => "No copyright"
* }
* }}}
*
@@ -488,6 +559,10 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
override def toString = regex
}
+/** A [[Regex]] that finds the first match when used in a pattern match.
+ *
+ * @see [[Regex#unanchored]]
+ */
trait UnanchoredRegex extends Regex {
override protected def runMatcher(m: Matcher) = m.find()
override def unanchored = this
@@ -503,70 +578,79 @@ object Regex {
*/
trait MatchData {
- /** The source from where the match originated */
+ /** The source from which the match originated */
val source: CharSequence
- /** The names of the groups, or some empty sequence if one defined */
+ /** The names of the groups, or an empty sequence if none defined */
val groupNames: Seq[String]
- /** The number of subgroups in the pattern (not all of these need to match!) */
+ /** The number of capturing groups in the pattern.
+ * (For a given successful match, some of those groups may not have matched any input.)
+ */
def groupCount: Int
/** The index of the first matched character, or -1 if nothing was matched */
def start: Int
/** The index of the first matched character in group `i`,
- * or -1 if nothing was matched for that group */
+ * or -1 if nothing was matched for that group.
+ */
def start(i: Int): Int
- /** The index of the last matched character, or -1 if nothing was matched */
+ /** The index following the last matched character, or -1 if nothing was matched. */
def end: Int
/** The index following the last matched character in group `i`,
- * or -1 if nothing was matched for that group */
+ * or -1 if nothing was matched for that group.
+ */
def end(i: Int): Int
- /** The matched string, or `null` if nothing was matched */
+ /** The matched string, or `null` if nothing was matched. */
def matched: String =
if (start >= 0) source.subSequence(start, end).toString
else null
/** The matched string in group `i`,
- * or `null` if nothing was matched */
+ * or `null` if nothing was matched.
+ */
def group(i: Int): String =
if (start(i) >= 0) source.subSequence(start(i), end(i)).toString
else null
- /** All matched subgroups, i.e. not including group(0) */
+ /** All capturing groups, i.e., not including group(0). */
def subgroups: List[String] = (1 to groupCount).toList map group
/** The char sequence before first character of match,
- * or `null` if nothing was matched */
+ * or `null` if nothing was matched.
+ */
def before: CharSequence =
if (start >= 0) source.subSequence(0, start)
else null
/** The char sequence before first character of match in group `i`,
- * or `null` if nothing was matched for that group */
+ * or `null` if nothing was matched for that group.
+ */
def before(i: Int): CharSequence =
if (start(i) >= 0) source.subSequence(0, start(i))
else null
/** Returns char sequence after last character of match,
- * or `null` if nothing was matched */
+ * or `null` if nothing was matched.
+ */
def after: CharSequence =
if (end >= 0) source.subSequence(end, source.length)
else null
/** The char sequence after last character of match in group `i`,
- * or `null` if nothing was matched for that group */
+ * or `null` if nothing was matched for that group.
+ */
def after(i: Int): CharSequence =
if (end(i) >= 0) source.subSequence(end(i), source.length)
else null
private lazy val nameToIndex: Map[String, Int] = Map[String, Int]() ++ ("" :: groupNames.toList).zipWithIndex
- /** Returns the group with given name
+ /** Returns the group with given name.
*
* @param id The group name
* @return The requested group
@@ -577,24 +661,22 @@ object Regex {
case Some(index) => group(index)
}
- /** The matched string; equivalent to `matched.toString` */
+ /** The matched string; equivalent to `matched.toString`. */
override def toString = matched
-
}
- /** Provides information about a succesful match.
- */
+ /** Provides information about a successful match. */
class Match(val source: CharSequence,
private[matching] val matcher: Matcher,
val groupNames: Seq[String]) extends MatchData {
- /** The index of the first matched character */
+ /** The index of the first matched character. */
val start = matcher.start
- /** The index following the last matched character */
+ /** The index following the last matched character. */
val end = matcher.end
- /** The number of subgroups */
+ /** The number of subgroups. */
def groupCount = matcher.groupCount
private lazy val starts: Array[Int] =
@@ -602,19 +684,19 @@ object Regex {
private lazy val ends: Array[Int] =
((0 to groupCount) map matcher.end).toArray
- /** The index of the first matched character in group `i` */
+ /** The index of the first matched character in group `i`. */
def start(i: Int) = starts(i)
- /** The index following the last matched character in group `i` */
+ /** The index following the last matched character in group `i`. */
def end(i: Int) = ends(i)
/** The match itself with matcher-dependent lazy vals forced,
- * so that match is valid even once matcher is advanced
+ * so that match is valid even once matcher is advanced.
*/
def force: this.type = { starts; ends; this }
}
- /** An extractor object for Matches, yielding the matched string
+ /** An extractor object for Matches, yielding the matched string.
*
* This can be used to help writing replacer functions when you
* are not interested in match data. For example:
@@ -629,15 +711,15 @@ object Regex {
def unapply(m: Match): Some[String] = Some(m.matched)
}
- /** An extractor object that yields the groups in the match. Using an extractor
- * rather than the original regex avoids recomputing the match.
+ /** An extractor object that yields the groups in the match. Using this extractor
+ * rather than the original `Regex` ensures that the match is not recomputed.
*
* {{{
* import scala.util.matching.Regex.Groups
*
- * val datePattern = """(\d\d\d\d)-(\d\d)-(\d\d)""".r
+ * val date = """(\d\d\d\d)-(\d\d)-(\d\d)""".r
* val text = "The doc spree happened on 2011-07-15."
- * val day = datePattern replaceAllIn(text, _ match { case Groups(year, month, day) => month+"/"+day })
+ * val day = date replaceAllIn(text, _ match { case Groups(_, month, day) => s"$month/$day" })
* }}}
*/
object Groups {
@@ -666,7 +748,7 @@ object Regex {
nextSeen
}
- /** The next matched substring of `source` */
+ /** The next matched substring of `source`. */
def next(): String = {
if (!hasNext) throw new NoSuchElementException
nextSeen = false
@@ -675,28 +757,28 @@ object Regex {
override def toString = super[AbstractIterator].toString
- /** The index of the first matched character */
+ /** The index of the first matched character. */
def start: Int = matcher.start
- /** The index of the first matched character in group `i` */
+ /** The index of the first matched character in group `i`. */
def start(i: Int): Int = matcher.start(i)
- /** The index of the last matched character */
+ /** The index of the last matched character. */
def end: Int = matcher.end
- /** The index following the last matched character in group `i` */
+ /** The index following the last matched character in group `i`. */
def end(i: Int): Int = matcher.end(i)
- /** The number of subgroups */
+ /** The number of subgroups. */
def groupCount = matcher.groupCount
- /** Convert to an iterator that yields MatchData elements instead of Strings */
+ /** Convert to an iterator that yields MatchData elements instead of Strings. */
def matchData: Iterator[Match] = new AbstractIterator[Match] {
def hasNext = self.hasNext
def next = { self.next(); new Match(source, matcher, groupNames).force }
}
- /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support */
+ /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support. */
private[matching] def replacementData = new AbstractIterator[Match] with Replacement {
def matcher = self.matcher
def hasNext = self.hasNext
diff --git a/src/manual/scala/man1/Command.scala b/src/manual/scala/man1/Command.scala
index 1cf55cb28d..8f811f950e 100644
--- a/src/manual/scala/man1/Command.scala
+++ b/src/manual/scala/man1/Command.scala
@@ -47,7 +47,7 @@ trait Command {
def copyright = Section("COPYRIGHT",
"This is open-source software, available to you under a BSD-like license. " &
- "See accomponying \"copyright\" or \"LICENSE\" file for copying conditions. " &
+ "See accompanying \"copyright\" or \"LICENSE\" file for copying conditions. " &
"There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A " &
"PARTICULAR PURPOSE.")
diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala
index 31d25d4801..3954ed588e 100644
--- a/src/manual/scala/man1/scalac.scala
+++ b/src/manual/scala/man1/scalac.scala
@@ -360,7 +360,7 @@ object scalac extends Command {
"ANF pre-transform for " & MItalic("@cps") & " (CPS plugin)"),
Definition(
MItalic("selectivecps"),
- MItalic("@cps") & "-driven transform of selectiveanf assignements (CPS plugin)"),
+ MItalic("@cps") & "-driven transform of selectiveanf assignments (CPS plugin)"),
Definition(
MItalic("uncurry"),
"uncurry, translate function values to anonymous classes"),
diff --git a/src/partest-extras/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala
index d618e086f4..f6e2d2a9ec 100644
--- a/src/partest-extras/scala/tools/partest/ASMConverters.scala
+++ b/src/partest-extras/scala/tools/partest/ASMConverters.scala
@@ -2,70 +2,216 @@ package scala.tools.partest
import scala.collection.JavaConverters._
import scala.tools.asm
-import asm.tree.{ClassNode, MethodNode, InsnList}
+import asm.{tree => t}
/** Makes using ASM from ByteCodeTests more convenient.
*
* Wraps ASM instructions in case classes so that equals and toString work
* for the purpose of bytecode diffing and pretty printing.
*/
-trait ASMConverters {
- // wrap ASM's instructions so we get case class-style `equals` and `toString`
- object instructions {
- def fromMethod(meth: MethodNode): List[Instruction] = {
- val insns = meth.instructions
- val asmToScala = new AsmToScala{ def labelIndex(l: asm.tree.AbstractInsnNode) = insns.indexOf(l) }
-
- asmToScala.mapOver(insns.iterator.asScala.toList).asInstanceOf[List[Instruction]]
+object ASMConverters {
+
+ /**
+ * Transform the instructions of an ASM Method into a list of [[Instruction]]s.
+ */
+ def instructionsFromMethod(meth: t.MethodNode): List[Instruction] = new AsmToScala(meth).instructions
+
+ def convertMethod(meth: t.MethodNode): Method = new AsmToScala(meth).method
+
+ implicit class RichInstructionLists(val self: List[Instruction]) extends AnyVal {
+ def === (other: List[Instruction]) = equivalentBytecode(self, other)
+
+ def dropLinesFrames = self.filterNot(i => i.isInstanceOf[LineNumber] || i.isInstanceOf[FrameEntry])
+
+ private def referencedLabels(instruction: Instruction): Set[Instruction] = instruction match {
+ case Jump(op, label) => Set(label)
+ case LookupSwitch(op, dflt, keys, labels) => (dflt :: labels).toSet
+ case TableSwitch(op, min, max, dflt, labels) => (dflt :: labels).toSet
+ case LineNumber(line, start) => Set(start)
+ case _ => Set.empty
}
- sealed abstract class Instruction { def opcode: String }
- case class Field (opcode: String, desc: String, name: String, owner: String) extends Instruction
- case class Incr (opcode: String, incr: Int, `var`: Int) extends Instruction
- case class Op (opcode: String) extends Instruction
- case class IntOp (opcode: String, operand: Int) extends Instruction
- case class Jump (opcode: String, label: Label) extends Instruction
- case class Ldc (opcode: String, cst: Any) extends Instruction
- case class LookupSwitch (opcode: String, dflt: Label, keys: List[Integer], labels: List[Label]) extends Instruction
- case class TableSwitch (opcode: String, dflt: Label, max: Int, min: Int, labels: List[Label]) extends Instruction
- case class Method (opcode: String, desc: String, name: String, owner: String) extends Instruction
- case class NewArray (opcode: String, desc: String, dims: Int) extends Instruction
- case class TypeOp (opcode: String, desc: String) extends Instruction
- case class VarOp (opcode: String, `var`: Int) extends Instruction
- case class Label (offset: Int) extends Instruction { def opcode: String = "" }
- case class FrameEntry (local: List[Any], stack: List[Any]) extends Instruction { def opcode: String = "" }
- case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: String = "" }
+ def dropStaleLabels = {
+ val definedLabels: Set[Instruction] = self.filter(_.isInstanceOf[Label]).toSet
+ val usedLabels: Set[Instruction] = self.flatMap(referencedLabels)(collection.breakOut)
+ self.filterNot(definedLabels diff usedLabels)
+ }
+
+ def dropNonOp = dropLinesFrames.dropStaleLabels
+ }
+
+ sealed abstract class Instruction extends Product {
+ def opcode: Int
+
+ // toString such that the first field, "opcode: Int", is printed textually.
+ final override def toString() = {
+ import scala.tools.asm.util.Printer.OPCODES
+ def opString(op: Int) = if (OPCODES.isDefinedAt(op)) OPCODES(op) else "?"
+ val printOpcode = opcode != -1
+
+ productPrefix + (
+ if (printOpcode) Iterator(opString(opcode)) ++ productIterator.drop(1)
+ else productIterator
+ ).mkString("(", ", ", ")")
+ }
}
- abstract class AsmToScala {
- import instructions._
+ case class Method(instructions: List[Instruction], handlers: List[ExceptionHandler], localVars: List[LocalVariable])
+
+ case class Field (opcode: Int, owner: String, name: String, desc: String) extends Instruction
+ case class Incr (opcode: Int, `var`: Int, incr: Int) extends Instruction
+ case class Op (opcode: Int) extends Instruction
+ case class IntOp (opcode: Int, operand: Int) extends Instruction
+ case class Jump (opcode: Int, label: Label) extends Instruction
+ case class Ldc (opcode: Int, cst: Any) extends Instruction
+ case class LookupSwitch(opcode: Int, dflt: Label, keys: List[Int], labels: List[Label]) extends Instruction
+ case class TableSwitch (opcode: Int, min: Int, max: Int, dflt: Label, labels: List[Label]) extends Instruction
+ case class Invoke (opcode: Int, owner: String, name: String, desc: String, itf: Boolean) extends Instruction
+ case class NewArray (opcode: Int, desc: String, dims: Int) extends Instruction
+ case class TypeOp (opcode: Int, desc: String) extends Instruction
+ case class VarOp (opcode: Int, `var`: Int) extends Instruction
+ case class Label (offset: Int) extends Instruction { def opcode: Int = -1 }
+ case class FrameEntry (`type`: Int, local: List[Any], stack: List[Any]) extends Instruction { def opcode: Int = -1 }
+ case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: Int = -1 }
+
+ case class ExceptionHandler(start: Label, end: Label, handler: Label, desc: Option[String])
+ case class LocalVariable(name: String, desc: String, signature: Option[String], start: Label, end: Label, index: Int)
+
+ class AsmToScala(asmMethod: t.MethodNode) {
+
+ def instructions: List[Instruction] = asmMethod.instructions.iterator.asScala.toList map apply
+
+ def method: Method = Method(instructions, convertHandlers(asmMethod), convertLocalVars(asmMethod))
- def labelIndex(l: asm.tree.AbstractInsnNode): Int
+ private def labelIndex(l: t.LabelNode): Int = asmMethod.instructions.indexOf(l)
+
+ private def op(i: t.AbstractInsnNode): Int = i.getOpcode
- def mapOver(is: List[Any]): List[Any] = is map {
- case i: asm.tree.AbstractInsnNode => apply(i)
+ private def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList
+
+ // Heterogeneous List[Any] is used in FrameNode: type information about locals / stack values
+ // are stored in a List[Any] (Integer, String or LabelNode), see Javadoc of MethodNode#visitFrame.
+ // Opcodes (eg Opcodes.INTEGER) and Reference types (eg "java/lang/Object") are returned unchanged,
+ // LabelNodes are mapped to their LabelEntry.
+ private def mapOverFrameTypes(is: List[Any]): List[Any] = is map {
+ case i: t.LabelNode => applyLabel(i)
case x => x
}
- def op(i: asm.tree.AbstractInsnNode) = if (asm.util.Printer.OPCODES.isDefinedAt(i.getOpcode)) asm.util.Printer.OPCODES(i.getOpcode) else "?"
- def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList
- def apply(l: asm.tree.LabelNode): Label = this(l: asm.tree.AbstractInsnNode).asInstanceOf[Label]
- def apply(x: asm.tree.AbstractInsnNode): Instruction = x match {
- case i: asm.tree.FieldInsnNode => Field (op(i), i.desc: String, i.name: String, i.owner: String)
- case i: asm.tree.IincInsnNode => Incr (op(i), i.incr: Int, i.`var`: Int)
- case i: asm.tree.InsnNode => Op (op(i))
- case i: asm.tree.IntInsnNode => IntOp (op(i), i.operand: Int)
- case i: asm.tree.JumpInsnNode => Jump (op(i), this(i.label))
- case i: asm.tree.LdcInsnNode => Ldc (op(i), i.cst: Any)
- case i: asm.tree.LookupSwitchInsnNode => LookupSwitch (op(i), this(i.dflt), lst(i.keys), mapOver(lst(i.labels)).asInstanceOf[List[Label]])
- case i: asm.tree.TableSwitchInsnNode => TableSwitch (op(i), this(i.dflt), i.max: Int, i.min: Int, mapOver(lst(i.labels)).asInstanceOf[List[Label]])
- case i: asm.tree.MethodInsnNode => Method (op(i), i.desc: String, i.name: String, i.owner: String)
- case i: asm.tree.MultiANewArrayInsnNode => NewArray (op(i), i.desc: String, i.dims: Int)
- case i: asm.tree.TypeInsnNode => TypeOp (op(i), i.desc: String)
- case i: asm.tree.VarInsnNode => VarOp (op(i), i.`var`: Int)
- case i: asm.tree.LabelNode => Label (labelIndex(x))
- case i: asm.tree.FrameNode => FrameEntry (mapOver(lst(i.local)), mapOver(lst(i.stack)))
- case i: asm.tree.LineNumberNode => LineNumber (i.line: Int, this(i.start): Label)
+ // avoids some casts
+ private def applyLabel(l: t.LabelNode) = this(l: t.AbstractInsnNode).asInstanceOf[Label]
+
+ private def apply(x: t.AbstractInsnNode): Instruction = x match {
+ case i: t.FieldInsnNode => Field (op(i), i.owner, i.name, i.desc)
+ case i: t.IincInsnNode => Incr (op(i), i.`var`, i.incr)
+ case i: t.InsnNode => Op (op(i))
+ case i: t.IntInsnNode => IntOp (op(i), i.operand)
+ case i: t.JumpInsnNode => Jump (op(i), applyLabel(i.label))
+ case i: t.LdcInsnNode => Ldc (op(i), i.cst: Any)
+ case i: t.LookupSwitchInsnNode => LookupSwitch (op(i), applyLabel(i.dflt), lst(i.keys) map (x => x: Int), lst(i.labels) map applyLabel)
+ case i: t.TableSwitchInsnNode => TableSwitch (op(i), i.min, i.max, applyLabel(i.dflt), lst(i.labels) map applyLabel)
+ case i: t.MethodInsnNode => Invoke (op(i), i.owner, i.name, i.desc, i.itf)
+ case i: t.MultiANewArrayInsnNode => NewArray (op(i), i.desc, i.dims)
+ case i: t.TypeInsnNode => TypeOp (op(i), i.desc)
+ case i: t.VarInsnNode => VarOp (op(i), i.`var`)
+ case i: t.LabelNode => Label (labelIndex(i))
+ case i: t.FrameNode => FrameEntry (i.`type`, mapOverFrameTypes(lst(i.local)), mapOverFrameTypes(lst(i.stack)))
+ case i: t.LineNumberNode => LineNumber (i.line, applyLabel(i.start))
+ }
+
+ private def convertHandlers(method: t.MethodNode): List[ExceptionHandler] = {
+ method.tryCatchBlocks.asScala.map(h => ExceptionHandler(applyLabel(h.start), applyLabel(h.end), applyLabel(h.handler), Option(h.`type`)))(collection.breakOut)
+ }
+
+ private def convertLocalVars(method: t.MethodNode): List[LocalVariable] = {
+ method.localVariables.asScala.map(v => LocalVariable(v.name, v.desc, Option(v.signature), applyLabel(v.start), applyLabel(v.end), v.index))(collection.breakOut)
+ }
+ }
+
+ import collection.mutable.{Map => MMap}
+
+ /**
+ * Bytecode is equal modula local variable numbering and label numbering.
+ */
+ def equivalentBytecode(as: List[Instruction], bs: List[Instruction], varMap: MMap[Int, Int] = MMap(), labelMap: MMap[Int, Int] = MMap()): Boolean = {
+ def same(v1: Int, v2: Int, m: MMap[Int, Int]) = {
+ if (m contains v1) m(v1) == v2
+ else if (m.valuesIterator contains v2) false // v2 is already associated with some different value v1
+ else { m(v1) = v2; true }
+ }
+ def sameVar(v1: Int, v2: Int) = same(v1, v2, varMap)
+ def sameLabel(l1: Label, l2: Label) = same(l1.offset, l2.offset, labelMap)
+ def sameLabels(ls1: List[Label], ls2: List[Label]) = (ls1 corresponds ls2)(sameLabel)
+
+ def sameFrameTypes(ts1: List[Any], ts2: List[Any]) = (ts1 corresponds ts2) {
+ case (t1: Label, t2: Label) => sameLabel(t1, t2)
+ case (x, y) => x == y
+ }
+
+ if (as.isEmpty) bs.isEmpty
+ else if (bs.isEmpty) false
+ else ((as.head, bs.head) match {
+ case (VarOp(op1, v1), VarOp(op2, v2)) => op1 == op2 && sameVar(v1, v2)
+ case (Incr(op1, v1, inc1), Incr(op2, v2, inc2)) => op1 == op2 && sameVar(v1, v2) && inc1 == inc2
+
+ case (l1 @ Label(_), l2 @ Label(_)) => sameLabel(l1, l2)
+ case (Jump(op1, l1), Jump(op2, l2)) => op1 == op2 && sameLabel(l1, l2)
+ case (LookupSwitch(op1, l1, keys1, ls1), LookupSwitch(op2, l2, keys2, ls2)) => op1 == op2 && sameLabel(l1, l2) && keys1 == keys2 && sameLabels(ls1, ls2)
+ case (TableSwitch(op1, min1, max1, l1, ls1), TableSwitch(op2, min2, max2, l2, ls2)) => op1 == op2 && min1 == min2 && max1 == max2 && sameLabel(l1, l2) && sameLabels(ls1, ls2)
+ case (LineNumber(line1, l1), LineNumber(line2, l2)) => line1 == line2 && sameLabel(l1, l2)
+ case (FrameEntry(tp1, loc1, stk1), FrameEntry(tp2, loc2, stk2)) => tp1 == tp2 && sameFrameTypes(loc1, loc2) && sameFrameTypes(stk1, stk2)
+
+ // this needs to go after the above. For example, Label(1) may not equal Label(1), if before
+ // the left 1 was associated with another right index.
+ case (a, b) if a == b => true
+
+ case _ => false
+ }) && equivalentBytecode(as.tail, bs.tail, varMap, labelMap)
+ }
+
+ def applyToMethod(method: t.MethodNode, instructions: List[Instruction]): Unit = {
+ val asmLabel = createLabelNodes(instructions)
+ instructions.foreach(visitMethod(method, _, asmLabel))
+ }
+
+ /**
+ * Convert back a [[Method]] to ASM land. The code is emitted into the parameter `asmMethod`.
+ */
+ def applyToMethod(asmMethod: t.MethodNode, method: Method): Unit = {
+ val asmLabel = createLabelNodes(method.instructions)
+ method.instructions.foreach(visitMethod(asmMethod, _, asmLabel))
+ method.handlers.foreach(h => asmMethod.visitTryCatchBlock(asmLabel(h.start), asmLabel(h.end), asmLabel(h.handler), h.desc.orNull))
+ method.localVars.foreach(v => asmMethod.visitLocalVariable(v.name, v.desc, v.signature.orNull, asmLabel(v.start), asmLabel(v.end), v.index))
+ }
+
+ private def createLabelNodes(instructions: List[Instruction]): Map[Label, asm.Label] = {
+ val labels = instructions collect {
+ case l: Label => l
}
+ assert(labels.distinct == labels, s"Duplicate labels in: $labels")
+ labels.map(l => (l, new asm.Label())).toMap
+ }
+
+ private def frameTypesToAsm(l: List[Any], asmLabel: Map[Label, asm.Label]): List[Object] = l map {
+ case l: Label => asmLabel(l)
+ case x => x.asInstanceOf[Object]
+ }
+
+ private def visitMethod(method: t.MethodNode, instruction: Instruction, asmLabel: Map[Label, asm.Label]): Unit = instruction match {
+ case Field(op, owner, name, desc) => method.visitFieldInsn(op, owner, name, desc)
+ case Incr(op, vr, incr) => method.visitIincInsn(vr, incr)
+ case Op(op) => method.visitInsn(op)
+ case IntOp(op, operand) => method.visitIntInsn(op, operand)
+ case Jump(op, label) => method.visitJumpInsn(op, asmLabel(label))
+ case Ldc(op, cst) => method.visitLdcInsn(cst)
+ case LookupSwitch(op, dflt, keys, labels) => method.visitLookupSwitchInsn(asmLabel(dflt), keys.toArray, (labels map asmLabel).toArray)
+ case TableSwitch(op, min, max, dflt, labels) => method.visitTableSwitchInsn(min, max, asmLabel(dflt), (labels map asmLabel).toArray: _*)
+ case Invoke(op, owner, name, desc, itf) => method.visitMethodInsn(op, owner, name, desc, itf)
+ case NewArray(op, desc, dims) => method.visitMultiANewArrayInsn(desc, dims)
+ case TypeOp(op, desc) => method.visitTypeInsn(op, desc)
+ case VarOp(op, vr) => method.visitVarInsn(op, vr)
+ case l: Label => method.visitLabel(asmLabel(l))
+ case FrameEntry(tp, local, stack) => method.visitFrame(tp, local.length, frameTypesToAsm(local, asmLabel).toArray, stack.length, frameTypesToAsm(stack, asmLabel).toArray)
+ case LineNumber(line, start) => method.visitLineNumber(line, asmLabel(start))
}
-} \ No newline at end of file
+}
diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
index 7650a892fd..8459419fa5 100644
--- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala
+++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
@@ -3,7 +3,7 @@ package scala.tools.partest
import scala.tools.nsc.util.JavaClassPath
import scala.collection.JavaConverters._
import scala.tools.asm.{ClassWriter, ClassReader}
-import scala.tools.asm.tree.{ClassNode, MethodNode, InsnList}
+import scala.tools.asm.tree._
import java.io.{FileOutputStream, FileInputStream, File => JFile, InputStream}
import AsmNode._
@@ -28,18 +28,18 @@ import AsmNode._
* See test/files/jvm/bytecode-test-example for an example of bytecode test.
*
*/
-abstract class BytecodeTest extends ASMConverters {
- import instructions._
+abstract class BytecodeTest {
+ import ASMConverters._
/** produce the output to be compared against a checkfile */
protected def show(): Unit
- def main(args: Array[String]): Unit = show
+ def main(args: Array[String]): Unit = show()
// asserts
def sameBytecode(methA: MethodNode, methB: MethodNode) = {
- val isa = instructions.fromMethod(methA)
- val isb = instructions.fromMethod(methB)
+ val isa = instructionsFromMethod(methA)
+ val isb = instructionsFromMethod(methB)
if (isa == isb) println("bytecode identical")
else diffInstructions(isa, isb)
}
@@ -81,18 +81,16 @@ abstract class BytecodeTest extends ASMConverters {
}
}
- // bytecode is equal modulo local variable numbering
- def equalsModuloVar(a: Instruction, b: Instruction) = (a, b) match {
- case _ if a == b => true
- case (VarOp(op1, _), VarOp(op2, _)) if op1 == op2 => true
- case _ => false
- }
-
- def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (Instruction, Instruction) => Boolean) = {
- val isa = fromMethod(methA)
- val isb = fromMethod(methB)
+ /**
+ * Compare the bytecodes of two methods.
+ *
+ * For the `similar` function, you probably want to pass [[ASMConverters.equivalentBytecode]].
+ */
+ def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (List[Instruction], List[Instruction]) => Boolean) = {
+ val isa = instructionsFromMethod(methA)
+ val isb = instructionsFromMethod(methB)
if (isa == isb) println("bytecode identical")
- else if ((isa, isb).zipped.forall { case (a, b) => similar(a, b) }) println("bytecode similar")
+ else if (similar(isa, isb)) println("bytecode similar")
else diffInstructions(isa, isb)
}
@@ -118,10 +116,8 @@ abstract class BytecodeTest extends ASMConverters {
sys.error(s"Didn't find method '$name' in class '${classNode.name}'")
protected def loadClassNode(name: String, skipDebugInfo: Boolean = true): ClassNode = {
- val classBytes: InputStream = (for {
- classRep <- classpath.findClass(name)
- binary <- classRep.binary
- } yield binary.input) getOrElse sys.error(s"failed to load class '$name'; classpath = $classpath")
+ val classBytes: InputStream = classpath.findClassFile(name).map(_.input)
+ .getOrElse(sys.error(s"failed to load class '$name'; classpath = $classpath"))
val cr = new ClassReader(classBytes)
val cn = new ClassNode()
@@ -140,7 +136,7 @@ abstract class BytecodeTest extends ASMConverters {
object BytecodeTest {
/** Parse `file` as a class file, transforms the ASM representation with `f`,
- * and overwrites the orginal file.
+ * and overwrites the original file.
*/
def modifyClassFile(file: JFile)(f: ClassNode => ClassNode) {
val rfile = new reflect.io.File(file)
diff --git a/src/partest-extras/scala/tools/partest/ParserTest.scala b/src/partest-extras/scala/tools/partest/ParserTest.scala
new file mode 100644
index 0000000000..e4c92e3dc3
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/ParserTest.scala
@@ -0,0 +1,21 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ */
+
+package scala.tools.partest
+
+/** A class for testing parser output.
+ * Just supply the `code` and update the check file.
+ */
+abstract class ParserTest extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Ystop-after:parser -Xprint:parser"
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+}
diff --git a/src/partest-extras/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala
index a728e8bdef..5b65d6ab9b 100644
--- a/src/partest-extras/scala/tools/partest/ReplTest.scala
+++ b/src/partest-extras/scala/tools/partest/ReplTest.scala
@@ -8,6 +8,7 @@ package scala.tools.partest
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.ILoop
import java.lang.reflect.{ Method => JMethod, Field => JField }
+import scala.util.matching.Regex.Match
/** A class for testing repl code.
* It filters the line of output that mentions a version number.
@@ -22,6 +23,9 @@ abstract class ReplTest extends DirectTest {
s.Xnojline.value = true
transformSettings(s)
}
+ /** True for SessionTest to preserve session text. */
+ def inSession: Boolean = false
+ /** True to preserve welcome text. */
def welcoming: Boolean = false
lazy val welcome = "(Welcome to Scala) version .*".r
def normalize(s: String) = s match {
@@ -36,7 +40,7 @@ abstract class ReplTest extends DirectTest {
val s = settings
log("eval(): settings = " + s)
//ILoop.runForTranscript(code, s).lines drop 1 // not always first line
- val lines = ILoop.runForTranscript(code, s).lines
+ val lines = ILoop.runForTranscript(code, s, inSession = inSession).lines
if (welcoming) lines map normalize
else lines filter unwelcoming
}
@@ -57,13 +61,30 @@ abstract class SessionTest extends ReplTest {
/** Session transcript, as a triple-quoted, multiline, marginalized string. */
def session: String
- /** Expected output, as an iterator. */
- def expected = session.stripMargin.lines
+ /** Expected output, as an iterator, optionally marginally stripped. */
+ def expected = if (stripMargins) session.stripMargin.lines else session.lines
+
+ /** Override with false if we should not strip margins because of leading continuation lines. */
+ def stripMargins: Boolean = true
+
+ /** Analogous to stripMargins, don't mangle continuation lines on echo. */
+ override def inSession: Boolean = true
/** Code is the command list culled from the session (or the expected session output).
- * Would be nicer if code were lazy lines.
+ * Would be nicer if code were lazy lines so you could generate arbitrarily long text.
+ * Retain user input: prompt lines and continuations, without the prefix; or pasted text plus ctl-D.
*/
- override final def code = expected filter (_ startsWith prompt) map (_ drop prompt.length) mkString "\n"
+ import SessionTest._
+ override final def code = input findAllMatchIn (expected mkString ("", "\n", "\n")) map {
+ case input(null, null, prompted) =>
+ def continued(m: Match): Option[String] = m match {
+ case margin(text) => Some(text)
+ case _ => None
+ }
+ margin.replaceSomeIn(prompted, continued)
+ case input(cmd, pasted, null) =>
+ cmd + pasted + "\u0004"
+ } mkString
final def prompt = "scala> "
@@ -75,3 +96,9 @@ abstract class SessionTest extends ReplTest {
if (evaled != wanted) Console print nest.FileManager.compareContents(wanted, evaled, "expected", "actual")
}
}
+object SessionTest {
+ // \R for line break is Java 8, \v for vertical space might suffice
+ val input = """(?m)^scala> (:pa.*\u000A)// Entering paste mode.*\u000A\u000A((?:.*\u000A)*)\u000A// Exiting paste mode.*\u000A|^scala> (.*\u000A(?:\s*\| .*\u000A)*)""".r
+
+ val margin = """(?m)^\s*\| (.*)$""".r
+}
diff --git a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
index d6b62e1d9e..848103f5cc 100644
--- a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
+++ b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
@@ -12,7 +12,7 @@ import java.util.Map;
* A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
* call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
*
- * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
+ * WARNING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
* because it must be universally accessible for instrumentation needs. If you want to profile your test use
* {@link Instrumentation} instead.
*/
diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
index b1b100fbb0..d97756c171 100644
--- a/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
@@ -50,7 +50,7 @@ public class ProfilerVisitor extends ClassVisitor implements Opcodes {
mv.visitLdcInsn(name);
mv.visitLdcInsn(desc);
mv.visitMethodInsn(INVOKESTATIC, profilerClass, "methodCalled",
- "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
+ "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V", false);
}
}
return mv;
diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala
index c942d759ce..4cc2cb86b2 100644
--- a/src/reflect/scala/reflect/api/Constants.scala
+++ b/src/reflect/scala/reflect/api/Constants.scala
@@ -60,7 +60,7 @@ package api
*
* object Test extends App {
* val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs
- * def jarg(name: String) = jann(newTermName(name)).asInstanceOf[LiteralArgument].value
+ * def jarg(name: String) = jann(TermName(name)).asInstanceOf[LiteralArgument].value
*
* val classRef = jarg("classRef").typeValue
* println(showRaw(classRef)) // TypeRef(ThisType(<empty>), JavaAnnottee, List())
@@ -150,7 +150,7 @@ trait Constants {
*
* object Test extends App {
* val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs
- * def jarg(name: String) = jann(newTermName(name)) match {
+ * def jarg(name: String) = jann(TermName(name)) match {
* // Constant is always wrapped into a Literal or LiteralArgument tree node
* case LiteralArgument(ct: Constant) => value
* case _ => sys.error("Not a constant")
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 5b6ff2325c..ad03718898 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -9,6 +9,7 @@ package api
import scala.reflect.runtime.{universe => ru}
import scala.annotation.compileTimeOnly
+import java.io.ObjectStreamException
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
@@ -83,7 +84,7 @@ trait Exprs { self: Universe =>
*
* It is equivalent to
* {{{
- * Select( expr.tree, newTermName("foo") )
+ * Select( expr.tree, TermName("foo") )
* }}}
*
* The following example code however does not compile
@@ -157,23 +158,23 @@ trait Exprs { self: Universe =>
|if you want to get a value of the underlying expression, add scala-compiler.jar to the classpath,
|import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin)
+ @throws(classOf[ObjectStreamException])
private def writeReplace(): AnyRef = new SerializedExpr(treec, implicitly[WeakTypeTag[T]].in(ru.rootMirror))
}
}
+@SerialVersionUID(1L)
private[scala] class SerializedExpr(var treec: TreeCreator, var tag: ru.WeakTypeTag[_]) extends Serializable {
- private def writeObject(out: java.io.ObjectOutputStream): Unit = {
- out.writeObject(treec)
- out.writeObject(tag)
- }
-
- private def readObject(in: java.io.ObjectInputStream): Unit = {
- treec = in.readObject().asInstanceOf[TreeCreator]
- tag = in.readObject().asInstanceOf[ru.WeakTypeTag[_]]
- }
+ import scala.reflect.runtime.universe.{Expr, runtimeMirror}
+ @throws(classOf[ObjectStreamException])
private def readResolve(): AnyRef = {
- import ru._
- Expr(rootMirror, treec)(tag)
+ val loader: ClassLoader = try {
+ Thread.currentThread().getContextClassLoader()
+ } catch {
+ case se: SecurityException => null
+ }
+ val m = runtimeMirror(loader)
+ Expr(m, treec)(tag.in(m))
}
}
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index bf4d6353df..bcad84a3f0 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -20,20 +20,20 @@ import scala.language.implicitConversions
*
* For example, to create a class named `C` one would write something like:
* {{{
- * ClassDef(Modifiers(NoFlags), newTypeName("C"), Nil, ...)
+ * ClassDef(Modifiers(NoFlags), TypeName("C"), Nil, ...)
* }}}
*
* Here, the flag set is empty.
*
* To make `C` private, one would write something like:
* {{{
- * ClassDef(Modifiers(PRIVATE), newTypeName("C"), Nil, ...)
+ * ClassDef(Modifiers(PRIVATE), TypeName("C"), Nil, ...)
* }}}
*
* Flags can also be combined with the vertical bar operator (`|`).
* For example, a private final class is written something like:
* {{{
- * ClassDef(Modifiers(PRIVATE | FINAL), newTypeName("C"), Nil, ...)
+ * ClassDef(Modifiers(PRIVATE | FINAL), TypeName("C"), Nil, ...)
* }}}
*
* The list of all available flags is defined in [[scala.reflect.api.FlagSets#FlagValues]], available via
diff --git a/src/reflect/scala/reflect/api/Liftables.scala b/src/reflect/scala/reflect/api/Liftables.scala
index 673dbce6f5..c6352905d1 100644
--- a/src/reflect/scala/reflect/api/Liftables.scala
+++ b/src/reflect/scala/reflect/api/Liftables.scala
@@ -52,7 +52,7 @@ trait Liftables { self: Universe =>
object Unliftable extends StandardUnliftableInstances {
/** A helper method that simplifies creation of `Unliftable` instances.
* Takes a partial function which is defined on correct representations of `T`
- * and returns corresponing instances.
+ * and returns corresponding instances.
*
* For example to extract a reference to an object as object itself:
*
diff --git a/src/reflect/scala/reflect/api/Mirror.scala b/src/reflect/scala/reflect/api/Mirror.scala
index 318fdb369a..96aab48e75 100644
--- a/src/reflect/scala/reflect/api/Mirror.scala
+++ b/src/reflect/scala/reflect/api/Mirror.scala
@@ -58,7 +58,7 @@ abstract class Mirror[U <: Universe with Singleton] {
* scala> cm.staticPackage("scala")
* res2: scala.reflect.runtime.universe.ModuleSymbol = package scala
*
- * scala> res2.moduleClass.info member newTypeName("List")
+ * scala> res2.moduleClass.info member TypeName("List")
* res3: scala.reflect.runtime.universe.Symbol = type List
*
* scala> res3.fullName
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index ec420d184c..adaf829b32 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -292,7 +292,7 @@ trait Mirrors { self: Universe =>
* that can be used to create instances of the class, inspect its companion object or perform further reflections.
*
* To get a class symbol by the name of the class you would like to reflect,
- * use `<this mirror>.symbol.info.member(newTypeName(<name of the class>)).asClass`.
+ * use `<this mirror>.symbol.info.member(TypeName(<name of the class>)).asClass`.
* For further information about member lookup refer to `Symbol.info`.
*
* The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
@@ -338,7 +338,7 @@ trait Mirrors { self: Universe =>
* with getting a field or invoking a getter method of the field.
*
* If `symbol` represents a field of a base class with respect to the class of the receiver,
- * and this base field is overriden in the class of the receiver, then this method will retrieve
+ * and this base field is overridden in the class of the receiver, then this method will retrieve
* the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor.
*/
def get: Any
@@ -352,7 +352,7 @@ trait Mirrors { self: Universe =>
* with setting a field or invoking a setter method of the field.
*
* If `symbol` represents a field of a base class with respect to the class of the receiver,
- * and this base field is overriden in the class of the receiver, then this method will set
+ * and this base field is overridden in the class of the receiver, then this method will set
* the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor.
*/
def set(value: Any): Unit
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index fe5f47c25d..cc01225287 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -17,11 +17,11 @@ import scala.language.implicitConversions
* To search for the `map` method (which is a term) declared in the `List` class, one can do:
*
* {{{
- * scala> typeOf[List[_]].member(newTermName("map"))
+ * scala> typeOf[List[_]].member(TermName("map"))
* res0: reflect.runtime.universe.Symbol = method map
* }}}
*
- * To search for a type member, one can follow the same procedure, using `newTypeName` instead.
+ * To search for a type member, one can follow the same procedure, using `TypeName` instead.
*
* For more information about creating and using `Name`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]]
*
@@ -30,15 +30,15 @@ import scala.language.implicitConversions
*/
trait Names {
/** An implicit conversion from String to TermName.
- * Enables an alternative notation `"map": TermName` as opposed to `newTermName("map")`.
- * @group Names
+ * Enables an alternative notation `"map": TermName` as opposed to `TermName("map")`.
+ * @group Names
*/
@deprecated("Use explicit `TermName(s)` instead", "2.11.0")
implicit def stringToTermName(s: String): TermName = TermName(s)
/** An implicit conversion from String to TypeName.
- * Enables an alternative notation `"List": TypeName` as opposed to `newTypeName("List")`.
- * @group Names
+ * Enables an alternative notation `"List": TypeName` as opposed to `TypeName("List")`.
+ * @group Names
*/
@deprecated("Use explicit `TypeName(s)` instead", "2.11.0")
implicit def stringToTypeName(s: String): TypeName = TypeName(s)
@@ -72,10 +72,10 @@ trait Names {
* @group API
*/
abstract class NameApi {
- /** Checks wether the name is a term name */
+ /** Checks whether the name is a term name */
def isTermName: Boolean
- /** Checks wether the name is a type name */
+ /** Checks whether the name is a type name */
def isTypeName: Boolean
/** Returns a term name that wraps the same string as `this` */
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 92ae6d8b44..01b9759c70 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -46,15 +46,15 @@ import java.io.{ PrintWriter, StringWriter }
* {{{
* scala> showRaw(tree)
* res1: String = Block(List(
- * ClassDef(Modifiers(FINAL), newTypeName("C"), List(), Template(
- * List(Ident(newTypeName("AnyRef"))),
+ * ClassDef(Modifiers(FINAL), TypeName("C"), List(), Template(
+ * List(Ident(TypeName("AnyRef"))),
* noSelfType,
* List(
* DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(),
* Block(List(
* Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())),
* Literal(Constant(())))),
- * DefDef(Modifiers(), newTermName("x"), List(), List(), TypeTree(),
+ * DefDef(Modifiers(), TermName("x"), List(), List(), TypeTree(),
* Literal(Constant(2))))))),
* Literal(Constant(())))
* }}}
@@ -70,23 +70,23 @@ import java.io.{ PrintWriter, StringWriter }
*
* scala> showRaw(cm.mkToolBox().typecheck(tree), printTypes = true)
* res2: String = Block[1](List(
- * ClassDef[2](Modifiers(FINAL), newTypeName("C"), List(), Template[3](
- * List(Ident[4](newTypeName("AnyRef"))),
+ * ClassDef[2](Modifiers(FINAL), TypeName("C"), List(), Template[3](
+ * List(Ident[4](TypeName("AnyRef"))),
* noSelfType,
* List(
* DefDef[2](Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree[3](),
* Block[1](List(
- * Apply[4](Select[5](Super[6](This[3](newTypeName("C")), tpnme.EMPTY), ...))),
+ * Apply[4](Select[5](Super[6](This[3](TypeName("C")), tpnme.EMPTY), ...))),
* Literal[1](Constant(())))),
- * DefDef[2](Modifiers(), newTermName("x"), List(), List(), TypeTree[7](),
+ * DefDef[2](Modifiers(), TermName("x"), List(), List(), TypeTree[7](),
* Literal[8](Constant(2))))))),
* Literal[1](Constant(())))
* [1] TypeRef(ThisType(scala), scala.Unit, List())
* [2] NoType
- * [3] TypeRef(NoPrefix, newTypeName("C"), List())
+ * [3] TypeRef(NoPrefix, TypeName("C"), List())
* [4] TypeRef(ThisType(java.lang), java.lang.Object, List())
* [5] MethodType(List(), TypeRef(ThisType(java.lang), java.lang.Object, List()))
- * [6] SuperType(ThisType(newTypeName("C")), TypeRef(... java.lang.Object ...))
+ * [6] SuperType(ThisType(TypeName("C")), TypeRef(... java.lang.Object ...))
* [7] TypeRef(ThisType(scala), scala.Int, List())
* [8] ConstantType(Constant(2))
* }}}
@@ -112,10 +112,10 @@ import java.io.{ PrintWriter, StringWriter }
* // showRaw has already been discussed above
* scala> showRaw(tpe)
* res1: String = RefinedType(
- * List(TypeRef(ThisType(scala), newTypeName("AnyRef"), List())),
+ * List(TypeRef(ThisType(scala), TypeName("AnyRef"), List())),
* Scope(
- * newTermName("x"),
- * newTermName("y")))
+ * TermName("x"),
+ * TermName("y")))
* }}}
*
* `printIds` and/or `printKinds` can additionally be supplied as arguments in a call to
@@ -124,10 +124,10 @@ import java.io.{ PrintWriter, StringWriter }
* {{{
* scala> showRaw(tpe, printIds = true, printKinds = true)
* res2: String = RefinedType(
- * List(TypeRef(ThisType(scala#2043#PK), newTypeName("AnyRef")#691#TPE, List())),
+ * List(TypeRef(ThisType(scala#2043#PK), TypeName("AnyRef")#691#TPE, List())),
* Scope(
- * newTermName("x")#2540#METH,
- * newTermName("y")#2541#GET))
+ * TermName("x")#2540#METH,
+ * TermName("y")#2541#GET))
* }}}
*
* For more details about `Printer`s and other aspects of Scala reflection, see the
diff --git a/src/reflect/scala/reflect/api/Quasiquotes.scala b/src/reflect/scala/reflect/api/Quasiquotes.scala
index e905aa4153..554b43afaf 100644
--- a/src/reflect/scala/reflect/api/Quasiquotes.scala
+++ b/src/reflect/scala/reflect/api/Quasiquotes.scala
@@ -3,7 +3,7 @@ package api
trait Quasiquotes { self: Universe =>
- /** Implicit class that introduces `q`, `tq`, `cq,` `p` and `fq` string interpolators
+ /** Implicit class that introduces `q`, `tq`, `cq,` `pq` and `fq` string interpolators
* that are also known as quasiquotes. With their help you can easily manipulate
* Scala reflection ASTs.
*
@@ -13,7 +13,7 @@ trait Quasiquotes { self: Universe =>
protected trait api {
// implementation is hardwired to `dispatch` method of `scala.tools.reflect.quasiquotes.Quasiquotes`
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
- def apply[T](args: T*): Tree = macro ???
+ def apply[A >: Any](args: A*): Tree = macro ???
def unapply(scrutinee: Any): Any = macro ???
}
object q extends api
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
index 524b7ea14b..bf9cf5e334 100644
--- a/src/reflect/scala/reflect/api/StandardDefinitions.scala
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -128,7 +128,7 @@ trait StandardDefinitions {
* scala> import scala.reflect.runtime.universe._
* import scala.reflect.runtime.universe._
*
- * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * scala> val m = typeOf[C].member(TermName("m")).asMethod
* m: reflect.runtime.universe.MethodSymbol = method m
*
* scala> m.params(0)(0).info
@@ -156,7 +156,7 @@ trait StandardDefinitions {
* scala> import scala.reflect.runtime.universe._
* import scala.reflect.runtime.universe._
*
- * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * scala> val m = typeOf[C].member(TermName("m")).asMethod
* m: reflect.runtime.universe.MethodSymbol = method m
*
* scala> m.params(0)(0).info
@@ -181,7 +181,7 @@ trait StandardDefinitions {
* scala> import scala.reflect.runtime.universe._
* import scala.reflect.runtime.universe._
*
- * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * scala> val m = typeOf[C].member(TermName("m")).asMethod
* m: reflect.runtime.universe.MethodSymbol = method m
*
* scala> m.params(0)(0).info
diff --git a/src/reflect/scala/reflect/api/StandardLiftables.scala b/src/reflect/scala/reflect/api/StandardLiftables.scala
index 66ac62cc9e..ebf15e4f57 100644
--- a/src/reflect/scala/reflect/api/StandardLiftables.scala
+++ b/src/reflect/scala/reflect/api/StandardLiftables.scala
@@ -230,6 +230,6 @@ trait StandardLiftables { self: Universe =>
val Symbol = TermName("Symbol")
val util = TermName("util")
val Vector = TermName("Vector")
- val WILDCARD = self.nme.WILDCARD
+ val WILDCARD = self.termNames.WILDCARD
}
}
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index dddd3c0e61..c01029d067 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -27,7 +27,7 @@ package api
* scala> class C[T] { def test[U](x: T)(y: U): Int = ??? }
* defined class C
*
- * scala> val test = typeOf[C[Int]].member(newTermName("test")).asMethod
+ * scala> val test = typeOf[C[Int]].member(TermName("test")).asMethod
* test: reflect.runtime.universe.MethodSymbol = method test
*
* scala> test.info
@@ -260,6 +260,9 @@ trait Symbols { self: Universe =>
* with an object definition (module class in scala compiler parlance).
* If yes, `isType` is also guaranteed to be true.
*
+ * Note to compiler developers: During the "mixin" phase, trait implementation class symbols
+ * receive the `lateMODULE` flag, hence `isImplClass && isModuleClass` becomes true.
+ *
* @group Tests
*/
def isModuleClass: Boolean = false
@@ -336,7 +339,7 @@ trait Symbols { self: Universe =>
@deprecated("Use `overrides` instead", "2.11.0")
def allOverriddenSymbols: List[Symbol]
- /** Returns all symbols overriden by this symbol.
+ /** Returns all symbols overridden by this symbol.
*
* @group Basics
*/
diff --git a/src/reflect/scala/reflect/api/TreeCreator.scala b/src/reflect/scala/reflect/api/TreeCreator.scala
index 027c840955..000eaa1aa6 100644
--- a/src/reflect/scala/reflect/api/TreeCreator.scala
+++ b/src/reflect/scala/reflect/api/TreeCreator.scala
@@ -2,12 +2,12 @@ package scala
package reflect
package api
-/** This is an internal implementation class.
+/** A mirror-aware factory for trees.
*
* This class is used internally by Scala Reflection, and is not recommended for use in client code.
*
- * @group ReflectionAPI
+ * @group ReflectionAPI
*/
-abstract class TreeCreator {
+abstract class TreeCreator extends Serializable {
def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Tree
}
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index ff8926651b..2bf407ee19 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -33,7 +33,7 @@ package api
*
* The following creates an AST representing `print("Hello World")`:
* {{{
- * Apply(Select(Select(This(newTypeName("scala")), newTermName("Predef")), newTermName("print")), List(Literal(Constant("Hello World"))))
+ * Apply(Select(Select(This(TypeName("scala")), TermName("Predef")), TermName("print")), List(Literal(Constant("Hello World"))))
* }}}
*
* The following creates an AST from a literal 5, and then uses `showRaw` to print it in a readable format.
@@ -158,7 +158,7 @@ trait Trees { self: Universe =>
/** Do all parts of this tree satisfy predicate `p`? */
def forAll(p: Tree => Boolean): Boolean
- /** Tests whether two trees are structurall equal.
+ /** Tests whether two trees are structurally equal.
* Note that `==` on trees is reference equality.
*/
def equalsStructure(that : Tree): Boolean
@@ -1098,11 +1098,11 @@ trait Trees { self: Universe =>
* // a dummy node that carries the type of unapplication to patmat
* // the <unapply-selector> here doesn't have an underlying symbol
* // it only has a type assigned, therefore after `untypecheck` this tree is no longer typeable
- * Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))),
+ * Apply(Select(Ident(Foo), TermName("unapply")), List(Ident(TermName("<unapply-selector>")))),
* // arguments of the unapply => nothing synthetic here
- * List(Bind(newTermName("x"), Ident(nme.WILDCARD)))),
+ * List(Bind(TermName("x"), Ident(nme.WILDCARD)))),
* EmptyTree,
- * Ident(newTermName("x")))))
+ * Ident(TermName("x")))))
* }}}
*
* Introduced by typer. Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
@@ -2661,7 +2661,7 @@ trait Trees { self: Universe =>
* @group Traversal
*/
abstract class ModifiersExtractor {
- def apply(): Modifiers = Modifiers(NoFlags, tpnme.EMPTY, List())
+ def apply(): Modifiers = Modifiers(NoFlags, typeNames.EMPTY, List())
def apply(flags: FlagSet, privateWithin: Name, annotations: List[Tree]): Modifiers
def unapply(mods: Modifiers): Option[(FlagSet, Name, List[Tree])]
}
@@ -2674,7 +2674,7 @@ trait Trees { self: Universe =>
/** The factory for `Modifiers` instances.
* @group Traversal
*/
- def Modifiers(flags: FlagSet): Modifiers = Modifiers(flags, tpnme.EMPTY)
+ def Modifiers(flags: FlagSet): Modifiers = Modifiers(flags, typeNames.EMPTY)
/** An empty `Modifiers` object: no flags, empty visibility annotation and no Scala annotations.
* @group Traversal
diff --git a/src/reflect/scala/reflect/api/TypeCreator.scala b/src/reflect/scala/reflect/api/TypeCreator.scala
index 37fff90b43..cbd55b9428 100644
--- a/src/reflect/scala/reflect/api/TypeCreator.scala
+++ b/src/reflect/scala/reflect/api/TypeCreator.scala
@@ -8,6 +8,6 @@ package api
*
* @group ReflectionAPI
*/
-abstract class TypeCreator {
+abstract class TypeCreator extends Serializable {
def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type
}
diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala
index 1dfc84be69..7db375ca61 100644
--- a/src/reflect/scala/reflect/api/TypeTags.scala
+++ b/src/reflect/scala/reflect/api/TypeTags.scala
@@ -9,6 +9,7 @@ package api
import java.lang.{ Class => jClass }
import scala.language.implicitConversions
+import java.io.ObjectStreamException
/**
* A `TypeTag[T]` encapsulates the runtime type representation of some type `T`.
@@ -233,6 +234,7 @@ trait TypeTags { self: Universe =>
val otherMirror1 = otherMirror.asInstanceOf[scala.reflect.api.Mirror[otherMirror.universe.type]]
otherMirror.universe.WeakTypeTag[T](otherMirror1, tpec)
}
+ @throws(classOf[ObjectStreamException])
private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = false)
}
@@ -293,10 +295,13 @@ trait TypeTags { self: Universe =>
val otherMirror1 = otherMirror.asInstanceOf[scala.reflect.api.Mirror[otherMirror.universe.type]]
otherMirror.universe.TypeTag[T](otherMirror1, tpec)
}
+ @throws(classOf[ObjectStreamException])
private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
}
/* @group TypeTags */
+ // This class only exists to silence MIMA complaining about a binary incompatibility.
+ // Only the top-level class (api.PredefTypeCreator) should be used.
private class PredefTypeCreator[T](copyIn: Universe => Universe#TypeTag[T]) extends TypeCreator {
def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type = {
copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe
@@ -304,8 +309,9 @@ trait TypeTags { self: Universe =>
}
/* @group TypeTags */
- private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe#TypeTag[T]) extends TypeTagImpl[T](rootMirror, new PredefTypeCreator(copyIn)) {
+ private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe#TypeTag[T]) extends TypeTagImpl[T](rootMirror, new api.PredefTypeCreator(copyIn)) {
override lazy val tpe: Type = _tpe
+ @throws(classOf[ObjectStreamException])
private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
}
@@ -341,20 +347,27 @@ trait TypeTags { self: Universe =>
def symbolOf[T: WeakTypeTag]: TypeSymbol
}
+// This class should be final, but we can't do that in Scala 2.11.x without breaking
+// binary incompatibility.
+@SerialVersionUID(1L)
private[scala] class SerializedTypeTag(var tpec: TypeCreator, var concrete: Boolean) extends Serializable {
- private def writeObject(out: java.io.ObjectOutputStream): Unit = {
- out.writeObject(tpec)
- out.writeBoolean(concrete)
- }
-
- private def readObject(in: java.io.ObjectInputStream): Unit = {
- tpec = in.readObject().asInstanceOf[TypeCreator]
- concrete = in.readBoolean()
+ import scala.reflect.runtime.universe.{TypeTag, WeakTypeTag, runtimeMirror}
+ @throws(classOf[ObjectStreamException])
+ private def readResolve(): AnyRef = {
+ val loader: ClassLoader = try {
+ Thread.currentThread().getContextClassLoader()
+ } catch {
+ case se: SecurityException => null
+ }
+ val m = runtimeMirror(loader)
+ if (concrete) TypeTag(m, tpec)
+ else WeakTypeTag(m, tpec)
}
+}
- private def readResolve(): AnyRef = {
- import scala.reflect.runtime.universe._
- if (concrete) TypeTag(rootMirror, tpec)
- else WeakTypeTag(rootMirror, tpec)
+/* @group TypeTags */
+private class PredefTypeCreator[T](copyIn: Universe => Universe#TypeTag[T]) extends TypeCreator {
+ def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type = {
+ copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe
}
}
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index f6995dd5de..cd7648a44a 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -469,7 +469,7 @@ trait Types {
def unapply(tpe: SingleType): Option[(Type, Symbol)]
/** @see [[InternalApi.singleType]] */
- @deprecated("Use `ClassSymbol.thisPrefix` or `internal.singleType` instead")
+ @deprecated("Use `ClassSymbol.thisPrefix` or `internal.singleType` instead", "2.11.0")
def apply(pre: Type, sym: Symbol)(implicit token: CompatToken): Type = internal.singleType(pre, sym)
}
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 19e9eef851..6863cdfd82 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -15,7 +15,6 @@ import scala.language.postfixOps
/** AnnotationInfo and its helpers */
trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
import definitions._
- import treeInfo._
// Common annotation code between Symbol and Type.
// For methods altering the annotation list, on Symbol it mutates
@@ -302,7 +301,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
*/
def defaultTargets = symbol.annotations map (_.symbol) filter isMetaAnnotation
// Test whether the typeSymbol of atp conforms to the given class.
- def matches(clazz: Symbol) = symbol isNonBottomSubClass clazz
+ def matches(clazz: Symbol) = !symbol.isInstanceOf[StubSymbol] && (symbol isNonBottomSubClass clazz)
// All subtrees of all args are considered.
def hasArgWhich(p: Tree => Boolean) = args exists (_ exists p)
@@ -388,11 +387,11 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
case Literal(const) => LiteralAnnotArg(const)
case Apply(ArrayModule, args) => ArrayAnnotArg(args map encodeJavaArg toArray)
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => NestedAnnotArg(treeToAnnotation(arg))
- case _ => throw new Exception("unexpected java argument shape $arg: literals, arrays and nested annotations are supported")
+ case _ => throw new Exception(s"unexpected java argument shape $arg: literals, arrays and nested annotations are supported")
}
def encodeJavaArgs(args: List[Tree]): List[(Name, ClassfileAnnotArg)] = args match {
case AssignOrNamedArg(Ident(name), arg) :: rest => (name, encodeJavaArg(arg)) :: encodeJavaArgs(rest)
- case arg :: rest => throw new Exception("unexpected java argument shape $arg: only AssignOrNamedArg trees are supported")
+ case arg :: rest => throw new Exception(s"unexpected java argument shape $arg: only AssignOrNamedArg trees are supported")
case Nil => Nil
}
val atp = tpt.tpe
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 0ca8611719..54f64153c1 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -144,7 +144,7 @@ trait BaseTypeSeqs {
"\n --- because ---\n"+msg)
}
- /** A merker object for a base type sequence that's no yet computed.
+ /** A marker object for a base type sequence that's no yet computed.
* used to catch inheritance cycles
*/
val undetBaseTypeSeq: BaseTypeSeq = newBaseTypeSeq(List(), Array())
@@ -152,7 +152,7 @@ trait BaseTypeSeqs {
/** Create a base type sequence consisting of a single type */
def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = newBaseTypeSeq(List(), Array(tp))
- /** Create the base type sequence of a compound type wuth given tp.parents */
+ /** Create the base type sequence of a compound type with given tp.parents */
def compoundBaseTypeSeq(tp: Type): BaseTypeSeq = {
val tsym = tp.typeSymbol
val parents = tp.parents
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 25d78f4e6f..5b20d9db8e 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -156,11 +156,11 @@ trait Definitions extends api.StandardDefinitions {
// It becomes tricky to create dedicated objects for other symbols because
// of initialization order issues.
- lazy val JavaLangPackage = getPackage("java.lang")
+ lazy val JavaLangPackage = getPackage(TermName("java.lang"))
lazy val JavaLangPackageClass = JavaLangPackage.moduleClass.asClass
- lazy val ScalaPackage = getPackage("scala")
+ lazy val ScalaPackage = getPackage(TermName("scala"))
lazy val ScalaPackageClass = ScalaPackage.moduleClass.asClass
- lazy val RuntimePackage = getPackage("scala.runtime")
+ lazy val RuntimePackage = getPackage(TermName("scala.runtime"))
lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass
def javaTypeToValueClass(jtype: Class[_]): Symbol = jtype match {
@@ -453,7 +453,7 @@ trait Definitions extends api.StandardDefinitions {
// XML
lazy val ScalaXmlTopScope = getModuleIfDefined("scala.xml.TopScope")
- lazy val ScalaXmlPackage = getPackageIfDefined("scala.xml")
+ lazy val ScalaXmlPackage = getPackageIfDefined(TermName("scala.xml"))
// scala.reflect
lazy val ReflectPackage = requiredModule[scala.reflect.`package`.type]
@@ -490,8 +490,10 @@ trait Definitions extends api.StandardDefinitions {
lazy val TypeCreatorClass = getClassIfDefined("scala.reflect.api.TypeCreator") // defined in scala-reflect.jar, so we need to be careful
lazy val TreeCreatorClass = getClassIfDefined("scala.reflect.api.TreeCreator") // defined in scala-reflect.jar, so we need to be careful
- lazy val BlackboxContextClass = getClassIfDefined("scala.reflect.macros.blackbox.Context") // defined in scala-reflect.jar, so we need to be careful
- lazy val WhiteboxContextClass = getClassIfDefined("scala.reflect.macros.whitebox.Context") // defined in scala-reflect.jar, so we need to be careful
+ private def Context_210 = if (settings.isScala211) NoSymbol else getClassIfDefined("scala.reflect.macros.Context") // needed under -Xsource:2.10
+ lazy val BlackboxContextClass = getClassIfDefined("scala.reflect.macros.blackbox.Context").orElse(Context_210) // defined in scala-reflect.jar, so we need to be careful
+
+ lazy val WhiteboxContextClass = getClassIfDefined("scala.reflect.macros.whitebox.Context").orElse(Context_210) // defined in scala-reflect.jar, so we need to be careful
def MacroContextPrefix = BlackboxContextClass.map(sym => getMemberMethod(sym, nme.prefix))
def MacroContextPrefixType = BlackboxContextClass.map(sym => getTypeMember(sym, tpnme.PrefixType))
def MacroContextUniverse = BlackboxContextClass.map(sym => getMemberMethod(sym, nme.universe))
@@ -512,6 +514,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
+ lazy val MethodHandle = getClassIfDefined("java.lang.invoke.MethodHandle")
+
// Option classes
lazy val OptionClass: ClassSymbol = requiredClass[Option[_]]
lazy val OptionModule: ModuleSymbol = requiredModule[scala.Option.type]
@@ -651,6 +655,7 @@ trait Definitions extends api.StandardDefinitions {
// tends to change the course of events by forcing types.
def isFunctionType(tp: Type) = isFunctionTypeDirect(tp.dealiasWiden)
def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden)
+ def tupleComponents(tp: Type) = tp.dealiasWiden.typeArgs
lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity)
@@ -787,7 +792,7 @@ trait Definitions extends api.StandardDefinitions {
* The class defining the method is a supertype of `tp` that
* has a public no-arg primary constructor.
*/
- def samOf(tp: Type): Symbol = {
+ def samOf(tp: Type): Symbol = if (!settings.Xexperimental) NoSymbol else {
// if tp has a constructor, it must be public and must not take any arguments
// (not even an implicit argument list -- to keep it simple for now)
val tpSym = tp.typeSymbol
@@ -834,12 +839,18 @@ trait Definitions extends api.StandardDefinitions {
def typeOfMemberNamedHead(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.head)())
def typeOfMemberNamedApply(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.apply)(IntTpe))
def typeOfMemberNamedDrop(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.drop)(IntTpe))
- def typesOfSelectors(tp: Type) = getterMemberTypes(tp, productSelectors(tp))
+ def typesOfSelectors(tp: Type) =
+ if (isTupleType(tp)) tupleComponents(tp)
+ else getterMemberTypes(tp, productSelectors(tp))
+
// SI-8128 Still using the type argument of the base type at Seq/Option if this is an old-style (2.10 compatible)
// extractor to limit exposure to regressions like the reported problem with existentials.
// TODO fix the existential problem in the general case, see test/pending/pos/t8128.scala
private def typeArgOfBaseTypeOr(tp: Type, baseClass: Symbol)(or: => Type): Type = (tp baseType baseClass).typeArgs match {
- case x :: Nil => x
+ case x :: Nil =>
+ val x1 = x
+ val x2 = repackExistential(x1)
+ x2
case _ => or
}
@@ -896,12 +907,14 @@ trait Definitions extends api.StandardDefinitions {
)
}
- def EnumType(sym: Symbol) =
+ def EnumType(sym: Symbol) = {
// given (in java): "class A { enum E { VAL1 } }"
// - sym: the symbol of the actual enumeration value (VAL1)
// - .owner: the ModuleClassSymbol of the enumeration (object E)
// - .linkedClassOfClass: the ClassSymbol of the enumeration (class E)
- sym.owner.linkedClassOfClass.tpe
+ // SI-6613 Subsequent runs of the resident compiler demand the phase discipline here.
+ enteringPhaseNotLaterThan(picklerPhase)(sym.owner.linkedClassOfClass).tpe
+ }
/** Given a class symbol C with type parameters T1, T2, ... Tn
* which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn,
@@ -920,10 +933,10 @@ trait Definitions extends api.StandardDefinitions {
// members of class scala.Any
- // TODO these aren't final! They are now overriden in AnyRef/Object. Prior to the fix
+ // TODO these aren't final! They are now overridden in AnyRef/Object. Prior to the fix
// for SI-8129, they were actually *overloaded* by the members in AnyRef/Object.
// We should unfinalize these, override in AnyValClass, and make the overrides final.
- // Refchecks never actually looks at these, so its just for consistency.
+ // Refchecks never actually looks at these, so it's just for consistency.
lazy val Any_== = enterNewMethod(AnyClass, nme.EQ, AnyTpe :: Nil, BooleanTpe, FINAL)
lazy val Any_!= = enterNewMethod(AnyClass, nme.NE, AnyTpe :: Nil, BooleanTpe, FINAL)
@@ -1079,6 +1092,10 @@ trait Definitions extends api.StandardDefinitions {
lazy val ClassfileAnnotationClass = requiredClass[scala.annotation.ClassfileAnnotation]
lazy val StaticAnnotationClass = requiredClass[scala.annotation.StaticAnnotation]
+ // Java retention annotations
+ lazy val AnnotationRetentionAttr = requiredClass[java.lang.annotation.Retention]
+ lazy val AnnotationRetentionPolicyAttr = requiredClass[java.lang.annotation.RetentionPolicy]
+
// Annotations
lazy val BridgeClass = requiredClass[scala.annotation.bridge]
lazy val ElidableMethodClass = requiredClass[scala.annotation.elidable]
@@ -1103,7 +1120,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val ScalaInlineClass = requiredClass[scala.inline]
lazy val ScalaNoInlineClass = requiredClass[scala.noinline]
lazy val SerialVersionUIDAttr = requiredClass[scala.SerialVersionUID]
- lazy val SerialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
+ lazy val SerialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(), List(nme.value -> LiteralAnnotArg(Constant(0))))
lazy val SpecializedClass = requiredClass[scala.specialized]
lazy val ThrowsClass = requiredClass[scala.throws[_]]
lazy val TransientAttr = requiredClass[scala.transient]
@@ -1131,7 +1148,7 @@ trait Definitions extends api.StandardDefinitions {
// Trying to allow for deprecated locations
sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol)
)
- lazy val metaAnnotations: Set[Symbol] = getPackage("scala.annotation.meta").info.members filter (_ isSubClass StaticAnnotationClass) toSet
+ lazy val metaAnnotations: Set[Symbol] = getPackage(TermName("scala.annotation.meta")).info.members filter (_ isSubClass StaticAnnotationClass) toSet
// According to the scala.annotation.meta package object:
// * By default, annotations on (`val`-, `var`- or plain) constructor parameters
@@ -1424,6 +1441,10 @@ trait Definitions extends api.StandardDefinitions {
lazy val isUnbox = unboxMethod.values.toSet[Symbol]
lazy val isBox = boxMethod.values.toSet[Symbol]
+ lazy val Boolean_and = definitions.Boolean_and
+ lazy val Boolean_or = definitions.Boolean_or
+ lazy val Boolean_not = definitions.Boolean_not
+
lazy val Option_apply = getMemberMethod(OptionModule, nme.apply)
lazy val List_apply = DefinitionsClass.this.List_apply
@@ -1441,7 +1462,7 @@ trait Definitions extends api.StandardDefinitions {
)
lazy val TagSymbols = TagMaterializers.keySet
lazy val Predef_conforms = (getMemberIfDefined(PredefModule, nme.conforms)
- orElse getMemberMethod(PredefModule, "conforms": TermName)) // TODO: predicate on -Xsource:2.10 (for now, needed for transition from M8 -> RC1)
+ orElse getMemberMethod(PredefModule, TermName("conforms"))) // TODO: predicate on -Xsource:2.10 (for now, needed for transition from M8 -> RC1)
lazy val Predef_classOf = getMemberMethod(PredefModule, nme.classOf)
lazy val Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly)
lazy val Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
@@ -1489,6 +1510,13 @@ trait Definitions extends api.StandardDefinitions {
lazy val PartialManifestClass = getTypeMember(ReflectPackage, tpnme.ClassManifest)
lazy val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass)
+
+ def isPolymorphicSignature(sym: Symbol) = PolySigMethods(sym)
+ private lazy val PolySigMethods: Set[Symbol] = Set[Symbol](MethodHandle.info.decl(sn.Invoke), MethodHandle.info.decl(sn.InvokeExact)).filter(_.exists)
+
+ lazy val Scala_Java8_CompatPackage = rootMirror.getPackageIfDefined("scala.compat.java8")
+ lazy val Scala_Java8_CompatPackage_JFunction = (0 to MaxTupleArity).toArray map (i => getMemberIfDefined(Scala_Java8_CompatPackage.moduleClass, TypeName("JFunction" + i)))
+ lazy val Scala_Java8_CompatPackage_JProcedure = (0 to MaxTupleArity).toArray map (i => getMemberIfDefined(Scala_Java8_CompatPackage.moduleClass, TypeName("JProcedure" + i)))
}
}
}
diff --git a/src/reflect/scala/reflect/internal/Depth.scala b/src/reflect/scala/reflect/internal/Depth.scala
index 357abf765f..a330e0accb 100644
--- a/src/reflect/scala/reflect/internal/Depth.scala
+++ b/src/reflect/scala/reflect/internal/Depth.scala
@@ -21,8 +21,20 @@ final class Depth private (val depth: Int) extends AnyVal with Ordered[Depth] {
object Depth {
// A don't care value for the depth parameter in lubs/glbs and related operations.
- final val AnyDepth = new Depth(Int.MinValue)
+ // When passed this value, the recursion budget will be inferred from the shape of
+ // the `typeDepth` of the list of types.
+ final val AnyDepthValue = -3
+ final val AnyDepth = new Depth(AnyDepthValue)
+
final val Zero = new Depth(0)
- @inline final def apply(depth: Int): Depth = if (depth < 0) AnyDepth else new Depth(depth)
+ // SI-9018: A negative depth is used to signal that we have breached the recursion limit.
+ // The LUB/GLB implementation will then truncate to Any/Nothing.
+ //
+ // We only really need one of these, but we allow representation of Depth(-1) and Depth(-2)
+ // to mimic the historical choice of 2.10.4.
+ @inline final def apply(depth: Int): Depth = {
+ if (depth < AnyDepthValue) AnyDepth
+ else new Depth(depth)
+ }
}
diff --git a/src/reflect/scala/reflect/internal/FreshNames.scala b/src/reflect/scala/reflect/internal/FreshNames.scala
index 7e9a568266..17883d12ad 100644
--- a/src/reflect/scala/reflect/internal/FreshNames.scala
+++ b/src/reflect/scala/reflect/internal/FreshNames.scala
@@ -7,6 +7,7 @@ package reflect
package internal
import scala.reflect.internal.util.FreshNameCreator
+import scala.util.matching.Regex
trait FreshNames { self: Names with StdNames =>
// SI-6879 Keeps track of counters that are supposed to be globally unique
@@ -23,17 +24,20 @@ trait FreshNames { self: Names with StdNames =>
// Extractor that matches names which were generated by some
// FreshNameCreator with known prefix. Extracts user-specified
// prefix that was used as a parameter to newName by stripping
- // global creator prefix and unique number in the end of the name.
+ // global creator prefix and unique numerical suffix.
+ // The creator prefix and numerical suffix may both be empty.
class FreshNameExtractor(creatorPrefix: String = "") {
- // quote prefix so that it can be used with replaceFirst
- // which expects regExp rather than simple string
- val quotedCreatorPrefix = java.util.regex.Pattern.quote(creatorPrefix)
-
- def unapply(name: Name): Option[String] = {
- val sname = name.toString
- // name should start with creatorPrefix and end with number
- if (!sname.startsWith(creatorPrefix) || !sname.matches("^.*\\d*$")) None
- else Some(NameTransformer.decode(sname.replaceFirst(quotedCreatorPrefix, "").replaceAll("\\d*$", "")))
+
+ // name should start with creatorPrefix and end with number
+ val freshlyNamed = {
+ val pre = if (!creatorPrefix.isEmpty) Regex quote creatorPrefix else ""
+ s"""$pre(.*?)\\d*""".r
}
+
+ def unapply(name: Name): Option[String] =
+ name.toString match {
+ case freshlyNamed(prefix) => Some(prefix)
+ case _ => None
+ }
}
}
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index dc4ad25ef2..494f62af06 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -301,7 +301,7 @@ trait Importers { to: SymbolTable =>
case (their: from.TypeTree, my: to.TypeTree) =>
if (their.wasEmpty) my.defineType(importType(their.tpe)) else my.setType(importType(their.tpe))
case (_, _) =>
- my.tpe = importType(their.tpe)
+ my.setType(importType(their.tpe))
}
}
}
diff --git a/src/reflect/scala/reflect/internal/Internals.scala b/src/reflect/scala/reflect/internal/Internals.scala
index e9916cf7d1..ad4cec5b4d 100644
--- a/src/reflect/scala/reflect/internal/Internals.scala
+++ b/src/reflect/scala/reflect/internal/Internals.scala
@@ -9,7 +9,6 @@ import scala.ref.WeakReference
import scala.reflect.api.Universe
import scala.reflect.macros.Attachments
import scala.reflect.internal.util.FreshNameCreator
-import scala.reflect.internal.Flags._
import scala.reflect.internal.util.ListOfNil
trait Internals extends api.Internals {
@@ -129,7 +128,7 @@ trait Internals extends api.Internals {
def typeBounds(lo: Type, hi: Type): TypeBounds = self.TypeBounds(lo, hi)
def boundedWildcardType(bounds: TypeBounds): BoundedWildcardType = self.BoundedWildcardType(bounds)
- def subpatterns(tree: Tree): Option[List[Tree]] = tree.attachments.get[SubpatternsAttachment].map(_.patterns.map(_.duplicate))
+ def subpatterns(tree: Tree): Option[List[Tree]] = tree.attachments.get[SubpatternsAttachment].map(_.patterns.map(duplicateAndKeepPositions))
type Decorators = MacroDecoratorApi
lazy val decorators: Decorators = new MacroDecoratorApi {
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 4a35e024de..0cbb976a98 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -277,7 +277,7 @@ trait Mirrors extends api.Mirrors {
// TODO - having these as objects means they elude the attempt to
// add synchronization in SynchronizedSymbols. But we should either
- // flip on object overrides or find some other accomodation, because
+ // flip on object overrides or find some other accommodation, because
// lazy vals are unnecessarily expensive relative to objects and it
// is very beneficial for a handful of bootstrap symbols to have
// first class identities
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index ae9f2da4e5..32d12d305e 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -40,7 +40,10 @@ trait Names extends api.Names {
/** Hashtable for finding type names quickly. */
private val typeHashtable = new Array[TypeName](HASH_SIZE)
- /** The hashcode of a name. */
+ /**
+ * The hashcode of a name depends on the first, the last and the middle character,
+ * and the length of the name.
+ */
private def hashValue(cs: Array[Char], offset: Int, len: Int): Int =
if (len > 0)
(len * (41 * 41 * 41) +
@@ -104,10 +107,21 @@ trait Names extends api.Names {
// The logic order here is future-proofing against the possibility
// that name.toString will become an eager val, in which case the call
// to enterChars cannot follow the construction of the TermName.
- val ncStart = nc
- enterChars(cs, offset, len)
- if (cachedString ne null) new TermName_S(ncStart, len, h, cachedString)
- else new TermName_R(ncStart, len, h)
+ var startIndex = 0
+ if (cs == chrs) {
+ // Optimize for subName, the new name is already stored in chrs
+ startIndex = offset
+ } else {
+ startIndex = nc
+ enterChars(cs, offset, len)
+ }
+ val next = termHashtable(h)
+ val termName =
+ if (cachedString ne null) new TermName_S(startIndex, len, next, cachedString)
+ else new TermName_R(startIndex, len, next)
+ // Add the new termName to the hashtable only after it's been fully constructed
+ termHashtable(h) = termName
+ termName
}
}
if (synchronizeNames) nameLock.synchronized(body) else body
@@ -117,11 +131,11 @@ trait Names extends api.Names {
newTermName(cs, offset, len, cachedString).toTypeName
/** Create a term name from string. */
- @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
+ @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overridden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
def newTermName(s: String): TermName = newTermName(s.toCharArray(), 0, s.length(), null)
/** Create a type name from string. */
- @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
+ @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overridden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
def newTypeName(s: String): TypeName = newTermName(s).toTypeName
/** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
@@ -145,40 +159,20 @@ trait Names extends api.Names {
newTermName(bs, offset, len).toTypeName
/**
- * Used only by the GenBCode backend, to represent bytecode-level types in a way that makes equals() and hashCode() efficient.
- * For bytecode-level types of OBJECT sort, its internal name (not its descriptor) is stored.
- * For those of ARRAY sort, its descriptor is stored ie has a leading '['
- * For those of METHOD sort, its descriptor is stored ie has a leading '('
+ * Used by the GenBCode backend to lookup type names that are known to already exist. This method
+ * might be invoked in a multi-threaded setting. Invoking newTypeName instead might be unsafe.
*
- * can-multi-thread
- * TODO SI-6240 !!! JZ Really? the constructors TermName and TypeName publish unconstructed `this` references
- * into the hash tables; we could observe them here before the subclass constructor completes.
+ * can-multi-thread: names are added to the hash tables only after they are fully constructed.
*/
- final def lookupTypeName(cs: Array[Char]): TypeName = { lookupTypeNameIfExisting(cs, true) }
-
- final def lookupTypeNameIfExisting(cs: Array[Char], failOnNotFound: Boolean): TypeName = {
-
- val hterm = hashValue(cs, 0, cs.size) & HASH_MASK
- var nterm = termHashtable(hterm)
- while ((nterm ne null) && (nterm.length != cs.size || !equals(nterm.start, cs, 0, cs.size))) {
- nterm = nterm.next
- }
- if (nterm eq null) {
- if (failOnNotFound) { assert(false, "TermName not yet created: " + new String(cs)) }
- return null
- }
+ final def lookupTypeName(cs: Array[Char]): TypeName = {
+ val hash = hashValue(cs, 0, cs.length) & HASH_MASK
+ var typeName = typeHashtable(hash)
- val htype = hashValue(chrs, nterm.start, nterm.length) & HASH_MASK
- var ntype = typeHashtable(htype)
- while ((ntype ne null) && ntype.start != nterm.start) {
- ntype = ntype.next
+ while ((typeName ne null) && (typeName.length != cs.length || !equals(typeName.start, cs, 0, cs.length))) {
+ typeName = typeName.next
}
- if (ntype eq null) {
- if (failOnNotFound) { assert(false, "TypeName not yet created: " + new String(cs)) }
- return null
- }
-
- ntype
+ assert(typeName != null, s"TypeName ${new String(cs)} not yet created.")
+ typeName
}
// Classes ----------------------------------------------------------------------
@@ -515,43 +509,47 @@ trait Names extends api.Names {
/** TermName_S and TypeName_S have fields containing the string version of the name.
* TermName_R and TypeName_R recreate it each time toString is called.
*/
- private final class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) {
- protected def createCompanionName(h: Int): TypeName = new TypeName_S(index, len, h, toString)
+ private final class TermName_S(index0: Int, len0: Int, next0: TermName, override val toString: String) extends TermName(index0, len0, next0) {
+ protected def createCompanionName(next: TypeName): TypeName = new TypeName_S(index, len, next, toString)
override def newName(str: String): TermName = newTermNameCached(str)
}
- private final class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) {
- protected def createCompanionName(h: Int): TermName = new TermName_S(index, len, h, toString)
+ private final class TypeName_S(index0: Int, len0: Int, next0: TypeName, override val toString: String) extends TypeName(index0, len0, next0) {
override def newName(str: String): TypeName = newTypeNameCached(str)
}
- private final class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) {
- protected def createCompanionName(h: Int): TypeName = new TypeName_R(index, len, h)
+ private final class TermName_R(index0: Int, len0: Int, next0: TermName) extends TermName(index0, len0, next0) {
+ protected def createCompanionName(next: TypeName): TypeName = new TypeName_R(index, len, next)
override def toString = new String(chrs, index, len)
}
- private final class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) {
- protected def createCompanionName(h: Int): TermName = new TermName_R(index, len, h)
+ private final class TypeName_R(index0: Int, len0: Int, next0: TypeName) extends TypeName(index0, len0, next0) {
override def toString = new String(chrs, index, len)
}
// SYNCNOTE: caller to constructor must synchronize if `synchronizeNames` is enabled
- sealed abstract class TermName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) with TermNameApi {
+ sealed abstract class TermName(index0: Int, len0: Int, val next: TermName) extends Name(index0, len0) with TermNameApi {
type ThisNameType = TermName
protected[this] def thisName: TermName = this
- val next: TermName = termHashtable(hash)
- termHashtable(hash) = this
+
def isTermName: Boolean = true
def isTypeName: Boolean = false
def toTermName: TermName = this
def toTypeName: TypeName = {
def body = {
+ // Re-computing the hash saves a field for storing it in the TermName
val h = hashValue(chrs, index, len) & HASH_MASK
var n = typeHashtable(h)
while ((n ne null) && n.start != index)
n = n.next
if (n ne null) n
- else createCompanionName(h)
+ else {
+ val next = typeHashtable(h)
+ val typeName = createCompanionName(next)
+ // Add the new typeName to the hashtable only after it's been fully constructed
+ typeHashtable(h) = typeName
+ typeName
+ }
}
if (synchronizeNames) nameLock.synchronized(body) else body
}
@@ -562,7 +560,7 @@ trait Names extends api.Names {
def nameKind = "term"
/** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */
- protected def createCompanionName(h: Int): TypeName
+ protected def createCompanionName(next: TypeName): TypeName
}
implicit val TermNameTag = ClassTag[TermName](classOf[TermName])
@@ -572,24 +570,22 @@ trait Names extends api.Names {
def unapply(name: TermName): Option[String] = Some(name.toString)
}
- sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) with TypeNameApi {
+ sealed abstract class TypeName(index0: Int, len0: Int, val next: TypeName) extends Name(index0, len0) with TypeNameApi {
type ThisNameType = TypeName
protected[this] def thisName: TypeName = this
- val next: TypeName = typeHashtable(hash)
- typeHashtable(hash) = this
-
def isTermName: Boolean = false
def isTypeName: Boolean = true
def toTermName: TermName = {
def body = {
+ // Re-computing the hash saves a field for storing it in the TypeName
val h = hashValue(chrs, index, len) & HASH_MASK
var n = termHashtable(h)
while ((n ne null) && n.start != index)
n = n.next
- if (n ne null) n
- else createCompanionName(h)
+ assert (n ne null, s"TypeName $this is missing its correspondent")
+ n
}
if (synchronizeNames) nameLock.synchronized(body) else body
}
@@ -601,8 +597,6 @@ trait Names extends api.Names {
def nameKind = "type"
override def decode = if (nameDebug) super.decode + "!" else super.decode
- /** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */
- protected def createCompanionName(h: Int): TermName
}
implicit val TypeNameTag = ClassTag[TypeName](classOf[TypeName])
diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala
index 01fba1efc1..4d0e31b037 100644
--- a/src/reflect/scala/reflect/internal/Positions.scala
+++ b/src/reflect/scala/reflect/internal/Positions.scala
@@ -23,13 +23,10 @@ import scala.collection.mutable.ListBuffer
* Otherwise, the singleton consisting of the node itself.
*/
trait Positions extends api.Positions { self: SymbolTable =>
-
type Position = scala.reflect.internal.util.Position
val NoPosition = scala.reflect.internal.util.NoPosition
implicit val PositionTag = ClassTag[Position](classOf[Position])
- def inform(msg: String): Unit
-
def useOffsetPositions: Boolean = true
/** A position that wraps a set of trees.
@@ -100,7 +97,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
inform("\nWhile validating #" + tree.id)
inform(treeStatus(tree))
inform("\nChildren:")
- tree.children map (t => " " + treeStatus(t, tree)) foreach inform
+ tree.children foreach (t => inform(" " + treeStatus(t, tree)))
inform("=======")
throw new ValidateException(msg)
}
@@ -109,7 +106,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
if (!tree.isEmpty && tree.canHaveAttrs) {
if (settings.Yposdebug && (settings.verbose || settings.Yrangepos))
- println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
+ inform("[%10s] %s".format("validate", treeStatus(tree, encltree)))
if (!tree.pos.isDefined)
positionError("Unpositioned tree #"+tree.id) {
@@ -176,7 +173,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
case r :: rs1 =>
assert(!t.pos.isTransparent)
if (r.isFree && (r.pos includes t.pos)) {
-// println("subdividing "+r+"/"+t.pos)
+// inform("subdividing "+r+"/"+t.pos)
maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1
} else {
if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree
@@ -207,7 +204,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
/** Set position of all children of a node
* @param pos A target position.
* Uses the point of the position as the point of all positions it assigns.
- * Uses the start of this position as an Offset position for unpositioed trees
+ * Uses the start of this position as an Offset position for unpositioned trees
* without children.
* @param trees The children to position. All children must be positionable.
*/
@@ -225,7 +222,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
}
} catch {
case ex: Exception =>
- println("error while set children pos "+pos+" of "+trees)
+ inform("error while set children pos "+pos+" of "+trees)
throw ex
}
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index fcc377ba32..b44c4022f6 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -546,10 +546,11 @@ trait Printers extends api.Printers { self: SymbolTable =>
import Chars._
val decName = name.decoded
val bslash = '\\'
+ val isDot = (x: Char) => x == '.'
val brackets = List('[',']','(',')','{','}')
def addBackquotes(s: String) =
- if (decoded && (decName.exists(ch => brackets.contains(ch) || isWhitespace(ch)) ||
+ if (decoded && (decName.exists(ch => brackets.contains(ch) || isWhitespace(ch) || isDot(ch)) ||
(name.isOperatorName && decName.exists(isOperatorPart) && decName.exists(isScalaLetter) && !decName.contains(bslash))))
s"`$s`" else s
@@ -596,18 +597,26 @@ trait Printers extends api.Printers { self: SymbolTable =>
}
}
- protected def emptyTree(tree: Tree) = tree match {
- case EmptyTree | build.SyntacticEmptyTypeTree() => true
- case _ => false
+ object EmptyTypeTree {
+ def unapply(tt: TypeTree): Boolean = tt match {
+ case build.SyntacticEmptyTypeTree() if tt.wasEmpty || tt.isEmpty => true
+ case _ => false
+ }
}
+ protected def isEmptyTree(tree: Tree) =
+ tree match {
+ case EmptyTree | EmptyTypeTree() => true
+ case _ => false
+ }
+
protected def originalTypeTrees(trees: List[Tree]) =
- trees.filter(!emptyTree(_)) map {
- case tt: TypeTree => tt.original
- case tree => tree
+ trees.filter(!isEmptyTree(_)) map {
+ case tt: TypeTree if tt.original != null => tt.original
+ case tree => tree
}
- val defaultClasses = List(tpnme.AnyRef)
+ val defaultClasses = List(tpnme.AnyRef, tpnme.Object)
val defaultTraitsForCase = List(tpnme.Product, tpnme.Serializable)
protected def removeDefaultTypesFromList(trees: List[Tree])(classesToRemove: List[Name] = defaultClasses)(traitsToRemove: List[Name]) = {
def removeDefaultTraitsFromList(trees: List[Tree], traitsToRemove: List[Name]): List[Tree] =
@@ -623,9 +632,10 @@ trait Printers extends api.Printers { self: SymbolTable =>
removeDefaultTraitsFromList(removeDefaultClassesFromList(trees, classesToRemove), traitsToRemove)
}
- protected def removeDefaultClassesFromList(trees: List[Tree], classesToRemove: List[Name] = defaultClasses) =
+ protected def removeDefaultClassesFromList(trees: List[Tree], classesToRemove: List[Name] = defaultClasses) =
originalTypeTrees(trees) filter {
case Select(Ident(sc), name) => !(classesToRemove.contains(name) && sc == nme.scala_)
+ case tt: TypeTree if tt.tpe != null => !(classesToRemove contains(newTypeName(tt.tpe.toString())))
case _ => true
}
@@ -637,7 +647,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
}
override def printOpt(prefix: String, tree: Tree) =
- if (!emptyTree(tree)) super.printOpt(prefix, tree)
+ if (!isEmptyTree(tree)) super.printOpt(prefix, tree)
override def printColumn(ts: List[Tree], start: String, sep: String, end: String) = {
super.printColumn(ts.filter(!syntheticToRemove(_)), start, sep, end)
@@ -752,7 +762,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
val build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) = cl
// constructor's modifier
- if (ctorMods.hasFlag(AccessFlags)) {
+ if (ctorMods.hasFlag(AccessFlags) || ctorMods.hasAccessBoundary) {
print(" ")
printModifiers(ctorMods, primaryCtorParam = false)
}
@@ -952,7 +962,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
def printTp = print("(", tp, ")")
tp match {
- case EmptyTree | build.SyntacticEmptyTypeTree() => printTp
+ case EmptyTree | EmptyTypeTree() => printTp
// case for untypechecked trees
case Annotated(annot, arg) if (expr ne null) && (arg ne null) && expr.equalsStructure(arg) => printTp // remove double arg - 5: 5: @unchecked
case tt: TypeTree if tt.original.isInstanceOf[Annotated] => printTp
@@ -963,7 +973,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
// print only fun when targs are TypeTrees with empty original
case TypeApply(fun, targs) =>
- if (targs.exists(emptyTree(_))) {
+ if (targs.exists(isEmptyTree(_))) {
print(fun)
} else super.printTree(tree)
@@ -984,8 +994,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
case treeInfo.Unapplied(body) =>
body match {
case Select(qual, name) if name == nme.unapply => print(qual)
- case TypeApply(Select(qual, name), args) if name == nme.unapply || name == nme.unapplySeq =>
- print(TypeApply(qual, args))
+ case TypeApply(Select(qual, name), _) if name == nme.unapply || name == nme.unapplySeq =>
+ print(qual)
case _ => print(body)
}
case _ => print(fun)
@@ -996,7 +1006,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
printSuper(st, printedName(qual), checkSymbol = false)
case th @ This(qual) =>
- if (tree.hasExistingSymbol && tree.symbol.isPackage) print(tree.symbol.fullName)
+ if (tree.hasExistingSymbol && tree.symbol.hasPackageFlag) print(tree.symbol.fullName)
else printThis(th, printedName(qual))
// remove this prefix from constructor invocation in typechecked trees: this.this -> this
@@ -1013,7 +1023,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
}) && (tr match { // check that Select contains package
case Select(q, _) => checkRootPackage(q)
case _: Ident | _: This => val sym = tr.symbol
- tr.hasExistingSymbol && sym.isPackage && sym.name != nme.ROOTPKG
+ tr.hasExistingSymbol && sym.hasPackageFlag && sym.name != nme.ROOTPKG
case _ => false
})
@@ -1061,7 +1071,11 @@ trait Printers extends api.Printers { self: SymbolTable =>
print("(", qualifier, ")#", blankForOperatorName(selector), printedName(selector))
case tt: TypeTree =>
- if (!emptyTree(tt)) print(tt.original)
+ if (!isEmptyTree(tt)) {
+ val original = tt.original
+ if (original != null) print(original)
+ else super.printTree(tree)
+ }
case AppliedTypeTree(tp, args) =>
// it's possible to have (=> String) => String type but Function1[=> String, String] is not correct
diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala
index ad8a2594dd..eddfec82e7 100644
--- a/src/reflect/scala/reflect/internal/ReificationSupport.scala
+++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala
@@ -7,7 +7,6 @@ import util._
trait ReificationSupport { self: SymbolTable =>
import definitions._
- import internal._
class ReificationSupportImpl extends ReificationSupportApi {
def selectType(owner: Symbol, name: String): TypeSymbol =
@@ -97,6 +96,8 @@ trait ReificationSupport { self: SymbolTable =>
def toStats(tree: Tree): List[Tree] = tree match {
case EmptyTree => Nil
case SyntacticBlock(stats) => stats
+ case defn if defn.isDef => defn :: Nil
+ case imp: Import => imp :: Nil
case _ => throw new IllegalArgumentException(s"can't flatten $tree")
}
@@ -121,7 +122,7 @@ trait ReificationSupport { self: SymbolTable =>
if (vd.rhs.nonEmpty) newmods |= DEFAULTPARAM
copyValDef(vd)(mods = newmods | extraFlags)
case _ =>
- throw new IllegalArgumentException(s"$tree is not valid represenation of a parameter, " +
+ throw new IllegalArgumentException(s"$tree is not valid representation of a parameter, " +
"""consider reformatting it into q"val $name: $T = $default" shape""")
}
@@ -291,7 +292,7 @@ trait ReificationSupport { self: SymbolTable =>
if (ctorMods.isTrait)
result(ctorMods, Nil, edefs, body)
else {
- // undo conversion from (implicit ... ) to ()(implicit ... ) when its the only parameter section
+ // undo conversion from (implicit ... ) to ()(implicit ... ) when it's the only parameter section
val vparamssRestoredImplicits = ctorVparamss match {
case Nil :: (tail @ ((head :: _) :: _)) if head.mods.isImplicit => tail
case other => other
@@ -864,7 +865,7 @@ trait ReificationSupport { self: SymbolTable =>
protected def mkCases(cases: List[Tree]): List[CaseDef] = cases.map {
case c: CaseDef => c
- case tree => throw new IllegalArgumentException("$tree is not valid representation of pattern match case")
+ case tree => throw new IllegalArgumentException(s"$tree is not valid representation of pattern match case")
}
object SyntacticPartialFunction extends SyntacticPartialFunctionExtractor {
diff --git a/src/reflect/scala/reflect/internal/Reporting.scala b/src/reflect/scala/reflect/internal/Reporting.scala
new file mode 100644
index 0000000000..f2de83bc5d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Reporting.scala
@@ -0,0 +1,116 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL, Typesafe Inc.
+ * @author Adriaan Moors
+ */
+
+package scala
+package reflect
+package internal
+
+/** Provides delegates to the reporter doing the actual work.
+ * All forwarding methods should be marked final,
+ * but some subclasses out of our reach stil override them.
+ *
+ * Eventually, this interface should be reduced to one method: `reporter`,
+ * and clients should indirect themselves (reduce duplication of forwarders).
+ */
+trait Reporting { self : Positions =>
+ def reporter: Reporter
+ def currentRun: RunReporting
+
+ trait RunReporting {
+ val reporting: PerRunReporting = PerRunReporting
+ }
+
+ type PerRunReporting <: PerRunReportingBase
+ protected def PerRunReporting: PerRunReporting
+ abstract class PerRunReportingBase {
+ def deprecationWarning(pos: Position, msg: String): Unit
+
+ /** Have we already supplemented the error message of a compiler crash? */
+ private[this] var supplementedError = false
+ def supplementErrorMessage(errorMessage: String): String =
+ if (supplementedError) errorMessage
+ else {
+ supplementedError = true
+ supplementTyperState(errorMessage)
+ }
+
+ }
+
+ // overridden in Global
+ def supplementTyperState(errorMessage: String): String = errorMessage
+
+ def supplementErrorMessage(errorMessage: String) = currentRun.reporting.supplementErrorMessage(errorMessage)
+
+ @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2")
+ def inform(msg: String): Unit = inform(NoPosition, msg)
+ @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2")
+ def warning(msg: String): Unit = warning(NoPosition, msg)
+ // globalError(msg: String) used to abort -- not sure that was a good idea, so I made it more regular
+ // (couldn't find any uses that relied on old behavior)
+ @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2")
+ def globalError(msg: String): Unit = globalError(NoPosition, msg)
+
+ def abort(msg: String): Nothing = {
+ val augmented = supplementErrorMessage(msg)
+ // Needs to call error to make sure the compile fails.
+ globalError(augmented)
+ throw new FatalError(augmented)
+ }
+
+ @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2")
+ def inform(pos: Position, msg: String) = reporter.echo(pos, msg)
+ @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2")
+ def warning(pos: Position, msg: String) = reporter.warning(pos, msg)
+ @deprecatedOverriding("This forwards to the corresponding method in reporter -- override reporter instead", "2.11.2")
+ def globalError(pos: Position, msg: String) = reporter.error(pos, msg)
+}
+
+import util.Position
+
+/** Report information, warnings and errors.
+ *
+ * This describes the (future) external interface for issuing information, warnings and errors.
+ * Currently, scala.tools.nsc.Reporter is used by sbt/ide/partest.
+ */
+abstract class Reporter {
+ protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit
+
+ def echo(pos: Position, msg: String): Unit = info0(pos, msg, INFO, force = true)
+ def warning(pos: Position, msg: String): Unit = info0(pos, msg, WARNING, force = false)
+ def error(pos: Position, msg: String): Unit = info0(pos, msg, ERROR, force = false)
+
+ type Severity
+ val INFO: Severity
+ val WARNING: Severity
+ val ERROR: Severity
+
+ def count(severity: Severity): Int
+ def resetCount(severity: Severity): Unit
+
+ def errorCount: Int = count(ERROR)
+ def warningCount: Int = count(WARNING)
+
+ def hasErrors: Boolean = count(ERROR) > 0
+ def hasWarnings: Boolean = count(WARNING) > 0
+
+ def reset(): Unit = {
+ resetCount(INFO)
+ resetCount(WARNING)
+ resetCount(ERROR)
+ }
+
+ def flush(): Unit = { }
+}
+
+// TODO: move into superclass once partest cuts tie on Severity
+abstract class ReporterImpl extends Reporter {
+ class Severity(val id: Int)(name: String) { var count: Int = 0 ; override def toString = name}
+ object INFO extends Severity(0)("INFO")
+ object WARNING extends Severity(1)("WARNING")
+ object ERROR extends Severity(2)("ERROR")
+
+ def count(severity: Severity): Int = severity.count
+ def resetCount(severity: Severity): Unit = severity.count = 0
+}
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index cf3f356daa..103f885ad4 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -48,22 +48,17 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
* This is necessary because when run from reflection every scope needs to have a
* SynchronizedScope as mixin.
*/
- class Scope protected[Scopes] (initElems: ScopeEntry = null, initFingerPrints: Long = 0L) extends ScopeApi with MemberScopeApi {
+ class Scope protected[Scopes]() extends ScopeApi with MemberScopeApi {
- protected[Scopes] def this(base: Scope) = {
- this(base.elems)
- nestinglevel = base.nestinglevel + 1
- }
-
- private[scala] var elems: ScopeEntry = initElems
+ private[scala] var elems: ScopeEntry = _
/** The number of times this scope is nested in another
*/
- private var nestinglevel = 0
+ private[Scopes] var nestinglevel = 0
/** the hash table
*/
- private var hashtable: Array[ScopeEntry] = null
+ private[Scopes] var hashtable: Array[ScopeEntry] = null
/** a cache for all elements, to be used by symbol iterator.
*/
@@ -84,8 +79,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
*/
private val MIN_HASH = 8
- if (size >= MIN_HASH) createHash()
-
/** Returns a new scope with the same content as this one. */
def cloneScope: Scope = newScopeWith(this.toList: _*)
@@ -435,7 +428,14 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** Create a new scope nested in another one with which it shares its elements */
- def newNestedScope(outer: Scope): Scope = new Scope(outer)
+ final def newNestedScope(outer: Scope): Scope = {
+ val nested = newScope // not `new Scope`, we must allow the runtime reflection universe to mixin SynchronizedScopes!
+ nested.elems = outer.elems
+ nested.nestinglevel = outer.nestinglevel + 1
+ if (outer.hashtable ne null)
+ nested.hashtable = java.util.Arrays.copyOf(outer.hashtable, outer.hashtable.length)
+ nested
+ }
/** Create a new scope with given initial elements */
def newScopeWith(elems: Symbol*): Scope = {
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index 614e71b597..cca33253be 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -27,7 +27,7 @@ trait StdAttachments {
def importAttachment(importer: Importer): this.type
}
- /** Attachment that doesn't contain any reflection artificats and can be imported as-is. */
+ /** Attachment that doesn't contain any reflection artifacts and can be imported as-is. */
trait PlainAttachment extends ImportableAttachment {
def importAttachment(importer: Importer): this.type = this
}
@@ -42,7 +42,7 @@ trait StdAttachments {
*/
case object BackquotedIdentifierAttachment extends PlainAttachment
- /** Identifies trees are either result or intermidiate value of for loop desugaring.
+ /** Identifies trees are either result or intermediate value of for loop desugaring.
*/
case object ForAttachment extends PlainAttachment
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 6848c357c5..c0562b0679 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -99,17 +99,35 @@ trait StdNames {
val SINGLETON_SUFFIX: String = ".type"
- val ANON_CLASS_NAME: NameType = "$anon"
- val ANON_FUN_NAME: NameType = "$anonfun"
- val EMPTY: NameType = ""
- val EMPTY_PACKAGE_NAME: NameType = "<empty>"
- val IMPL_CLASS_SUFFIX = "$class"
- val IMPORT: NameType = "<import>"
- val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING
- val MODULE_VAR_SUFFIX: NameType = "$module"
- val PACKAGE: NameType = "package"
- val ROOT: NameType = "<root>"
- val SPECIALIZED_SUFFIX: NameType = "$sp"
+ val ANON_CLASS_NAME: NameType = "$anon"
+ val DELAMBDAFY_LAMBDA_CLASS_NAME: NameType = "$lambda"
+ val ANON_FUN_NAME: NameType = "$anonfun"
+ val EMPTY: NameType = ""
+ val EMPTY_PACKAGE_NAME: NameType = "<empty>"
+ val IMPL_CLASS_SUFFIX = "$class"
+ val IMPORT: NameType = "<import>"
+ val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING
+ val MODULE_VAR_SUFFIX: NameType = "$module"
+ val PACKAGE: NameType = "package"
+ val ROOT: NameType = "<root>"
+ val SPECIALIZED_SUFFIX: NameType = "$sp"
+
+ val NESTED_IN: String = "$nestedIn"
+ val NESTED_IN_ANON_CLASS: String = NESTED_IN + ANON_CLASS_NAME.toString.replace("$", "")
+ val NESTED_IN_ANON_FUN: String = NESTED_IN + ANON_FUN_NAME.toString.replace("$", "")
+ val NESTED_IN_LAMBDA: String = NESTED_IN + DELAMBDAFY_LAMBDA_CLASS_NAME.toString.replace("$", "")
+
+ /**
+ * Ensures that name mangling does not accidentally make a class respond `true` to any of
+ * isAnonymousClass, isAnonymousFunction, isDelambdafyFunction, e.g. by introducing "$anon".
+ */
+ def ensureNonAnon(name: String) = {
+ name
+ .replace(nme.ANON_CLASS_NAME.toString, NESTED_IN_ANON_CLASS)
+ .replace(nme.ANON_FUN_NAME.toString, NESTED_IN_ANON_FUN)
+ .replace(nme.DELAMBDAFY_LAMBDA_CLASS_NAME.toString, NESTED_IN_LAMBDA)
+ }
+
// value types (and AnyRef) are all used as terms as well
// as (at least) arguments to the @specialize annotation.
@@ -127,6 +145,7 @@ trait StdNames {
final val AnyRef: NameType = "AnyRef"
final val Array: NameType = "Array"
final val List: NameType = "List"
+ final val Option: NameType = "Option"
final val Seq: NameType = "Seq"
final val Symbol: NameType = "Symbol"
final val WeakTypeTag: NameType = "WeakTypeTag"
@@ -246,6 +265,7 @@ trait StdNames {
final val Unliftable: NameType = "Unliftable"
final val Name: NameType = "Name"
final val Tree: NameType = "Tree"
+ final val Text: NameType = "Text"
final val TermName: NameType = "TermName"
final val Type : NameType = "Type"
final val TypeName: NameType = "TypeName"
@@ -473,7 +493,7 @@ trait StdNames {
)
def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">")
- def superName(name: Name): TermName = newTermName(SUPER_PREFIX_STRING + name)
+ def superName(name: Name, mix: Name = EMPTY): TermName = newTermName(SUPER_PREFIX_STRING + name + (if (mix.isEmpty) "" else "$" + mix))
/** The name of an accessor for protected symbols. */
def protName(name: Name): TermName = newTermName(PROTECTED_PREFIX + name)
@@ -776,6 +796,7 @@ trait StdNames {
val values : NameType = "values"
val wait_ : NameType = "wait"
val withFilter: NameType = "withFilter"
+ val xml: NameType = "xml"
val zero: NameType = "zero"
// quasiquote interpolators:
@@ -1057,6 +1078,7 @@ trait StdNames {
val reflPolyCacheName: NameType = "reflPoly$Cache"
val reflParamsCacheName: NameType = "reflParams$Cache"
val reflMethodName: NameType = "reflMethod$Method"
+ val argument: NameType = "<argument>"
}
@@ -1143,6 +1165,7 @@ trait StdNames {
final val GetClassLoader: TermName = newTermName("getClassLoader")
final val GetMethod: TermName = newTermName("getMethod")
final val Invoke: TermName = newTermName("invoke")
+ final val InvokeExact: TermName = newTermName("invokeExact")
val Boxed = immutable.Map[TypeName, TypeName](
tpnme.Boolean -> BoxedBoolean,
diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala
index c088e8f57c..4763e77a34 100644
--- a/src/reflect/scala/reflect/internal/SymbolPairs.scala
+++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala
@@ -8,7 +8,6 @@ package reflect
package internal
import scala.collection.mutable
-import Flags._
import util.HashSet
import scala.annotation.tailrec
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index c76dedbff4..ef63078f90 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -46,16 +46,12 @@ abstract class SymbolTable extends macros.Universe
with pickling.Translations
with FreshNames
with Internals
+ with Reporting
{
val gen = new InternalTreeGen { val global: SymbolTable.this.type = SymbolTable.this }
def log(msg: => AnyRef): Unit
- def deprecationWarning(pos: Position, msg: String): Unit = warning(msg)
- def warning(msg: String): Unit = Console.err.println(msg)
- def inform(msg: String): Unit = Console.err.println(msg)
- def globalError(msg: String): Unit = abort(msg)
- def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
protected def elapsedMessage(msg: String, start: Long) =
msg + " in " + (TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - start) + "ms"
@@ -82,9 +78,6 @@ abstract class SymbolTable extends macros.Universe
/** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */
def debugStack(t: Throwable): Unit = devWarning(throwableAsString(t))
- /** Overridden when we know more about what was happening during a failure. */
- def supplementErrorMessage(msg: String): String = msg
-
private[scala] def printCaller[T](msg: String)(result: T) = {
Console.err.println("%s: %s\nCalled from: %s".format(msg, result,
(new Throwable).getStackTrace.drop(2).take(50).mkString("\n")))
@@ -345,7 +338,6 @@ abstract class SymbolTable extends macros.Universe
case _ => false
}
if (pkgModule.isModule && !fromSource) {
- // println("open "+pkgModule)//DEBUG
openPackageModule(pkgModule, pkgClass)
}
}
@@ -363,6 +355,14 @@ abstract class SymbolTable extends macros.Universe
cache
}
+ /**
+ * Removes a cache from the per-run caches. This is useful for testing: it allows running the
+ * compiler and then inspect the state of a cache.
+ */
+ def unrecordCache[T <: Clearable](cache: T): Unit = {
+ caches = caches.filterNot(_.get eq cache)
+ }
+
def clearAll() = {
debuglog("Clearing " + caches.size + " caches.")
caches foreach (ref => Option(ref.get).foreach(_.clear))
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 2ce54d2259..abe966920b 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -55,28 +55,30 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String): FreeTypeSymbol =
new FreeTypeSymbol(name, origin) initFlags flags
- /** The original owner of a class. Used by the backend to generate
- * EnclosingMethod attributes.
+ /**
+ * This map stores the original owner the the first time the owner of a symbol is re-assigned.
+ * The original owner of a symbol is needed in some places in the backend. Ideally, owners should
+ * be versioned like the type history.
*/
- val originalOwner = perRunCaches.newMap[Symbol, Symbol]()
+ private val originalOwnerMap = perRunCaches.newMap[Symbol, Symbol]()
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
// e.g. after flatten all classes are owned by package classes, there are lots and
// lots of these to be declared (or more realistically, discovered.)
- protected def saveOriginalOwner(sym: Symbol) {
- if (originalOwner contains sym) ()
- else originalOwner(sym) = sym.rawowner
- }
- protected def originalEnclosingMethod(sym: Symbol): Symbol = {
- if (sym.isMethod || sym == NoSymbol) sym
- else {
- val owner = originalOwner.getOrElse(sym, sym.rawowner)
- if (sym.isLocalDummy) owner.enclClass.primaryConstructor
- else originalEnclosingMethod(owner)
+ // could be private since 2.11.6, but left protected to avoid potential breakages (eg ensime)
+ protected def saveOriginalOwner(sym: Symbol): Unit = {
+ // some synthetic symbols have NoSymbol as owner initially
+ if (sym.owner != NoSymbol) {
+ if (originalOwnerMap contains sym) ()
+ else defineOriginalOwner(sym, sym.rawowner)
}
}
+ def defineOriginalOwner(sym: Symbol, owner: Symbol): Unit = {
+ originalOwnerMap(sym) = owner
+ }
+
def symbolOf[T: WeakTypeTag]: TypeSymbol = weakTypeOf[T].typeSymbolDirect.asType
abstract class SymbolContextApiImpl extends SymbolApi {
@@ -153,11 +155,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def toTypeConstructor: Type = typeConstructor
def setAnnotations(annots: AnnotationInfo*): this.type = { setAnnotations(annots.toList); this }
- def getter: Symbol = getter(owner)
- def setter: Symbol = setter(owner)
+ def getter: Symbol = getterIn(owner)
+ def setter: Symbol = setterIn(owner)
def companion: Symbol = {
- if (isModule && !isPackage) companionSymbol
+ if (isModule && !hasPackageFlag) companionSymbol
else if (isModuleClass && !isPackageClass) sourceModule.companionSymbol
else if (isClass && !isModuleClass && !isPackageClass) companionSymbol
else NoSymbol
@@ -176,7 +178,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
with HasFlags
with Annotatable[Symbol]
with Attachable {
-
// makes sure that all symbols that runtime reflection deals with are synchronized
private def isSynchronized = this.isInstanceOf[scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol]
private def isAprioriThreadsafe = isThreadsafe(AllOps)
@@ -185,7 +186,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
type AccessBoundaryType = Symbol
type AnnotationType = AnnotationInfo
- // TODO - don't allow names to be renamed in this unstructured a fashion.
+ // TODO - don't allow names to be renamed in this unstructured fashion.
// Rename as little as possible. Enforce invariants on all renames.
type TypeOfClonedSymbol >: Null <: Symbol { type NameType = Symbol.this.NameType }
@@ -686,7 +687,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* to fix the core of the compiler risk stability a few weeks before the final release.
* upd. Haha, "a few weeks before the final release". This surely sounds familiar :)
*
- * However we do need to fix this for runtime reflection, since this idionsynchrazy is not something
+ * However we do need to fix this for runtime reflection, since this idiosyncrasy is not something
* we'd like to expose to reflection users. Therefore a proposed solution is to check whether we're in a
* runtime reflection universe, and if yes and if we've not yet loaded the requested info, then to commence initialization.
*/
@@ -738,27 +739,41 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def hasGetter = isTerm && nme.isLocalName(name)
- /** A little explanation for this confusing situation.
- * Nested modules which have no static owner when ModuleDefs
- * are eliminated (refchecks) are given the lateMETHOD flag,
- * which makes them appear as methods after refchecks.
- * Here's an example where one can see all four of FF FT TF TT
- * for (isStatic, isMethod) at various phases.
+ /**
+ * Nested modules which have no static owner when ModuleDefs are eliminated (refchecks) are
+ * given the lateMETHOD flag, which makes them appear as methods after refchecks.
+ *
+ * Note: the lateMETHOD flag is added lazily in the info transformer of the RefChecks phase.
+ * This means that forcing the `sym.info` may change the value of `sym.isMethod`. Forcing the
+ * info is in the responsibility of the caller. Doing it eagerly here was tried (0ccdb151f) but
+ * has proven to lead to bugs (SI-8907).
*
- * trait A1 { case class Quux() }
- * object A2 extends A1 { object Flax }
- * // -- namer object Quux in trait A1
- * // -M flatten object Quux in trait A1
- * // S- flatten object Flax in object A2
- * // -M posterasure object Quux in trait A1
- * // -M jvm object Quux in trait A1
- * // SM jvm object Quux in object A2
+ * Here's an example where one can see all four of FF FT TF TT for (isStatic, isMethod) at
+ * various phases.
*
- * So "isModuleNotMethod" exists not for its achievement in
- * brevity, but to encapsulate the relevant condition.
+ * trait A1 { case class Quux() }
+ * object A2 extends A1 { object Flax }
+ * // -- namer object Quux in trait A1
+ * // -M flatten object Quux in trait A1
+ * // S- flatten object Flax in object A2
+ * // -M posterasure object Quux in trait A1
+ * // -M jvm object Quux in trait A1
+ * // SM jvm object Quux in object A2
+ *
+ * So "isModuleNotMethod" exists not for its achievement in brevity, but to encapsulate the
+ * relevant condition.
*/
def isModuleNotMethod = isModule && !isMethod
- def isStaticModule = isModuleNotMethod && isStatic
+
+ // After RefChecks, the `isStatic` check is mostly redundant: all non-static modules should
+ // be methods (and vice versa). There's a corner case on the vice-versa with mixed-in module
+ // symbols:
+ // trait T { object A }
+ // object O extends T
+ // The module symbol A is cloned into T$impl (addInterfaces), and then cloned into O (mixin).
+ // Since the original A is not static, it's turned into a method. The clone in O however is
+ // static (owned by a module), but it's also a method.
+ def isStaticModule = isModuleNotMethod && isStatic
final def isInitializedToDefault = !isType && hasAllFlags(DEFAULTINIT | ACCESSOR)
final def isThisSym = isTerm && owner.thisSym == this
@@ -775,12 +790,18 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
final def isMethodWithExtension =
- isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isMacro
+ isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isMacro && !isSpecialized
final def isAnonymousFunction = isSynthetic && (name containsName tpnme.ANON_FUN_NAME)
+ final def isDelambdafyFunction = isSynthetic && (name containsName tpnme.DELAMBDAFY_LAMBDA_CLASS_NAME)
+ final def isDelambdafyTarget = isArtifact && isMethod && (name containsName tpnme.ANON_FUN_NAME)
final def isDefinedInPackage = effectiveOwner.isPackageClass
final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
+ // TODO introduce a flag for these?
+ final def isPatternTypeVariable: Boolean =
+ isAbstractType && !isExistential && !isTypeParameterOrSkolem && isLocalToBlock
+
/** change name by appending $$<fully-qualified-name-of-class `base`>
* Do the same for any accessed symbols or setters/getters.
* Implementation in TermSymbol.
@@ -909,10 +930,31 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
)
final def isModuleVar = hasFlag(MODULEVAR)
- /** Is this symbol static (i.e. with no outer instance)?
- * Q: When exactly is a sym marked as STATIC?
- * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
- * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
+ /**
+ * Is this symbol static (i.e. with no outer instance)?
+ * Q: When exactly is a sym marked as STATIC?
+ * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or
+ * any number of levels deep.
+ * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
+ *
+ * TODO: should this only be invoked on class / module symbols? because there's also `isStaticMember`.
+ *
+ * Note: the result of `isStatic` changes over time.
+ * - Lambdalift local definitions to the class level, the `owner` field is modified.
+ * object T { def foo { object O } }
+ * After lambdalift, the OModule.isStatic is true.
+ *
+ * - After flatten, nested classes are moved to the package level. Invoking `owner` on a
+ * class returns a package class, for which `isStaticOwner` is true. For example,
+ * class C { object O }
+ * OModuleClass.isStatic is true after flatten. Using phase travel to get before flatten,
+ * method `owner` returns the class C.
+ *
+ * Why not make a stable version of `isStatic`? Maybe some parts of the compiler depend on the
+ * current implementation. For example
+ * trait T { def foo = 1 }
+ * The method `foo` in the implementation class T$impl will be `isStatic`, because trait
+ * impl classes get the `lateMODULE` flag (T$impl.isStaticOwner is true).
*/
def isStatic = (this hasFlag STATIC) || owner.isStaticOwner
@@ -1006,7 +1048,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isIncompleteIn(base: Symbol): Boolean =
this.isDeferred ||
(this hasFlag ABSOVERRIDE) && {
- val supersym = superSymbol(base)
+ val supersym = superSymbolIn(base)
supersym == NoSymbol || supersym.isIncompleteIn(base)
}
@@ -1099,13 +1141,28 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ owner attribute --------------------------------------------------------------
- /** In general when seeking the owner of a symbol, one should call `owner`.
- * The other possibilities include:
- * - call `safeOwner` if it is expected that the target may be NoSymbol
- * - call `assertOwner` if it is an unrecoverable error if the target is NoSymbol
+ /**
+ * The owner of a symbol. Changes over time to adapt to the structure of the trees:
+ * - Up to lambdalift, the owner is the lexically enclosing definition. For definitions
+ * in a local block, the owner is also the next enclosing definition.
+ * - After lambdalift, all local method and class definitions (those not owned by a class
+ * or package class) change their owner to the enclosing class. This is done through
+ * a destructive "sym.owner = sym.owner.enclClass". The old owner is saved by
+ * saveOriginalOwner.
+ * - After flatten, all classes are owned by a PackageClass. This is done through a
+ * phase check (if after flatten) in the (overridden) method "def owner" in
+ * ModuleSymbol / ClassSymbol. The `rawowner` field is not modified.
+ * - Owners are also changed in other situations, for example when moving trees into a new
+ * lexical context, e.g. in the named/default arguments tranformation, or when translating
+ * extension method definitions.
*
- * `owner` behaves like `safeOwner`, but logs NoSymbol.owner calls under -Xdev.
- * `assertOwner` aborts compilation immediately if called on NoSymbol.
+ * In general when seeking the owner of a symbol, one should call `owner`.
+ * The other possibilities include:
+ * - call `safeOwner` if it is expected that the target may be NoSymbol
+ * - call `assertOwner` if it is an unrecoverable error if the target is NoSymbol
+ *
+ * `owner` behaves like `safeOwner`, but logs NoSymbol.owner calls under -Xdev.
+ * `assertOwner` aborts compilation immediately if called on NoSymbol.
*/
def owner: Symbol = {
if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
@@ -1114,6 +1171,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner
final def assertOwner: Symbol = if (this eq NoSymbol) abort("no-symbol does not have an owner") else owner
+ /**
+ * The initial owner of this symbol.
+ */
+ def originalOwner: Symbol = originalOwnerMap.getOrElse(this, rawowner)
+
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
// e.g. after flatten all classes are owned by package classes, there are lots and
@@ -1127,7 +1189,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
def ownerChain: List[Symbol] = this :: owner.ownerChain
- def originalOwnerChain: List[Symbol] = this :: originalOwner.getOrElse(this, rawowner).originalOwnerChain
// Non-classes skip self and return rest of owner chain; overridden in ClassSymbol.
def enclClassChain: List[Symbol] = owner.enclClassChain
@@ -1406,11 +1467,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def info: Type = try {
var cnt = 0
while (validTo == NoPeriod) {
- //if (settings.debug.value) System.out.println("completing " + this);//DEBUG
assert(infos ne null, this.name)
assert(infos.prev eq null, this.name)
val tp = infos.info
- //if (settings.debug.value) System.out.println("completing " + this.rawname + tp.getClass());//debug
if ((_rawflags & LOCKED) != 0L) { // rolled out once for performance
lock {
@@ -1419,6 +1478,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
} else {
_rawflags |= LOCKED
+ // TODO another commented out lines - this should be solved in one way or another
// activeLocks += 1
// lockedSyms += this
}
@@ -1540,13 +1600,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
assert(isCompilerUniverse)
if (infos == null || runId(infos.validFrom) == currentRunId) {
infos
- } else if (isPackageClass) {
- // SI-7801 early phase package scopes are mutated in new runs (Namers#enterPackage), so we have to
- // discard transformed infos, rather than just marking them as from this run.
- val oldest = infos.oldest
- oldest.validFrom = validTo
- this.infos = oldest
- oldest
+ } else if (infos ne infos.oldest) {
+ // SI-8871 Discard all but the first element of type history. Specialization only works in the resident
+ // compiler / REPL if re-run its info transformer in this run to correctly populate its
+ // per-run caches, e.g. typeEnv
+ adaptInfos(infos.oldest)
} else {
val prev1 = adaptInfos(infos.prev)
if (prev1 ne infos.prev) prev1
@@ -1971,12 +2029,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
info.decls.filter(sym => !sym.isMethod && sym.isParamAccessor).toList
/** The symbol accessed by this accessor (getter or setter) function. */
- final def accessed: Symbol = accessed(owner.info)
-
- /** The symbol accessed by this accessor function, but with given owner type. */
- final def accessed(ownerTp: Type): Symbol = {
+ final def accessed: Symbol = {
assert(hasAccessorFlag, this)
- ownerTp decl localName
+ val localField = owner.info decl localName
+
+ if (localField == NoSymbol && this.hasFlag(MIXEDIN)) {
+ // SI-8087: private[this] fields don't have a `localName`. When searching the accessed field
+ // for a mixin accessor of such a field, we need to look for `name` instead.
+ // The phase travel ensures that the field is found (`owner` is the trait class symbol, the
+ // field gets removed from there in later phases).
+ enteringPhase(picklerPhase)(owner.info).decl(name).suchThat(!_.isAccessor)
+ } else {
+ localField
+ }
}
/** The module corresponding to this module class (note that this
@@ -1995,7 +2060,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* where it is the outer class of the enclosing class.
*/
final def outerClass: Symbol =
- if (owner.isClass) owner
+ if (this == NoSymbol) {
+ // ideally we shouldn't get here, but it's better to harden against this than suffer the infinite loop in SI-9133
+ devWarningDumpStack("NoSymbol.outerClass", 15)
+ NoSymbol
+ } else if (owner.isClass) owner
else if (isClassLocalToConstructor) owner.enclClass.outerClass
else owner.outerClass
@@ -2062,16 +2131,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* is not one. */
def enclosingPackage: Symbol = enclosingPackageClass.companionModule
- /** Return the original enclosing method of this symbol. It should return
- * the same thing as enclMethod when called before lambda lift,
- * but it preserves the original nesting when called afterwards.
- *
- * @note This method is NOT available in the presentation compiler run. The
- * originalOwner map is not populated for memory considerations (the symbol
- * may hang on to lazy types and in turn to whole (outdated) compilation units.
- */
- def originalEnclosingMethod: Symbol = Symbols.this.originalEnclosingMethod(this)
-
/** The method or class which logically encloses the current symbol.
* If the symbol is defined in the initialization part of a template
* this is the template's primary constructor, otherwise it is
@@ -2108,6 +2167,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (isClass) this else moduleClass
} else owner.enclosingTopLevelClass
+ /** The top-level class or local dummy symbol containing this symbol. */
+ def enclosingTopLevelClassOrDummy: Symbol =
+ if (isTopLevel) {
+ if (isClass) this else moduleClass.orElse(this)
+ } else owner.enclosingTopLevelClassOrDummy
+
/** Is this symbol defined in the same scope and compilation unit as `that` symbol? */
def isCoDefinedWith(that: Symbol) = (
!rawInfoIsNoType
@@ -2311,13 +2376,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
Nil
)
+ @deprecated("Use `superSymbolIn` instead", "2.11.0")
+ final def superSymbol(base: Symbol): Symbol = superSymbolIn(base)
+
/** The symbol accessed by a super in the definition of this symbol when
* seen from class `base`. This symbol is always concrete.
* pre: `this.owner` is in the base class sequence of `base`.
*/
- @deprecated("Use `superSymbolIn` instead", "2.11.0")
- final def superSymbol(base: Symbol): Symbol = superSymbolIn(base)
-
final def superSymbolIn(base: Symbol): Symbol = {
var bcs = base.info.baseClasses dropWhile (owner != _) drop 1
var sym: Symbol = NoSymbol
@@ -2329,12 +2394,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
sym
}
- /** The getter of this value or setter definition in class `base`, or NoSymbol if
- * none exists.
- */
@deprecated("Use `getterIn` instead", "2.11.0")
final def getter(base: Symbol): Symbol = getterIn(base)
+ /** The getter of this value or setter definition in class `base`, or NoSymbol if none exists. */
final def getterIn(base: Symbol): Symbol =
base.info decl getterName filter (_.hasAccessorFlag)
@@ -2342,11 +2405,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def setterName: TermName = name.setterName
def localName: TermName = name.localName
- /** The setter of this value or getter definition, or NoSymbol if none exists */
@deprecated("Use `setterIn` instead", "2.11.0")
final def setter(base: Symbol, hasExpandedName: Boolean = needsExpandedSetterName): Symbol =
setterIn(base, hasExpandedName)
+ /** The setter of this value or getter definition, or NoSymbol if none exists. */
final def setterIn(base: Symbol, hasExpandedName: Boolean = needsExpandedSetterName): Symbol =
base.info decl setterNameInBase(base, hasExpandedName) filter (_.hasAccessorFlag)
@@ -2474,7 +2537,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (isInstanceOf[FreeTermSymbol]) ("free term", "free term", "FTE")
else if (isInstanceOf[FreeTypeSymbol]) ("free type", "free type", "FTY")
else if (isPackageClass) ("package class", "package", "PKC")
- else if (isPackage) ("package", "package", "PK")
+ else if (hasPackageFlag) ("package", "package", "PK")
else if (isPackageObject) ("package object", "package", "PKO")
else if (isPackageObjectClass) ("package object class", "package", "PKOC")
else if (isAnonymousClass) ("anonymous class", "anonymous class", "AC")
@@ -2757,8 +2820,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def outerSource: Symbol =
// SI-6888 Approximate the name to workaround the deficiencies in `nme.originalName`
- // in the face of clases named '$'. SI-2806 remains open to address the deeper problem.
- if (originalName endsWith (nme.OUTER)) initialize.referenced
+ // in the face of classes named '$'. SI-2806 remains open to address the deeper problem.
+ if (unexpandedName endsWith (nme.OUTER)) initialize.referenced
else NoSymbol
def setModuleClass(clazz: Symbol): TermSymbol = {
@@ -2788,8 +2851,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
accessed.expandName(base)
}
else if (hasGetter) {
- getter(owner).expandName(base)
- setter(owner).expandName(base)
+ getterIn(owner).expandName(base)
+ setterIn(owner).expandName(base)
}
name = nme.expandedName(name.toTermName, base)
}
@@ -2811,6 +2874,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def owner = {
if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
+ // a module symbol may have the lateMETHOD flag after refchecks, see isModuleNotMethod
if (!isMethod && needsFlatClasses) rawowner.owner
else rawowner
}
@@ -3375,10 +3439,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
trait StubSymbol extends Symbol {
devWarning("creating stub symbol to defer error: " + missingMessage)
- protected def missingMessage: String
+ def missingMessage: String
/** Fail the stub by throwing a [[scala.reflect.internal.MissingRequirementError]]. */
- override final def failIfStub() = {MissingRequirementError.signal(missingMessage)} //
+ override final def failIfStub() =
+ MissingRequirementError.signal(missingMessage)
/** Fail the stub by reporting an error to the reporter, setting the IS_ERROR flag
* on this symbol, and returning the dummy value `alt`.
@@ -3403,8 +3468,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def rawInfo = fail(NoType)
override def companionSymbol = fail(NoSymbol)
}
- class StubClassSymbol(owner0: Symbol, name0: TypeName, protected val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
- class StubTermSymbol(owner0: Symbol, name0: TermName, protected val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
+ class StubClassSymbol(owner0: Symbol, name0: TypeName, val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
+ class StubTermSymbol(owner0: Symbol, name0: TermName, val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
trait FreeSymbol extends Symbol {
def origin: String
@@ -3455,6 +3520,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def enclClassChain = Nil
override def enclClass: Symbol = this
override def enclosingTopLevelClass: Symbol = this
+ override def enclosingTopLevelClassOrDummy: Symbol = this
override def enclosingPackageClass: Symbol = this
override def enclMethod: Symbol = this
override def associatedFile = NoAbstractFile
@@ -3471,7 +3537,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def rawInfo: Type = NoType
override def accessBoundary(base: Symbol): Symbol = enclosingRootClass
def cloneSymbolImpl(owner: Symbol, newFlags: Long) = abort("NoSymbol.clone()")
- override def originalEnclosingMethod = this
}
protected def makeNoSymbol: NoSymbol = new NoSymbol
@@ -3513,7 +3578,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @param syms the prototypical symbols
* @param symFn the function to create new symbols
* @param tpe the prototypical type
- * @return the new symbol-subsituted type
+ * @return the new symbol-substituted type
*/
def deriveType(syms: List[Symbol], symFn: Symbol => Symbol)(tpe: Type): Type = {
val syms1 = deriveSymbols(syms, symFn)
@@ -3528,7 +3593,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @param as arguments to be passed to symFn together with symbols from syms (must be same length)
* @param symFn the function to create new symbols based on `as`
* @param tpe the prototypical type
- * @return the new symbol-subsituted type
+ * @return the new symbol-substituted type
*/
def deriveType2[A](syms: List[Symbol], as: List[A], symFn: (Symbol, A) => Symbol)(tpe: Type): Type = {
val syms1 = deriveSymbols2(syms, as, symFn)
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 9066c73393..b2248ad518 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -362,10 +362,10 @@ abstract class TreeGen {
if (body forall treeInfo.isInterfaceMember) None
else Some(
atPos(wrappingPos(superPos, lvdefs)) (
- DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant())))))
+ DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant(()))))))
}
else {
- // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
+ // convert (implicit ... ) to ()(implicit ... ) if it's the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
vparamss1 = List() :: vparamss1
val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass
@@ -376,7 +376,7 @@ abstract class TreeGen {
// therefore here we emit a dummy which gets populated when the template is named and typechecked
Some(
atPos(wrappingPos(superPos, lvdefs ::: vparamss1.flatten).makeTransparent) (
- DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
+ DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant(()))))))
}
}
constr foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus = false))
@@ -451,10 +451,10 @@ abstract class TreeGen {
def mkSyntheticUnit() = Literal(Constant(())).updateAttachment(SyntheticUnitAttachment)
/** Create block of statements `stats` */
- def mkBlock(stats: List[Tree]): Tree =
+ def mkBlock(stats: List[Tree], doFlatten: Boolean = true): Tree =
if (stats.isEmpty) mkSyntheticUnit()
else if (!stats.last.isTerm) Block(stats, mkSyntheticUnit())
- else if (stats.length == 1) stats.head
+ else if (stats.length == 1 && doFlatten) stats.head
else Block(stats.init, stats.last)
/** Create a block that wraps multiple statements but don't
@@ -713,7 +713,7 @@ abstract class TreeGen {
val rhsUnchecked = mkUnchecked(rhs)
- // TODO: clean this up -- there is too much information packked into mkPatDef's `pat` argument
+ // TODO: clean this up -- there is too much information packed into mkPatDef's `pat` argument
// when it's a simple identifier (case Some((name, tpt)) -- above),
// pat should have the type ascription that was specified by the user
// however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`)
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 7cf749c048..4657fa0000 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -51,6 +51,11 @@ abstract class TreeInfo {
case _ => false
}
+ def isConstructorWithDefault(t: Tree) = t match {
+ case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault)
+ case _ => false
+ }
+
/** Is tree a pure (i.e. non-side-effecting) definition?
*/
def isPureDef(tree: Tree): Boolean = tree match {
@@ -504,13 +509,6 @@ abstract class TreeInfo {
case _ => false
}
- /** The parameter ValDefs of a method definition that have vararg types of the form T*
- */
- def repeatedParams(tree: Tree): List[ValDef] = tree match {
- case DefDef(_, _, _, vparamss, _, _) => vparamss.flatten filter (vd => isRepeatedParamType(vd.tpt))
- case _ => Nil
- }
-
/** Is tpt a by-name parameter type of the form => T? */
def isByNameParamType(tpt: Tree) = tpt match {
case TypeTree() => definitions.isByNameParamType(tpt.tpe)
@@ -590,7 +588,7 @@ abstract class TreeInfo {
private def hasNoSymbol(t: Tree) = t.symbol == null || t.symbol == NoSymbol
- /** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know
+ /** Is this pattern node a synthetic catch-all case, added during PartialFunction synthesis before we know
* whether the user provided cases are exhaustive. */
def isSyntheticDefaultCase(cdef: CaseDef) = cdef match {
case CaseDef(Bind(nme.DEFAULT_CASE, _), EmptyTree, _) => true
@@ -817,7 +815,7 @@ abstract class TreeInfo {
object Unapplied {
// Duplicated with `spliceApply`
def unapply(tree: Tree): Option[Tree] = tree match {
- // SI-7868 Admit Select() to account for numeric widening, e.g. <unappplySelector>.toInt
+ // SI-7868 Admit Select() to account for numeric widening, e.g. <unapplySelector>.toInt
case Apply(fun, (Ident(nme.SELECTOR_DUMMY)| Select(Ident(nme.SELECTOR_DUMMY), _)) :: Nil)
=> Some(fun)
case Apply(fun, _) => unapply(fun)
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 9dc4baee32..e3f95f9fd8 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -8,8 +8,8 @@ package reflect
package internal
import Flags._
-import pickling.PickleFormat._
import scala.collection.{ mutable, immutable }
+import scala.reflect.macros.Attachments
import util.Statistics
trait Trees extends api.Trees {
@@ -87,7 +87,7 @@ trait Trees extends api.Trees {
private[scala] def copyAttrs(tree: Tree): this.type = {
rawatt = tree.rawatt
- tpe = tree.tpe
+ setType(tree.tpe)
if (hasSymbolField) symbol = tree.symbol
this
}
@@ -1075,6 +1075,13 @@ trait Trees extends api.Trees {
override def setType(t: Type) = { requireLegal(t, NoType, "tpe"); this }
override def tpe_=(t: Type) = setType(t)
+ // We silently ignore attempts to add attachments to `EmptyTree`. See SI-8947 for an
+ // example of a bug in macro expansion that this solves.
+ override def setAttachments(attachments: Attachments {type Pos = Position}): this.type = attachmentWarning()
+ override def updateAttachment[T: ClassTag](attachment: T): this.type = attachmentWarning()
+ override def removeAttachment[T: ClassTag]: this.type = attachmentWarning()
+ private def attachmentWarning(): this.type = {devWarning(s"Attempt to mutate attachments on $self ignored"); this}
+
private def requireLegal(value: Any, allowed: Any, what: String) = (
if (value != allowed) {
log(s"can't set $what for $self to value other than $allowed")
@@ -1569,6 +1576,7 @@ trait Trees extends api.Trees {
*/
class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer {
val symSubst = new SubstSymMap(from, to)
+ private var mutatedSymbols: List[Symbol] = Nil
override def transform(tree: Tree): Tree = {
def subst(from: List[Symbol], to: List[Symbol]) {
if (!from.isEmpty)
@@ -1587,6 +1595,7 @@ trait Trees extends api.Trees {
|TreeSymSubstituter: updated info of symbol ${tree.symbol}
| Old: ${showRaw(tree.symbol.info, printTypes = true, printIds = true)}
| New: ${showRaw(newInfo, printTypes = true, printIds = true)}""")
+ mutatedSymbols ::= tree.symbol
tree.symbol updateInfo newInfo
}
case _ =>
@@ -1606,7 +1615,23 @@ trait Trees extends api.Trees {
} else
super.transform(tree)
}
- def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
+ def apply[T <: Tree](tree: T): T = {
+ val tree1 = transform(tree)
+ invalidateSingleTypeCaches(tree1)
+ tree1.asInstanceOf[T]
+ }
+ private def invalidateSingleTypeCaches(tree: Tree): Unit = {
+ if (mutatedSymbols.nonEmpty)
+ for (t <- tree if t.tpe != null)
+ for (tp <- t.tpe) {
+ tp match {
+ case s: SingleType if mutatedSymbols contains s.sym =>
+ s.underlyingPeriod = NoPeriod
+ s.underlyingCache = NoType
+ case _ =>
+ }
+ }
+ }
override def toString() = "TreeSymSubstituter/" + substituterString("Symbol", "Symbol", from, to)
}
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index eb56f4ba81..f74d976b82 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -209,7 +209,7 @@ trait Types
case object UnmappableTree extends TermTree {
override def toString = "<unmappable>"
- super.tpe_=(NoType)
+ super.setType(NoType)
override def tpe_=(t: Type) = if (t != NoType) {
throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
}
@@ -247,7 +247,7 @@ trait Types
def companion = {
val sym = typeSymbolDirect
- if (sym.isModule && !sym.isPackage) sym.companionSymbol.tpe
+ if (sym.isModule && !sym.hasPackageFlag) sym.companionSymbol.tpe
else if (sym.isModuleClass && !sym.isPackageClass) sym.sourceModule.companionSymbol.tpe
else if (sym.isClass && !sym.isModuleClass && !sym.isPackageClass) sym.companionSymbol.info
else NoType
@@ -731,7 +731,7 @@ trait Types
* `substThis(from, to).substSym(symsFrom, symsTo)`.
*
* `SubstThisAndSymMap` performs a breadth-first map over this type, which meant that
- * symbol substitution occured before `ThisType` substitution. Consequently, in substitution
+ * symbol substitution occurred before `ThisType` substitution. Consequently, in substitution
* of a `SingleType(ThisType(`from`), sym), symbols were rebound to `from` rather than `to`.
*/
def substThisAndSym(from: Symbol, to: Type, symsFrom: List[Symbol], symsTo: List[Symbol]): Type =
@@ -1687,7 +1687,7 @@ trait Types
*/
private var refs: Array[RefMap] = _
- /** The initialization state of the class: UnInialized --> Initializing --> Initialized
+ /** The initialization state of the class: UnInitialized --> Initializing --> Initialized
* Syncnote: This var need not be protected with synchronized, because
* it is accessed only from expansiveRefs, which is called only from
* Typer.
@@ -1972,17 +1972,17 @@ trait Types
require(sym.isNonClassType, sym)
/* Syncnote: These are pure caches for performance; no problem to evaluate these
- * several times. Hence, no need to protected with synchronzied in a mutli-threaded
+ * several times. Hence, no need to protected with synchronized in a multi-threaded
* usage scenario.
*/
private var relativeInfoCache: Type = _
- private var memberInfoCache: Type = _
+ private var relativeInfoPeriod: Period = NoPeriod
- private[Types] def relativeInfo = {
- val memberInfo = pre.memberInfo(sym)
- if (relativeInfoCache == null || (memberInfo ne memberInfoCache)) {
- memberInfoCache = memberInfo
+ private[Types] def relativeInfo = /*trace(s"relativeInfo(${safeToString}})")*/{
+ if (relativeInfoPeriod != currentPeriod) {
+ val memberInfo = pre.memberInfo(sym)
relativeInfoCache = transformInfo(memberInfo)
+ relativeInfoPeriod = currentPeriod
}
relativeInfoCache
}
@@ -2605,7 +2605,7 @@ trait Types
// derived from the existentially quantified type into the typing environment
// (aka \Gamma, which tracks types for variables and constraints/kinds for types)
// as a nice bonus, delaying this until we need it avoids cyclic errors
- def tpars = underlying.typeSymbol.initialize.typeParams
+ def tpars = underlying.typeSymbolDirect.initialize.typeParams
def newSkolem(quant: Symbol) = owner.newExistentialSkolem(quant, origin)
def newSharpenedSkolem(quant: Symbol, tparam: Symbol): Symbol = {
@@ -2643,7 +2643,7 @@ trait Types
* nowhere inside a type argument
* - no quantified type argument contains a quantified variable in its bound
* - the typeref's symbol is not itself quantified
- * - the prefix is not quanitified
+ * - the prefix is not quantified
*/
def isRepresentableWithWildcards = {
val qset = quantified.toSet
@@ -3101,7 +3101,7 @@ trait Types
// addressed here: all lower bounds are retained and their intersection calculated when the
// bounds are solved.
//
- // In a side-effect free universe, checking tp and tp.parents beofre checking tp.baseTypeSeq
+ // In a side-effect free universe, checking tp and tp.parents before checking tp.baseTypeSeq
// would be pointless. In this case, each check we perform causes us to lose specificity: in
// the end the best we'll do is the least specific type we tested against, since the typevar
// does not see these checks as "probes" but as requirements to fulfill.
@@ -3332,7 +3332,7 @@ trait Types
*
* SI-6385 Erasure's creation of bridges considers method signatures `exitingErasure`,
* which contain `ErasedValueType`-s. In order to correctly consider the overriding
- * and overriden signatures as equivalent in `run/t6385.scala`, it is critical that
+ * and overridden signatures as equivalent in `run/t6385.scala`, it is critical that
* this type contains the erasure of the wrapped type, rather than the unerased type
* of the value class itself, as was originally done.
*
@@ -3658,7 +3658,7 @@ trait Types
// JZ: We used to register this as a perRunCache so it would be cleared eagerly at
// the end of the compilation run. But, that facility didn't actually clear this map (SI-8129)!
// When i fixed that bug, run/tpeCache-tyconCache.scala started failing. Why was that?
- // I've removed the registration for now. I don't think its particularly harmful anymore
+ // I've removed the registration for now. I don't think it's particularly harmful anymore
// as a) this is now a weak set, and b) it is discarded completely before the next run.
uniqueRunId = currentRunId
}
@@ -4113,8 +4113,8 @@ trait Types
def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Depth): Boolean = {
def isSubArg(t1: Type, t2: Type, variance: Variance) = (
- (variance.isContravariant || isSubType(t1, t2, depth))
- && (variance.isCovariant || isSubType(t2, t1, depth))
+ (variance.isCovariant || isSubType(t2, t1, depth)) // The order of these two checks can be material for performance (SI-8478)
+ && (variance.isContravariant || isSubType(t1, t2, depth))
)
corresponds3(tps1, tps2, mapList(tparams)(_.variance))(isSubArg)
@@ -4535,7 +4535,7 @@ trait Types
/** Adds the @uncheckedBound annotation if the given `tp` has type arguments */
final def uncheckedBounds(tp: Type): Type = {
- if (tp.typeArgs.isEmpty || UncheckedBoundsClass == NoSymbol) tp // second condition for backwards compatibilty with older scala-reflect.jar
+ if (tp.typeArgs.isEmpty || UncheckedBoundsClass == NoSymbol) tp // second condition for backwards compatibility with older scala-reflect.jar
else tp.withAnnotation(AnnotationInfo marker UncheckedBoundsClass.tpe)
}
diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala
index cfe2ad8b87..ef22df3f2e 100644
--- a/src/reflect/scala/reflect/internal/Variances.scala
+++ b/src/reflect/scala/reflect/internal/Variances.scala
@@ -32,7 +32,7 @@ trait Variances {
/** Is every symbol in the owner chain between `site` and the owner of `sym`
* either a term symbol or private[this]? If not, add `sym` to the set of
- * esacped locals.
+ * escaped locals.
* @pre sym.isLocalToThis
*/
@tailrec final def checkForEscape(sym: Symbol, site: Symbol) {
@@ -79,7 +79,7 @@ trait Variances {
// Unsound pre-2.11 behavior preserved under -Xsource:2.10
if (settings.isScala211 || sym.isOverridingSymbol) Invariant
else {
- deprecationWarning(sym.pos, s"Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond")
+ currentRun.reporting.deprecationWarning(sym.pos, s"Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond")
Bivariant
}
)
diff --git a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
index a44bb54734..662d841c91 100644
--- a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
+++ b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
@@ -5,7 +5,7 @@ package annotations
/**
* An annotation that designates the annotated type should not be checked for violations of
* type parameter bounds in the `refchecks` phase of the compiler. This can be used by synthesized
- * code the uses an inferred type of an expression as the type of an artifict val/def (for example,
+ * code the uses an inferred type of an expression as the type of an artifact val/def (for example,
* a temporary value introduced by an ANF transform). See [[https://issues.scala-lang.org/browse/SI-7694]].
*
* @since 2.10.3
diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
index 8615e34fad..241638e88e 100644
--- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
+++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
@@ -196,10 +196,10 @@ object ByteCodecs {
*
* Sometimes returns (length+1) of the decoded array. Example:
*
- * scala> val enc = scala.reflect.generic.ByteCodecs.encode(Array(1,2,3))
+ * scala> val enc = scala.reflect.internal.pickling.ByteCodecs.encode(Array(1,2,3))
* enc: Array[Byte] = Array(2, 5, 13, 1)
*
- * scala> scala.reflect.generic.ByteCodecs.decode(enc)
+ * scala> scala.reflect.internal.pickling.ByteCodecs.decode(enc)
* res43: Int = 4
*
* scala> enc
diff --git a/src/reflect/scala/reflect/internal/pickling/Translations.scala b/src/reflect/scala/reflect/internal/pickling/Translations.scala
index e56cf796cb..d924cb3a0c 100644
--- a/src/reflect/scala/reflect/internal/pickling/Translations.scala
+++ b/src/reflect/scala/reflect/internal/pickling/Translations.scala
@@ -62,21 +62,22 @@ trait Translations {
}
def picklerTag(tpe: Type): Int = tpe match {
- case NoType => NOtpe
- case NoPrefix => NOPREFIXtpe
- case _: ThisType => THIStpe
- case _: SingleType => SINGLEtpe
- case _: SuperType => SUPERtpe
- case _: ConstantType => CONSTANTtpe
- case _: TypeBounds => TYPEBOUNDStpe
- case _: TypeRef => TYPEREFtpe
- case _: RefinedType => REFINEDtpe
- case _: ClassInfoType => CLASSINFOtpe
- case _: MethodType => METHODtpe
- case _: PolyType => POLYtpe
- case _: NullaryMethodType => POLYtpe // bad juju, distinct ints are not at a premium!
- case _: ExistentialType => EXISTENTIALtpe
- case _: AnnotatedType => ANNOTATEDtpe
+ case NoType => NOtpe
+ case NoPrefix => NOPREFIXtpe
+ case _: ThisType => THIStpe
+ case _: SingleType => SINGLEtpe
+ case _: SuperType => SUPERtpe
+ case _: ConstantType => CONSTANTtpe
+ case _: TypeBounds => TYPEBOUNDStpe
+ case _: TypeRef => TYPEREFtpe
+ case _: RefinedType => REFINEDtpe
+ case _: ClassInfoType => CLASSINFOtpe
+ case _: MethodType => METHODtpe
+ case _: PolyType => POLYtpe
+ case _: NullaryMethodType => POLYtpe // bad juju, distinct ints are not at a premium!
+ case _: ExistentialType => EXISTENTIALtpe
+ case StaticallyAnnotatedType(_, _) => ANNOTATEDtpe
+ case _: AnnotatedType => picklerTag(tpe.underlying)
}
def picklerSubTag(tree: Tree): Int = tree match {
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 64a1a44722..1fc7aebab0 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -211,7 +211,12 @@ abstract class UnPickler {
def fromName(name: Name) = name.toTermName match {
case nme.ROOT => loadingMirror.RootClass
case nme.ROOTPKG => loadingMirror.RootPackage
- case _ => adjust(owner.info.decl(name))
+ case _ =>
+ val decl = owner match {
+ case stub: StubSymbol => NoSymbol // SI-8502 Don't call .info and fail the stub
+ case _ => owner.info.decl(name)
+ }
+ adjust(decl)
}
def nestedObjectSymbol: Symbol = {
// If the owner is overloaded (i.e. a method), it's not possible to select the
@@ -243,8 +248,14 @@ abstract class UnPickler {
} getOrElse "")
}
+ def localDummy = {
+ if (nme.isLocalDummyName(name))
+ owner.newLocalDummy(NoPosition)
+ else NoSymbol
+ }
+
// (1) Try name.
- fromName(name) orElse {
+ localDummy orElse fromName(name) orElse {
// (2) Try with expanded name. Can happen if references to private
// symbols are read from outside: for instance when checking the children
// of a class. See #1722.
@@ -254,12 +265,13 @@ abstract class UnPickler {
// (4) Call the mirror's "missing" hook.
adjust(mirrorThatLoaded(owner).missingHook(owner, name)) orElse {
// (5) Create a stub symbol to defer hard failure a little longer.
- val fullName = s"${owner.fullName}.$name"
+ val advice = moduleAdvice(s"${owner.fullName}.$name")
val missingMessage =
- s"""|bad symbolic reference to $fullName encountered in class file '$filename'.
- |Cannot access ${name.longString} in ${owner.kindString} ${owner.fullName}. The current classpath may be
- |missing a definition for $fullName, or $filename may have been compiled against a version that's
- |incompatible with the one found on the current classpath.${moduleAdvice(fullName)}""".stripMargin
+ s"""|missing or invalid dependency detected while loading class file '$filename'.
+ |Could not access ${name.longString} in ${owner.kindString} ${owner.fullName},
+ |because it (or its dependencies) are missing. Check your build definition for
+ |missing or conflicting dependencies. (Re-run with `-Ylog-classpath` to see the problematic classpath.)
+ |A full rebuild may help if '$filename' was compiled against an incompatible version of ${owner.fullName}.$advice""".stripMargin
owner.newStubSymbol(name, missingMessage)
}
}
@@ -290,6 +302,27 @@ abstract class UnPickler {
def pflags = flags & PickledFlags
def finishSym(sym: Symbol): Symbol = {
+ /**
+ * member symbols (symbols owned by a class) are added to the class's scope, with a number
+ * of exceptions:
+ *
+ * (.) ...
+ * (1) `local child` represents local child classes, see comment in Pickler.putSymbol.
+ * Since it is not a member, it should not be entered in the owner's scope.
+ * (2) Similarly, we ignore local dummy symbols, as seen in SI-8868
+ */
+ def shouldEnterInOwnerScope = {
+ sym.owner.isClass &&
+ sym != classRoot &&
+ sym != moduleRoot &&
+ !sym.isModuleClass &&
+ !sym.isRefinementClass &&
+ !sym.isTypeParameter &&
+ !sym.isExistentiallyBound &&
+ sym.rawname != tpnme.LOCAL_CHILD && // (1)
+ !nme.isLocalDummyName(sym.rawname) // (2)
+ }
+
markFlagsCompleted(sym)(mask = AllFlags)
sym.privateWithin = privateWithin
sym.info = (
@@ -302,8 +335,7 @@ abstract class UnPickler {
newLazyTypeRefAndAlias(inforef, readNat())
}
)
- if (sym.owner.isClass && sym != classRoot && sym != moduleRoot &&
- !sym.isModuleClass && !sym.isRefinementClass && !sym.isTypeParameter && !sym.isExistentiallyBound)
+ if (shouldEnterInOwnerScope)
symScope(sym.owner) enter sym
sym
@@ -362,14 +394,24 @@ abstract class UnPickler {
case CLASSINFOtpe => ClassInfoType(parents, symScope(clazz), clazz)
}
+ def readThisType(): Type = {
+ val sym = readSymbolRef() match {
+ case stub: StubSymbol if !stub.isClass =>
+ // SI-8502 This allows us to create a stub for a unpickled reference to `missingPackage.Foo`.
+ stub.owner.newStubSymbol(stub.name.toTypeName, stub.missingMessage)
+ case sym => sym
+ }
+ ThisType(sym)
+ }
+
// We're stuck with the order types are pickled in, but with judicious use
// of named parameters we can recapture a declarative flavor in a few cases.
// But it's still a rat's nest of adhockery.
(tag: @switch) match {
case NOtpe => NoType
case NOPREFIXtpe => NoPrefix
- case THIStpe => ThisType(readSymbolRef())
- case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef())
+ case THIStpe => readThisType()
+ case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef().filter(_.isStable)) // SI-7596 account for overloading
case SUPERtpe => SuperType(readTypeRef(), readTypeRef())
case CONSTANTtpe => ConstantType(readConstantRef())
case TYPEREFtpe => TypeRef(readTypeRef(), readSymbolRef(), readTypes())
@@ -681,10 +723,24 @@ abstract class UnPickler {
private val p = phase
protected def completeInternal(sym: Symbol) : Unit = try {
val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
- if (p ne null)
- slowButSafeEnteringPhase(p) (sym setInfo tp)
+
+ // This is a temporary fix allowing to read classes generated by an older, buggy pickler.
+ // See the generation of the LOCAL_CHILD class in Pickler.scala. In an earlier version, the
+ // pickler did not add the ObjectTpe superclass, it used a trait as the first parent. This
+ // tripped an assertion in AddInterfaces which checks that the first parent is not a trait.
+ // This workaround can probably be removed in 2.12, because the 2.12 compiler is supposed
+ // to only read classfiles generated by 2.12.
+ val fixLocalChildTp = if (sym.rawname == tpnme.LOCAL_CHILD) tp match {
+ case ClassInfoType(superClass :: traits, decls, typeSymbol) if superClass.typeSymbol.isTrait =>
+ ClassInfoType(definitions.ObjectTpe :: superClass :: traits, decls, typeSymbol)
+ case _ => tp
+ } else tp
+
+ if (p ne null) {
+ slowButSafeEnteringPhase(p)(sym setInfo fixLocalChildTp)
+ }
if (currentRunId != definedAtRunId)
- sym.setInfo(adaptToNewRunMap(tp))
+ sym.setInfo(adaptToNewRunMap(fixLocalChildTp))
}
catch {
case e: MissingRequirementError => throw toTypeError(e)
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index a494c7f0d0..38893d8db3 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -31,6 +31,9 @@ abstract class MutableSettings extends AbsSettings {
v = arg
postSetHook()
}
+
+ /** Returns Some(value) in the case of a value set by user and None otherwise. */
+ def valueSetByUser: Option[T] = if (isSetByUser) Some(value) else None
}
def Xexperimental: BooleanSetting
diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
index de54f3768e..83a5d23e7c 100644
--- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
@@ -12,7 +12,7 @@ import TypesStats._
trait FindMembers {
this: SymbolTable =>
- /** Implementatation of `Type#{findMember, findMembers}` */
+ /** Implementation of `Type#{findMember, findMembers}` */
private[internal] abstract class FindMemberBase[T](tpe: Type, name: Name, excludedFlags: Long, requiredFlags: Long) {
protected val initBaseClasses: List[Symbol] = tpe.baseClasses
@@ -155,7 +155,7 @@ trait FindMembers {
&& ( (member.owner eq other.owner) // same owner, therefore overload
|| (member.flags & PRIVATE) != 0 // (unqualified) private members never participate in overriding
|| (other.flags & PRIVATE) != 0 // ... as overrider or overridee.
- || !(memberTypeLow(member) matches memberTypeHi(other)) // do the member types match? If so, its an override. Otherwise it's an overload.
+ || !(memberTypeLow(member) matches memberTypeHi(other)) // do the member types match? If so, it's an override. Otherwise it's an overload.
)
)
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
index 876685e24a..123b44aa05 100644
--- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -347,7 +347,9 @@ private[internal] trait GlbLubs {
def lubsym(proto: Symbol): Symbol = {
val prototp = lubThisType.memberInfo(proto)
val syms = narrowts map (t =>
- t.nonPrivateMember(proto.name).suchThat(sym =>
+ // SI-7602 With erroneous code, we could end up with overloaded symbols after filtering
+ // so `suchThat` unsuitable.
+ t.nonPrivateMember(proto.name).filter(sym =>
sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
if (syms contains NoSymbol) NoSymbol
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
index 564cbb1ce3..f79099213a 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -16,8 +16,9 @@ private[internal] trait TypeConstraints {
private lazy val _undoLog = new UndoLog
def undoLog = _undoLog
+ import TypeConstraints.UndoPair
class UndoLog extends Clearable {
- private type UndoPairs = List[(TypeVar, TypeConstraint)]
+ type UndoPairs = List[UndoPair[TypeVar, TypeConstraint]]
//OPT this method is public so we can do `manual inlining`
var log: UndoPairs = List()
@@ -29,7 +30,7 @@ private[internal] trait TypeConstraints {
def undoTo(limit: UndoPairs) {
assertCorrectThread()
while ((log ne limit) && log.nonEmpty) {
- val (tv, constr) = log.head
+ val UndoPair(tv, constr) = log.head
tv.constr = constr
log = log.tail
}
@@ -40,7 +41,7 @@ private[internal] trait TypeConstraints {
* which is already synchronized.
*/
private[reflect] def record(tv: TypeVar) = {
- log ::= ((tv, tv.constr.cloneInternal))
+ log ::= UndoPair(tv, tv.constr.cloneInternal)
}
def clear() {
@@ -74,7 +75,7 @@ private[internal] trait TypeConstraints {
/* Syncnote: Type constraints are assumed to be used from only one
* thread. They are not exposed in api.Types and are used only locally
* in operations that are exposed from types. Hence, no syncing of any
- * variables should be ncessesary.
+ * variables should be necessary.
*/
/** Guard these lists against AnyClass and NothingClass appearing,
@@ -266,3 +267,9 @@ private[internal] trait TypeConstraints {
tvars forall (tv => tv.instWithinBounds || util.andFalse(logBounds(tv)))
}
}
+
+private[internal] object TypeConstraints {
+ // UndoPair is declared in companion object to not hold an outer pointer reference
+ final case class UndoPair[TypeVar <: SymbolTable#TypeVar,
+ TypeConstraint <: TypeConstraints#TypeConstraint](tv: TypeVar, tConstraint: TypeConstraint)
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index f06420de96..c705ca7069 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -422,6 +422,22 @@ private[internal] trait TypeMaps {
}
}
+ /**
+ * Get rid of BoundedWildcardType where variance allows us to do so.
+ * Invariant: `wildcardExtrapolation(tp) =:= tp`
+ *
+ * For example, the MethodType given by `def bla(x: (_ >: String)): (_ <: Int)`
+ * is both a subtype and a supertype of `def bla(x: String): Int`.
+ */
+ object wildcardExtrapolation extends TypeMap(trackVariance = true) {
+ def apply(tp: Type): Type =
+ tp match {
+ case BoundedWildcardType(TypeBounds(lo, AnyTpe)) if variance.isContravariant => lo
+ case BoundedWildcardType(TypeBounds(NothingTpe, hi)) if variance.isCovariant => hi
+ case tp => mapOver(tp)
+ }
+ }
+
/** Might the given symbol be important when calculating the prefix
* of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
* the result will be `tp` unchanged if `pre` is trivial and `clazz`
diff --git a/src/reflect/scala/reflect/internal/transform/PostErasure.scala b/src/reflect/scala/reflect/internal/transform/PostErasure.scala
index f0c7d0f050..dd4f044818 100644
--- a/src/reflect/scala/reflect/internal/transform/PostErasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/PostErasure.scala
@@ -5,7 +5,6 @@ package transform
trait PostErasure {
val global: SymbolTable
import global._
- import definitions._
object elimErasedValueType extends TypeMap {
def apply(tp: Type) = tp match {
diff --git a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
index 10a8b4c812..30dcbc21ca 100644
--- a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
@@ -5,16 +5,16 @@
package scala
package reflect.internal.util
-import scala.reflect.io.AbstractFile
+import scala.collection.{ mutable, immutable }
+import scala.reflect.io.{ AbstractFile, Streamable }
+import java.net.{ URL, URLConnection, URLStreamHandler }
import java.security.cert.Certificate
import java.security.{ ProtectionDomain, CodeSource }
-import java.net.{ URL, URLConnection, URLStreamHandler }
-import scala.collection.{ mutable, immutable }
+import java.util.{ Collections => JCollections, Enumeration => JEnumeration }
-/**
- * A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
+/** A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
*
- * @author Lex Spoon
+ * @author Lex Spoon
*/
class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
extends ClassLoader(parent)
@@ -22,7 +22,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
{
protected def classNameToPath(name: String): String =
if (name endsWith ".class") name
- else name.replace('.', '/') + ".class"
+ else s"${name.replace('.', '/')}.class"
protected def findAbstractFile(name: String): AbstractFile = {
var file: AbstractFile = root
@@ -56,35 +56,25 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
file
}
- // parent delegation in JCL uses getResource; so either add parent.getResAsStream
- // or implement findResource, which we do here as a study in scarlet (my complexion
- // after looking at CLs and URLs)
- override def findResource(name: String): URL = findAbstractFile(name) match {
+ override protected def findClass(name: String): Class[_] = {
+ val bytes = classBytes(name)
+ if (bytes.length == 0)
+ throw new ClassNotFoundException(name)
+ else
+ defineClass(name, bytes, 0, bytes.length, protectionDomain)
+ }
+ override protected def findResource(name: String): URL = findAbstractFile(name) match {
case null => null
- case file => new URL(null, "repldir:" + file.path, new URLStreamHandler {
+ case file => new URL(null, s"memory:${file.path}", new URLStreamHandler {
override def openConnection(url: URL): URLConnection = new URLConnection(url) {
- override def connect() { }
+ override def connect() = ()
override def getInputStream = file.input
}
})
}
-
- // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail
- override def getResourceAsStream(name: String) = findAbstractFile(name) match {
- case null => super.getResourceAsStream(name)
- case file => file.input
- }
- // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating
- override def classBytes(name: String): Array[Byte] = findAbstractFile(classNameToPath(name)) match {
- case null => super.classBytes(name)
- case file => file.toByteArray
- }
- override def findClass(name: String): Class[_] = {
- val bytes = classBytes(name)
- if (bytes.length == 0)
- throw new ClassNotFoundException(name)
- else
- defineClass(name, bytes, 0, bytes.length, protectionDomain)
+ override protected def findResources(name: String): JEnumeration[URL] = findResource(name) match {
+ case null => JCollections.enumeration(JCollections.emptyList[URL]) //JCollections.emptyEnumeration[URL]
+ case url => JCollections.enumeration(JCollections.singleton(url))
}
lazy val protectionDomain = {
@@ -106,15 +96,13 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
throw new UnsupportedOperationException()
}
- override def getPackage(name: String): Package = {
- findAbstractDir(name) match {
- case null => super.getPackage(name)
- case file => packages.getOrElseUpdate(name, {
- val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader])
- ctor.setAccessible(true)
- ctor.newInstance(name, null, null, null, null, null, null, null, this)
- })
- }
+ override def getPackage(name: String): Package = findAbstractDir(name) match {
+ case null => super.getPackage(name)
+ case file => packages.getOrElseUpdate(name, {
+ val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader])
+ ctor.setAccessible(true)
+ ctor.newInstance(name, null, null, null, null, null, null, null, this)
+ })
}
override def getPackages(): Array[Package] =
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index d128521be8..a743d8962a 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -181,6 +181,9 @@ trait Collections {
final def mapFrom[A, A1 >: A, B](xs: List[A])(f: A => B): Map[A1, B] = {
Map[A1, B](xs map (x => (x, f(x))): _*)
}
+ final def linkedMapFrom[A, A1 >: A, B](xs: List[A])(f: A => B): mutable.LinkedHashMap[A1, B] = {
+ mutable.LinkedHashMap[A1, B](xs map (x => (x, f(x))): _*)
+ }
final def mapWithIndex[A, B](xs: List[A])(f: (A, Int) => B): List[B] = {
val lb = new ListBuffer[B]
diff --git a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
index 63ea6e2c49..41011f6c6b 100644
--- a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
@@ -53,8 +53,10 @@ trait ScalaClassLoader extends JClassLoader {
}
/** An InputStream representing the given class name, or null if not found. */
- def classAsStream(className: String) =
- getResourceAsStream(className.replaceAll("""\.""", "/") + ".class")
+ def classAsStream(className: String) = getResourceAsStream {
+ if (className endsWith ".class") className
+ else s"${className.replace('.', '/')}.class" // classNameToPath
+ }
/** Run the main method of a class to be loaded by this classloader */
def run(objectName: String, arguments: Seq[String]) {
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index 4fccad74ac..a2642628a4 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -40,7 +40,7 @@ abstract class SourceFile {
def lineToString(index: Int): String = {
val start = lineToOffset(index)
var end = start
- while (!isEndOfLine(end) && end <= length) end += 1
+ while (end < length && !isEndOfLine(end)) end += 1
new String(content, start, end - start)
}
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index a9a7c7780d..3a7a7626fb 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -7,13 +7,13 @@ import scala.collection.generic.Clearable
import scala.collection.mutable.{Set => MSet}
/**
- * A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other
+ * A HashSet where the elements are stored weakly. Elements in this set are eligible for GC if no other
* hard references are associated with them. Its primary use case is as a canonical reference
* identity holder (aka "hash-consing") via findEntryOrUpdate
*
* This Set implementation cannot hold null. Any attempt to put a null in it will result in a NullPointerException
*
- * This set implmeentation is not in general thread safe without external concurrency control. However it behaves
+ * This set implementation is not in general thread safe without external concurrency control. However it behaves
* properly when GC concurrently collects elements in this set.
*/
final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with MSet[A] {
@@ -26,7 +26,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
/**
* queue of Entries that hold elements scheduled for GC
- * the removeStaleEntries() method works through the queue to remeove
+ * the removeStaleEntries() method works through the queue to remove
* stale entries from the table
*/
private[this] val queue = new ReferenceQueue[A]
@@ -62,7 +62,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
private[this] def computeThreshHold: Int = (table.size * loadFactor).ceil.toInt
/**
- * find the bucket associated with an elements's hash code
+ * find the bucket associated with an element's hash code
*/
private[this] def bucketFor(hash: Int): Int = {
// spread the bits around to try to avoid accidental collisions using the
diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
index ac1159b2ac..bcefcc471f 100644
--- a/src/reflect/scala/reflect/io/AbstractFile.scala
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -48,14 +48,16 @@ object AbstractFile {
else null
/**
- * If the specified URL exists and is a readable zip or jar archive,
- * returns an abstract directory backed by it. Otherwise, returns
- * `null`.
+ * If the specified URL exists and is a regular file or a directory, returns an
+ * abstract regular file or an abstract directory, respectively, backed by it.
+ * Otherwise, returns `null`.
*/
- def getURL(url: URL): AbstractFile = {
- if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null
- else ZipArchive fromURL url
- }
+ def getURL(url: URL): AbstractFile =
+ if (url.getProtocol == "file") {
+ val f = new java.io.File(url.getPath)
+ if (f.isDirectory) getDirectory(f)
+ else getFile(f)
+ } else null
def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url
}
diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala
index 45f38db745..1cb4f2fe6f 100644
--- a/src/reflect/scala/reflect/io/VirtualFile.scala
+++ b/src/reflect/scala/reflect/io/VirtualFile.scala
@@ -75,10 +75,10 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
}
/** Does this abstract file denote an existing file? */
- def create() { unsupported() }
+ def create(): Unit = unsupported()
/** Delete the underlying file or directory (recursively). */
- def delete() { unsupported() }
+ def delete(): Unit = unsupported()
/**
* Returns the abstract file in this abstract directory with the
diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala
index 8260189459..0c63acb86c 100644
--- a/src/reflect/scala/reflect/io/ZipArchive.scala
+++ b/src/reflect/scala/reflect/io/ZipArchive.scala
@@ -74,12 +74,6 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
def container = unsupported()
def absolute = unsupported()
- private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = {
- its flatMap { f =>
- if (f.isDirectory) walkIterator(f.iterator)
- else Iterator(f)
- }
- }
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) {
// have to keep this name for compat with sbt's compiler-interface
@@ -87,6 +81,7 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
override def underlyingSource = Some(self)
override def toString = self.path + "(" + path + ")"
}
+
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
class DirEntry(path: String) extends Entry(path) {
val entries = mutable.HashMap[String, Entry]()
@@ -125,14 +120,15 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
}
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
final class FileZipArchive(file: JFile) extends ZipArchive(file) {
- def iterator: Iterator[Entry] = {
+ lazy val (root, allDirs) = {
+ val root = new DirEntry("/")
+ val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
val zipFile = try {
new ZipFile(file)
} catch {
case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe)
}
- val root = new DirEntry("/")
- val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
+
val enum = zipFile.entries()
while (enum.hasMoreElements) {
@@ -150,11 +146,11 @@ final class FileZipArchive(file: JFile) extends ZipArchive(file) {
dir.entries(f.name) = f
}
}
-
- try root.iterator
- finally dirs.clear()
+ (root, dirs)
}
+ def iterator: Iterator[Entry] = root.iterator
+
def name = file.getName
def path = file.getPath
def input = File(file).inputStream()
@@ -244,11 +240,9 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) {
val manifest = new Manifest(input)
val iter = manifest.getEntries().keySet().iterator().filter(_.endsWith(".class")).map(new ZipEntry(_))
- while (iter.hasNext) {
- val zipEntry = iter.next()
+ for (zipEntry <- iter) {
val dir = getDir(dirs, zipEntry)
- if (zipEntry.isDirectory) dir
- else {
+ if (!zipEntry.isDirectory) {
class FileEntry() extends Entry(zipEntry.getName) {
override def lastModified = zipEntry.getTime()
override def input = resourceInputStream(path)
@@ -284,14 +278,14 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) {
private def resourceInputStream(path: String): InputStream = {
new FilterInputStream(null) {
override def read(): Int = {
- if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path);
+ if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path)
if(in == null) throw new RuntimeException(path + " not found")
- super.read();
+ super.read()
}
override def close(): Unit = {
- super.close();
- in = null;
+ super.close()
+ in = null
}
}
}
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index 5ccdc15a03..b5c340645a 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -35,7 +35,7 @@ abstract class Attachments { self =>
def all: Set[Any] = Set.empty
private def matchesTag[T: ClassTag](datum: Any) =
- classTag[T].runtimeClass == datum.getClass
+ classTag[T].runtimeClass.isInstance(datum)
/** An underlying payload of the given class type `T`. */
def get[T: ClassTag]: Option[T] =
diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala
index 69ede42cc7..1eb6832b5b 100644
--- a/src/reflect/scala/reflect/macros/Enclosures.scala
+++ b/src/reflect/scala/reflect/macros/Enclosures.scala
@@ -47,7 +47,7 @@ trait Enclosures {
/** Tries to guess a position for the enclosing application.
* But that is simple, right? Just dereference `pos` of `macroApplication`? Not really.
- * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggerd this expansion.
+ * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggered this expansion.
* Surprisingly, quite often we can do this by navigation the `enclosingMacros` stack.
*/
def enclosingPosition: Position
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
index 720b754649..5fc0fd5078 100644
--- a/src/reflect/scala/reflect/macros/Parsers.scala
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -13,7 +13,7 @@ trait Parsers {
/** Parses a string with a Scala expression into an abstract syntax tree.
* Only works for expressions, i.e. parsing a package declaration will fail.
- * @throws [[scala.reflect.macros.ParseException]]
+ * @throws scala.reflect.macros.ParseException
*/
def parse(code: String): Tree
}
diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala
index d0dccb469d..bd608601dc 100644
--- a/src/reflect/scala/reflect/macros/Typers.scala
+++ b/src/reflect/scala/reflect/macros/Typers.scala
@@ -2,8 +2,6 @@ package scala
package reflect
package macros
-import scala.reflect.internal.{Mode => InternalMode}
-
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
@@ -72,7 +70,7 @@ trait Typers {
* `withImplicitViewsDisabled` recursively prohibits implicit views (though, implicit vals will still be looked up and filled in), default value is false
* `withMacrosDisabled` recursively prohibits macro expansions and macro-based implicits, default value is false
*
- * @throws [[scala.reflect.macros.TypecheckException]]
+ * @throws scala.reflect.macros.TypecheckException
*/
def typecheck(tree: Tree, mode: TypecheckMode = TERMmode, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree
@@ -84,7 +82,7 @@ trait Typers {
* Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
* Unlike in `typecheck`, `silent` is true by default.
*
- * @throws [[scala.reflect.macros.TypecheckException]]
+ * @throws scala.reflect.macros.TypecheckException
*/
def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree
@@ -96,7 +94,7 @@ trait Typers {
* Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
* Unlike in `typecheck`, `silent` is true by default.
*
- * @throws [[scala.reflect.macros.TypecheckException]]
+ * @throws scala.reflect.macros.TypecheckException
*/
def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index 1eb67215bb..3b57169565 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -44,7 +44,7 @@ abstract class Universe extends scala.reflect.api.Universe {
* it is imperative that you either call `untypecheck` or do `changeOwner(tree, x, y)`.
*
* Since at the moment `untypecheck` has fundamental problem that can sometimes lead to tree corruption,
- * `changeOwner` becomes an indispensible tool in building 100% robust macros.
+ * `changeOwner` becomes an indispensable tool in building 100% robust macros.
* Future versions of the reflection API might obviate the need in taking care of
* these low-level details, but at the moment this is what we've got.
*/
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index f5bddb1784..ce60ade9f5 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -38,7 +38,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
override lazy val rootMirror: Mirror = createMirror(NoSymbol, rootClassLoader)
- // overriden by ReflectGlobal
+ // overridden by ReflectGlobal
def rootClassLoader: ClassLoader = this.getClass.getClassLoader
trait JavaClassCompleter
@@ -142,7 +142,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
object ConstantArg {
def enumToSymbol(enum: Enum[_]): Symbol = {
val staticPartOfEnum = classToScala(enum.getClass).companionSymbol
- staticPartOfEnum.info.declaration(enum.name: TermName)
+ staticPartOfEnum.info.declaration(TermName(enum.name))
}
def unapply(schemaAndValue: (jClass[_], Any)): Option[Any] = schemaAndValue match {
@@ -172,7 +172,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
// currently I'm simply sorting the methods to guarantee stability of the output
override lazy val assocs: List[(Name, ClassfileAnnotArg)] = (
jann.annotationType.getDeclaredMethods.sortBy(_.getName).toList map (m =>
- (m.getName: TermName) -> toAnnotArg(m.getReturnType -> m.invoke(jann))
+ TermName(m.getName) -> toAnnotArg(m.getReturnType -> m.invoke(jann))
)
)
}
@@ -428,9 +428,12 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
var i = 0
while (i < args1.length) {
val arg = args(i)
- if (i >= paramCount) args1(i) = arg // don't transform varargs
- else if (isByName(i)) args1(i) = () => arg // don't transform by-name value class params
- else if (isDerivedValueClass(i)) args1(i) = paramUnboxers(i).invoke(arg)
+ args1(i) = (
+ if (i >= paramCount) arg // don't transform varargs
+ else if (isByName(i)) () => arg // don't transform by-name value class params
+ else if (isDerivedValueClass(i)) paramUnboxers(i).invoke(arg) // do get the underlying value
+ else arg // don't molest anything else
+ )
i += 1
}
jinvoke(args1)
@@ -588,6 +591,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
// don't use classOf[scala.reflect.ScalaSignature] here, because it will use getClass.getClassLoader, not mirror's classLoader
// don't use asInstanceOf either because of the same reason (lol, I cannot believe I fell for it)
// don't use structural types to simplify reflective invocations because of the same reason
+ // TODO SI-9296 duplicated code, refactor
def loadAnnotation(name: String): Option[java.lang.annotation.Annotation] =
tryJavaClass(name) flatMap { annotClass =>
val anns = jclazz.getAnnotations
@@ -760,8 +764,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
module.moduleClass setInfo new ClassInfoType(List(), newScope, module.moduleClass)
}
- def enter(sym: Symbol, mods: JavaAccFlags) =
- ( if (mods.isStatic) module.moduleClass else clazz ).info.decls enter sym
+ def enter(sym: Symbol, mods: JavaAccFlags) = followStatic(clazz, module, mods).info.decls enter sym
def enterEmptyCtorIfNecessary(): Unit = {
if (jclazz.getConstructors.isEmpty)
@@ -801,34 +804,33 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
* If Java modifiers `mods` contain STATIC, return the module class
* of the companion module of `clazz`, otherwise the class `clazz` itself.
*/
- private def followStatic(clazz: Symbol, mods: JavaAccFlags) =
- if (mods.isStatic) clazz.companionModule.moduleClass else clazz
+ private def followStatic(clazz: Symbol, mods: JavaAccFlags): Symbol = followStatic(clazz, clazz.companionModule, mods)
- /** Methods which need to be treated with care
- * because they either are getSimpleName or call getSimpleName:
+ private def followStatic(clazz: Symbol, module: Symbol, mods: JavaAccFlags): Symbol =
+ // SI-8196 `orElse(clazz)` needed for implementation details of the backend, such as the static
+ // field containing the cache for structural calls.
+ if (mods.isStatic) module.moduleClass.orElse(clazz) else clazz
+
+ /**
+ * Certain method of the Java reflection api cannot be used on classfiles created by Scala.
+ * See the comment in test/files/jvm/javaReflection/Test.scala. The methods are
*
* public String getSimpleName()
* public boolean isAnonymousClass()
* public boolean isLocalClass()
* public String getCanonicalName()
- *
- * A typical manifestation:
- *
- * // java.lang.Error: sOwner(class Test$A$1) has failed
- * // Caused by: java.lang.InternalError: Malformed class name
- * // at java.lang.Class.getSimpleName(Class.java:1133)
- * // at java.lang.Class.isAnonymousClass(Class.java:1188)
- * // at java.lang.Class.isLocalClass(Class.java:1199)
- * // (see t5256c.scala for more details)
+ * public boolean isSynthetic()
*
* TODO - find all such calls and wrap them.
* TODO - create mechanism to avoid the recurrence of unwrapped calls.
*/
implicit class RichClass(jclazz: jClass[_]) {
- // `jclazz.isLocalClass` doesn't work because of problems with `getSimpleName`
- // hence we have to approximate by removing the `isAnonymousClass` check
-// def isLocalClass0: Boolean = jclazz.isLocalClass
- def isLocalClass0: Boolean = jclazz.getEnclosingMethod != null || jclazz.getEnclosingConstructor != null
+ // As explained in the javaReflection test, Class.isLocalClass is true for all non-member
+ // nested classes in Scala. This is fine per se, however the implementation may throw an
+ // InternalError. We therefore re-implement it here.
+ // TODO: this method should be renamed to `isLocalOrAnonymousClass`.
+ // due to bin compat that's only possible in 2.12, we cannot introduce a new alias in 2.11.
+ def isLocalClass0: Boolean = jclazz.getEnclosingClass != null && !jclazz.isMemberClass
}
/**
@@ -939,7 +941,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
val ownerModule: ModuleSymbol =
if (split > 0) packageNameToScala(fullname take split) else this.RootPackage
val owner = ownerModule.moduleClass
- val name = (fullname: TermName) drop split + 1
+ val name = TermName(fullname) drop split + 1
val opkg = owner.info decl name
if (opkg.hasPackageFlag)
opkg.asModule
@@ -990,7 +992,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
if (name.startsWith(nme.NAME_JOIN_STRING)) coreLookup(name drop 1) else NoSymbol
}
if (nme.isModuleName(simpleName))
- coreLookup(nme.stripModuleSuffix(simpleName).toTermName) map (_.moduleClass)
+ coreLookup(simpleName.dropModule.toTermName) map (_.moduleClass)
else
coreLookup(simpleName)
}
@@ -1193,7 +1195,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
* - top-level classes
* - Scala classes that were generated via jclassToScala
* - classes that have a class owner that has a corresponding Java class
- * @throws A `ClassNotFoundException` for all Scala classes not in one of these categories.
+ * @throws ClassNotFoundException for all Scala classes not in one of these categories.
*/
@throws(classOf[ClassNotFoundException])
def classToJava(clazz: ClassSymbol): jClass[_] = classCache.toJava(clazz) {
@@ -1284,16 +1286,12 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
jclazz getDeclaredConstructor (effectiveParamClasses: _*)
}
- private def jArrayClass(elemClazz: jClass[_]): jClass[_] = {
- jArray.newInstance(elemClazz, 0).getClass
- }
-
/** The Java class that corresponds to given Scala type.
* Pre: Scala type is already transformed to Java level.
*/
def typeToJavaClass(tpe: Type): jClass[_] = tpe match {
case ExistentialType(_, rtpe) => typeToJavaClass(rtpe)
- case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe))
+ case TypeRef(_, ArrayClass, List(elemtpe)) => ScalaRunTime.arrayClass(typeToJavaClass(elemtpe))
case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClass)
case tpe @ TypeRef(_, sym: AliasTypeSymbol, _) => typeToJavaClass(tpe.dealias)
case SingleType(_, sym: ModuleSymbol) => classToJava(sym.moduleClass.asClass)
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index b5446694ed..7848753e69 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -5,7 +5,7 @@ package runtime
import scala.reflect.internal.{TreeInfo, SomePhase}
import scala.reflect.internal.{SymbolTable => InternalSymbolTable}
import scala.reflect.runtime.{SymbolTable => RuntimeSymbolTable}
-import scala.reflect.api.{TreeCreator, TypeCreator, Universe}
+import scala.reflect.api.{TypeCreator, Universe}
/** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders.
*
@@ -14,15 +14,27 @@ import scala.reflect.api.{TreeCreator, TypeCreator, Universe}
* @contentDiagram hideNodes "*Api" "*Extractor"
*/
class JavaUniverse extends InternalSymbolTable with JavaUniverseForce with ReflectSetup with RuntimeSymbolTable { self =>
-
- override def inform(msg: String): Unit = log(msg)
def picklerPhase = SomePhase
def erasurePhase = SomePhase
lazy val settings = new Settings
- private val isLogging = sys.props contains "scala.debug.reflect"
+ private val isLogging = sys.props contains "scala.debug.reflect"
def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg)
+ // TODO: why put output under isLogging? Calls to inform are already conditional on debug/verbose/...
+ import scala.reflect.internal.{Reporter, ReporterImpl}
+ override def reporter: Reporter = new ReporterImpl {
+ protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = log(msg)
+ }
+
+ // minimal Run to get Reporting wired
+ def currentRun = new RunReporting {}
+ class PerRunReporting extends PerRunReportingBase {
+ def deprecationWarning(pos: Position, msg: String): Unit = reporter.warning(pos, msg)
+ }
+ protected def PerRunReporting = new PerRunReporting
+
+
type TreeCopier = InternalTreeCopierOps
implicit val TreeCopierTag: ClassTag[TreeCopier] = ClassTag[TreeCopier](classOf[TreeCopier])
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
index dcd262c288..1c0aa7cf6d 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -170,6 +170,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
this.dropSingletonType
this.abstractTypesToBounds
this.dropIllegalStarTypes
+ this.wildcardExtrapolation
this.IsDependentCollector
this.ApproximateDependentMap
this.wildcardToTypeVarMap
@@ -309,6 +310,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.QuasiquoteClass_api_unapply
definitions.ScalaSignatureAnnotation
definitions.ScalaLongSignatureAnnotation
+ definitions.MethodHandle
definitions.OptionClass
definitions.OptionModule
definitions.SomeClass
@@ -360,6 +362,8 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.AnnotationClass
definitions.ClassfileAnnotationClass
definitions.StaticAnnotationClass
+ definitions.AnnotationRetentionAttr
+ definitions.AnnotationRetentionPolicyAttr
definitions.BridgeClass
definitions.ElidableMethodClass
definitions.ImplicitNotFoundClass
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index c56bc28d90..9ce6331e33 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -65,10 +65,15 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
class LazyPackageType extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
assert(sym.isPackageClass)
- sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym)
+ // Time travel to a phase before refchecks avoids an initialization issue. `openPackageModule`
+ // creates a module symbol and invokes invokes `companionModule` while the `infos` field is
+ // still null. This calls `isModuleNotMethod`, which forces the `info` if run after refchecks.
+ slowButSafeEnteringPhaseNotLaterThan(picklerPhase) {
+ sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym)
// override def safeToString = pkgClass.toString
- openPackageModule(sym)
- markAllCompleted(sym)
+ openPackageModule(sym)
+ markAllCompleted(sym)
+ }
}
}
@@ -91,7 +96,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
//
// Short of significantly changing SymbolLoaders I see no other way than just
// to slap a global lock on materialization in runtime reflection.
- class PackageScope(pkgClass: Symbol) extends Scope(initFingerPrints = -1L) // disable fingerprinting as we do not know entries beforehand
+ class PackageScope(pkgClass: Symbol) extends Scope
with SynchronizedScope {
assert(pkgClass.isType)
@@ -102,7 +107,8 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
if (isCompilerUniverse) super.enter(sym)
else {
val existing = super.lookupEntry(sym.name)
- assert(existing == null || existing.sym.isMethod, s"pkgClass = $pkgClass, sym = $sym, existing = $existing")
+ def eitherIsMethod(sym1: Symbol, sym2: Symbol) = sym1.isMethod || sym2.isMethod
+ assert(existing == null || eitherIsMethod(existing.sym, sym), s"pkgClass = $pkgClass, sym = $sym, existing = $existing")
super.enter(sym)
}
}
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index 02155578f8..092bbd711f 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -2,8 +2,6 @@ package scala
package reflect
package runtime
-import scala.reflect.internal.Flags._
-
/**
* This symbol table trait fills in the definitions so that class information is obtained by refection.
* It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
index c90901410a..4a8585d616 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -37,8 +37,7 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
// Scopes
- override def newScope = new Scope() with SynchronizedScope
- override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope
+ override def newScope = new Scope with SynchronizedScope
trait SynchronizedScope extends Scope {
// we can keep this lock fine-grained, because methods of Scope don't do anything extraordinary, which makes deadlocks impossible
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index f5e16c6640..4f0c0253e9 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -2,8 +2,7 @@ package scala
package reflect
package runtime
-import scala.reflect.io.AbstractFile
-import scala.collection.{ immutable, mutable }
+import scala.collection.immutable
import scala.reflect.internal.Flags._
private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
@@ -40,7 +39,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
* Reasons for that differ from artifact to artifact. In some cases it's quite bad (e.g. types use a number
* of non-concurrent compiler caches, so we need to serialize certain operations on types in order to make
* sure that things stay deterministic). However, in case of symbols there's hope, because it's only during
- * initializaton that symbols are thread-unsafe. After everything's set up, symbols become immutable
+ * initialization that symbols are thread-unsafe. After everything's set up, symbols become immutable
* (sans a few deterministic caches that can be populated simultaneously by multiple threads) and therefore thread-safe.
*
* Note that by saying "symbols become immutable" I mean literally that. In a very common case of PackageClassSymbol's,
@@ -103,10 +102,10 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
*
* Just a volatile var is fine, because:
* 1) Status can only be changed in a single-threaded fashion (this is enforced by gilSynchronized
- * that effecively guards `Symbol.initialize`), which means that there can't be update conflicts.
+ * that effectively guards `Symbol.initialize`), which means that there can't be update conflicts.
* 2) If someone reads a stale value of status, then the worst thing that might happen is that this someone
- * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't inited yet)
- * or a no-op (if the symbol is already inited), and that is fine in both cases.
+ * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't initialized yet)
+ * or a no-op (if the symbol is already initialized), and that is fine in both cases.
*
* upd. It looks like we also need to keep track of a mask of initialized flags to make sure
* that normal symbol initialization routines don't trigger auto-init in Symbol.flags-related routines (e.g. Symbol.getFlag).
diff --git a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
index 5edc051461..586b8a5257 100644
--- a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
+++ b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
@@ -11,12 +11,16 @@ private[reflect] trait ThreadLocalStorage {
trait ThreadLocalStorage[T] { def get: T; def set(newValue: T): Unit }
private class MyThreadLocalStorage[T](initialValue: => T) extends ThreadLocalStorage[T] {
// TODO: how do we use org.cliffc.high_scale_lib.NonBlockingHashMap here?
- val values = new java.util.concurrent.ConcurrentHashMap[Thread, T]()
+ // (we would need a version that uses weak keys)
+ private val values = java.util.Collections.synchronizedMap(new java.util.WeakHashMap[Thread, T]())
def get: T = {
if (values containsKey currentThread) values.get(currentThread)
else {
val value = initialValue
- values.putIfAbsent(currentThread, value)
+ // since the key is currentThread, and `values` is private, it
+ // would be impossible for a value to have been set after the
+ // above containsKey check. `putIfAbsent` is not necessary.
+ values.put(currentThread, value)
value
}
}
diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala
index 3c9bbccba3..77eb610a84 100644
--- a/src/reflect/scala/reflect/runtime/package.scala
+++ b/src/reflect/scala/reflect/runtime/package.scala
@@ -30,8 +30,9 @@ package runtime {
import c.universe._
val runtimeClass = c.reifyEnclosingRuntimeClass
if (runtimeClass.isEmpty) c.abort(c.enclosingPosition, "call site does not have an enclosing class")
- val runtimeUniverse = Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("universe"))
- val currentMirror = Apply(Select(runtimeUniverse, newTermName("runtimeMirror")), List(Select(runtimeClass, newTermName("getClassLoader"))))
+ val scalaPackage = Select(Ident(TermName("_root_")), TermName("scala"))
+ val runtimeUniverse = Select(Select(Select(scalaPackage, TermName("reflect")), TermName("runtime")), TermName("universe"))
+ val currentMirror = Apply(Select(runtimeUniverse, TermName("runtimeMirror")), List(Select(runtimeClass, TermName("getClassLoader"))))
c.Expr[Nothing](currentMirror)(c.WeakTypeTag.Nothing)
}
}
diff --git a/src/repl/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala
index 43f0ea1256..34057ed341 100644
--- a/src/repl/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala
@@ -8,7 +8,6 @@ package tools.nsc
import io.{ File }
import util.{ ClassPath, ScalaClassLoader }
-import Properties.{ versionString, copyrightString }
import GenericRunnerCommand._
object JarRunner extends CommonRunner {
@@ -28,79 +27,78 @@ object JarRunner extends CommonRunner {
}
/** An object that runs Scala code. It has three possible
- * sources for the code to run: pre-compiled code, a script file,
- * or interactive entry.
- */
+ * sources for the code to run: pre-compiled code, a script file,
+ * or interactive entry.
+ */
class MainGenericRunner {
- def errorFn(ex: Throwable): Boolean = {
- ex.printStackTrace()
- false
- }
- def errorFn(str: String): Boolean = {
- Console.err println str
- false
+ def errorFn(str: String, e: Option[Throwable] = None, isFailure: Boolean = true): Boolean = {
+ if (str.nonEmpty) Console.err println str
+ e foreach (_.printStackTrace())
+ !isFailure
}
def process(args: Array[String]): Boolean = {
val command = new GenericRunnerCommand(args.toList, (x: String) => errorFn(x))
- import command.{ settings, howToRun, thingToRun }
- def sampleCompiler = new Global(settings) // def so its not created unless needed
-
- if (!command.ok) return errorFn("\n" + command.shortUsageMsg)
- else if (settings.version) return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString))
- else if (command.shouldStopWithInfo) return errorFn(command getInfoMessage sampleCompiler)
-
- def isE = !settings.execute.isDefault
- def dashe = settings.execute.value
-
- def isI = !settings.loadfiles.isDefault
- def dashi = settings.loadfiles.value
-
- // Deadlocks on startup under -i unless we disable async.
- if (isI)
- settings.Yreplsync.value = true
-
- def combinedCode = {
- val files = if (isI) dashi map (file => File(file).slurp()) else Nil
- val str = if (isE) List(dashe) else Nil
-
- files ++ str mkString "\n\n"
- }
-
- def runTarget(): Either[Throwable, Boolean] = howToRun match {
- case AsObject =>
- ObjectRunner.runAndCatch(settings.classpathURLs, thingToRun, command.arguments)
- case AsScript =>
- ScriptRunner.runScriptAndCatch(settings, thingToRun, command.arguments)
- case AsJar =>
- JarRunner.runJar(settings, thingToRun, command.arguments)
- case Error =>
- Right(false)
- case _ =>
- // We start the repl when no arguments are given.
- Right(new interpreter.ILoop process settings)
+ import command.{ settings, howToRun, thingToRun, shortUsageMsg, shouldStopWithInfo }
+ def sampleCompiler = new Global(settings) // def so it's not created unless needed
+
+ def run(): Boolean = {
+ def isE = !settings.execute.isDefault
+ def dashe = settings.execute.value
+
+ def isI = !settings.loadfiles.isDefault
+ def dashi = settings.loadfiles.value
+
+ // Deadlocks on startup under -i unless we disable async.
+ if (isI)
+ settings.Yreplsync.value = true
+
+ def combinedCode = {
+ val files = if (isI) dashi map (file => File(file).slurp()) else Nil
+ val str = if (isE) List(dashe) else Nil
+
+ files ++ str mkString "\n\n"
+ }
+
+ def runTarget(): Either[Throwable, Boolean] = howToRun match {
+ case AsObject =>
+ ObjectRunner.runAndCatch(settings.classpathURLs, thingToRun, command.arguments)
+ case AsScript =>
+ ScriptRunner.runScriptAndCatch(settings, thingToRun, command.arguments)
+ case AsJar =>
+ JarRunner.runJar(settings, thingToRun, command.arguments)
+ case Error =>
+ Right(false)
+ case _ =>
+ // We start the repl when no arguments are given.
+ Right(new interpreter.ILoop process settings)
+ }
+
+ /** If -e and -i were both given, we want to execute the -e code after the
+ * -i files have been included, so they are read into strings and prepended to
+ * the code given in -e. The -i option is documented to only make sense
+ * interactively so this is a pretty reasonable assumption.
+ *
+ * This all needs a rewrite though.
+ */
+ if (isE) {
+ ScriptRunner.runCommand(settings, combinedCode, thingToRun +: command.arguments)
+ }
+ else runTarget() match {
+ case Left(ex) => errorFn("", Some(ex)) // there must be a useful message of hope to offer here
+ case Right(b) => b
+ }
}
- /** If -e and -i were both given, we want to execute the -e code after the
- * -i files have been included, so they are read into strings and prepended to
- * the code given in -e. The -i option is documented to only make sense
- * interactively so this is a pretty reasonable assumption.
- *
- * This all needs a rewrite though.
- */
- if (isE) {
- ScriptRunner.runCommand(settings, combinedCode, thingToRun +: command.arguments)
- }
- else runTarget() match {
- case Left(ex) => errorFn(ex)
- case Right(b) => b
- }
+ if (!command.ok)
+ errorFn(f"%n$shortUsageMsg")
+ else if (shouldStopWithInfo)
+ errorFn(command getInfoMessage sampleCompiler, isFailure = false)
+ else
+ run()
}
}
object MainGenericRunner extends MainGenericRunner {
- def main(args: Array[String]) {
- if (!process(args))
- sys.exit(1)
- }
+ def main(args: Array[String]): Unit = if (!process(args)) sys.exit(1)
}
diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
index e66e4eff29..df49e6a2e4 100644
--- a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
+++ b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
@@ -30,7 +30,7 @@ class AbstractOrMissingHandler[T](onError: String => Unit, value: T) extends Par
|Failed to initialize compiler: %s not found.
|** Note that as of 2.8 scala does not assume use of the java classpath.
|** For the old behavior pass -usejavacp to scala, or if using a Settings
- |** object programatically, settings.usejavacp.value = true.""".stripMargin.format(x.req)
+ |** object programmatically, settings.usejavacp.value = true.""".stripMargin.format(x.req)
)
value
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
index d8efcda8b5..a8d537e314 100644
--- a/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
@@ -112,8 +112,12 @@ trait VariColumnTabulator extends Tabulator {
def layout(ncols: Int): Option[(Int, Seq[Int], Seq[Seq[String]])] = {
val nrows = items.size /% ncols
val xwise = isAcross || ncols >= items.length
- def maxima(sss: Seq[Seq[String]]) =
- (0 until (ncols min items.size)) map (i => (sss map (ss => ss(i).length)).max)
+ // max width item in each column
+ def maxima(rows: Seq[Seq[String]]) =
+ (0 until (ncols min items.size)) map { col =>
+ val widths = for (r <- rows if r.size > col) yield r(col).length
+ widths.max
+ }
def resulting(rows: Seq[Seq[String]]) = {
val columnWidths = maxima(rows) map (_ + marginSize)
val linelen = columnWidths.sum
@@ -124,9 +128,10 @@ trait VariColumnTabulator extends Tabulator {
else if (xwise) resulting((items grouped ncols).toSeq)
else {
val cols = (items grouped nrows).toList
- val rows = for (i <- 0 until nrows) yield
- for (j <- 0 until ncols) yield
- if (j < cols.size && i < cols(j).size) cols(j)(i) else ""
+ val rows =
+ for (i <- 0 until nrows) yield
+ for (j <- 0 until ncols) yield
+ if (j < cols.size && i < cols(j).size) cols(j)(i) else ""
resulting(rows)
}
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index a96bed4696..4221126caa 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -12,6 +12,7 @@ import scala.annotation.tailrec
import Predef.{ println => _, _ }
import interpreter.session._
import StdReplTags._
+import scala.tools.asm.ClassReader
import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName }
import scala.tools.nsc.util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
import scala.reflect.classTag
@@ -19,6 +20,7 @@ import scala.reflect.internal.util.{ BatchSourceFile, ScalaClassLoader }
import ScalaClassLoader._
import scala.reflect.io.{ File, Directory }
import scala.tools.util._
+import io.AbstractFile
import scala.collection.generic.Clearable
import scala.concurrent.{ ExecutionContext, Await, Future, future }
import ExecutionContext.Implicits._
@@ -75,6 +77,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def history = in.history
// classpath entries added via :cp
+ @deprecated("Use reset, replay or require to update class path", since = "2.11")
var addedClasspath: String = ""
/** A reverse list of commands to replay if the user requests a :replay */
@@ -124,22 +127,18 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
/** print a friendly help message */
- def helpCommand(line: String): Result = {
- if (line == "") helpSummary()
- else uniqueCommand(line) match {
- case Some(lc) => echo("\n" + lc.help)
- case _ => ambiguousError(line)
- }
+ def helpCommand(line: String): Result = line match {
+ case "" => helpSummary()
+ case CommandMatch(cmd) => echo(f"%n${cmd.help}")
+ case _ => ambiguousError(line)
}
private def helpSummary() = {
- val usageWidth = commands map (_.usageMsg.length) max
- val formatStr = "%-" + usageWidth + "s %s"
+ val usageWidth = commands map (_.usageMsg.length) max
+ val formatStr = s"%-${usageWidth}s %s"
- echo("All commands can be abbreviated, e.g. :he instead of :help.")
+ echo("All commands can be abbreviated, e.g., :he instead of :help.")
- commands foreach { cmd =>
- echo(formatStr.format(cmd.usageMsg, cmd.help))
- }
+ for (cmd <- commands) echo(formatStr.format(cmd.usageMsg, cmd.help))
}
private def ambiguousError(cmd: String): Result = {
matchingCommands(cmd) match {
@@ -148,14 +147,14 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
Result(keepRunning = true, None)
}
+ // this lets us add commands willy-nilly and only requires enough command to disambiguate
private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
- private def uniqueCommand(cmd: String): Option[LoopCommand] = {
- // this lets us add commands willy-nilly and only requires enough command to disambiguate
- matchingCommands(cmd) match {
- case List(x) => Some(x)
- // exact match OK even if otherwise appears ambiguous
- case xs => xs find (_.name == cmd)
- }
+ private object CommandMatch {
+ def unapply(name: String): Option[LoopCommand] =
+ matchingCommands(name) match {
+ case x :: Nil => Some(x)
+ case xs => xs find (_.name == name) // accept an exact match
+ }
}
/** Show the history */
@@ -207,7 +206,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
/** Standard commands **/
lazy val standardCommands = List(
- cmd("cp", "<path>", "add a jar or directory to the classpath", addClasspath),
cmd("edit", "<id>|<line>", "edit history", editCommand),
cmd("help", "[command]", "print this summary or command-specific help", helpCommand),
historyCommand,
@@ -220,11 +218,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", pasteCommand),
nullary("power", "enable power user mode", powerCmd),
nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)),
- nullary("replay", "reset execution and replay all previous commands", replay),
- nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
+ cmd("replay", "[options]", "reset the repl and replay all previous commands", replayCommand),
+ cmd("require", "<path>", "add a jar to the classpath", require),
+ cmd("reset", "[options]", "reset the repl to its initial state, forgetting all session entries", resetCommand),
cmd("save", "<path>", "save replayable session to a file", saveCommand),
shCommand,
- cmd("settings", "[+|-]<options>", "+enable/-disable flags, set compiler options", changeSettings),
+ cmd("settings", "<options>", "update compiler options, if possible; see reset", changeSettings),
nullary("silent", "disable/enable automatic printing of results", verbosity),
cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
cmd("kind", "[-v] <expr>", "display the kind of expression's type", kindCommand),
@@ -304,57 +303,23 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) }
}
- private def changeSettings(args: String): Result = {
- def showSettings() = {
- for (s <- settings.userSetSettings.toSeq.sorted) echo(s.toString)
- }
- def updateSettings() = {
- // put aside +flag options
- val (pluses, rest) = (args split "\\s+").toList partition (_.startsWith("+"))
- val tmps = new Settings
- val (ok, leftover) = tmps.processArguments(rest, processAll = true)
- if (!ok) echo("Bad settings request.")
- else if (leftover.nonEmpty) echo("Unprocessed settings.")
- else {
- // boolean flags set-by-user on tmp copy should be off, not on
- val offs = tmps.userSetSettings filter (_.isInstanceOf[Settings#BooleanSetting])
- val (minuses, nonbools) = rest partition (arg => offs exists (_ respondsTo arg))
- // update non-flags
- settings.processArguments(nonbools, processAll = true)
- // also snag multi-value options for clearing, e.g. -Ylog: and -language:
- for {
- s <- settings.userSetSettings
- if s.isInstanceOf[Settings#MultiStringSetting] || s.isInstanceOf[Settings#PhasesSetting]
- if nonbools exists (arg => arg.head == '-' && arg.last == ':' && (s respondsTo arg.init))
- } s match {
- case c: Clearable => c.clear()
- case _ =>
- }
- def update(bs: Seq[String], name: String=>String, setter: Settings#Setting=>Unit) = {
- for (b <- bs)
- settings.lookupSetting(name(b)) match {
- case Some(s) =>
- if (s.isInstanceOf[Settings#BooleanSetting]) setter(s)
- else echo(s"Not a boolean flag: $b")
- case _ =>
- echo(s"Not an option: $b")
- }
- }
- update(minuses, identity, _.tryToSetFromPropertyValue("false")) // turn off
- update(pluses, "-" + _.drop(1), _.tryToSet(Nil)) // turn on
- }
- }
- if (args.isEmpty) showSettings() else updateSettings()
+ private def changeSettings(line: String): Result = {
+ def showSettings() = for (s <- settings.userSetSettings.toSeq.sorted) echo(s.toString)
+ if (line.isEmpty) showSettings() else { updateSettings(line) ; () }
+ }
+ private def updateSettings(line: String) = {
+ val (ok, rest) = settings.processArguments(words(line), processAll = false)
+ ok && rest.isEmpty
}
private def javapCommand(line: String): Result = {
if (javap == null)
- ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome)
+ s":javap unavailable, no tools.jar at $jdkHome. Set JDK_HOME."
else if (line == "")
":javap [-lcsvp] [path1 path2 ...]"
else
javap(words(line)) foreach { res =>
- if (res.isError) return "Failed: " + res.value
+ if (res.isError) return s"Failed: ${res.value}"
else res.show()
}
}
@@ -402,7 +367,13 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private val crashRecovery: PartialFunction[Throwable, Boolean] = {
case ex: Throwable =>
- echo(intp.global.throwableAsString(ex))
+ val (err, explain) = (
+ if (intp.isInitializeComplete)
+ (intp.global.throwableAsString(ex), "")
+ else
+ (ex.getMessage, "The compiler did not initialize.\n")
+ )
+ echo(err)
ex match {
case _: NoSuchMethodError | _: NoClassDefFoundError =>
@@ -410,7 +381,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
throw ex
case _ =>
def fn(): Boolean =
- try in.readYesOrNo(replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() })
+ try in.readYesOrNo(explain + replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() })
catch { case _: RuntimeException => false }
if (fn()) replay()
@@ -419,39 +390,56 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
true
}
+ // after process line, OK continue, ERR break, or EOF all done
+ object LineResults extends Enumeration {
+ type LineResult = Value
+ val EOF, ERR, OK = Value
+ }
+ import LineResults.LineResult
+
// return false if repl should exit
def processLine(line: String): Boolean = {
import scala.concurrent.duration._
- Await.ready(globalFuture, 60.seconds)
+ Await.ready(globalFuture, 10.minutes) // Long timeout here to avoid test failures under heavy load.
- (line ne null) && (command(line) match {
+ command(line) match {
case Result(false, _) => false
case Result(_, Some(line)) => addReplay(line) ; true
case _ => true
- })
+ }
}
private def readOneLine() = {
+ import scala.io.AnsiColor.{ MAGENTA, RESET }
out.flush()
- in readLine prompt
+ in readLine (
+ if (replProps.colorOk)
+ MAGENTA + prompt + RESET
+ else
+ prompt
+ )
}
/** The main read-eval-print loop for the repl. It calls
* command() for each line of input, and stops when
* command() returns false.
*/
- @tailrec final def loop() {
- if ( try processLine(readOneLine()) catch crashRecovery )
- loop()
+ @tailrec final def loop(): LineResult = {
+ import LineResults._
+ readOneLine() match {
+ case null => EOF
+ case line => if (try processLine(line) catch crashRecovery) loop() else ERR
+ }
}
/** interpret all lines from a specified file */
- def interpretAllFrom(file: File) {
+ def interpretAllFrom(file: File, verbose: Boolean = false) {
savingReader {
savingReplayStack {
file applyReader { reader =>
- in = SimpleReader(reader, out, interactive = false)
- echo("Loading " + file + "...")
+ in = if (verbose) new SimpleReader(reader, out, interactive = true) with EchoReader
+ else SimpleReader(reader, out, interactive = false)
+ echo(s"Loading $file...")
loop()
}
}
@@ -459,8 +447,16 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
/** create a new interpreter and replay the given commands */
- def replay() {
- reset()
+ def replayCommand(line: String): Unit = {
+ def run(destructive: Boolean): Unit = {
+ if (destructive) createInterpreter() else reset()
+ replay()
+ }
+ if (line.isEmpty) run(destructive = false)
+ else if (updateSettings(line)) run(destructive = true)
+ }
+ /** Announces as it replays. */
+ def replay(): Unit = {
if (replayCommandStack.isEmpty)
echo("Nothing to replay.")
else for (cmd <- replayCommands) {
@@ -469,21 +465,28 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
echo("")
}
}
- def resetCommand() {
- echo("Resetting interpreter state.")
- if (replayCommandStack.nonEmpty) {
- echo("Forgetting this session history:\n")
- replayCommands foreach echo
- echo("")
- replayCommandStack = Nil
+ /** `reset` the interpreter in an attempt to start fresh.
+ * Supplying settings creates a new compiler.
+ */
+ def resetCommand(line: String): Unit = {
+ def run(destructive: Boolean): Unit = {
+ echo("Resetting interpreter state.")
+ if (replayCommandStack.nonEmpty) {
+ echo("Forgetting this session history:\n")
+ replayCommands foreach echo
+ echo("")
+ replayCommandStack = Nil
+ }
+ if (intp.namedDefinedTerms.nonEmpty)
+ echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", "))
+ if (intp.definedTypes.nonEmpty)
+ echo("Forgetting defined types: " + intp.definedTypes.mkString(", "))
+ if (destructive) createInterpreter() else reset()
}
- if (intp.namedDefinedTerms.nonEmpty)
- echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", "))
- if (intp.definedTypes.nonEmpty)
- echo("Forgetting defined types: " + intp.definedTypes.mkString(", "))
-
- reset()
+ if (line.isEmpty) run(destructive = false)
+ else if (updateSettings(line)) run(destructive = true)
}
+ /** Resets without announcements. */
def reset() {
intp.reset()
unleashAndSetPhase()
@@ -591,13 +594,17 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
res
}
- def loadCommand(arg: String) = {
- var shouldReplay: Option[String] = None
- withFile(arg)(f => {
- interpretAllFrom(f)
- shouldReplay = Some(":load " + arg)
- })
- Result(keepRunning = true, shouldReplay)
+ def loadCommand(arg: String): Result = {
+ def run(file: String, verbose: Boolean) = withFile(file) { f =>
+ interpretAllFrom(f, verbose)
+ Result recording s":load $arg"
+ } getOrElse Result.default
+
+ words(arg) match {
+ case "-v" :: file :: Nil => run(file, verbose = true)
+ case file :: Nil => run(file, verbose = false)
+ case _ => echo("usage: :load -v file") ; Result.default
+ }
}
def saveCommand(filename: String): Result = (
@@ -606,17 +613,63 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
else File(filename).printlnAll(replayCommands: _*)
)
+ @deprecated("Use reset, replay or require to update class path", since = "2.11")
def addClasspath(arg: String): Unit = {
val f = File(arg).normalize
if (f.exists) {
addedClasspath = ClassPath.join(addedClasspath, f.path)
- val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
- echo("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, totalClasspath))
- replay()
+ intp.addUrlsToClassPath(f.toURI.toURL)
+ echo("Added '%s' to classpath.".format(f.path, intp.global.classPath.asClassPathString))
+ repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClassPathString))
}
else echo("The path '" + f + "' doesn't seem to exist.")
}
+ /** Adds jar file to the current classpath. Jar will only be added if it
+ * does not contain classes that already exist on the current classpath.
+ *
+ * Importantly, `require` adds jars to the classpath ''without'' resetting
+ * the state of the interpreter. This is in contrast to `replay` which can
+ * be used to add jars to the classpath and which creates a new instance of
+ * the interpreter and replays all interpreter expressions.
+ */
+ def require(arg: String): Unit = {
+ val f = File(arg).normalize
+
+ val jarFile = AbstractFile.getDirectory(new java.io.File(arg))
+ if (jarFile == null) {
+ echo(s"Cannot load '$arg'")
+ return
+ }
+
+ def flatten(f: AbstractFile): Iterator[AbstractFile] =
+ if (f.isClassContainer) f.iterator.flatMap(flatten)
+ else Iterator(f)
+
+ val entries = flatten(jarFile)
+
+ def classNameOf(classFile: AbstractFile): String = {
+ val input = classFile.input
+ try {
+ val reader = new ClassReader(input)
+ reader.getClassName.replace('/', '.')
+ } finally {
+ input.close()
+ }
+ }
+ def alreadyDefined(clsName: String) = intp.classLoader.tryToLoadClass(clsName).isDefined
+ val exists = entries.filter(_.hasExtension("class")).map(classNameOf).exists(alreadyDefined)
+
+ if (!f.exists) echo(s"The path '$f' doesn't seem to exist.")
+ else if (exists) echo(s"The path '$f' cannot be loaded, because existing classpath entries conflict.") // TODO tell me which one
+ else {
+ addedClasspath = ClassPath.join(addedClasspath, f.path)
+ intp.addUrlsToClassPath(f.toURI.toURL)
+ echo("Added '%s' to classpath.".format(f.path, intp.global.classPath.asClassPathString))
+ repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClassPathString))
+ }
+ }
+
def powerCmd(): Result = {
if (isReplPower) "Already in power mode."
else enablePowerMode(isDuringInit = false)
@@ -646,20 +699,23 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
/** Run one command submitted by the user. Two values are returned:
- * (1) whether to keep running, (2) the line to record for replay,
- * if any. */
+ * (1) whether to keep running, (2) the line to record for replay, if any.
+ */
def command(line: String): Result = {
- if (line startsWith ":") {
- val cmd = line.tail takeWhile (x => !x.isWhitespace)
- uniqueCommand(cmd) match {
- case Some(lc) => lc(line.tail stripPrefix cmd dropWhile (_.isWhitespace))
- case _ => ambiguousError(cmd)
- }
- }
+ if (line startsWith ":") colonCommand(line.tail)
else if (intp.global == null) Result(keepRunning = false, None) // Notice failure to create compiler
else Result(keepRunning = true, interpretStartingWith(line))
}
+ private val commandish = """(\S+)(?:\s+)?(.*)""".r
+
+ private def colonCommand(line: String): Result = line.trim match {
+ case "" => helpSummary()
+ case commandish(CommandMatch(cmd), rest) => cmd(rest)
+ case commandish(name, _) => ambiguousError(name)
+ case _ => echo("?")
+ }
+
private def readWhile(cond: String => Boolean) = {
Iterator continually in.readLine("") takeWhile (x => x != null && cond(x))
}
@@ -683,13 +739,13 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
val code = file match {
case Some(name) =>
- withFile(name)(f => {
+ withFile(name) { f =>
shouldReplay = Some(s":paste $arg")
val s = f.slurp.trim
if (s.isEmpty) echo(s"File contains no code: $f")
else echo(s"Pasting file $f...")
s
- }) getOrElse ""
+ } getOrElse ""
case None =>
echo("// Entering paste mode (ctrl-D to finish)\n")
val text = (readWhile(_ => true) mkString "\n").trim
@@ -818,7 +874,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
)
catch {
case ex @ (_: Exception | _: NoClassDefFoundError) =>
- echo("Failed to created JLineReader: " + ex + "\nFalling back to SimpleReader.")
+ echo(f"Failed to created JLineReader: ${ex}%nFalling back to SimpleReader.")
SimpleReader()
}
}
@@ -845,6 +901,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
case _ =>
}
}
+
+ // start an interpreter with the given settings
def process(settings: Settings): Boolean = savingContextLoader {
this.settings = settings
createInterpreter()
@@ -859,7 +917,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
loadFiles(settings)
printWelcome()
- try loop()
+ try loop() match {
+ case LineResults.EOF => out print Properties.shellInterruptedString
+ case _ =>
+ }
catch AbstractOrMissingHandler()
finally closeInterpreter()
@@ -876,25 +937,30 @@ object ILoop {
// Designed primarily for use by test code: take a String with a
// bunch of code, and prints out a transcript of what it would look
// like if you'd just typed it into the repl.
- def runForTranscript(code: String, settings: Settings): String = {
+ def runForTranscript(code: String, settings: Settings, inSession: Boolean = false): String = {
import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
stringFromStream { ostream =>
Console.withOut(ostream) {
val output = new JPrintWriter(new OutputStreamWriter(ostream), true) {
- override def write(str: String) = {
- // completely skip continuation lines
- if (str forall (ch => ch.isWhitespace || ch == '|')) ()
+ // skip margin prefix for continuation lines, unless preserving session text for test
+ override def write(str: String) =
+ if (!inSession && (str forall (ch => ch.isWhitespace || ch == '|'))) () // repl.paste.ContinueString
else super.write(str)
- }
}
val input = new BufferedReader(new StringReader(code.trim + "\n")) {
override def readLine(): String = {
- val s = super.readLine()
- // helping out by printing the line being interpreted.
- if (s != null)
+ mark(1) // default buffer is 8k
+ val c = read()
+ if (c == -1 || c == 4) {
+ null
+ } else {
+ reset()
+ val s = super.readLine()
+ // helping out by printing the line being interpreted.
output.println(s)
- s
+ s
+ }
}
}
val repl = new ILoop(input, output)
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 9c853fb514..c281126d5f 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -15,12 +15,16 @@ import scala.concurrent.{ Future, ExecutionContext }
import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ ClassTag, classTag }
import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
-import scala.tools.util.PathResolver
+import scala.tools.util.PathResolverFactory
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
-import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps }
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps, ClassPath, MergedClassPath }
+import ScalaClassLoader.URLClassLoader
import scala.tools.nsc.util.Exceptional.unwrap
+import scala.tools.nsc.backend.JavaPlatform
import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
+import java.net.URL
+import java.io.File
/** An interpreter for Scala code.
*
@@ -41,7 +45,7 @@ import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine
* all variables defined by that code. To extract the result of an
* interpreted line to show the user, a second "result object" is created
* which imports the variables exported by the above object and then
- * exports members called "$eval" and "$print". To accomodate user expressions
+ * exports members called "$eval" and "$print". To accommodate user expressions
* that read from variables or methods defined in previous statements, "import"
* statements are used.
*
@@ -82,9 +86,11 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private var _classLoader: util.AbstractFileClassLoader = null // active classloader
private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler
+ private var _runtimeClassLoader: URLClassLoader = null // wrapper exposing addURL
+
def compilerClasspath: Seq[java.net.URL] = (
if (isInitializeComplete) global.classPath.asURLs
- else new PathResolver(settings).result.asURLs // the compiler's classpath
+ else PathResolverFactory.create(settings).resultAsURLs // the compiler's classpath
)
def settings = initialSettings
// Run the code body with the given boolean settings flipped to true.
@@ -110,15 +116,17 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
lazy val reporter: ReplReporter = new ReplReporter(this)
import formatting._
- import reporter.{ printMessage, withoutTruncating }
+ import reporter.{ printMessage, printUntruncatedMessage }
// This exists mostly because using the reporter too early leads to deadlock.
private def echo(msg: String) { Console println msg }
private def _initSources = List(new BatchSourceFile("<init>", "class $repl_$init { }"))
private def _initialize() = {
try {
- // todo. if this crashes, REPL will hang
- new _compiler.Run() compileSources _initSources
+ // if this crashes, REPL will hang its head in shame
+ val run = new _compiler.Run()
+ assert(run.typerPhase != NoPhase, "REPL requires a typer phase.")
+ run compileSources _initSources
_initializeComplete = true
true
}
@@ -235,6 +243,18 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
new Global(settings, reporter) with ReplGlobal { override def toString: String = "<global>" }
}
+ /**
+ * Adds all specified jars to the compile and runtime classpaths.
+ *
+ * @note Currently only supports jars, not directories.
+ * @param urls The list of items to add to the compile and runtime classpaths.
+ */
+ def addUrlsToClassPath(urls: URL*): Unit = {
+ new Run // force some initialization
+ urls.foreach(_runtimeClassLoader.addURL) // Add jars to runtime classloader
+ global.extendCompilerClassPath(urls: _*) // Add jars to compile-time classpath
+ }
+
/** Parent classloader. Overridable. */
protected def parentClassLoader: ClassLoader =
settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() )
@@ -289,31 +309,47 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def shift[T](op: => T): T = exitingFlatten(op)
}
- def originalPath(name: String): String = originalPath(name: TermName)
+ def originalPath(name: String): String = originalPath(TermName(name))
def originalPath(name: Name): String = typerOp path name
def originalPath(sym: Symbol): String = typerOp path sym
def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName
+
def translatePath(path: String) = {
val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path)
sym.toOption map flatPath
}
+
+ /** If path represents a class resource in the default package,
+ * see if the corresponding symbol has a class file that is a REPL artifact
+ * residing at a different resource path. Translate X.class to $line3/$read$$iw$$iw$X.class.
+ */
+ def translateSimpleResource(path: String): Option[String] = {
+ if (!(path contains '/') && (path endsWith ".class")) {
+ val name = path stripSuffix ".class"
+ val sym = if (name endsWith "$") symbolOfTerm(name.init) else symbolOfIdent(name)
+ def pathOf(s: String) = s"${s.replace('.', '/')}.class"
+ sym.toOption map (s => pathOf(flatPath(s)))
+ } else {
+ None
+ }
+ }
def translateEnclosingClass(n: String) = symbolOfTerm(n).enclClass.toOption map flatPath
+ /** If unable to find a resource foo.class, try taking foo as a symbol in scope
+ * and use its java class name as a resource to load.
+ *
+ * $intp.classLoader classBytes "Bippy" or $intp.classLoader getResource "Bippy.class" just work.
+ */
private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) {
- /** Overridden here to try translating a simple name to the generated
- * class name if the original attempt fails. This method is used by
- * getResourceAsStream as well as findClass.
- */
- override protected def findAbstractFile(name: String): AbstractFile =
- super.findAbstractFile(name) match {
- case null if _initializeComplete => translatePath(name) map (super.findAbstractFile(_)) orNull
- case file => file
- }
+ override protected def findAbstractFile(name: String): AbstractFile = super.findAbstractFile(name) match {
+ case null if _initializeComplete => translateSimpleResource(name) map super.findAbstractFile orNull
+ case file => file
+ }
}
private def makeClassLoader(): util.AbstractFileClassLoader =
- new TranslatingClassLoader(parentClassLoader match {
- case null => ScalaClassLoader fromURLs compilerClasspath
- case p => new ScalaClassLoader.URLClassLoader(compilerClasspath, p)
+ new TranslatingClassLoader({
+ _runtimeClassLoader = new URLClassLoader(compilerClasspath, parentClassLoader)
+ _runtimeClassLoader
})
// Set the current Java "context" class loader to this interpreter's class loader
@@ -384,6 +420,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def compileSourcesKeepingRun(sources: SourceFile*) = {
val run = new Run()
+ assert(run.typerPhase != NoPhase, "REPL requires a typer phase.")
reporter.reset()
run compileSources sources.toList
(!reporter.hasErrors, run)
@@ -606,7 +643,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
else {
// don't truncate stack traces
- withoutTruncating(printMessage(result))
+ printUntruncatedMessage(result)
IR.Error
}
}
@@ -790,7 +827,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
((pos, msg)) :: loop(filtered)
}
- val warnings = loop(run.allConditionalWarnings flatMap (_.warnings))
+ val warnings = loop(run.reporting.allConditionalWarnings)
if (warnings.nonEmpty)
mostRecentWarnings = warnings
}
@@ -1069,8 +1106,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def tryTwice(op: => Symbol): Symbol = exitingTyper(op) orElse exitingFlatten(op)
def symbolOfIdent(id: String): Symbol = symbolOfType(id) orElse symbolOfTerm(id)
- def symbolOfType(id: String): Symbol = tryTwice(replScope lookup (id: TypeName))
- def symbolOfTerm(id: String): Symbol = tryTwice(replScope lookup (id: TermName))
+ def symbolOfType(id: String): Symbol = tryTwice(replScope lookup TypeName(id))
+ def symbolOfTerm(id: String): Symbol = tryTwice(replScope lookup TermName(id))
def symbolOfName(id: Name): Symbol = replScope lookup id
def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
@@ -1118,7 +1155,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def apply(line: String): Result = debugging(s"""parse("$line")""") {
var isIncomplete = false
- reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
+ currentRun.parsing.withIncompleteHandler((_, _) => isIncomplete = true) {
reporter.reset()
val trees = newUnitParser(line).parseStats()
if (reporter.hasErrors) Error
@@ -1171,6 +1208,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
finally isettings.unwrapStrings = saved
}
+ def withoutTruncating[A](body: => A): A = reporter withoutTruncating body
+
def symbolDefString(sym: Symbol) = {
TypeStrings.quieter(
exitingTyper(sym.defString),
@@ -1243,9 +1282,11 @@ object IMain {
def getProgram(statements: String*): String = null
- def getScriptEngine: ScriptEngine = new IMain(this, new Settings() {
- usemanifestcp.value = true
- })
+ def getScriptEngine: ScriptEngine = {
+ val settings = new Settings()
+ settings.usemanifestcp.value = true
+ new IMain(this, settings)
+ }
}
// The two name forms this is catching are the two sides of this assignment:
diff --git a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
index 28ddf2939c..ed69d449cb 100644
--- a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -23,6 +23,7 @@ trait InteractiveReader {
def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match {
case 'y' => true
case 'n' => false
+ case -1 => false // EOF
case _ => alt
}
diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
index 915fd57bf8..1ccade2172 100644
--- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
@@ -8,8 +8,9 @@ package tools.nsc
package interpreter
import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
+import scala.tools.asm.Opcodes
import scala.tools.nsc.util.ScalaClassLoader
-import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, Writer }
+import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, StringWriter, Writer }
import java.util.{ Locale }
import java.util.concurrent.ConcurrentLinkedQueue
import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener,
@@ -18,39 +19,47 @@ import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener,
import scala.reflect.io.{ AbstractFile, Directory, File, Path }
import scala.io.Source
import scala.util.{ Try, Success, Failure }
-import scala.util.Properties.lineSeparator
+import scala.util.Properties.{ lineSeparator => EOL }
import scala.util.matching.Regex
-import scala.collection.JavaConverters
+import scala.collection.JavaConverters._
import scala.collection.generic.Clearable
import java.net.URL
import scala.language.reflectiveCalls
+import PartialFunction.{ cond => when }
import Javap._
+/** Javap command implementation. Supports platform tool for Java 6 or 7+.
+ * Adds a few options for REPL world, to show bodies of `App` classes and closures.
+ */
class JavapClass(
val loader: ScalaClassLoader,
val printWriter: PrintWriter,
intp: Option[IMain] = None
-) extends scala.tools.util.Javap {
+) extends Javap {
import JavapTool.ToolArgs
import JavapClass._
lazy val tool = JavapTool()
- /** Run the tool. Option args start with "-".
+ /** Run the tool. Option args start with "-", except that "-" itself
+ * denotes the last REPL result.
* The default options are "-protected -verbose".
* Byte data for filename args is retrieved with findBytes.
+ * @return results for invoking JpResult.show()
*/
def apply(args: Seq[String]): List[JpResult] = {
- val (options, claases) = args partition (s => (s startsWith "-") && s.length > 1)
- val (flags, upgraded) = upgrade(options)
+ val (options, classes) = args partition (s => (s startsWith "-") && s.length > 1)
+ val (flags, upgraded) = upgrade(options)
import flags.{ app, fun, help, raw }
- val targets = if (fun && !help) FunFinder(loader, intp).funs(claases) else claases
- if (help || claases.isEmpty)
+
+ val targets = if (fun && !help) FunFinder(loader, intp).funs(classes) else classes
+
+ if (help || classes.isEmpty)
List(JpResult(JavapTool.helper(printWriter)))
else if (targets.isEmpty)
- List(JpResult("No anonfuns found."))
+ List(JpResult("No closures found."))
else
- tool(raw, upgraded)(targets map (claas => targeted(claas, app)))
+ tool(raw, upgraded)(targets map (targeted(_, app))) // JavapTool.apply
}
/** Cull our tool options. */
@@ -67,19 +76,22 @@ class JavapClass(
case f: Failure[_] => (path, Failure(f.exception))
}
- /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar"). */
+ /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar").
+ * @return the path to use for filtering, and the byte array
+ */
private def bytesFor(path: String, app: Boolean) = Try {
def last = intp.get.mostRecentVar // fail if no intp
- def req = path match {
- case "-" => last
- case HashSplit(prefix, member) =>
- if (prefix != null) prefix
- else if (member != null) member
- else "#"
- }
- val targetedBytes = if (app) findAppBody(req) else (req, findBytes(req))
- if (targetedBytes._2.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '$path'")
- targetedBytes
+ val req = path match {
+ case "-" => last
+ case HashSplit(prefix, _) if prefix != null => prefix
+ case HashSplit(_, member) if member != null => member
+ case s => s
+ }
+ val targetedBytes = if (app) findAppBody(req) else (path, findBytes(req))
+ targetedBytes match {
+ case (_, bytes) if bytes.isEmpty => throw new FileNotFoundException(s"Could not find class bytes for '$path'")
+ case ok => ok
+ }
}
private def findAppBody(path: String): (String, Array[Byte]) = {
@@ -88,16 +100,12 @@ class JavapClass(
// assumes only the first match is of interest (because only one endpoint is generated).
def findNewStyle(bytes: Array[Byte]) = {
import scala.tools.asm.ClassReader
- import scala.tools.asm.tree.ClassNode
- import PartialFunction.cond
- import JavaConverters._
- val rdr = new ClassReader(bytes)
- val nod = new ClassNode
- rdr.accept(nod, 0)
//foo/Bar.delayedEndpoint$foo$Bar$1
val endpoint = "delayedEndpoint".r.unanchored
- def isEndPoint(s: String) = (s contains '$') && cond(s) { case endpoint() => true }
- nod.methods.asScala collectFirst { case m if isEndPoint(m.name) => m.name }
+ def isEndPoint(s: String) = (s contains '$') && when(s) { case endpoint() => true }
+ new ClassReader(bytes) withMethods { methods =>
+ methods collectFirst { case m if isEndPoint(m.name) => m.name }
+ }
}
// try new style, and add foo#delayedEndpoint$bar$1 to filter on the endpoint
def asNewStyle(bytes: Array[Byte]) = Some(bytes) filter (_.nonEmpty) flatMap { bs =>
@@ -121,8 +129,7 @@ class JavapClass(
def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path)
- /** Assume the string is a path and try to find the classfile
- * it represents.
+ /** Assume the string is a path and try to find the classfile it represents.
*/
def tryFile(path: String): Option[Array[Byte]] =
(Try (File(path.asClassResource)) filter (_.exists) map (_.toByteArray())).toOption
@@ -201,46 +208,67 @@ class JavapClass(
w
}
- /** Create a Showable with output massage.
- * @param raw show ugly repl names
- * @param target attempt to filter output to show region of interest
- * @param preamble other messages to output
- */
- def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable = new Showable {
- // ReplStrippingWriter clips and scrubs on write(String)
- // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping
- def show() =
- if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() }
- else writeLines()
- private def writeLines() {
- // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#;
- // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#?
- val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s }
- var filtering = false // true if in region matching filter
- // true to output
- def checkFilter(line: String) = if (filterOn.isEmpty) true else {
- // cheap heuristic, todo maybe parse for the java sig.
- // method sigs end in paren semi
- def isAnyMethod = line.endsWith(");")
- def isOurMethod = {
- val lparen = line.lastIndexOf('(')
- val blank = line.lastIndexOf(' ', lparen)
- (blank >= 0 && line.substring(blank+1, lparen) == filterOn.get)
+ def filterLines(target: String, text: String): String = {
+ // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#;
+ // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#?
+ val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s }
+ var filtering = false // true if in region matching filter
+ // turn filtering on/off given the pattern of interest
+ def filterStatus(line: String, pattern: String) = {
+ def isSpecialized(method: String) = (method startsWith pattern+"$") && (method endsWith "$sp")
+ def isAnonymized(method: String) = (pattern == "$anonfun") && (method startsWith "$anonfun$")
+ // cheap heuristic, todo maybe parse for the java sig.
+ // method sigs end in paren semi
+ def isAnyMethod = line endsWith ");"
+ // take the method name between the space char and left paren.
+ // accept exact match or something that looks like what we might be asking for.
+ def isOurMethod = {
+ val lparen = line lastIndexOf '('
+ val blank = line.lastIndexOf(' ', lparen)
+ if (blank < 0) false
+ else {
+ val method = line.substring(blank+1, lparen)
+ (method == pattern || isSpecialized(method) || isAnonymized(method))
}
- filtering = if (filtering) {
+ }
+ filtering =
+ if (filtering) {
// next blank line terminates section
- // for -public, next line is next method, more or less
- line.trim.nonEmpty && !isAnyMethod
+ // in non-verbose mode, next line is next method, more or less
+ line.trim.nonEmpty && (!isAnyMethod || isOurMethod)
} else {
isAnyMethod && isOurMethod
}
- filtering
- }
- for (line <- Source.fromString(preamble + written).getLines(); if checkFilter(line))
- printWriter write line+lineSeparator
- printWriter.flush()
+ filtering
}
+ // do we output this line?
+ def checkFilter(line: String) = filterOn map (filterStatus(line, _)) getOrElse true
+ val sw = new StringWriter
+ val pw = new PrintWriter(sw)
+ for {
+ line <- Source.fromString(text).getLines()
+ if checkFilter(line)
+ } pw println line
+ pw.flush()
+ sw.toString
}
+
+ /** Create a Showable with output massage.
+ * @param raw show ugly repl names
+ * @param target attempt to filter output to show region of interest
+ * @param preamble other messages to output
+ */
+ def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable =
+ new Showable {
+ private def writeLines() = filterLines(target, preamble + written)
+ val output = writeLines()
+
+ // ReplStrippingWriter clips and scrubs on write(String)
+ // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping
+ def show() =
+ if (raw && intp.isDefined) intp.get withoutUnwrapping { printWriter.write(output, 0, output.length) }
+ else intp.get withoutTruncating(printWriter write output)
+ }
}
class JavapTool6 extends JavapTool {
@@ -275,12 +303,13 @@ class JavapClass(
override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] =
(inputs map {
- case (claas, Success(ba)) => JpResult(showable(raw, claas, newPrinter(new ByteArrayInputStream(ba), newEnv(options))))
+ case (klass, Success(ba)) => JpResult(showable(raw, klass, newPrinter(new ByteArrayInputStream(ba), newEnv(options))))
case (_, Failure(e)) => JpResult(e.toString)
}).toList orFailed List(noToolError)
}
class JavapTool7 extends JavapTool {
+ import JavapTool._
type Task = {
def call(): Boolean // true = ok
//def run(args: Array[String]): Int // all args
@@ -290,10 +319,10 @@ class JavapClass(
//object TaskResult extends Enumeration {
// val Ok, Error, CmdErr, SysErr, Abnormal = Value
//}
- val TaskClaas = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull
- override protected def failed = TaskClaas eq null
+ val TaskClass = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull
+ override protected def failed = TaskClass eq null
- val TaskCtor = TaskClaas.getConstructor(
+ val TaskCtor = TaskClass.getConstructor(
classOf[Writer],
classOf[JavaFileManager],
classOf[DiagnosticListener[_]],
@@ -312,19 +341,14 @@ class JavapClass(
/** All diagnostic messages.
* @param locale Locale for diagnostic messages, null by default.
*/
- def messages(implicit locale: Locale = null) = {
- import JavaConverters._
- diagnostics.asScala.map(_ getMessage locale).toList
- }
+ def messages(implicit locale: Locale = null) = diagnostics.asScala.map(_ getMessage locale).toList
+ // don't filter this message if raw, since the names are likely to differ
+ private val container = "Binary file .* contains .*".r
def reportable(raw: Boolean): String = {
- // don't filter this message if raw, since the names are likely to differ
- val container = "Binary file .* contains .*".r
- val m = if (raw) messages
- else messages filter (_ match { case container() => false case _ => true })
+ val m = if (raw) messages else messages filterNot (when(_) { case container() => true })
clear()
- if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator)
- else ""
+ if (m.nonEmpty) m mkString ("", EOL, EOL) else ""
}
}
val reporter = new JavaReporter
@@ -344,8 +368,12 @@ class JavapClass(
import Kind._
import StandardLocation._
import JavaFileManager.Location
- import java.net.URI
- def uri(name: String): URI = new URI(name) // new URI("jfo:" + name)
+ import java.net.{ URI, URISyntaxException }
+
+ // name#fragment is OK, but otherwise fragile
+ def uri(name: String): URI =
+ try new URI(name) // new URI("jfo:" + name)
+ catch { case _: URISyntaxException => new URI("dummy") }
def inputNamed(name: String): Try[ByteAry] = (managed find (_._1 == name)).get._2
def managedFile(name: String, kind: Kind) = kind match {
@@ -379,19 +407,18 @@ class JavapClass(
def showable(raw: Boolean, target: String): Showable = showWithPreamble(raw, target, reporter.reportable(raw))
// eventually, use the tool interface
- def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = {
+ def task(options: Seq[String], classes: Seq[String], inputs: Seq[Input]): Task = {
//ServiceLoader.load(classOf[javax.tools.DisassemblerTool]).
- //getTask(writer, fileManager, reporter, options.asJava, claases.asJava)
- import JavaConverters.asJavaIterableConverter
- TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, claases.asJava)
+ //getTask(writer, fileManager, reporter, options.asJava, classes.asJava)
+ TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, classes.asJava)
.orFailed (throw new IllegalStateException)
}
// a result per input
- private def applyOne(raw: Boolean, options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] =
+ private def applyOne(raw: Boolean, options: Seq[String], klass: String, inputs: Seq[Input]): Try[JpResult] =
Try {
- task(options, Seq(claas), inputs).call()
+ task(options, Seq(klass), inputs).call()
} map {
- case true => JpResult(showable(raw, claas))
+ case true => JpResult(showable(raw, klass))
case _ => JpResult(reporter.reportable(raw))
} recoverWith {
case e: java.lang.reflect.InvocationTargetException => e.getCause match {
@@ -402,7 +429,7 @@ class JavapClass(
reporter.clear()
}
override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map {
- case (claas, Success(_)) => applyOne(raw, options, claas, inputs).get
+ case (klass, Success(_)) => applyOne(raw, options, klass, inputs).get
case (_, Failure(e)) => JpResult(e.toString)
}).toList orFailed List(noToolError)
}
@@ -462,7 +489,7 @@ class JavapClass(
object ToolArgs {
def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) {
case ((t,others), s) => s match {
- case "-fun" => (t copy (fun=true), others)
+ case "-fun" => (t copy (fun=true), others :+ "-private")
case "-app" => (t copy (app=true), others)
case "-help" => (t copy (help=true), others)
case "-raw" => (t copy (raw=true), others)
@@ -528,24 +555,28 @@ class JavapClass(
val DefaultOptions = List("-protected", "-verbose")
- def isAvailable = Seq(Env, Tool) exists (cn => hasClass(loader, cn))
-
private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined
- private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool)
+ def isAvailable = Seq(Env, Tool) exists (hasClass(loader, _))
- def apply() = if (isTaskable(loader)) new JavapTool7 else new JavapTool6
+ /** Select the tool implementation for this platform. */
+ def apply() = if (hasClass(loader, Tool)) new JavapTool7 else new JavapTool6
}
}
object JavapClass {
+ import scala.tools.asm.ClassReader
+ import scala.tools.asm.tree.{ ClassNode, MethodNode }
+
def apply(
loader: ScalaClassLoader = ScalaClassLoader.appLoader,
printWriter: PrintWriter = new PrintWriter(System.out, true),
intp: Option[IMain] = None
) = new JavapClass(loader, printWriter, intp)
- val HashSplit = "(.*?)(?:#([^#]*))?".r
+ /** Match foo#bar, both groups are optional (may be null). */
+ val HashSplit = "([^#]+)?(?:#(.+)?)?".r
+
// We enjoy flexibility in specifying either a fully-qualified class name com.acme.Widget
// or a resource path com/acme/Widget.class; but not widget.out
implicit class MaybeClassLike(val s: String) extends AnyVal {
@@ -564,9 +595,9 @@ object JavapClass {
else (s take i, Some(s drop i+1))
}
}
- implicit class ClassLoaderOps(val cl: ClassLoader) extends AnyVal {
+ implicit class ClassLoaderOps(val loader: ScalaClassLoader) extends AnyVal {
private def parentsOf(x: ClassLoader): List[ClassLoader] = if (x == null) Nil else x :: parentsOf(x.getParent)
- def parents: List[ClassLoader] = parentsOf(cl)
+ def parents: List[ClassLoader] = parentsOf(loader)
/* all file locations */
def locations = {
def alldirs = parents flatMap (_ match {
@@ -580,11 +611,11 @@ object JavapClass {
/* only the file location from which the given class is loaded */
def locate(k: String): Option[Path] = {
Try {
- val claas = try cl loadClass k catch {
+ val klass = try loader loadClass k catch {
case _: NoClassDefFoundError => null // let it snow
}
// cf ScalaClassLoader.originOfClass
- claas.getProtectionDomain.getCodeSource.getLocation
+ klass.getProtectionDomain.getCodeSource.getLocation
} match {
case Success(null) => None
case Success(loc) if loc.isFile => Some(Path(new JFile(loc.toURI)))
@@ -592,44 +623,66 @@ object JavapClass {
}
}
/* would classBytes succeed with a nonempty array */
- def resourceable(className: String): Boolean = cl.getResource(className.asClassResource) != null
+ def resourceable(className: String): Boolean = loader.getResource(className.asClassResource) != null
+
+ /* class reader of class bytes */
+ def classReader(resource: String): ClassReader = new ClassReader(loader classBytes resource)
+ }
+ implicit class `class reader convenience`(val reader: ClassReader) extends AnyVal {
+ def withMethods[A](f: Seq[MethodNode] => A): A = {
+ val cls = new ClassNode
+ reader.accept(cls, 0)
+ f(cls.methods.asScala)
+ }
}
implicit class PathOps(val p: Path) extends AnyVal {
import scala.tools.nsc.io.Jar
def isJar = Jar isJarOrZip p
}
+ implicit class `fun with files`(val f: AbstractFile) extends AnyVal {
+ def descend(path: Seq[String]): Option[AbstractFile] = {
+ def lookup(f: AbstractFile, path: Seq[String]): Option[AbstractFile] = path match {
+ case p if p.isEmpty => Option(f)
+ case p => Option(f.lookupName(p.head, directory = true)) flatMap (lookup(_, p.tail))
+ }
+ lookup(f, path)
+ }
+ }
implicit class URLOps(val url: URL) extends AnyVal {
def isFile: Boolean = url.getProtocol == "file"
}
object FunFinder {
def apply(loader: ScalaClassLoader, intp: Option[IMain]) = new FunFinder(loader, intp)
}
+ // FunFinder.funs(ks) finds anonfuns
class FunFinder(loader: ScalaClassLoader, intp: Option[IMain]) {
+ // manglese for closure: typename, $anonfun or lambda, opt method, digits
+ val closure = """(.*)\$(\$anonfun|lambda)(?:\$+([^$]+))?\$(\d+)""".r
+
+ // manglese for closure
+ val cleese = "(?:anonfun|lambda)"
+
// class k, candidate f without prefix
- def isFunOfClass(k: String, f: String) = {
- val p = (s"${Regex quote k}\\$$+anonfun").r
- (p findPrefixOf f).nonEmpty
- }
+ def isFunOfClass(k: String, f: String) = (s"${Regex quote k}\\$$+$cleese".r findPrefixOf f).nonEmpty
+
// class k, candidate f without prefix, method m
- def isFunOfMethod(k: String, m: String, f: String) = {
- val p = (s"${Regex quote k}\\$$+anonfun\\$$${Regex quote m}\\$$").r
- (p findPrefixOf f).nonEmpty
- }
- def isFunOfTarget(k: String, m: Option[String], f: String) =
- if (m.isEmpty) isFunOfClass(k, f)
- else isFunOfMethod(k, m.get, f)
- def listFunsInAbsFile(k: String, m: Option[String], d: AbstractFile) = {
- for (f <- d; if !f.isDirectory && isFunOfTarget(k, m, f.name)) yield f.name
- }
- // path prefix p, class k, dir d
- def listFunsInDir(p: String, k: String, m: Option[String])(d: Directory) = {
- val subdir = Path(p)
- for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(k, m, f.name))
+ def isFunOfMethod(k: String, m: String, f: String) =
+ (s"${Regex quote k}\\$$+$cleese\\$$+${Regex quote m}\\$$".r findPrefixOf f).nonEmpty
+
+ def isFunOfTarget(target: Target, f: String) =
+ target.member map (isFunOfMethod(target.name, _, f)) getOrElse isFunOfClass(target.name, f)
+
+ def listFunsInAbsFile(target: Target)(d: AbstractFile) =
+ for (f <- d; if !f.isDirectory && isFunOfTarget(target, f.name)) yield f.name
+
+ def listFunsInDir(target: Target)(d: Directory) = {
+ val subdir = Path(target.prefix)
+ for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(target, f.name))
yield f.name
}
- // path prefix p, class k, jar file f
- def listFunsInJar(p: String, k: String, m: Option[String])(f: File) = {
+
+ def listFunsInJar(target: Target)(f: File) = {
import java.util.jar.JarEntry
import scala.tools.nsc.io.Jar
def maybe(e: JarEntry) = {
@@ -638,78 +691,120 @@ object JavapClass {
if (parts.length < 2) ("", e.getName)
else (parts.init mkString "/", parts.last)
}
- if (path == p && isFunOfTarget(k, m, name)) Some(name) else None
+ if (path == target.prefix && isFunOfTarget(target, name)) Some(name) else None
}
(new Jar(f) map maybe).flatten
}
def loadable(name: String) = loader resourceable name
- // translated class, optional member, opt member to filter on, whether it is repl output
- def translate(s: String): (String, Option[String], Option[String], Boolean) = {
+ case class Target(path: String, member: Option[String], filter: Option[String], isRepl: Boolean, isModule: Boolean) {
+ val splat = path split "\\."
+ val name = splat.last
+ val prefix = if (splat.length > 1) splat.init mkString "/" else ""
+ val pkg = if (splat.length > 1) splat.init mkString "." else ""
+ val targetName = s"$name${ if (isModule) "$" else "" }"
+ }
+ // translated class, optional member, opt member to filter on, whether it is repl output and a module
+ def translate(s: String): Target = {
val (k0, m0) = s.splitHashMember
- val k = k0.asClassName
+ val isModule = k0 endsWith "$"
+ val k = (k0 stripSuffix "$").asClassName
val member = m0 filter (_.nonEmpty) // take Foo# as no member, not ""
val filter = m0 flatMap { case "" => Some("apply") case _ => None } // take Foo# as filter on apply
// class is either something replish or available to loader
// $line.$read$$etc$Foo#member
- ((intp flatMap (_ translatePath k) filter (loadable) map ((_, member, filter, true)))
+ ((intp flatMap (_ translatePath k) filter (loadable) map (x => Target(x stripSuffix "$", member, filter, true, isModule)))
// s = "f" and $line.$read$$etc$#f is what we're after,
// ignoring any #member (except take # as filter on #apply)
- orElse (intp flatMap (_ translateEnclosingClass k) map ((_, Some(k), filter, true)))
- getOrElse ((k, member, filter, false)))
+ orElse (intp flatMap (_ translateEnclosingClass k) map (x => Target(x stripSuffix "$", Some(k), filter, true, isModule)))
+ getOrElse (Target(k, member, filter, false, isModule)))
}
/** Find the classnames of anonfuns associated with k,
* where k may be an available class or a symbol in scope.
*/
- def funsOf(k0: String): Seq[String] = {
+ def funsOf(selection: String): Seq[String] = {
// class is either something replish or available to loader
- val (k, member, filter, isReplish) = translate(k0)
- val splat = k split "\\."
- val name = splat.last
- val prefix = if (splat.length > 1) splat.init mkString "/" else ""
- val pkg = if (splat.length > 1) splat.init mkString "." else ""
+ val target = translate(selection)
+
// reconstitute an anonfun with a package
// if filtered, add the hash back, e.g. pkg.Foo#bar, pkg.Foo$anon$1#apply
def packaged(s: String) = {
- val p = if (pkg.isEmpty) s else s"$pkg.$s"
- val pm = filter map (p + "#" + _)
- pm getOrElse p
+ val p = if (target.pkg.isEmpty) s else s"${target.pkg}.$s"
+ target.filter map (p + "#" + _) getOrElse p
}
- // is this translated path in (usually virtual) repl outdir? or loadable from filesystem?
- val fs = if (isReplish) {
- def outed(d: AbstractFile, p: Seq[String]): Option[AbstractFile] = {
- if (p.isEmpty) Option(d)
- else Option(d.lookupName(p.head, directory = true)) flatMap (f => outed(f, p.tail))
- }
- outed(intp.get.replOutput.dir, splat.init) map { d =>
- listFunsInAbsFile(name, member, d) map packaged
- }
- } else {
- loader locate k map { w =>
- if (w.isDirectory) listFunsInDir(prefix, name, member)(w.toDirectory) map packaged
- else if (w.isJar) listFunsInJar(prefix, name, member)(w.toFile) map packaged
- else Nil
+ // find closure classes in repl outdir or try asking the classloader where to look
+ val fs =
+ if (target.isRepl)
+ (intp.get.replOutput.dir descend target.splat.init) map { d =>
+ listFunsInAbsFile(target)(d) map (_.asClassName) map packaged
+ }
+ else
+ loader locate target.path map {
+ case d if d.isDirectory => listFunsInDir(target)(d.toDirectory) map packaged
+ case j if j.isJar => listFunsInJar(target)(j.toFile) map packaged
+ case _ => Nil
+ }
+ val res = fs map (_.to[Seq]) getOrElse Seq()
+ // on second thought, we don't care about lambda method classes, just the impl methods
+ val rev =
+ res flatMap {
+ case x @ closure(_, "lambda", _, _) => labdaMethod(x, target)
+ //target.member flatMap (_ => labdaMethod(x, target)) getOrElse s"${target.name}#$$anonfun"
+ case x => Some(x)
+ }
+ rev
+ }
+ // given C$lambda$$g$n for member g and n in 1..N, find the C.accessor$x
+ // and the C.$anonfun$x it forwards to.
+ def labdaMethod(lambda: String, target: Target): Option[String] = {
+ import scala.tools.asm.ClassReader
+ import scala.tools.asm.Opcodes.INVOKESTATIC
+ import scala.tools.asm.tree.{ ClassNode, MethodInsnNode }
+ def callees(s: String): List[(String, String)] = {
+ loader classReader s withMethods { ms =>
+ val nonBridgeApplyMethods = ms filter (_.name == "apply") filter (n => (n.access & Opcodes.ACC_BRIDGE) == 0)
+ val instructions = nonBridgeApplyMethods flatMap (_.instructions.toArray)
+ instructions.collect {
+ case i: MethodInsnNode => (i.owner, i.name)
+ }.toList
}
}
- fs match {
- case Some(xs) => xs.to[Seq] // maybe empty
- case None => Seq() // nothing found, e.g., junk input
+ callees(lambda) match {
+ case (k, _) :: Nil if target.isModule && !(k endsWith "$") => None
+ case (k, m) :: _ => Some(s"${k}#${m}")
+ case _ => None
}
}
- def funs(ks: Seq[String]) = ks flatMap funsOf _
+ /** Translate the supplied targets to patterns for anonfuns.
+ * Pattern is typename $ label [[$]$func] $n where label is $anonfun or lambda,
+ * and lambda includes the extra dollar, func is a method name, and n is an int.
+ * The typename for a nested class is dollar notation, Betty$Bippy.
+ *
+ * If C has anonfun closure classes, then use C$$anonfun$f$1 (various names, C# filters on apply).
+ * If C has lambda closure classes, then use C#$anonfun (special-cased by output filter).
+ */
+ def funs(ks: Seq[String]): Seq[String] = ks flatMap funsOf
}
}
+trait Javap {
+ def loader: ScalaClassLoader
+ def printWriter: PrintWriter
+ def apply(args: Seq[String]): List[Javap.JpResult]
+ def tryFile(path: String): Option[Array[Byte]]
+ def tryClass(path: String): Array[Byte]
+}
+
object Javap {
def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl).JavapTool.isAvailable
def apply(path: String): Unit = apply(Seq(path))
def apply(args: Seq[String]): Unit = JavapClass() apply args foreach (_.show())
- trait Showable {
+ private[interpreter] trait Showable {
def show(): Unit
}
- sealed trait JpResult extends scala.tools.util.JpResult {
+ sealed trait JpResult {
type ResultType
def isError: Boolean
def value: ResultType
@@ -735,8 +830,13 @@ object Javap {
def isError = false
def show() = value.show() // output to tool's PrintWriter
}
- implicit class Lastly[A](val t: Try[A]) extends AnyVal {
- private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t }
- def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _)
- }
+}
+
+object NoJavap extends Javap {
+ import Javap._
+ def loader: ScalaClassLoader = getClass.getClassLoader
+ def printWriter: PrintWriter = new PrintWriter(System.err, true)
+ def apply(args: Seq[String]): List[JpResult] = Nil
+ def tryFile(path: String): Option[Array[Byte]] = None
+ def tryClass(path: String): Array[Byte] = Array()
}
diff --git a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
index 12d6ee5112..9f555aee14 100644
--- a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -76,8 +76,11 @@ trait LoopCommands {
// the default result means "keep running, and don't record that line"
val default = Result(keepRunning = true, None)
+ // "keep running, and record this line"
+ def recording(line: String) = Result(keepRunning = true, Option(line))
+
// most commands do not want to micromanage the Result, but they might want
- // to print something to the console, so we accomodate Unit and String returns.
+ // to print something to the console, so we accommodate Unit and String returns.
implicit def resultFromUnit(x: Unit): Result = default
implicit def resultFromString(msg: String): Result = {
echoCommandMessage(msg)
@@ -85,4 +88,3 @@ trait LoopCommands {
}
}
}
-
diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
index f4cbcb50fe..bcba7b6dfd 100644
--- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -102,6 +102,18 @@ trait MemberHandlers {
class GenericHandler(member: Tree) extends MemberHandler(member)
+ import scala.io.AnsiColor.{ BOLD, BLUE, GREEN, RESET }
+
+ def color(c: String, s: String) =
+ if (replProps.colorOk) string2code(BOLD) + string2code(c) + s + string2code(RESET)
+ else s
+
+ def colorName(s: String) =
+ color(BLUE, string2code(s))
+
+ def colorType(s: String) =
+ color(GREEN, string2code(s))
+
class ValHandler(member: ValDef) extends MemberDefHandler(member) {
val maxStringElements = 1000 // no need to mkString billions of elements
override def definesValue = true
@@ -116,18 +128,23 @@ trait MemberHandlers {
else any2stringOf(path, maxStringElements)
val vidString =
- if (replProps.vids) s"""" + " @ " + "%%8x".format(System.identityHashCode($path)) + " """.trim
+ if (replProps.vids) s"""" + f"@$${System.identityHashCode($path)}%8x" + """"
else ""
- """ + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString)
+ val nameString = colorName(prettyName) + vidString
+ val typeString = colorType(req typeOf name)
+ s""" + "$nameString: $typeString = " + $resultString"""
}
}
}
class DefHandler(member: DefDef) extends MemberDefHandler(member) {
override def definesValue = flattensToEmpty(member.vparamss) // true if 0-arity
- override def resultExtractionCode(req: Request) =
- if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
+ override def resultExtractionCode(req: Request) = {
+ val nameString = colorName(name)
+ val typeString = colorType(req typeOf name)
+ if (mods.isPublic) s""" + "$nameString: $typeString\\n"""" else ""
+ }
}
abstract class MacroHandler(member: DefDef) extends MemberDefHandler(member) {
diff --git a/src/repl/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala
index f69a5b487d..8d8140b638 100644
--- a/src/repl/scala/tools/nsc/interpreter/Power.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Power.scala
@@ -155,7 +155,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
}
object InternalInfo extends LowPriorityInternalInfo { }
- /** Now dealing with the problem of acidentally calling a method on Type
+ /** Now dealing with the problem of accidentally calling a method on Type
* when you're holding a Symbol and seeing the Symbol converted to the
* type of Symbol rather than the type of the thing represented by the
* symbol, by only implicitly installing one method, "?", and the rest
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
index 51fab3082e..07d619bca5 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
@@ -55,6 +55,8 @@ trait ReplGlobal extends Global {
// newNamer(rootContext(unit)).enterSym(unit.body)
}
}
+ // add to initial or terminal phase to sanity check Run at construction
+ override val requires = List("typer") // ensure they didn't -Ystop-after:parser
}
override protected def computePhaseDescriptors: List[SubComponent] = {
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
index 36e6dbbccc..8c4faf7278 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
@@ -13,6 +13,9 @@ class ReplProps {
private def bool(name: String) = BooleanProp.keyExists(name)
private def int(name: String) = IntProp(name)
+ // This property is used in TypeDebugging. Let's recycle it.
+ val colorOk = bool("scala.color")
+
val info = bool("scala.repl.info")
val debug = bool("scala.repl.debug")
val trace = bool("scala.repl.trace")
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
index b20166d070..e6f5a4089e 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
@@ -9,11 +9,47 @@ package interpreter
import reporters._
import IMain._
+import scala.reflect.internal.util.Position
+
/** Like ReplGlobal, a layer for ensuring extra functionality.
*/
class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.in, new ReplStrippingWriter(intp)) {
def printUntruncatedMessage(msg: String) = withoutTruncating(printMessage(msg))
+ /** Whether very long lines can be truncated. This exists so important
+ * debugging information (like printing the classpath) is not rendered
+ * invisible due to the max message length.
+ */
+ private var _truncationOK: Boolean = !intp.settings.verbose
+ def truncationOK = _truncationOK
+ def withoutTruncating[T](body: => T): T = {
+ val saved = _truncationOK
+ _truncationOK = false
+ try body
+ finally _truncationOK = saved
+ }
+
+ override def warning(pos: Position, msg: String): Unit = withoutTruncating(super.warning(pos, msg))
+ override def error(pos: Position, msg: String): Unit = withoutTruncating(super.error(pos, msg))
+
+ import scala.io.AnsiColor.{ RED, YELLOW, RESET }
+
+ def severityColor(severity: Severity): String = severity match {
+ case ERROR => RED
+ case WARNING => YELLOW
+ case INFO => RESET
+ }
+
+ override def print(pos: Position, msg: String, severity: Severity) {
+ val prefix = (
+ if (replProps.colorOk)
+ severityColor(severity) + clabel(severity) + RESET
+ else
+ clabel(severity)
+ )
+ printMessage(pos, prefix + msg)
+ }
+
override def printMessage(msg: String) {
// Avoiding deadlock if the compiler starts logging before
// the lazy val is complete.
@@ -31,4 +67,5 @@ class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.i
if (intp.totalSilence) ()
else super.displayPrompt()
}
+
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
index 43da5c6f12..1664546cab 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
@@ -28,5 +28,8 @@ trait ReplStrings {
def any2stringOf(x: Any, maxlen: Int) =
"scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen)
- def words(s: String) = (s.trim split "\\s+" filterNot (_ == "")).toList
+ // no escaped or nested quotes
+ private[this] val inquotes = """(['"])(.*?)\1""".r
+ def unquoted(s: String) = s match { case inquotes(_, w) => w ; case _ => s }
+ def words(s: String) = (s.trim split "\\s+" filterNot (_ == "") map unquoted).toList
}
diff --git a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
index 6634dc6944..49b8433a8c 100644
--- a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
@@ -22,14 +22,19 @@ extends InteractiveReader
def reset() = ()
def redrawLine() = ()
- def readOneLine(prompt: String): String = {
- if (interactive) {
- out.print(prompt)
- out.flush()
- }
- in.readLine()
+
+ // InteractiveReader internals
+ protected def readOneLine(prompt: String): String = {
+ echo(prompt)
+ readOneLine()
+ }
+ protected def readOneKey(prompt: String) = sys.error("No char-based input in SimpleReader")
+
+ protected def readOneLine(): String = in.readLine()
+ protected def echo(s: String): Unit = if (interactive) {
+ out.print(s)
+ out.flush()
}
- def readOneKey(prompt: String) = sys.error("No char-based input in SimpleReader")
}
object SimpleReader {
@@ -39,3 +44,13 @@ object SimpleReader {
def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader =
new SimpleReader(in, out, interactive)
}
+
+// pretend we are a console for verbose purposes
+trait EchoReader extends SimpleReader {
+ // if there is more input, then maybe echo the prompt and the input
+ override def readOneLine(prompt: String) = {
+ val input = readOneLine()
+ if (input != null) echo(f"$prompt$input%n")
+ input
+ }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
index 079097d7a2..56f1e65376 100644
--- a/src/repl/scala/tools/nsc/interpreter/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
@@ -11,6 +11,7 @@ import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ClassTag, classTag}
import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
import scala.util.control.Exception.catching
+import scala.util.Try
/** The main REPL related classes and values are as follows.
* In addition to standard compiler classes Global and Settings, there are:
@@ -196,4 +197,14 @@ package object interpreter extends ReplConfig with ReplStrings {
}
}
}
+
+ /* debug assist
+ private[nsc] implicit class `smart stringifier`(val sc: StringContext) extends AnyVal {
+ import StringContext._, runtime.ScalaRunTime.stringOf
+ def ss(args: Any*): String = sc.standardInterpolator(treatEscapes, args map stringOf)
+ } debug assist */
+ private[nsc] implicit class `try lastly`[A](val t: Try[A]) extends AnyVal {
+ private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t }
+ def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _)
+ }
}
diff --git a/src/scaladoc/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala
index 36a1405b11..034416e844 100644
--- a/src/scaladoc/scala/tools/ant/Scaladoc.scala
+++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala
@@ -543,7 +543,7 @@ class Scaladoc extends ScalaMatchingTask {
/** Tests if a file exists and prints a warning in case it doesn't. Always
* returns the file, even if it doesn't exist.
*
- * @param file A file to test for existance.
+ * @param file A file to test for existence.
* @return The same file.
*/
private def existing(file: File): File = {
diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
index 52a0c20a11..32a6ba0ce3 100644
--- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
+++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
@@ -18,14 +18,10 @@ class ScalaDoc {
val versionMsg = "Scaladoc %s -- %s".format(Properties.versionString, Properties.copyrightString)
def process(args: Array[String]): Boolean = {
- var reporter: ConsoleReporter = null
+ var reporter: ScalaDocReporter = null
val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"),
msg => reporter.printMessage(msg))
- reporter = new ConsoleReporter(docSettings) {
- // need to do this so that the Global instance doesn't trash all the
- // symbols just because there was an error
- override def hasErrors = false
- }
+ reporter = new ScalaDocReporter(docSettings)
val command = new ScalaDoc.Command(args.toList, docSettings)
def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty
@@ -50,12 +46,18 @@ class ScalaDoc {
}
finally reporter.printSummary()
- // not much point in returning !reporter.hasErrors when it has
- // been overridden with constant false.
- true
+ !reporter.reallyHasErrors
}
}
+class ScalaDocReporter(settings: Settings) extends ConsoleReporter(settings) {
+
+ // need to do sometimes lie so that the Global instance doesn't
+ // trash all the symbols just because there was an error
+ override def hasErrors = false
+ def reallyHasErrors = super.hasErrors
+}
+
object ScalaDoc extends ScalaDoc {
class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) {
override def cmdName = "scaladoc"
diff --git a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
index dce52af56a..47ddfb8aa9 100644
--- a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
@@ -95,11 +95,11 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
val documentError: PartialFunction[Throwable, Unit] = {
case NoCompilerRunException =>
reporter.info(null, "No documentation generated with unsuccessful compiler run", force = false)
- case _: ClassNotFoundException =>
- ()
+ case e @ (_:ClassNotFoundException | _:IllegalAccessException | _:InstantiationException | _:SecurityException | _:ClassCastException) =>
+ reporter.error(null, s"Cannot load the doclet class ${settings.docgenerator.value} (specified with ${settings.docgenerator.name}): $e. Leaving the default settings will generate the html version of scaladoc.")
}
- /** Generate document(s) for all `files` containing scaladoc documenataion.
+ /** Generate document(s) for all `files` containing scaladoc documentation.
* @param files The list of paths (relative to the compiler's source path, or absolute) of files to document. */
def document(files: List[String]) {
def generate() = {
diff --git a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
index 6dc3e5a62b..f03b848af6 100644
--- a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
@@ -15,13 +15,14 @@ import DocParser.Parsed
* right after parsing so it can read `DocDefs` from source code which would
* otherwise cause the compiler to go haywire.
*/
-class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) {
+class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait {
def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
def this() = this(new Settings(Console println _))
// the usual global initialization
locally { new Run() }
+ override def forScaladoc = true
override protected def computeInternalPhases() {
phasesSet += syntaxAnalyzer
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala
index 84545e9201..a11ca38a86 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala
@@ -11,4 +11,6 @@ trait Index {
type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]]
def firstLetterIndex: Map[Char, SymbolMap]
+
+ def hasDeprecatedMembers: Boolean
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
index e5c64c6f45..cbf8ff22ba 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -39,12 +39,12 @@ trait ScaladocAnalyzer extends Analyzer {
for (useCase <- comment.useCases) {
typer1.silent(_.asInstanceOf[ScaladocTyper].defineUseCases(useCase)) match {
case SilentTypeError(err) =>
- unit.warning(useCase.pos, err.errMsg)
+ reporter.warning(useCase.pos, err.errMsg)
case _ =>
}
for (useCaseSym <- useCase.defined) {
if (sym.name != useCaseSym.name)
- unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
+ reporter.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
}
}
}
@@ -190,8 +190,8 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
typeParams.nonEmpty || version.nonEmpty || since.nonEmpty
}
def isDirty = unclean(unmooredParser parseComment doc)
- if ((doc ne null) && (settings.lint || isDirty))
- unit.warning(doc.pos, "discarding unmoored doc comment")
+ if ((doc ne null) && (settings.warnDocDetached || isDirty))
+ reporter.warning(doc.pos, "discarding unmoored doc comment")
}
override def flushDoc(): DocComment = (try lastDoc finally lastDoc = null)
@@ -208,7 +208,7 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
super.skipDocComment()
}
override def skipBlockComment(): Unit = {
- inDocComment = false
+ inDocComment = false // ??? this means docBuffer won't receive contents of this comment???
docBuffer = new StringBuilder("/*")
super.skipBlockComment()
}
@@ -217,9 +217,10 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
def foundStarComment(start: Int, end: Int) = try {
val str = docBuffer.toString
val pos = Position.range(unit.source, start, start, end)
- unit.comment(pos, str)
- if (inDocComment)
+ if (inDocComment) {
+ signalParsedDocComment(str, pos)
lastDoc = DocComment(str, pos)
+ }
true
} finally {
docBuffer = null
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
index 2ea3a0eb7c..4b40d25c17 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
@@ -11,6 +11,7 @@ import reporters.Reporter
import typechecker.Analyzer
import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
+
trait ScaladocGlobalTrait extends Global {
outer =>
diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
index 67529f4178..44683f1755 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
@@ -66,7 +66,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
val docsourceurl = StringSetting (
"-doc-source-url",
"url",
- "A URL pattern used to build links to template sources; use variables, for example: ?{TPL_NAME} ('Seq'), ?{TPL_OWNER} ('scala.collection'), ?{FILE_PATH} ('scala/collection/Seq')",
+ s"A URL pattern used to link to the source file; the following variables are available: €{TPL_NAME}, €{TPL_OWNER} and respectively €{FILE_PATH}. For example, for `scala.collection.Seq`, the variables will be expanded to `Seq`, `scala.collection` and respectively `scala/collection/Seq` (without the backquotes). To obtain a relative path for €{FILE_PATH} instead of an absolute one, use the ${sourcepath.name} setting.",
""
)
@@ -249,7 +249,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
val idx = s.indexOf("#")
if (idx > 0) {
val (first, last) = s.splitAt(idx)
- Some(new File(first).getAbsolutePath -> appendIndex(last.substring(1)))
+ Some(new File(first).getCanonicalPath -> appendIndex(last.substring(1)))
} else {
error(s"Illegal -doc-external-doc option; expected a pair with '#' separator, found: '$s'")
None
diff --git a/src/scaladoc/scala/tools/nsc/doc/Universe.scala b/src/scaladoc/scala/tools/nsc/doc/Universe.scala
index 11520c810e..edf5112d7b 100644
--- a/src/scaladoc/scala/tools/nsc/doc/Universe.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Universe.scala
@@ -5,6 +5,8 @@
package scala.tools.nsc.doc
+import scala.tools.nsc.doc.html.page.diagram.DotRunner
+
/**
* Class to hold common dependencies across Scaladoc classes.
* @author Pedro Furlanetto
@@ -13,4 +15,5 @@ package scala.tools.nsc.doc
trait Universe {
def settings: Settings
def rootPackage: model.Package
+ def dotRunner: DotRunner
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index a933c35c99..fb4ed34571 100755
--- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -131,18 +131,19 @@ trait CommentFactoryBase { this: MemberLookupBase =>
/** Javadoc tags that should be replaced by something useful, such as wiki
* syntax, or that should be dropped. */
private val JavadocTags =
- new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""")
+ new Regex("""\{\@(code|docRoot|linkplain|link|literal|value)\p{Zs}*([^}]*)\}""")
/** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
- private def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
- case "code" => "`" + mtch.group(2) + "`"
- case "docRoot" => ""
- case "inheritDoc" => ""
- case "link" => "`" + mtch.group(2) + "`"
- case "linkplain" => "`" + mtch.group(2) + "`"
- case "literal" => mtch.group(2)
- case "value" => "`" + mtch.group(2) + "`"
- case _ => ""
+ private def javadocReplacement(mtch: Regex.Match): String = {
+ mtch.group(1) match {
+ case "code" => "<code>" + mtch.group(2) + "</code>"
+ case "docRoot" => ""
+ case "link" => "`[[" + mtch.group(2) + "]]`"
+ case "linkplain" => "[[" + mtch.group(2) + "]]"
+ case "literal" => "`" + mtch.group(2) + "`"
+ case "value" => "`" + mtch.group(2) + "`"
+ case _ => ""
+ }
}
/** Safe HTML tags that can be kept. */
@@ -280,13 +281,16 @@ trait CommentFactoryBase { this: MemberLookupBase =>
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
case line :: ls if (lastTagKey.isDefined) =>
- val key = lastTagKey.get
- val value =
- ((tags get key): @unchecked) match {
- case Some(b :: bs) => (b + endOfLine + line) :: bs
- case None => oops("lastTagKey set when no tag exists for key")
- }
- parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock)
+ val newtags = if (!line.isEmpty) {
+ val key = lastTagKey.get
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + line) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ tags + (key -> value)
+ } else tags
+ parse0(docBody, newtags, lastTagKey, ls, inCodeBlock)
case line :: ls =>
if (docBody.length > 0) docBody append endOfLine
@@ -314,18 +318,18 @@ trait CommentFactoryBase { this: MemberLookupBase =>
val bodyTags: mutable.Map[TagKey, List[Body]] =
mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, site))} toSeq: _*)
- def oneTag(key: SimpleTagKey): Option[Body] =
+ def oneTag(key: SimpleTagKey, filterEmpty: Boolean = true): Option[Body] =
((bodyTags remove key): @unchecked) match {
- case Some(r :: rs) =>
+ case Some(r :: rs) if !(filterEmpty && r.blocks.isEmpty) =>
if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed")
Some(r)
- case None => None
+ case _ => None
}
def allTags(key: SimpleTagKey): List[Body] =
- (bodyTags remove key) getOrElse Nil
+ (bodyTags remove key).getOrElse(Nil).filterNot(_.blocks.isEmpty)
- def allSymsOneTag(key: TagKey): Map[String, Body] = {
+ def allSymsOneTag(key: TagKey, filterEmpty: Boolean = true): Map[String, Body] = {
val keys: Seq[SymbolTagKey] =
bodyTags.keys.toSeq flatMap {
case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
@@ -341,7 +345,23 @@ trait CommentFactoryBase { this: MemberLookupBase =>
reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed")
(key.symbol, bs.head)
}
- Map.empty[String, Body] ++ pairs
+ Map.empty[String, Body] ++ (if (filterEmpty) pairs.filterNot(_._2.blocks.isEmpty) else pairs)
+ }
+
+ def linkedExceptions: Map[String, Body] = {
+ val m = allSymsOneTag(SimpleTagKey("throws"), filterEmpty = false)
+
+ m.map { case (name,body) =>
+ val link = memberLookup(pos, name, site)
+ val newBody = body match {
+ case Body(List(Paragraph(Chain(content)))) =>
+ val descr = Text(" ") +: content
+ val entityLink = EntityLink(Monospace(Text(name)), link)
+ Body(List(Paragraph(Chain(entityLink +: descr))))
+ case _ => body
+ }
+ (name, newBody)
+ }
}
val com = createComment (
@@ -349,13 +369,13 @@ trait CommentFactoryBase { this: MemberLookupBase =>
authors0 = allTags(SimpleTagKey("author")),
see0 = allTags(SimpleTagKey("see")),
result0 = oneTag(SimpleTagKey("return")),
- throws0 = allSymsOneTag(SimpleTagKey("throws")),
+ throws0 = linkedExceptions,
valueParams0 = allSymsOneTag(SimpleTagKey("param")),
typeParams0 = allSymsOneTag(SimpleTagKey("tparam")),
version0 = oneTag(SimpleTagKey("version")),
since0 = oneTag(SimpleTagKey("since")),
todo0 = allTags(SimpleTagKey("todo")),
- deprecated0 = oneTag(SimpleTagKey("deprecated")),
+ deprecated0 = oneTag(SimpleTagKey("deprecated"), filterEmpty = false),
note0 = allTags(SimpleTagKey("note")),
example0 = allTags(SimpleTagKey("example")),
constructor0 = oneTag(SimpleTagKey("constructor")),
@@ -666,7 +686,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
def summary(): Inline = {
- val i = inline(check("."))
+ val i = inline(checkSentenceEnded())
Summary(
if (jump("."))
Chain(List(i, Text(".")))
@@ -680,11 +700,10 @@ trait CommentFactoryBase { this: MemberLookupBase =>
jump("[[")
val parens = 2 + repeatJump('[')
val stop = "]" * parens
- //println("link with " + parens + " matching parens")
- val target = readUntil { check(stop) || check(" ") }
+ val target = readUntil { check(stop) || isWhitespaceOrNewLine(char) }
val title =
if (!check(stop)) Some({
- jump(" ")
+ jumpWhitespaceOrNewLine()
inline(check(stop))
})
else None
@@ -723,49 +742,15 @@ trait CommentFactoryBase { this: MemberLookupBase =>
*/
def normalizeIndentation(_code: String): String = {
- val code = _code.trim
- var maxSkip = Integer.MAX_VALUE
- var crtSkip = 0
- var wsArea = true
- var index = 0
- var firstLine = true
- var emptyLine = true
-
- while (index < code.length) {
- code(index) match {
- case ' ' =>
- if (wsArea)
- crtSkip += 1
- case c =>
- wsArea = (c == '\n')
- maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
- crtSkip = if (c == '\n') 0 else crtSkip
- firstLine = if (c == '\n') false else firstLine
- emptyLine = if (c == '\n') true else false
- }
- index += 1
- }
+ val code = _code.replaceAll("\\s+$", "").dropWhile(_ == '\n') // right-trim + remove all leading '\n'
+ val lines = code.split("\n")
- if (maxSkip == 0)
- code
- else {
- index = 0
- val builder = new StringBuilder
- while (index < code.length) {
- builder.append(code(index))
- if (code(index) == '\n') {
- // we want to skip as many spaces are available, if there are less spaces (like on empty lines, do not
- // over-consume them)
- index += 1
- val limit = index + maxSkip
- while ((index < code.length) && (code(index) == ' ') && index < limit)
- index += 1
- }
- else
- index += 1
- }
- builder.toString
- }
+ // maxSkip - size of the longest common whitespace prefix of non-empty lines
+ val nonEmptyLines = lines.filter(_.trim.nonEmpty)
+ val maxSkip = if (nonEmptyLines.isEmpty) 0 else nonEmptyLines.map(line => line.prefixLength(_ == ' ')).min
+
+ // remove common whitespace prefix
+ lines.map(line => if (line.trim.nonEmpty) line.substring(maxSkip) else line).mkString("\n")
}
def checkParaEnded(): Boolean = {
@@ -785,6 +770,16 @@ trait CommentFactoryBase { this: MemberLookupBase =>
})
}
+ def checkSentenceEnded(): Boolean = {
+ (char == '.') && {
+ val poff = offset
+ nextChar() // read '.'
+ val ok = char == endOfText || char == endOfLine || isWhitespace(char)
+ offset = poff
+ ok
+ }
+ }
+
def reportError(pos: Position, message: String) {
reporter.warning(pos, message)
}
@@ -889,6 +884,8 @@ trait CommentFactoryBase { this: MemberLookupBase =>
def jumpWhitespace() = jumpUntil(!isWhitespace(char))
+ def jumpWhitespaceOrNewLine() = jumpUntil(!isWhitespaceOrNewLine(char))
+
/* READERS */
final def readUntil(c: Char): String = {
@@ -928,5 +925,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
/* CHARS CLASSES */
def isWhitespace(c: Char) = c == ' ' || c == '\t'
+
+ def isWhitespaceOrNewLine(c: Char) = isWhitespace(c) || c == '\n'
}
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
index d721a96ad7..8313d842e5 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -13,8 +13,6 @@ import io.{ Streamable, Directory }
import scala.collection._
import page.diagram._
-import html.page.diagram.DiagramGenerator
-
/** A class that can generate Scaladoc sites to some fixed root folder.
* @author David Bernard
* @author Gilles Dubochet */
@@ -97,7 +95,9 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"selected2.png",
"selected-right-implicits.png",
"selected-implicits.png",
- "unselected.png"
+ "unselected.png",
+
+ "permalink.png"
)
/** Generates the Scaladoc site for a model into the site root.
@@ -119,28 +119,29 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
finally out.close()
}
- DiagramGenerator.initialize(universe.settings)
-
libResources foreach (s => copyResource("lib/" + s))
new page.Index(universe, index) writeFor this
new page.IndexScript(universe, index) writeFor this
-
- writeTemplates(_ writeFor this)
-
- for (letter <- index.firstLetterIndex) {
- new html.page.ReferenceIndex(letter._1, index, universe) writeFor this
+ if (index.hasDeprecatedMembers)
+ new page.DeprecatedIndex(universe, index) writeFor this
+ try {
+ writeTemplates(_ writeFor this)
+ for (letter <- index.firstLetterIndex) {
+ new html.page.ReferenceIndex(letter._1, index, universe) writeFor this
+ }
+ } finally {
+ DiagramStats.printStats(universe.settings)
+ universe.dotRunner.cleanup()
}
-
- DiagramGenerator.cleanup()
}
def writeTemplates(writeForThis: HtmlPage => Unit) {
val written = mutable.HashSet.empty[DocTemplateEntity]
- val diagramGenerator: DiagramGenerator = new DotDiagramGenerator(universe.settings)
def writeTemplate(tpl: DocTemplateEntity) {
if (!(written contains tpl)) {
+ val diagramGenerator: DiagramGenerator = new DotDiagramGenerator(universe.settings, universe.dotRunner)
writeForThis(new page.Template(universe, diagramGenerator, tpl))
written += tpl
tpl.templates collect { case d: DocTemplateEntity => d } map writeTemplate
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
index f6373e9e97..86155845b0 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -14,6 +14,7 @@ import base.comment._
import model._
import scala.xml.NodeSeq
+import scala.xml.Elem
import scala.xml.dtd.{DocType, PublicID}
import scala.collection._
import java.io.Writer
@@ -205,18 +206,91 @@ abstract class HtmlPage extends Page { thisPage =>
case tpl :: tpls => templateToHtml(tpl) ++ sep ++ templatesToHtml(tpls, sep)
}
- /** Returns the _big image name corresponding to the DocTemplate Entity (upper left icon) */
- def docEntityKindToBigImage(ety: DocTemplateEntity) =
- if (ety.isTrait && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "trait_to_object_big.png"
- else if (ety.isTrait) "trait_big.png"
- else if (ety.isClass && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "class_to_object_big.png"
- else if (ety.isClass) "class_big.png"
- else if ((ety.isAbstractType || ety.isAliasType) && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "type_to_object_big.png"
- else if ((ety.isAbstractType || ety.isAliasType)) "type_big.png"
- else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isClass) "object_to_class_big.png"
- else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isTrait) "object_to_trait_big.png"
- else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && (ety.companion.get.isAbstractType || ety.companion.get.isAliasType)) "object_to_trait_big.png"
- else if (ety.isObject) "object_big.png"
- else if (ety.isPackage) "package_big.png"
- else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
+ object Image extends Enumeration {
+ val Trait, Class, Type, Object, Package = Value
+ }
+
+ /** Returns the _big image name and the alt attribute
+ * corresponding to the DocTemplate Entity (upper left icon) */
+ def docEntityKindToBigImage(ety: DocTemplateEntity) = {
+ def entityToImage(e: DocTemplateEntity) =
+ if (e.isTrait) Image.Trait
+ else if (e.isClass) Image.Class
+ else if (e.isAbstractType || e.isAliasType) Image.Type
+ else if (e.isObject) Image.Object
+ else if (e.isPackage) Image.Package
+ else {
+ // FIXME: an entity *should* fall into one of the above categories,
+ // but AnyRef is somehow not
+ Image.Class
+ }
+
+ val image = entityToImage(ety)
+ val companionImage = ety.companion filter {
+ e => e.visibility.isPublic && ! e.inSource.isEmpty
+ } map { entityToImage }
+
+ (image, companionImage) match {
+ case (from, Some(to)) =>
+ ((from + "_to_" + to + "_big.png").toLowerCase, from + "/" + to)
+ case (from, None) =>
+ ((from + "_big.png").toLowerCase, from.toString)
+ }
+ }
+
+ def permalink(template: Entity, isSelf: Boolean = true): Elem =
+ <span class="permalink">
+ <a href={ memberToUrl(template, isSelf) } title="Permalink" target="_top">
+ <img src={ relativeLinkTo(List("permalink.png", "lib")) } alt="Permalink" />
+ </a>
+ </span>
+
+ def docEntityKindToCompanionTitle(ety: DocTemplateEntity, baseString: String = "See companion") =
+ ety.companion match{
+ case Some(companion) =>
+ s"$baseString${
+ if(companion.isObject) " object"
+ else if(companion.isTrait) " trait"
+ else if(companion.isClass) " class"
+ else ""
+ }"
+ case None => baseString
+ }
+
+ def companionAndPackage(tpl: DocTemplateEntity): Elem =
+ <span class="morelinks">{
+ tpl.companion match {
+ case Some(companionTpl) =>
+ val objClassTrait =
+ if (companionTpl.isObject) s"object ${tpl.name}"
+ else if (companionTpl.isTrait) s"trait ${companionTpl.name}"
+ else s"class ${companionTpl.name}"
+ <div>
+ Related Docs:
+ <a href={relativeLinkTo(tpl.companion.get)} title={docEntityKindToCompanionTitle(tpl)}>{objClassTrait}</a>
+ | {templateToHtml(tpl.inTemplate, s"package ${tpl.inTemplate.name}")}
+ </div>
+ case None =>
+ <div>Related Doc:
+ {templateToHtml(tpl.inTemplate, s"package ${tpl.inTemplate.name}")}
+ </div>
+ }
+ }</span>
+
+ def memberToUrl(template: Entity, isSelf: Boolean = true): String = {
+ val (signature: Option[String], containingTemplate: TemplateEntity) = template match {
+ case dte: DocTemplateEntity if (!isSelf) => (Some(dte.signature), dte.inTemplate)
+ case dte: DocTemplateEntity => (None, dte)
+ case me: MemberEntity => (Some(me.signature), me.inTemplate)
+ case tpl => (None, tpl)
+ }
+
+ def hashFromPath(templatePath: List[String]): String =
+ ((templatePath.head.replace(".html", "") :: templatePath.tail).reverse).mkString(".")
+
+ val containingTemplatePath = templateToPath(containingTemplate)
+ val url = "../" * (containingTemplatePath.size - 1) + "index.html"
+ val hash = hashFromPath(containingTemplatePath)
+ s"$url#$hash" + signature.map("@" + _).getOrElse("")
+ }
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
index 910148532d..9ab3999447 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -52,7 +52,7 @@ private[html] object SyntaxHigh {
"Triple", "TypeTag", "Unit")
def apply(data: String): NodeSeq = {
- val buf = data.getBytes
+ val buf = data.toCharArray
val out = new StringBuilder
def compare(offset: Int, key: String): Int = {
@@ -60,7 +60,7 @@ private[html] object SyntaxHigh {
var j = 0
val l = key.length
while (i < buf.length && j < l) {
- val bch = buf(i).toChar
+ val bch = buf(i)
val kch = key charAt j
if (bch < kch) return -1
else if (bch > kch) return 1
@@ -94,13 +94,13 @@ private[html] object SyntaxHigh {
def line(i: Int): Int =
if (i == buf.length || buf(i) == '\n') i
else {
- out append buf(i).toChar
+ out append buf(i)
line(i+1)
}
var level = 0
def multiline(i: Int, star: Boolean): Int = {
if (i == buf.length) return i
- val ch = buf(i).toChar
+ val ch = buf(i)
out append ch
ch match {
case '*' =>
@@ -127,7 +127,7 @@ private[html] object SyntaxHigh {
if (i == buf.length) i
else if (i > j+6) { out setLength 0; j }
else {
- val ch = buf(i).toChar
+ val ch = buf(i)
out append ch
ch match {
case '\\' =>
@@ -148,7 +148,7 @@ private[html] object SyntaxHigh {
val out = new StringBuilder("\"")
def strlit0(i: Int, bslash: Boolean): Int = {
if (i == buf.length) return i
- val ch = buf(i).toChar
+ val ch = buf(i)
out append ch
ch match {
case '\\' =>
@@ -167,7 +167,7 @@ private[html] object SyntaxHigh {
val out = new StringBuilder
def intg(i: Int): Int = {
if (i == buf.length) return i
- val ch = buf(i).toChar
+ val ch = buf(i)
ch match {
case '.' =>
out append ch
@@ -181,7 +181,7 @@ private[html] object SyntaxHigh {
}
def frac(i: Int): Int = {
if (i == buf.length) return i
- val ch = buf(i).toChar
+ val ch = buf(i)
ch match {
case 'e' | 'E' =>
out append ch
@@ -195,7 +195,7 @@ private[html] object SyntaxHigh {
}
def expo(i: Int, signed: Boolean): Int = {
if (i == buf.length) return i
- val ch = buf(i).toChar
+ val ch = buf(i)
ch match {
case '+' | '-' if !signed =>
out append ch
@@ -222,7 +222,7 @@ private[html] object SyntaxHigh {
case '&' =>
parse("&amp;", i+1)
case '<' if i+1 < buf.length =>
- val ch = buf(i+1).toChar
+ val ch = buf(i+1)
if (ch == '-' || ch == ':' || ch == '%')
parse("<span class=\"kw\">&lt;"+ch+"</span>", i+2)
else
@@ -236,19 +236,19 @@ private[html] object SyntaxHigh {
if (i+1 < buf.length && buf(i+1) == '>')
parse("<span class=\"kw\">=&gt;</span>", i+2)
else
- parse(buf(i).toChar.toString, i+1)
+ parse(buf(i).toString, i+1)
case '/' =>
if (i+1 < buf.length && (buf(i+1) == '/' || buf(i+1) == '*')) {
val c = comment(i+1)
parse("<span class=\"cmt\">"+c+"</span>", i+c.length)
} else
- parse(buf(i).toChar.toString, i+1)
+ parse(buf(i).toString, i+1)
case '\'' =>
val s = charlit(i+1)
if (s.length > 0)
parse("<span class=\"lit\">"+s+"</span>", i+s.length)
else
- parse(buf(i).toChar.toString, i+1)
+ parse(buf(i).toString, i+1)
case '"' =>
val s = strlit(i+1)
parse("<span class=\"lit\">"+s+"</span>", i+s.length)
@@ -257,9 +257,9 @@ private[html] object SyntaxHigh {
if (k >= 0)
parse("<span class=\"ano\">@"+annotations(k)+"</span>", i+annotations(k).length+1)
else
- parse(buf(i).toChar.toString, i+1)
+ parse(buf(i).toString, i+1)
case _ =>
- if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) {
+ if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1)))) {
if (Character.isDigit(buf(i).toInt) ||
(buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1).toInt))) {
val s = numlit(i)
@@ -273,11 +273,11 @@ private[html] object SyntaxHigh {
if (k >= 0)
parse("<span class=\"std\">"+standards(k)+"</span>", i+standards(k).length)
else
- parse(buf(i).toChar.toString, i+1)
+ parse(buf(i).toString, i+1)
}
}
} else
- parse(buf(i).toChar.toString, i+1)
+ parse(buf(i).toString, i+1)
}
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala
new file mode 100755
index 0000000000..f257153bd7
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/DeprecatedIndex.scala
@@ -0,0 +1,58 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ */
+
+package scala
+package tools
+package nsc
+package doc
+package html
+package page
+
+import doc.model._
+
+class DeprecatedIndex(universe: Universe, index: doc.Index) extends HtmlPage {
+
+ def path = List("deprecated-list.html")
+
+ def title = {
+ val s = universe.settings
+ ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
+ ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
+ }
+
+ def headers =
+ <xml:group>
+ <link href={ relativeLinkTo(List("ref-index.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
+ </xml:group>
+
+
+ private def entry(name: String, methods: Iterable[MemberEntity]) = {
+ val occurrences = methods.filter(_.deprecation.isDefined).map(method =>
+ templateToHtml(method.inDefinitionTemplates.head)
+ ).toList.distinct
+
+ <div class="entry">
+ <div class="name">{ name }</div>
+ <div class="occurrences">{
+ for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
+ }</div>
+ </div>
+ }
+
+ def deprecatedEntries = {
+ val available = ('_' +: ('a' to 'z')).flatMap(index.firstLetterIndex.get)
+
+ for (group <- available;
+ value <- group if value._2.find(_.deprecation.isDefined).isDefined)
+ yield value
+ }
+
+ def body =
+ <body>{
+ for(value <- deprecatedEntries) yield
+ entry(value._1, value._2.view)
+ }</body>
+
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
index ce3a5eb1fc..6bfe480e33 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
@@ -61,12 +61,17 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
}
}
+ def deprecated: NodeSeq = if (index.hasDeprecatedMembers)
+ <a target="template" href="deprecated-list.html">deprecated</a>
+ else
+ <span>deprecated</span>
+
def browser =
<div id="browser" class="ui-layout-west">
<div class="ui-west-center">
<div id="filter">
<div id="textfilter"></div>
- <div id="letters">{ letters }</div>
+ <div id="letters">{ letters } &#8211; { deprecated }</div>
</div>
<div class="pack" id="tpl">{
def packageElem(pack: model.Package): NodeSeq = {
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala
deleted file mode 100644
index 37145756d9..0000000000
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2013 LAMP/EPFL
- * @author David Bernard, Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package html
-package page
-
-import scala.xml.NodeSeq
-import java.io.File
-
-class Source(sourceFile: File) extends HtmlPage {
-
- val path = List("source.html")
-
- val title = "Scaladoc: page source"
-
- val headers =
- NodeSeq.Empty
-
- val body =
- <body>
- <h1>Page source is not implemented yet</h1>
- </body>
-
- /*
-
-
- def readTextFromSrcDir(subPath: String) :Option[String] = {
- readTextFromFile(new File(sourceDir, subPath))
- }
-
- def readTextFromFile(f : File) :Option[String] = {
- if (f.exists) {
- Some(Source.fromFile(f)(Codec.default).getLines().mkString(""))
- } else {
- None
- }
- }
-
-
- def writeTextToFile(f : File, txt : String, header: Option[String], footer: Option[String]) {
- val out = new FileOutputStream(f)
- try {
- val enc = "UTF-8"
- header.foreach(s => out.write(s.getBytes(enc)))
- out.write(txt.getBytes(enc))
- footer.foreach(s => out.write(s.getBytes(enc)))
- } finally {
- try {
- out.close()
- } catch {
- case _ => //ignore
- }
- }
- }
-
- trait SourceHtmlizer {
- def scalaToHtml(src :File) : Option[File]
- }
-
- lazy val sourceHtmlizer : SourceHtmlizer = {
- if (cfg.htmlizeSource) {
- new SourceHtmlizer {
-
- val inDir: File = cfg.sourcedir
- val outDir: File = cfg.outputdir
-
- private def relativize(uri: URI, from: URI) = linkHelper.relativize(uri, from).getOrElse("__notFound__" + uri.getPath)
-
- def header(dest: URI) = Some("""
- <html>
- <head>
- <link href='""" + relativize(new URI("site:/_highlighter/SyntaxHighlighter.css"), dest) + """' rel='stylesheet' type='text/css'/>
- <script language='javascript' src='""" + relativize(new URI("site:/_highlighter/shAll.js"), dest) + """'></script>
- </head>
- <body>
- <pre name="code" class="scala" style="width:100%">
- """)
-
- def footer(dest: URI) = Some("""</pre>
- <script language='javascript'>
- dp.SyntaxHighlighter.ClipboardSwf = '""" + relativize(new URI("site:/_highlighter/clipboard.swf"), dest) + """';
- dp.SyntaxHighlighter.HighlightAll('code');
- </script>
- </body>
- </html>
- """)
-
- //TODO: escape the source code
- def scalaToHtml(src :File) = {
- val dest = new File(outDir, fileHelper.relativePathUnderDir(src, inDir) + ".html")
- if (!dest.exists || dest.lastModified < src.lastModified) {
-
- //we need to verify whether the directory we are trying to write to has already been created or not
- if(!dest.getParentFile.exists) dest.getParentFile.mkdirs
-
- val uri = linkHelper.uriFor(dest).get
- var txt = fileHelper.readTextFromFile(src).getOrElse("")
- txt = txt.replace("<", "&lt;")
- fileHelper.writeTextToFile(dest, txt, header(uri), footer(uri))
- }
- Some(dest)
- }
-
- def copyResources() {
- val loader = this.getClass().getClassLoader()
- val buf = new Array[Byte](1024)
- def copyResource(name: String) = fileHelper.copyResource("/scala/tools/nsc/doc/html/resource/", name, outDir, loader, buf)
- copyResource("_highlighter/clipboard.swf")
- copyResource("_highlighter/shAll.js")
- copyResource("_highlighter/SyntaxHighlighter.css")
- }
-
- copyResources()
- }
- } else {
- new SourceHtmlizer {
- def scalaToHtml(src :File) = None
- }
- }
- }
- */
-
-}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 26ee005d3e..c384ed7034 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -15,7 +15,7 @@ import base.comment._
import model._
import model.diagram._
-import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
+import scala.xml.{Elem, NodeSeq, Text, UnprefixedAttribute}
import scala.language.postfixOps
import scala.collection.mutable. { Set, HashSet }
@@ -89,7 +89,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
val templateName = if (tpl.isRootPackage) "root package" else tpl.name
val displayName = tpl.companion match {
case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
- <a href={relativeLinkTo(companion)} title="Go to companion">{ templateName }</a>
+ <a href={relativeLinkTo(companion)} title={docEntityKindToCompanionTitle(tpl)}>{ templateName }</a>
case _ =>
templateName
}
@@ -103,14 +103,18 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<body class={ if (tpl.isType) "type" else "value" }>
<div id="definition">
{
+ val (src, alt) = docEntityKindToBigImage(tpl)
+
tpl.companion match {
case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
- <a href={relativeLinkTo(companion)} title="Go to companion"><img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/></a>
+ <a href={relativeLinkTo(companion)} title={docEntityKindToCompanionTitle(tpl)}><img alt={alt} src={ relativeLinkTo(List(src, "lib")) }/></a>
case _ =>
- <img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/>
+ <img alt={alt} src={ relativeLinkTo(List(src, "lib")) }/>
}}
{ owner }
- <h1>{ displayName }</h1>
+ <h1>{ displayName }</h1>{
+ if (tpl.isPackage) NodeSeq.Empty else <h3>{companionAndPackage(tpl)}</h3>
+ }{ permalink(tpl) }
</div>
{ signature(tpl, isSelf = true) }
@@ -306,9 +310,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<xml:group>
<div id="comment" class="fullcommenttop">{ memberToCommentBodyHtml(mbr, inTpl, isSelf = true) }</div>
</xml:group>
- case dte: DocTemplateEntity if mbr.comment.isDefined =>
- // comment of inner, documented class (only short comment, full comment is on the class' own page)
- memberToInlineCommentHtml(mbr, isSelf)
case _ =>
// comment of non-class member or non-documentented inner class
val commentBody = memberToCommentBodyHtml(mbr, inTpl, isSelf = false)
@@ -608,7 +609,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<dd>{
val exceptionsXml: List[NodeSeq] =
for((name, body) <- comment.throws.toList.sortBy(_._1) ) yield
- <span class="cmt">{Text(name) ++ bodyToHtml(body)}</span>
+ <span class="cmt">{bodyToHtml(body)}</span>
exceptionsXml.reduceLeft(_ ++ Text("") ++ _)
}</dd>
}
@@ -723,6 +724,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
/** name, tparams, params, result */
def signature(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+
def inside(hasLinks: Boolean, nameLink: String = ""): NodeSeq =
<xml:group>
<span class="modifier_kind">
@@ -833,11 +835,11 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
</xml:group>
mbr match {
case dte: DocTemplateEntity if !isSelf =>
- <h4 class="signature">{ inside(hasLinks = true, nameLink = relativeLinkTo(dte)) }</h4>
+ <h4 class="signature">{ inside(hasLinks = true, nameLink = relativeLinkTo(dte)) }</h4> ++ permalink(dte, isSelf)
case _ if isSelf =>
<h4 id="signature" class="signature">{ inside(hasLinks = true) }</h4>
case _ =>
- <h4 class="signature">{ inside(hasLinks = true) }</h4>
+ <h4 class="signature">{ inside(hasLinks = true) }</h4> ++ permalink(mbr)
}
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
index 61c1819d11..cf65de4151 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
@@ -25,29 +25,3 @@ trait DiagramGenerator {
*/
def generate(d: Diagram, t: DocTemplateEntity, p: HtmlPage):NodeSeq
}
-
-object DiagramGenerator {
-
- // TODO: This is tailored towards the dot generator, since it's the only generator. In the future it should be more
- // general.
-
- private[this] var dotRunner: DotRunner = null
- private[this] var settings: doc.Settings = null
-
- def initialize(s: doc.Settings) =
- settings = s
-
- def getDotRunner() = {
- if (dotRunner == null)
- dotRunner = new DotRunner(settings)
- dotRunner
- }
-
- def cleanup() = {
- DiagramStats.printStats(settings)
- if (dotRunner != null) {
- dotRunner.cleanup()
- dotRunner = null
- }
- }
-} \ No newline at end of file
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index 4ff436bdc6..b541cf721b 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -15,7 +15,7 @@ import scala.collection.immutable._
import model._
import model.diagram._
-class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
+class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends DiagramGenerator {
// the page where the diagram will be embedded
private var page: HtmlPage = null
@@ -317,7 +317,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
* Calls dot with a given dot string and returns the SVG output.
*/
private def generateSVG(dotInput: String, template: DocTemplateEntity) = {
- val dotOutput = DiagramGenerator.getDotRunner().feedToDot(dotInput, template)
+ val dotOutput = dotRunner.feedToDot(dotInput, template)
var tSVG = -System.currentTimeMillis
val result = if (dotOutput != null) {
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
index 478f2e38ac..680ead7a59 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
@@ -25,7 +25,7 @@ $(document).ready(function()
$(".diagram-container").css("display", "block");
$(".diagram").each(function() {
- // store inital dimensions
+ // store initial dimensions
$(this).data("width", $("svg", $(this)).width());
$(this).data("height", $("svg", $(this)).height());
// store unscaled clone of SVG element
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
index 55fb370a41..3e352a95b3 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -310,6 +310,7 @@ h1 {
position: fixed;
margin-left: 300px;
display: block;
+ -webkit-overflow-scrolling: touch;
}
#content > iframe {
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
index c201b324e7..3d9cf8d465 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -1,5 +1,5 @@
// © 2009–2010 EPFL/LAMP
-// code by Gilles Dubochet with contributions by Johannes Rudolph and "spiros"
+// code by Gilles Dubochet with contributions by Johannes Rudolph, "spiros" and Marcin Kubala
var topLevelTemplates = undefined;
var topLevelPackages = undefined;
@@ -11,7 +11,7 @@ var focusFilterState = undefined;
var title = $(document).attr('title');
-var lastHash = "";
+var lastFragment = "";
$(document).ready(function() {
$('body').layout({
@@ -24,9 +24,13 @@ $(document).ready(function() {
,north__paneSelector: ".ui-west-north"
});
$('iframe').bind("load", function(){
- var subtitle = $(this).contents().find('title').text();
- $(document).attr('title', (title ? title + " - " : "") + subtitle);
-
+ try {
+ var subtitle = $(this).contents().find('title').text();
+ $(document).attr('title', (title ? title + " - " : "") + subtitle);
+ } catch (e) {
+ // Chrome doesn't allow reading the iframe's contents when
+ // used on the local file system.
+ }
setUrlFragmentFromFrameSrc();
});
@@ -64,21 +68,43 @@ $(document).ready(function() {
// Set the iframe's src according to the fragment of the current url.
// fragment = "#scala.Either" => iframe url = "scala/Either.html"
// fragment = "#scala.Either@isRight:Boolean" => iframe url = "scala/Either.html#isRight:Boolean"
+// fragment = "#scalaz.iteratee.package@>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]" => iframe url = "scalaz/iteratee/package.html#>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]"
function setFrameSrcFromUrlFragment() {
- var fragment = location.hash.slice(1);
- if(fragment) {
- var loc = fragment.split("@")[0].replace(/\./g, "/");
- if(loc.indexOf(".html") < 0) loc += ".html";
- if(fragment.indexOf('@') > 0) loc += ("#" + fragment.split("@", 2)[1]);
- frames["template"].location.replace(loc);
- }
- else
- frames["template"].location.replace("package.html");
+
+ function extractLoc(fragment) {
+ var loc = fragment.split('@')[0].replace(/\./g, "/");
+ if (loc.indexOf(".html") < 0) {
+ loc += ".html";
+ }
+ return loc;
+ }
+
+ function extractMemberSig(fragment) {
+ var splitIdx = fragment.indexOf('@');
+ if (splitIdx < 0) {
+ return;
+ }
+ return fragment.substr(splitIdx + 1);
+ }
+
+ var fragment = location.hash.slice(1);
+ if (fragment) {
+ var locWithMemeberSig = extractLoc(fragment);
+ var memberSig = extractMemberSig(fragment);
+ if (memberSig) {
+ locWithMemeberSig += "#" + memberSig;
+ }
+ frames["template"].location.replace(location.protocol + locWithMemeberSig);
+ } else {
+ console.log("empty fragment detected");
+ frames["template"].location.replace("package.html");
+ }
}
// Set the url fragment according to the src of the iframe "template".
// iframe url = "scala/Either.html" => url fragment = "#scala.Either"
// iframe url = "scala/Either.html#isRight:Boolean" => url fragment = "#scala.Either@isRight:Boolean"
+// iframe url = "scalaz/iteratee/package.html#>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]" => fragment = "#scalaz.iteratee.package@>@>[E,A]=scalaz.iteratee.package.Iteratee[E,A]"
function setUrlFragmentFromFrameSrc() {
try {
var commonLength = location.pathname.lastIndexOf("/");
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.png
new file mode 100644
index 0000000000..d54bc93f6a
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/permalink.png
Binary files differ
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
index b066027f04..e84d7c1ca6 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -210,6 +210,7 @@ dl.attributes > dd {
display: block;
padding-left: 10em;
margin-bottom: 5px;
+ min-height: 15px;
}
#template .values > h3 {
@@ -397,6 +398,49 @@ div.members > ol > li:last-child {
margin-bottom: 5px;
}
+#definition .morelinks {
+ text-align: right;
+ position: absolute;
+ top: 40px;
+ right: 10px;
+ width: 450px;
+}
+
+#definition .morelinks a {
+ color: #EBEBEB;
+}
+
+#template .members li .permalink {
+ position: absolute;
+ top: 5px;
+ right: 5px;
+}
+
+#definition .permalink {
+ position: absolute;
+ top: 10px;
+ right: 15px;
+}
+
+#definition .permalink a {
+ color: #EBEBEB;
+}
+
+#template .members li .permalink,
+#definition .permalink a {
+ display: none;
+}
+
+#template .members li:hover .permalink,
+#definition:hover .permalink a {
+ display: block;
+}
+
+#template .members li .permalink a,
+#definition .permalink a {
+ text-decoration: none;
+ font-weight: bold;
+}
/* Comments text formating */
@@ -626,6 +670,7 @@ div.fullcomment dl.paramcmts > dd {
padding-left: 10px;
margin-bottom: 5px;
margin-left: 70px;
+ min-height: 15px;
}
/* Members filter tool */
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
index 6d1caf6d50..798a2d430b 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -1,23 +1,57 @@
// © 2009–2010 EPFL/LAMP
-// code by Gilles Dubochet with contributions by Pedro Furlanetto
+// code by Gilles Dubochet with contributions by Pedro Furlanetto and Marcin Kubala
$(document).ready(function(){
+ var controls = {
+ visibility: {
+ publicOnly: $("#visbl").find("> ol > li.public"),
+ all: $("#visbl").find("> ol > li.all")
+ }
+ };
+
// Escapes special characters and returns a valid jQuery selector
function escapeJquery(str){
- return str.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=>\|])/g, '\\$1');
+ return str.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=<>\|])/g, '\\$1');
}
- // highlight and jump to selected member
- if (window.location.hash) {
- var temp = window.location.hash.replace('#', '');
- var elem = '#'+escapeJquery(temp);
+ function toggleVisibilityFilter(ctrlToEnable, ctrToDisable) {
+ if (ctrlToEnable.hasClass("out")) {
+ ctrlToEnable.removeClass("out").addClass("in");
+ ctrToDisable.removeClass("in").addClass("out");
+ filter();
+ }
+ }
+
+ controls.visibility.publicOnly.click(function () {
+ toggleVisibilityFilter(controls.visibility.publicOnly, controls.visibility.all);
+ });
- window.scrollTo(0, 0);
- $(elem).parent().effect("highlight", {color: "#FFCC85"}, 3000);
- $('html,body').animate({scrollTop:$(elem).parent().offset().top}, 1000);
+ controls.visibility.all.click(function () {
+ toggleVisibilityFilter(controls.visibility.all, controls.visibility.publicOnly);
+ });
+
+ function exposeMember(jqElem) {
+ var jqElemParent = jqElem.parent(),
+ parentName = jqElemParent.attr("name"),
+ linearizationName = /^([^#]*)(#.*)?$/gi.exec(parentName)[1];
+
+ // switch visibility filter if necessary
+ if (jqElemParent.attr("visbl") == "prt") {
+ toggleVisibilityFilter(controls.visibility.all, controls.visibility.publicOnly);
+ }
+
+ // toggle appropriate linearization buttons
+ if (linearizationName) {
+ $("#linearization li.out[name='" + linearizationName + "']").removeClass("out").addClass("in");
+ }
+
+ filter();
+ window.scrollTo(0, 0);
+ jqElemParent.effect("highlight", {color: "#FFCC85"}, 3000);
+ $('html,body').animate({scrollTop: jqElemParent.offset().top}, 1000);
}
-
+
var isHiddenClass = function (name) {
return name == 'scala.Any' ||
name == 'scala.AnyRef';
@@ -97,7 +131,7 @@ $(document).ready(function(){
else if ($(this).hasClass("out")) {
$(this).removeClass("out");
$(this).addClass("in");
- };
+ }
filter();
});
@@ -109,7 +143,7 @@ $(document).ready(function(){
else if ($(this).hasClass("out")) {
$(this).removeClass("out");
$(this).addClass("in");
- };
+ }
filter();
});
@@ -145,34 +179,20 @@ $(document).ready(function(){
filter();
});
- $("#visbl > ol > li.public").click(function() {
- if ($(this).hasClass("out")) {
- $(this).removeClass("out").addClass("in");
- $("#visbl > ol > li.all").removeClass("in").addClass("out");
- filter();
- };
- })
- $("#visbl > ol > li.all").click(function() {
- if ($(this).hasClass("out")) {
- $(this).removeClass("out").addClass("in");
- $("#visbl > ol > li.public").removeClass("in").addClass("out");
- filter();
- };
- });
$("#order > ol > li.alpha").click(function() {
if ($(this).hasClass("out")) {
orderAlpha();
- };
+ }
})
$("#order > ol > li.inherit").click(function() {
if ($(this).hasClass("out")) {
orderInherit();
- };
+ }
});
$("#order > ol > li.group").click(function() {
if ($(this).hasClass("out")) {
orderGroup();
- };
+ }
});
$("#groupedMembers").hide();
@@ -181,7 +201,7 @@ $(document).ready(function(){
// Create tooltips
$(".extype").add(".defval").tooltip({
tip: "#tooltip",
- position:"top center",
+ position: "top center",
predelay: 500,
onBeforeShow: function(ev) {
$(this.getTip()).text(this.getTrigger().attr("name"));
@@ -233,6 +253,20 @@ $(document).ready(function(){
windowTitle();
if ($("#order > ol > li.group").length == 1) { orderGroup(); };
+
+ function findElementByHash(locationHash) {
+ var temp = locationHash.replace('#', '');
+ var memberSelector = '#' + escapeJquery(temp);
+ return $(memberSelector);
+ }
+
+ // highlight and jump to selected member
+ if (window.location.hash) {
+ var jqElem = findElementByHash(window.location.hash);
+ if (jqElem.length > 0) {
+ exposeMember(jqElem);
+ }
+ }
});
function orderAlpha() {
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
index 6932f01e9a..7fe8903c76 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
@@ -117,7 +117,7 @@ trait MemberEntity extends Entity {
def toRoot: List[MemberEntity]
/** The templates in which this member has been declared. The first element of the list is the template that contains
- * the currently active declaration of this member, subsequent elements are declarations that have been overriden. If
+ * the currently active declaration of this member, subsequent elements are declarations that have been overridden. If
* the first element is equal to `inTemplate`, the member is declared locally, if not, it has been inherited. All
* elements of this list are in the linearization of `inTemplate`. */
def inDefinitionTemplates: List[TemplateEntity]
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 643a089aae..ad53dc6bfa 100755
--- a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -14,10 +14,12 @@ object IndexModelFactory {
def makeIndex(universe: Universe): Index = new Index {
- lazy val firstLetterIndex: Map[Char, SymbolMap] = {
+ lazy val (firstLetterIndex, hasDeprecatedMembers): (Map[Char, SymbolMap], Boolean) = {
object result extends mutable.HashMap[Char,SymbolMap] {
+ var deprecated = false
+
/* symbol name ordering */
implicit def orderingMap = math.Ordering.String
@@ -32,6 +34,8 @@ object IndexModelFactory {
val members = letter.get(d.name).getOrElse {
SortedSet.empty[MemberEntity](Ordering.by { _.toString })
} + d
+ if (!deprecated && members.find(_.deprecation.isDefined).isDefined)
+ deprecated = true
this(firstLetter) = letter + (d.name -> members)
}
}
@@ -50,7 +54,7 @@ object IndexModelFactory {
gather(universe.rootPackage)
- result.toMap
+ (result.toMap, result.deprecated)
}
}
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
index 339129bdbc..64eb1adbea 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -45,7 +45,7 @@ trait MemberLookup extends base.MemberLookupBase {
sym.info.member(newTermName("package"))
else sym
Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src =>
- val path = src.path
+ val path = src.canonicalPath
settings.extUrlMapping get path map { url =>
LinkToExternal(name, url + "#" + name)
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index ef84ac42ba..03d71f15a3 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -9,8 +9,11 @@ import base.comment._
import diagram._
import scala.collection._
+import scala.tools.nsc.doc.html.HtmlPage
+import scala.tools.nsc.doc.html.page.diagram.{DotRunner}
import scala.util.matching.Regex
import scala.reflect.macros.internal.macroImpl
+import scala.xml.NodeSeq
import symtab.Flags
import io._
@@ -47,6 +50,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
thisFactory.universe = thisUniverse
val settings = thisFactory.settings
val rootPackage = modelCreation.createRootPackage
+ lazy val dotRunner = new DotRunner(settings)
}
_modelFinished = true
// complete the links between model entities, everthing that couldn't have been done before
@@ -313,7 +317,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
/* Subclass cache */
private lazy val subClassesCache = (
- if (sym == AnyRefClass) null
+ if (sym == AnyRefClass || sym == AnyClass) null
else mutable.ListBuffer[DocTemplateEntity]()
)
def registerSubClass(sc: DocTemplateEntity): Unit = {
@@ -753,8 +757,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
})
}
else if (bSym.isConstructor)
- if (conversion.isDefined)
- None // don't list constructors inherted by implicit conversion
+ if (conversion.isDefined || (bSym.enclClass.isAbstract && (bSym.enclClass.isSealed || bSym.enclClass.isFinal)))
+ // don't list constructors inherited by implicit conversion
+ // and don't list constructors of abstract sealed types (they cannot be accessed anyway)
+ None
else
Some(new NonTemplateParamMemberImpl(bSym, conversion, useCaseOf, inTpl) with Constructor {
override def isConstructor = true
diff --git a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
index 70423cc7dc..fa3e8ff5cb 100644
--- a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
+++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
@@ -182,14 +182,16 @@ abstract class ScaladocModelTest extends DirectTest {
}
}
- def countLinks(c: Comment, p: EntityLink => Boolean) = {
- def countLinks(body: Any): Int = body match {
+ def countLinks(c: Comment, p: EntityLink => Boolean): Int = countLinksInBody(c.body, p)
+
+ def countLinksInBody(body: Body, p: EntityLink => Boolean): Int = {
+ def countLinks(b: Any): Int = b match {
case el: EntityLink if p(el) => 1
case s: Seq[_] => s.toList.map(countLinks(_)).sum
case p: Product => p.productIterator.toList.map(countLinks(_)).sum
case _ => 0
}
- countLinks(c.body)
+ countLinks(body)
}
def testDiagram(doc: DocTemplateEntity, diag: Option[Diagram], nodes: Int, edges: Int) = {
diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala
index c375a5bac4..de9c30b8af 100644
--- a/src/scalap/scala/tools/scalap/Arguments.scala
+++ b/src/scalap/scala/tools/scalap/Arguments.scala
@@ -9,7 +9,7 @@
package scala.tools.scalap
import scala.collection.mutable
-import mutable.{ Buffer, ListBuffer }
+import mutable.ListBuffer
object Arguments {
case class Parser(optionPrefix: Char) {
@@ -47,7 +47,7 @@ object Arguments {
}
def parseBinding(str: String, separator: Char): (String, String) = (str indexOf separator) match {
- case -1 => argumentError("missing '" + separator + "' in binding '" + str + "'") ; ("", "")
+ case -1 => argumentError(s"missing '$separator' in binding '$str'") ; ("", "")
case idx => ((str take idx).trim, (str drop (idx + 1)).trim)
}
@@ -71,7 +71,7 @@ object Arguments {
i += 1
} else if (optionalArgs contains args(i)) {
if ((i + 1) == args.length) {
- argumentError("missing argument for '" + args(i) + "'")
+ argumentError(s"missing argument for '${args(i)}'")
i += 1
} else {
res.addArgument(args(i), args(i + 1))
@@ -79,11 +79,11 @@ object Arguments {
}
} else if (optionalBindings contains args(i)) {
if ((i + 1) == args.length) {
- argumentError("missing argument for '" + args(i) + "'")
+ argumentError(s"missing argument for '${args(i)}'")
i += 1
} else {
res.addBinding(args(i),
- parseBinding(args(i + 1), optionalBindings(args(i))));
+ parseBinding(args(i + 1), optionalBindings(args(i))))
i += 2
}
} else {
@@ -92,23 +92,23 @@ object Arguments {
while ((i == j) && iter.hasNext) {
val prefix = iter.next
if (args(i) startsWith prefix) {
- res.addPrefixed(prefix, args(i).substring(prefix.length()).trim());
+ res.addPrefixed(prefix, args(i).substring(prefix.length()).trim())
i += 1
}
}
if (i == j) {
- val iter = prefixedBindings.keysIterator;
+ val iter = prefixedBindings.keysIterator
while ((i == j) && iter.hasNext) {
val prefix = iter.next
if (args(i) startsWith prefix) {
val arg = args(i).substring(prefix.length()).trim()
i = i + 1
res.addBinding(prefix,
- parseBinding(arg, prefixedBindings(prefix)));
+ parseBinding(arg, prefixedBindings(prefix)))
}
}
if (i == j) {
- argumentError("unknown option '" + args(i) + "'")
+ argumentError(s"unknown option '${args(i)}'")
i = i + 1
}
}
@@ -119,7 +119,7 @@ object Arguments {
def parse(options: String*)(args: Array[String]): Arguments = {
val parser = new Parser('-')
- options foreach (parser withOption _)
+ options foreach parser.withOption
parser parse args
}
}
@@ -142,7 +142,7 @@ class Arguments {
if (key.length > 0)
bindings.getOrElseUpdate(tag, new mutable.HashMap)(key) = value
- def addBinding(tag: String, binding: Tuple2[String, String]): Unit =
+ def addBinding(tag: String, binding: (String, String)): Unit =
addBinding(tag, binding._1, binding._2)
def addOther(arg: String): Unit = others += arg
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index c72f416a89..3d2bfd7251 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -10,11 +10,16 @@ package tools.scalap
import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
import scala.reflect.NameTransformer
-import scalax.rules.scalasig._
-import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
-import scala.tools.util.PathResolver
-import ClassPath.DefaultJavaContext
+import scala.tools.nsc.Settings
+import scala.tools.nsc.classpath.AggregateFlatClassPath
+import scala.tools.nsc.classpath.FlatClassPathFactory
import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.ClassFileLookup
+import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+import scala.tools.nsc.util.JavaClassPath
+import scala.tools.util.PathResolverFactory
+import scalax.rules.scalasig._
/**The main object used to execute scalap on the command-line.
*
@@ -23,6 +28,7 @@ import scala.tools.nsc.io.AbstractFile
class Main {
val SCALA_SIG = "ScalaSig"
val SCALA_SIG_ANNOTATION = "Lscala/reflect/ScalaSignature;"
+ val SCALA_LONG_SIG_ANNOTATION = "Lscala/reflect/ScalaLongSignature;"
val BYTES_VALUE = "bytes"
val versionMsg = "Scala classfile decoder %s -- %s\n".format(Properties.versionString, Properties.copyrightString)
@@ -42,12 +48,12 @@ class Main {
*
* @param clazz the class file to be processed.
*/
- def processJavaClassFile(clazz: Classfile) {
+ def processJavaClassFile(clazz: Classfile): Unit = {
// construct a new output stream writer
val out = new OutputStreamWriter(Console.out)
val writer = new JavaWriter(clazz, out)
// print the class
- writer.printClass
+ writer.printClass()
out.flush()
}
@@ -60,21 +66,20 @@ class Main {
syms.head.parent match {
// Partial match
- case Some(p) if (p.name != "<empty>") => {
+ case Some(p) if p.name != "<empty>" =>
val path = p.path
if (!isPackageObject) {
- stream.print("package ");
- stream.print(path);
+ stream.print("package ")
+ stream.print(path)
stream.print("\n")
} else {
val i = path.lastIndexOf(".")
if (i > 0) {
- stream.print("package ");
+ stream.print("package ")
stream.print(path.substring(0, i))
stream.print("\n")
}
}
- }
case _ =>
}
// Print classes
@@ -96,7 +101,7 @@ class Main {
/** Executes scalap with the given arguments and classpath for the
* class denoted by `classname`.
*/
- def process(args: Arguments, path: ClassPath[AbstractFile])(classname: String): Unit = {
+ def process(args: Arguments, path: ClassFileLookup[AbstractFile])(classname: String): Unit = {
// find the classfile
val encName = classname match {
case "scala.AnyRef" => "java.lang.Object"
@@ -106,92 +111,115 @@ class Main {
// we can afford allocations because this is not a performance critical code
classname.split('.').map(NameTransformer.encode).mkString(".")
}
- val cls = path.findClass(encName)
- if (cls.isDefined && cls.get.binary.isDefined) {
- val cfile = cls.get.binary.get
- if (verbose) {
- Console.println(Console.BOLD + "FILENAME" + Console.RESET + " = " + cfile.path)
- }
- val bytes = cfile.toByteArray
- if (isScalaFile(bytes)) {
- Console.println(decompileScala(bytes, isPackageObjectFile(encName)))
- } else {
- // construct a reader for the classfile content
- val reader = new ByteArrayReader(cfile.toByteArray)
- // parse the classfile
- val clazz = new Classfile(reader)
- processJavaClassFile(clazz)
- }
- // if the class corresponds to the artificial class scala.Any.
- // (see member list in class scala.tool.nsc.symtab.Definitions)
- }
- else
- Console.println("class/object " + classname + " not found.")
- }
- object EmptyClasspath extends ClassPath[AbstractFile] {
- /**
- * The short name of the package (without prefix)
- */
- def name = ""
- def asURLs = Nil
- def asClasspathString = ""
-
- val context = DefaultJavaContext
- val classes = IndexedSeq()
- val packages = IndexedSeq()
- val sourcepaths = IndexedSeq()
+ path.findClassFile(encName) match {
+ case Some(classFile) =>
+ if (verbose) {
+ Console.println(Console.BOLD + "FILENAME" + Console.RESET + " = " + classFile.path)
+ }
+ val bytes = classFile.toByteArray
+ if (isScalaFile(bytes)) {
+ Console.println(decompileScala(bytes, isPackageObjectFile(encName)))
+ } else {
+ // construct a reader for the classfile content
+ val reader = new ByteArrayReader(classFile.toByteArray)
+ // parse the classfile
+ val clazz = new Classfile(reader)
+ processJavaClassFile(clazz)
+ }
+ // if the class corresponds to the artificial class scala.Any.
+ // (see member list in class scala.tool.nsc.symtab.Definitions)
+ case _ =>
+ Console.println(s"class/object $classname not found.")
+ }
}
}
object Main extends Main {
+
+ private object opts {
+ val cp = "-cp"
+ val help = "-help"
+ val classpath = "-classpath"
+ val showPrivateDefs = "-private"
+ val verbose = "-verbose"
+ val version = "-version"
+
+ val classPathImplType = "-YclasspathImpl"
+ val disableFlatClassPathCaching = "-YdisableFlatCpCaching"
+ val logClassPath = "-Ylog-classpath"
+ }
+
/** Prints usage information for scalap. */
- def usage() {
- Console println """
+ def usage(): Unit = {
+ Console println s"""
|Usage: scalap {<option>} <name>
|where <name> is fully-qualified class name or <package_name>.package for package objects
|and <option> is
- | -private print private definitions
- | -verbose print out additional information
- | -version print out the version number of scalap
- | -help display this usage message
- | -classpath <path> specify where to find user class files
- | -cp <path> specify where to find user class files
+ | ${opts.showPrivateDefs} print private definitions
+ | ${opts.verbose} print out additional information
+ | ${opts.version} print out the version number of scalap
+ | ${opts.help} display this usage message
+ | ${opts.classpath} <path> specify where to find user class files
+ | ${opts.cp} <path> specify where to find user class files
""".stripMargin.trim
}
- def main(args: Array[String]) {
- // print usage information if there is no command-line argument
- if (args.isEmpty)
- return usage()
-
- val arguments = Arguments.Parser('-')
- .withOption("-private")
- .withOption("-verbose")
- .withOption("-version")
- .withOption("-help")
- .withOptionalArg("-classpath")
- .withOptionalArg("-cp")
- .parse(args);
-
- if (arguments contains "-version")
- Console.println(versionMsg)
- if (arguments contains "-help")
- usage()
-
- verbose = arguments contains "-verbose"
- printPrivates = arguments contains "-private"
- // construct a custom class path
- val cparg = List("-classpath", "-cp") map (arguments getArgument _) reduceLeft (_ orElse _)
- val path = cparg match {
- case Some(cp) => new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
- case _ => PathResolver.fromPathString(".") // include '.' in the default classpath SI-6669
+ def main(args: Array[String]): Unit =
+ // print usage information if there is no command-line argument
+ if (args.isEmpty) usage()
+ else {
+ val arguments = parseArguments(args)
+
+ if (arguments contains opts.version)
+ Console.println(versionMsg)
+ if (arguments contains opts.help)
+ usage()
+
+ verbose = arguments contains opts.verbose
+ printPrivates = arguments contains opts.showPrivateDefs
+ // construct a custom class path
+ val cpArg = List(opts.classpath, opts.cp) map arguments.getArgument reduceLeft (_ orElse _)
+
+ val settings = new Settings()
+
+ arguments getArgument opts.classPathImplType foreach settings.YclasspathImpl.tryToSetFromPropertyValue
+ settings.YdisableFlatCpCaching.value = arguments contains opts.disableFlatClassPathCaching
+ settings.Ylogcp.value = arguments contains opts.logClassPath
+
+ val path = createClassPath(cpArg, settings)
+
+ // print the classpath if output is verbose
+ if (verbose)
+ Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path.asClassPathString)
+
+ // process all given classes
+ arguments.getOthers foreach process(arguments, path)
}
- // print the classpath if output is verbose
- if (verbose)
- Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path)
- // process all given classes
- arguments.getOthers foreach process(arguments, path)
+ private def parseArguments(args: Array[String]) =
+ Arguments.Parser('-')
+ .withOption(opts.showPrivateDefs)
+ .withOption(opts.verbose)
+ .withOption(opts.version)
+ .withOption(opts.help)
+ .withOptionalArg(opts.classpath)
+ .withOptionalArg(opts.cp)
+ // TODO three temporary, hidden options to be able to test different classpath representations
+ .withOptionalArg(opts.classPathImplType)
+ .withOption(opts.disableFlatClassPathCaching)
+ .withOption(opts.logClassPath)
+ .parse(args)
+
+ private def createClassPath(cpArg: Option[String], settings: Settings) = cpArg match {
+ case Some(cp) => settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat =>
+ AggregateFlatClassPath(new FlatClassPathFactory(settings).classesInExpandedPath(cp))
+ case ClassPathRepresentationType.Recursive =>
+ new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
+ }
+ case _ =>
+ settings.classpath.value = "." // include '.' in the default classpath SI-6669
+ PathResolverFactory.create(settings).result
}
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
index e3076322dd..c36fdd02cd 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
@@ -14,20 +14,32 @@ package scalasig
import scala.language.postfixOps
import scala.language.implicitConversions
-import ClassFileParser.{ ConstValueIndex, Annotation }
+import ClassFileParser._
import scala.reflect.internal.pickling.ByteCodecs
object ScalaSigParser {
- import Main.{ SCALA_SIG, SCALA_SIG_ANNOTATION, BYTES_VALUE }
+ import Main.{ BYTES_VALUE, SCALA_LONG_SIG_ANNOTATION, SCALA_SIG, SCALA_SIG_ANNOTATION }
+ // TODO SI-9296 duplicated code, refactor
def scalaSigFromAnnotation(classFile: ClassFile): Option[ScalaSig] = {
import classFile._
- classFile.annotation(SCALA_SIG_ANNOTATION) map {
+ def getBytes(bytesElem: AnnotationElement): Array[Byte] = bytesElem.elementValue match {
+ case ConstValueIndex(index) => bytesForIndex(index)
+ case ArrayValue(signatureParts) => mergedLongSignatureBytes(signatureParts)
+ }
+
+ def mergedLongSignatureBytes(signatureParts: Seq[ElementValue]): Array[Byte] = signatureParts.flatMap {
+ case ConstValueIndex(index) => bytesForIndex(index)
+ }(collection.breakOut)
+
+ def bytesForIndex(index: Int) = constantWrapped(index).asInstanceOf[StringBytesPair].bytes
+
+ classFile.annotation(SCALA_SIG_ANNOTATION)
+ .orElse(classFile.annotation(SCALA_LONG_SIG_ANNOTATION)).map {
case Annotation(_, elements) =>
val bytesElem = elements.find(elem => constant(elem.elementNameIndex) == BYTES_VALUE).get
- val bytes = ((bytesElem.elementValue match {case ConstValueIndex(index) => constantWrapped(index)})
- .asInstanceOf[StringBytesPair].bytes)
+ val bytes = getBytes(bytesElem)
val length = ByteCodecs.decode(bytes)
ScalaSigAttributeParsers.parse(ByteCode(bytes.take(length)))
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala
index e8dfe0ac50..c20bbaeef1 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/Bench.scala
@@ -78,7 +78,7 @@ trait Bench extends Benchmark {
}
/**
- * Prints results of the benchmark. May be overidden in benchmarks.
+ * Prints results of the benchmark. May be overridden in benchmarks.
*/
def printResults {}
diff --git a/test/disabled/run/t8946.scala b/test/disabled/run/t8946.scala
new file mode 100644
index 0000000000..a248a20501
--- /dev/null
+++ b/test/disabled/run/t8946.scala
@@ -0,0 +1,29 @@
+// Tests to assert that references to threads are not strongly held when scala-reflection is used inside of them.
+object Test {
+ import scala.ref.WeakReference
+
+ def forceGc() = {
+ var obj = new Object
+ val ref = new WeakReference(obj)
+ obj = null;
+ while(ref.get.nonEmpty)
+ Array.ofDim[Byte](16 * 1024 * 1024)
+ }
+
+ def main(args: Array[String]): Unit = {
+ val threads = for (i <- (1 to 16)) yield {
+ val t = new Thread {
+ override def run(): Unit = {
+ import reflect.runtime.universe._
+ typeOf[List[String]] <:< typeOf[Seq[_]]
+ }
+ }
+ t.start()
+ t.join()
+ WeakReference(t)
+ }
+ forceGc()
+ val nonGCdThreads = threads.filter(_.get.nonEmpty).length
+ assert(nonGCdThreads == 0, s"${nonGCdThreads} threads were retained; expected 0.")
+ }
+}
diff --git a/test/files/jvm/beanInfo.check b/test/files/jvm/beanInfo.check
new file mode 100644
index 0000000000..d74e127711
--- /dev/null
+++ b/test/files/jvm/beanInfo.check
@@ -0,0 +1,6 @@
+property descriptors
+x -- int -- public int p.C.x() -- null
+y -- class java.lang.String -- public java.lang.String p.C.y() -- public void p.C.y_$eq(java.lang.String)
+z -- class scala.collection.immutable.List -- public scala.collection.immutable.List p.C.z() -- public void p.C.z_$eq(scala.collection.immutable.List)
+method descriptors
+f -- public p.C p.C.f()
diff --git a/test/files/jvm/beanInfo/C_1.scala b/test/files/jvm/beanInfo/C_1.scala
new file mode 100644
index 0000000000..a338abea1d
--- /dev/null
+++ b/test/files/jvm/beanInfo/C_1.scala
@@ -0,0 +1,9 @@
+package p
+
+@scala.beans.BeanInfo
+class C {
+ val x: Int = 0
+ var y: String = ""
+ var z: List[_] = Nil
+ def f: C = ???
+}
diff --git a/test/files/jvm/beanInfo/Test_2.scala b/test/files/jvm/beanInfo/Test_2.scala
new file mode 100644
index 0000000000..fa9b6e1391
--- /dev/null
+++ b/test/files/jvm/beanInfo/Test_2.scala
@@ -0,0 +1,17 @@
+object Test extends App {
+ val info = java.beans.Introspector.getBeanInfo(classOf[p.C])
+
+ println("property descriptors")
+
+ val pds = info.getPropertyDescriptors
+ for (pd <- pds) {
+ println(s"${pd.getName} -- ${pd.getPropertyType} -- ${pd.getReadMethod} -- ${pd.getWriteMethod}")
+ }
+
+ println("method descriptors")
+
+ val mds = info.getMethodDescriptors
+ for (md <- mds) {
+ println(s"${md.getName} -- ${md.getMethod}")
+ }
+}
diff --git a/test/files/jvm/deprecation.check b/test/files/jvm/deprecation.check
index d116778d3f..d57b6b55a5 100644
--- a/test/files/jvm/deprecation.check
+++ b/test/files/jvm/deprecation.check
@@ -1,3 +1,3 @@
-warning: there were 4 deprecation warning(s); re-run with -deprecation for details
+warning: there were four deprecation warnings; re-run with -deprecation for details
Note: deprecation/Use_2.java uses or overrides a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
diff --git a/test/files/jvm/duration-tck.scala b/test/files/jvm/duration-tck.scala
index 3bc8a2c100..7db6c49964 100644
--- a/test/files/jvm/duration-tck.scala
+++ b/test/files/jvm/duration-tck.scala
@@ -61,6 +61,11 @@ object Test extends App {
minf - inf mustBe minf
minf + minf mustBe minf
+ for (i <- Seq(zero, one, two, three)) {
+ i - inf mustBe minf
+ i - minf mustBe inf
+ }
+
inf.compareTo(inf) mustBe 0
inf.compareTo(one) mustBe 1
inf.compareTo(minf) mustBe 1
diff --git a/test/files/jvm/future-spec.check b/test/files/jvm/future-spec.check
index 844ca54682..df1629dd7e 100644
--- a/test/files/jvm/future-spec.check
+++ b/test/files/jvm/future-spec.check
@@ -1 +1 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
diff --git a/test/files/jvm/future-spec/PromiseTests.scala b/test/files/jvm/future-spec/PromiseTests.scala
index 12b9168c5d..67c8c542ba 100644
--- a/test/files/jvm/future-spec/PromiseTests.scala
+++ b/test/files/jvm/future-spec/PromiseTests.scala
@@ -44,20 +44,79 @@ class PromiseTests extends MinimalScalaTest {
}.getMessage mustBe ("br0ken")
}
+ "be completable with a completed Promise" in {
+ {
+ val p = Promise[String]()
+ p.tryCompleteWith(Promise[String]().success("foo").future)
+ Await.result(p.future, defaultTimeout) mustBe ("foo")
+ }
+ {
+ val p = Promise[String]()
+ p.completeWith(Promise[String]().success("foo").future)
+ Await.result(p.future, defaultTimeout) mustBe ("foo")
+ }
+ {
+ val p = Promise[String]()
+ p.tryCompleteWith(Promise[String]().failure(new RuntimeException("br0ken")).future)
+ intercept[RuntimeException] {
+ Await.result(p.future, defaultTimeout)
+ }.getMessage mustBe ("br0ken")
+ }
+ {
+ val p = Promise[String]()
+ p.tryCompleteWith(Promise[String]().failure(new RuntimeException("br0ken")).future)
+ intercept[RuntimeException] {
+ Await.result(p.future, defaultTimeout)
+ }.getMessage mustBe ("br0ken")
+ }
+ }
}
"A successful Promise" should {
- val result = "test value"
- val promise = Promise[String]().complete(Success(result))
- promise.isCompleted mustBe (true)
- futureWithResult(_(promise.future, result))
+ "be completed" in {
+ val result = "test value"
+ val promise = Promise[String]().complete(Success(result))
+ promise.isCompleted mustBe (true)
+ futureWithResult(_(promise.future, result))
+ }
+
+ "not be completable with a completed Promise" in {
+ {
+ val p = Promise.successful("bar")
+ p.tryCompleteWith(Promise[String]().success("foo").future)
+ Await.result(p.future, defaultTimeout) mustBe ("bar")
+ }
+ {
+ val p = Promise.successful("bar")
+ p.completeWith(Promise[String]().success("foo").future)
+ Await.result(p.future, defaultTimeout) mustBe ("bar")
+ }
+ }
}
"A failed Promise" should {
- val message = "Expected Exception"
- val promise = Promise[String]().complete(Failure(new RuntimeException(message)))
- promise.isCompleted mustBe (true)
- futureWithException[RuntimeException](_(promise.future, message))
+ "be completed" in {
+ val message = "Expected Exception"
+ val promise = Promise[String]().complete(Failure(new RuntimeException(message)))
+ promise.isCompleted mustBe (true)
+ futureWithException[RuntimeException](_(promise.future, message))
+ }
+ "not be completable with a completed Promise" in {
+ {
+ val p = Promise[String]().failure(new RuntimeException("unbr0ken"))
+ p.tryCompleteWith(Promise[String].failure(new Exception("br0ken")).future)
+ intercept[RuntimeException] {
+ Await.result(p.future, defaultTimeout)
+ }.getMessage mustBe ("unbr0ken")
+ }
+ {
+ val p = Promise[String]().failure(new RuntimeException("unbr0ken"))
+ p.completeWith(Promise[String]().failure(new Exception("br0ken")).future)
+ intercept[RuntimeException] {
+ Await.result(p.future, defaultTimeout)
+ }.getMessage mustBe ("unbr0ken")
+ }
+ }
}
"An interrupted Promise" should {
diff --git a/test/files/jvm/inner.scala b/test/files/jvm/inner.scala
index c05e803449..dc01b124c5 100644
--- a/test/files/jvm/inner.scala
+++ b/test/files/jvm/inner.scala
@@ -77,7 +77,7 @@ object Scalatest {
def java(cname: String) =
exec(javacmd, "-cp", classpath, cname)
- /** Execute cmd, wait for the process to end and pipe it's output to stdout */
+ /** Execute cmd, wait for the process to end and pipe its output to stdout */
private def exec(args: String*) {
val proc = Runtime.getRuntime().exec(args.toArray)
val inp = new BufferedReader(new InputStreamReader(proc.getInputStream))
diff --git a/test/files/jvm/innerClassAttribute.check b/test/files/jvm/innerClassAttribute.check
new file mode 100644
index 0000000000..bb532e4f36
--- /dev/null
+++ b/test/files/jvm/innerClassAttribute.check
@@ -0,0 +1,54 @@
+#partest !-Ydelambdafy:method
+-- A4 --
+A4$$anonfun$f$1 / null / null / 17
+A4$$anonfun$f$1 / null / null / 17
+A4 / f / (Lscala/collection/immutable/List;)Lscala/collection/immutable/List;
+-- A19 --
+A19$$anonfun$1 / null / null / 17
+A19$$anonfun$2 / null / null / 17
+A19$$anonfun$3 / null / null / 17
+A19$$anonfun$1 / null / null / 17
+A19$$anonfun$2 / null / null / 17
+A19$$anonfun$3 / null / null / 17
+A19 / null / null
+A19 / null / null
+A19 / null / null
+-- A20 --
+A20$$anonfun$6 / null / null / 17
+fun1: attribute for itself and the two child closures `() => ()` and `() => () => 1`
+A20$$anonfun$6 / null / null / 17
+A20$$anonfun$6$$anonfun$apply$1 / null / null / 17
+A20$$anonfun$6$$anonfun$apply$3 / null / null / 17
+fun2 () => (): itself and the outer closure
+A20$$anonfun$6 / null / null / 17
+A20$$anonfun$6$$anonfun$apply$1 / null / null / 17
+fun3 () => () => (): itself, the outer closure and its child closure
+A20$$anonfun$6 / null / null / 17
+A20$$anonfun$6$$anonfun$apply$3 / null / null / 17
+A20$$anonfun$6$$anonfun$apply$3$$anonfun$apply$2 / null / null / 17
+fun4: () => 1: itself and the two outer closures
+A20$$anonfun$6 / null / null / 17
+A20$$anonfun$6$$anonfun$apply$3 / null / null / 17
+A20$$anonfun$6$$anonfun$apply$3$$anonfun$apply$2 / null / null / 17
+enclosing: nested closures have outer class defined, but no outer method
+A20 / null / null
+A20$$anonfun$6 / null / null
+A20$$anonfun$6 / null / null
+A20$$anonfun$6$$anonfun$apply$3 / null / null
+#partest -Ydelambdafy:method
+-- A4 --
+null / null / null
+-- A19 --
+null / null / null
+null / null / null
+null / null / null
+-- A20 --
+fun1: attribute for itself and the two child closures `() => ()` and `() => () => 1`
+fun2 () => (): itself and the outer closure
+fun3 () => () => (): itself, the outer closure and its child closure
+fun4: () => 1: itself and the two outer closures
+enclosing: nested closures have outer class defined, but no outer method
+null / null / null
+null / null / null
+null / null / null
+null / null / null
diff --git a/test/files/jvm/innerClassAttribute/Classes_1.scala b/test/files/jvm/innerClassAttribute/Classes_1.scala
new file mode 100644
index 0000000000..fb1f32aa3d
--- /dev/null
+++ b/test/files/jvm/innerClassAttribute/Classes_1.scala
@@ -0,0 +1,297 @@
+class A1 {
+ class B
+}
+
+class A2 {
+ object B
+}
+
+object A3 {
+ class B1
+ object B2
+}
+
+class A4 {
+ def f(l: List[Int]): List[Int] = {
+ l map (_ + 1)
+ }
+}
+
+class A5 {
+ def f(): Object = {
+ object B
+ B
+ }
+}
+
+trait A6 {
+ def hui = -6
+ trait TT
+}
+
+class A7 extends A6
+
+abstract class A8 extends A6 {
+ def fish: TT
+}
+
+class A9 {
+ class brick extends annotation.StaticAnnotation
+}
+
+class A10 {
+ val a9 = new A9()
+ // there's no reference to brick in the bytecode (only in the pickle), so there's no InnerClass attribute for it.
+ @a9.brick def f = -7
+}
+
+class A11 {
+ @JavaAnnot_1.Ann def f = -8
+}
+
+object A12 {
+ object B {
+ class C
+ }
+}
+
+class A13 {
+ def oak: A12.B.C = new A12.B.C
+}
+
+class A14 {
+ def f = {
+ val x: Object = {
+ class K
+ new K
+ }
+ x
+ }
+ def g = {
+ val x: Object = new A6 { }
+ }
+}
+
+object A15 {
+ def f = {
+ class B { // non-static, even though it doesn't have an outer pointer
+ class C // non-static
+ }
+ }
+}
+
+class A16 {
+ val x: A6 = {
+ class U extends A6
+ new A6 { }
+ }
+
+ {
+ class V extends A6
+ new A6 { }
+ }
+
+ new A6 { }
+}
+
+class A17 {
+ object B {
+ class C // not static, also has an outer pointer.
+ }
+}
+
+class A18 {
+ def f = {
+ def g = {
+ class A
+ new A6 { }
+ val y = {
+ if ((new Object).hashCode() == 1) {class B {} ; new B} else 2
+ if ((new Object).hashCode() == 1) new A6 { } else "haifish"
+ }
+ }
+ }
+}
+
+class A19 {
+ ((x: Int) => x + 3)
+
+ val x = {
+ ((x: Int) => x + 1)
+ }
+
+ {
+ ((x: Int) => x + 2)
+ }
+}
+
+class A20 {
+ () => {
+ {() => ()}
+ {() => () => 1}
+ }
+}
+
+class A21 {
+ class I1
+ def f = { class J1 }
+}
+object A21 {
+ class I2
+ object I3 {
+ class J2 // static
+ }
+ def g = { class J3 } // non-static
+ val x = { class J4 } // non-static
+ {
+ class J5 // non-static (!)
+ new J5
+ }
+}
+
+class A22 {
+ class C
+ object C {
+ class D // inner class of C$, not of C. Not added to the inner class table of C, only to C$
+ }
+}
+
+class A23 {
+ def f = {
+ val a = new Java_A_1()
+ val c = new Java_A_1.C()
+ val d = new Java_A_1.C.D()
+ val e = new c.E()
+ val f = new a.F()
+ val g = new f.G()
+ }
+}
+
+trait A24Sym
+
+trait A24Base {
+ // trait with concrete members: interface plus (absract) impl class
+ trait DefinitionsApi {
+ def Abs: A24Sym
+ def Conc: A24Sym = new A24Sym { }
+ }
+}
+
+trait A24 extends A24Base {
+ class DefinitionsClass extends DefinitionsApi {
+ // bridge methods are generated for Abs and Conc. there used to be a bug: the bridge symbol was a ModuleSymbol,
+ // calling companionClass would return NoSymbol. i changed erasure to make the bridge symbol is a MethodSymbol.
+ object Abs extends A24Sym
+ override object Conc extends A24Sym
+ }
+}
+
+class SI_9105 {
+ // the EnclosingMethod attributes depend on the delambdafy strategy (inline vs method)
+
+ // outerClass-inline enclMeth-inline outerClass-method enclMeth-method
+ val fun = () => {
+ class A // closure null (*) SI_9105 null
+ def m: Object = { class B; new B } // closure m$1 SI_9105 m$1
+ val f: Object = { class C; new C } // closure null (*) SI_9105 null
+ }
+ def met = () => {
+ class D // closure null (*) SI_9105 met
+ def m: Object = { class E; new E } // closure m$1 SI_9105 m$1
+ val f: Object = { class F; new F } // closure null (*) SI_9105 met
+ }
+
+ // (*) the originalOwner chain of A (similar for D) is: SI_9105.fun.$anonfun-value.A
+ // we can get to the anonfun-class (created by uncurry), but not to the apply method.
+ //
+ // for C and F, the originalOwner chain is fun.$anonfun-value.f.C. at later phases, the rawowner of f is
+ // an apply$sp method of the closure class. we could use that as enclosing method, but it would be unsystematic
+ // (A / D don't have an encl meth either), and also strange to use the $sp, which is a compilation artifact.
+ // So using `null` looks more like the situation in the source code: C / F are nested classes of the anon-fun, and
+ // there's no method in between.
+
+ def byName[T](op: => T) = 0
+
+ val bnV = byName {
+ class G // closure null (*) SI_9105 null
+ def m: Object = { class H; new H } // closure m$1 SI_9105 m$1
+ val f: Object = { class I; new I } // closure null (*) SI_9105 null
+ }
+ def bnM = byName {
+ class J // closure null (*) SI_9105 bnM
+ def m: Object = { class K; new K } // closure m$1 SI_9105 m$1
+ val f: Object = { class L; new L } // closure null (*) SI_9105 bnM
+ }
+}
+
+trait SI_9124 {
+ trait A // member class, no enclosing method attribute
+
+ new A { def f1 = 0 } // nested class, enclosing class SI_9124, no encl meth
+
+ def f = new A { def f2 = 0 } // enclosing method is f in the interface SI_9124
+
+ private def g = new A { def f3 = 0 } // only encl class (SI_9124), encl meth is null because the interface SI_9124 doesn't have a method g
+
+ object O { // member, no encl meth attribute
+ new A { def f4 = 0 } // enclosing class is O$, no enclosing method
+ }
+
+ val f1 = { new A { def f5 = 0 }; 1 } // encl class SI_9124, no encl meth
+ private val f2 = { new A { def f6 = 0 }; 1 } // like above
+}
+
+trait ImplClassesAreTopLevel {
+ // all impl classes are top-level, so they don't appear in any InnerClass entry, and none of them have an EnclosingMethod attr
+ trait B1 { def f = 1 }
+ { trait B2 { def f = 1 }; new B2 {} }
+ val m = {
+ trait B3 { def f = 1 }
+ new B3 {}
+ }
+ def n = {
+ trait B4 { def f = 1 }
+ new B4 {}
+ }
+}
+
+class SpecializedClassesAreTopLevel {
+ // all specialized classes are top-level
+ class A[@specialized(Int) T]; new A[Int]
+
+ object T {
+ class B[@specialized(Int) T]; new B[Int]
+ }
+
+ // these crash the compiler, SI-7625
+
+ // { class B[@specialized(Int) T]; new B[Int] }
+
+ // val m: Object = {
+ // class C[@specialized(Int) T]
+ // new C[Int]
+ // }
+
+ // def n: Object = {
+ // class D[@specialized(Int) T]
+ // new D[Int]
+ // }
+}
+
+object NestedInValueClass {
+ // note that we can only test anonymous functions, nested classes are not allowed inside value classes
+ class A(val arg: String) extends AnyVal {
+ // A has InnerClass entries for the two closures (and for A and A$). not for B / C
+ def f = {
+ def g = List().map(x => (() => x)) // outer class A, no outer method (g is moved to the companion, doesn't exist in A)
+ g.map(x => (() => x)) // outer class A, outer method f
+ }
+ // statements and field declarations are not allowed in value classes
+ }
+
+ object A {
+ // A$ has InnerClass entries for B, C, A, A$. Also for the closures above, because they are referenced in A$'s bytecode.
+ class B // member class of A$
+ def f = { class C; new C } // outer class A$, outer method f
+ }
+}
diff --git a/test/files/jvm/innerClassAttribute/JavaAnnot_1.java b/test/files/jvm/innerClassAttribute/JavaAnnot_1.java
new file mode 100644
index 0000000000..27c4d4e5d3
--- /dev/null
+++ b/test/files/jvm/innerClassAttribute/JavaAnnot_1.java
@@ -0,0 +1,3 @@
+public class JavaAnnot_1 {
+ public static @interface Ann {}
+}
diff --git a/test/files/jvm/innerClassAttribute/Java_A_1.java b/test/files/jvm/innerClassAttribute/Java_A_1.java
new file mode 100644
index 0000000000..3357d05e2b
--- /dev/null
+++ b/test/files/jvm/innerClassAttribute/Java_A_1.java
@@ -0,0 +1,10 @@
+public class Java_A_1 {
+ public static class C {
+ public static class D { }
+ public class E { }
+ }
+
+ public class F {
+ public class G { }
+ }
+}
diff --git a/test/files/jvm/innerClassAttribute/Test.scala b/test/files/jvm/innerClassAttribute/Test.scala
new file mode 100644
index 0000000000..bc9aa2376a
--- /dev/null
+++ b/test/files/jvm/innerClassAttribute/Test.scala
@@ -0,0 +1,592 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import asm.tree.{ClassNode, InnerClassNode}
+import asm.{Opcodes => Flags}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def assertSame(a: Any, b: Any) = {
+ assert(a == b, s"\na: $a\nb: $b")
+ }
+
+ val publicStatic = Flags.ACC_PUBLIC | Flags.ACC_STATIC
+ val publicAbstractInterface = Flags.ACC_PUBLIC | Flags.ACC_ABSTRACT | Flags.ACC_INTERFACE
+
+ def innerClassNodes(className: String): List[InnerClassNode] = {
+ loadClassNode(className).innerClasses.asScala.toList.sortBy(_.name)
+ }
+
+ def ownInnerClassNode(n: String) = innerClassNodes(n).filter(_.name == n).head
+
+ def testInner(cls: String, fs: (InnerClassNode => Unit)*) = {
+ val ns = innerClassNodes(cls)
+ assert(ns.length == fs.length, ns)
+ (ns zip fs.toList) foreach { case (n, f) => f(n) }
+ }
+
+
+
+ final case class EnclosingMethod(name: String, descriptor: String, outerClass: String)
+ def enclosingMethod(className: String) = {
+ val n = loadClassNode(className)
+ EnclosingMethod(n.outerMethod, n.outerMethodDesc, n.outerClass)
+ }
+
+ def assertMember(node: InnerClassNode, outer: String, inner: String, name: Option[String] = None, flags: Int = Flags.ACC_PUBLIC) = {
+ assertSame(node.name, name.getOrElse(s"$outer$$$inner"))
+ assertSame(node.outerName, outer)
+ assertSame(node.innerName, inner)
+ assertSame(node.access, flags)
+ }
+
+ def assertAnonymous(node: InnerClassNode, name: String, flags: Int = Flags.ACC_PUBLIC | Flags.ACC_FINAL) = {
+ assertSame(node.name, name)
+ assertSame(node.outerName, null)
+ assertSame(node.innerName, null)
+ assertSame(node.access, flags)
+ }
+
+ def assertLocal(node: InnerClassNode, name: String, inner: String, flags: Int = Flags.ACC_PUBLIC) = {
+ assertSame(node.name, name)
+ assertSame(node.outerName, null)
+ assertSame(node.innerName, inner)
+ assertSame(node.access, flags)
+ }
+
+ def assertEnclosingMethod(className: String, outerClass: String, name: String, descriptor: String) = {
+ val encl = enclosingMethod(className)
+ assertSame(encl.outerClass, outerClass)
+ assertSame(encl.name, name)
+ assertSame(encl.descriptor, descriptor)
+ }
+
+ def assertNoEnclosingMethod(className: String) = {
+ assertSame(enclosingMethod(className).outerClass, null)
+ }
+
+ def printInnerClassNodes(className: String) = {
+ for (n <- innerClassNodes(className)) {
+ println(s"${n.name} / ${n.outerName} / ${n.innerName} / ${n.access}")
+ }
+ }
+
+ def printEnclosingMethod(className: String) = {
+ val e = enclosingMethod(className)
+ println(s"${e.outerClass} / ${e.name} / ${e.descriptor}")
+ }
+
+ def lambdaClass(anonfunName: String, lambdaName: String): String = {
+ if (classpath.findClass(anonfunName).isDefined) anonfunName else lambdaName
+ }
+
+ def testA1() = {
+ val List(b1) = innerClassNodes("A1")
+ assertMember(b1, "A1", "B")
+ val List(b2) = innerClassNodes("A1$B")
+ assertMember(b2, "A1", "B")
+ }
+
+ def testA2() = {
+ val List(b1) = innerClassNodes("A2")
+ assertMember(b1, "A2", "B$")
+ val List(b2) = innerClassNodes("A2$B$")
+ assertMember(b2, "A2", "B$")
+ }
+
+ def testA3() = {
+ // the inner class entries for top-level object members are in the companion class, so nothing in the module class
+ val List() = innerClassNodes("A3$")
+
+ // inner class entries in the companion class (a backend-generated mirror class in this case)
+ val List(b1, b2) = innerClassNodes("A3")
+ // the outer class for classes nested inside top-level modules is not the module class, but the mirror class.
+ // this is a hack for java interop, handled in the backend. see BTypes.scala, comment on "Java Compatibility".
+ assertMember(b1, "A3", "B1", flags = publicStatic)
+ assertMember(b2, "A3", "B2$", flags = publicStatic)
+ }
+
+ def testA4() = {
+ println("-- A4 --")
+ printInnerClassNodes("A4")
+ val fun = lambdaClass("A4$$anonfun$f$1", "A4$lambda$$f$1")
+ printInnerClassNodes(fun)
+ printEnclosingMethod(fun)
+ }
+
+ def testA5() = {
+ val List(b1) = innerClassNodes("A5")
+ assertLocal(b1, "A5$B$2$", "B$2$")
+ val List(b2) = innerClassNodes("A5$B$2$")
+ assertLocal(b2, "A5$B$2$", "B$2$")
+ assertEnclosingMethod(
+ "A5$B$2$",
+ "A5", "f", "()Ljava/lang/Object;")
+ }
+
+ def testA6() = {
+ val List(tt1) = innerClassNodes("A6")
+ assertMember(tt1, "A6", "TT", flags = publicAbstractInterface)
+ val List() = innerClassNodes("A6$class")
+ val List(tt2) = innerClassNodes("A6$TT")
+ assertMember(tt2, "A6", "TT", flags = publicAbstractInterface)
+ }
+
+ def testA7() = {
+ val List() = innerClassNodes("A7")
+ }
+
+ def testA8() = {
+ val List(tt) = innerClassNodes("A8")
+ assertMember(tt, "A6", "TT", flags = publicAbstractInterface)
+ }
+
+ def testA10() = {
+ val List() = innerClassNodes("A10")
+ }
+
+ def testA11() = {
+ val List(ann) = innerClassNodes("A11")
+ // in the java class file, the INNERCLASS attribute has more flags (public | static | abstract | interface | annotation)
+ // the scala compiler has its own interpretation of java annotations ant their flags.. it only emits publicStatic.
+ assertMember(ann, "JavaAnnot_1", "Ann", flags = publicStatic)
+ }
+
+ def testA13() = {
+ val List(b, c) = innerClassNodes("A13")
+ assertMember(b, "A12", "B$", flags = publicStatic)
+ assertMember(c, "A12$B$", "C", name = Some("A12$B$C"), flags = publicStatic)
+ }
+
+ def testA14() = {
+ val List(anon, k) = innerClassNodes("A14")
+
+ assertLocal(k, "A14$K$1", "K$1")
+ assertEnclosingMethod(
+ "A14$K$1",
+ "A14", "f", "()Ljava/lang/Object;")
+
+ assertAnonymous(anon, "A14$$anon$1")
+ assertEnclosingMethod(
+ "A14$$anon$1",
+ "A14", "g", "()V")
+ }
+
+ def testA15() = {
+ // no member classes, only anonymous / local. these are nested in the module class, not the companion.
+ val List() = innerClassNodes("A15")
+
+ val List(b) = innerClassNodes("A15$")
+ assertLocal(b, "A15$B$3", "B$3")
+
+ val List(_, c) = innerClassNodes("A15$B$3")
+ assertMember(c, "A15$B$3", "C")
+
+ assertEnclosingMethod(
+ "A15$B$3",
+ "A15$", "f", "()V")
+ assertNoEnclosingMethod("A15$B$3$C")
+ }
+
+ def testA16() = {
+ val List(anon1, anon2, anon3, u, v) = innerClassNodes("A16")
+ assertAnonymous(anon1, "A16$$anon$2")
+ assertAnonymous(anon2, "A16$$anon$3")
+ assertAnonymous(anon3, "A16$$anon$4")
+
+ assertLocal(u, "A16$U$1", "U$1")
+ assertLocal(v, "A16$V$1", "V$1")
+
+ assertEnclosingMethod(
+ "A16$$anon$2",
+ "A16", null, null)
+ assertEnclosingMethod(
+ "A16$$anon$3",
+ "A16", null, null)
+ assertEnclosingMethod(
+ "A16$$anon$4",
+ "A16", null, null)
+
+ assertEnclosingMethod(
+ "A16$U$1",
+ "A16", null, null)
+ assertEnclosingMethod(
+ "A16$V$1",
+ "A16", null, null)
+ }
+
+ def testA17() = {
+ val List(b, c) = innerClassNodes("A17$B$")
+ assertMember(b, "A17", "B$")
+ assertMember(c, "A17$B$", "C", name = Some("A17$B$C")) // not static, has an outer pointer.
+ }
+
+ def testA18() = {
+ val List(anon1, anon2, a, b) = innerClassNodes("A18")
+ assertAnonymous(anon1, "A18$$anon$5")
+ assertAnonymous(anon2, "A18$$anon$6")
+
+ assertLocal(a, "A18$A$2", "A$2")
+ assertLocal(b, "A18$B$4", "B$4")
+
+ assertEnclosingMethod(
+ "A18$$anon$5",
+ "A18", "g$1", "()V")
+ assertEnclosingMethod(
+ "A18$$anon$6",
+ "A18", "g$1", "()V")
+
+ assertEnclosingMethod(
+ "A18$A$2",
+ "A18", "g$1", "()V")
+ assertEnclosingMethod(
+ "A18$B$4",
+ "A18", "g$1", "()V")
+ }
+
+ def testA19() = {
+ println("-- A19 --")
+
+ printInnerClassNodes("A19")
+
+ val fun1 = lambdaClass("A19$$anonfun$1", "A19$lambda$1")
+ val fun2 = lambdaClass("A19$$anonfun$2", "A19$lambda$2")
+ val fun3 = lambdaClass("A19$$anonfun$3", "A19$lambda$3")
+
+ printInnerClassNodes(fun1)
+ printInnerClassNodes(fun2)
+ printInnerClassNodes(fun3)
+
+ printEnclosingMethod(fun1)
+ printEnclosingMethod(fun2)
+ printEnclosingMethod(fun3)
+ }
+
+ def testA20() = {
+ println("-- A20 --")
+
+ printInnerClassNodes("A20")
+
+ val fun1 = lambdaClass("A20$$anonfun$6", "A20$lambda$1")
+ val fun2 = lambdaClass("A20$$anonfun$6$$anonfun$apply$1", "A20$lambda$$$nestedInAnonfun$5$1")
+ val fun3 = lambdaClass("A20$$anonfun$6$$anonfun$apply$3", "A20$lambda$$$nestedInAnonfun$5$2")
+ val fun4 = lambdaClass("A20$$anonfun$6$$anonfun$apply$3$$anonfun$apply$2", "A20$lambda$$$nestedInAnonfun$7$1")
+
+ println("fun1: attribute for itself and the two child closures `() => ()` and `() => () => 1`")
+ printInnerClassNodes(fun1)
+ println("fun2 () => (): itself and the outer closure")
+ printInnerClassNodes(fun2)
+ println("fun3 () => () => (): itself, the outer closure and its child closure")
+ printInnerClassNodes(fun3)
+ println("fun4: () => 1: itself and the two outer closures")
+ printInnerClassNodes(fun4)
+
+ println("enclosing: nested closures have outer class defined, but no outer method")
+ printEnclosingMethod(fun1)
+ printEnclosingMethod(fun2)
+ printEnclosingMethod(fun3)
+ printEnclosingMethod(fun4)
+ }
+
+ def testA21() = {
+ val List(i1c, i2c, i3c, j1) = innerClassNodes("A21")
+ assertMember(i1c, "A21", "I1")
+ assertMember(i2c, "A21", "I2", flags = publicStatic)
+ assertMember(i3c, "A21", "I3$", flags = publicStatic)
+ assertLocal(j1, "A21$J1$1", "J1$1")
+
+ val List(j3, j4, j5) = innerClassNodes("A21$")
+ assertLocal(j3, "A21$J3$1", "J3$1")
+ assertLocal(j4, "A21$J4$1", "J4$1")
+ assertLocal(j5, "A21$J5$1", "J5$1") // non-static!
+
+ val List(i3x, j2x) = innerClassNodes("A21$I3$J2")
+ assertMember(j2x, "A21$I3$", "J2", name = Some("A21$I3$J2"), flags = publicStatic)
+
+ assertNoEnclosingMethod("A21$I3$J2")
+ assertEnclosingMethod("A21$J3$1", "A21$", "g", "()V")
+ assertEnclosingMethod("A21$J4$1", "A21$", null, null)
+ assertEnclosingMethod("A21$J5$1", "A21$", null, null)
+ }
+
+ def testA22() = {
+ val List(cc) = innerClassNodes("A22$C")
+ assertMember(cc, "A22", "C")
+ val List(cm, d) = innerClassNodes("A22$C$")
+ assertMember(cm, "A22", "C$")
+ assertMember(d, "A22$C$", "D", name = Some("A22$C$D"))
+ }
+
+ def testA23() {
+ val List(c, d, e, f, g) = innerClassNodes("A23")
+ assertMember(c, "Java_A_1", "C", flags = publicStatic)
+ assertMember(d, "Java_A_1$C", "D", flags = publicStatic)
+ assertMember(e, "Java_A_1$C", "E")
+ assertMember(f, "Java_A_1", "F")
+ assertMember(g, "Java_A_1$F", "G")
+ }
+
+ def testA24() {
+ val List(defsCls, abs, conc, defsApi) = innerClassNodes("A24$DefinitionsClass")
+ assertMember(defsCls, "A24", "DefinitionsClass")
+ assertMember(abs, "A24$DefinitionsClass", "Abs$")
+ assertMember(conc, "A24$DefinitionsClass", "Conc$")
+ assertMember(defsApi, "A24Base", "DefinitionsApi", flags = publicAbstractInterface)
+ }
+
+ def testSI_9105() {
+ val isDelambdafyMethod = classpath.findClass("SI_9105$lambda$1").isDefined
+ if (isDelambdafyMethod) {
+ assertEnclosingMethod ("SI_9105$A$3" , "SI_9105", null , null)
+ assertEnclosingMethod ("SI_9105$B$5" , "SI_9105", "m$1", "()Ljava/lang/Object;")
+ assertEnclosingMethod ("SI_9105$C$1" , "SI_9105", null , null)
+ assertEnclosingMethod ("SI_9105$D$1" , "SI_9105", "met", "()Lscala/Function0;")
+ assertEnclosingMethod ("SI_9105$E$1" , "SI_9105", "m$3", "()Ljava/lang/Object;")
+ assertEnclosingMethod ("SI_9105$F$1" , "SI_9105", "met", "()Lscala/Function0;")
+ assertNoEnclosingMethod("SI_9105$lambda$$met$1")
+ assertNoEnclosingMethod("SI_9105$lambda$1")
+ assertNoEnclosingMethod("SI_9105")
+
+ assertLocal(innerClassNodes("SI_9105$A$3").head, "SI_9105$A$3", "A$3")
+ assertLocal(innerClassNodes("SI_9105$B$5").head, "SI_9105$B$5", "B$5")
+ assertLocal(innerClassNodes("SI_9105$C$1").head, "SI_9105$C$1", "C$1")
+ assertLocal(innerClassNodes("SI_9105$D$1").head, "SI_9105$D$1", "D$1")
+ assertLocal(innerClassNodes("SI_9105$E$1").head, "SI_9105$E$1", "E$1")
+ assertLocal(innerClassNodes("SI_9105$F$1").head, "SI_9105$F$1", "F$1")
+
+ // by-name
+ assertEnclosingMethod("SI_9105$G$1", "SI_9105", null , null)
+ assertEnclosingMethod("SI_9105$H$1", "SI_9105", "m$2", "()Ljava/lang/Object;")
+ assertEnclosingMethod("SI_9105$I$1", "SI_9105", null , null)
+ assertEnclosingMethod("SI_9105$J$1", "SI_9105", "bnM", "()I")
+ assertEnclosingMethod("SI_9105$K$2", "SI_9105", "m$4", "()Ljava/lang/Object;")
+ assertEnclosingMethod("SI_9105$L$1", "SI_9105", "bnM", "()I")
+
+ assert(innerClassNodes("SI_9105$lambda$$met$1").isEmpty)
+ assert(innerClassNodes("SI_9105$lambda$1").isEmpty)
+ assert(innerClassNodes("SI_9105").length == 12) // the 12 local classes
+ } else {
+ // comment in innerClassAttribute/Classes_1.scala explains the difference between A / C and D / F.
+ assertEnclosingMethod ("SI_9105$$anonfun$4$A$3" , "SI_9105$$anonfun$4" , null , null)
+ assertEnclosingMethod ("SI_9105$$anonfun$4$B$5" , "SI_9105$$anonfun$4" , "m$1" , "()Ljava/lang/Object;")
+ assertEnclosingMethod ("SI_9105$$anonfun$4$C$1" , "SI_9105$$anonfun$4" , null , null)
+ assertEnclosingMethod ("SI_9105$$anonfun$met$1$D$1", "SI_9105$$anonfun$met$1", null , null)
+ assertEnclosingMethod ("SI_9105$$anonfun$met$1$E$1", "SI_9105$$anonfun$met$1", "m$3" , "()Ljava/lang/Object;")
+ assertEnclosingMethod ("SI_9105$$anonfun$met$1$F$1", "SI_9105$$anonfun$met$1", null , null)
+ assertEnclosingMethod ("SI_9105$$anonfun$4" , "SI_9105" , null , null)
+ assertEnclosingMethod ("SI_9105$$anonfun$met$1" , "SI_9105" , "met" , "()Lscala/Function0;")
+ assertNoEnclosingMethod("SI_9105")
+
+ assertLocal(ownInnerClassNode("SI_9105$$anonfun$4$A$3"), "SI_9105$$anonfun$4$A$3" , "A$3")
+ assertLocal(ownInnerClassNode("SI_9105$$anonfun$4$B$5"), "SI_9105$$anonfun$4$B$5" , "B$5")
+ assertLocal(ownInnerClassNode("SI_9105$$anonfun$4$C$1"), "SI_9105$$anonfun$4$C$1" , "C$1")
+ assertLocal(ownInnerClassNode("SI_9105$$anonfun$met$1$D$1"), "SI_9105$$anonfun$met$1$D$1", "D$1")
+ assertLocal(ownInnerClassNode("SI_9105$$anonfun$met$1$E$1"), "SI_9105$$anonfun$met$1$E$1", "E$1")
+ assertLocal(ownInnerClassNode("SI_9105$$anonfun$met$1$F$1"), "SI_9105$$anonfun$met$1$F$1", "F$1")
+
+ // by-name
+ assertEnclosingMethod("SI_9105$$anonfun$5$G$1", "SI_9105$$anonfun$5", null, null)
+ assertEnclosingMethod("SI_9105$$anonfun$5$H$1", "SI_9105$$anonfun$5", "m$2", "()Ljava/lang/Object;")
+ assertEnclosingMethod("SI_9105$$anonfun$5$I$1", "SI_9105$$anonfun$5", null, null)
+ assertEnclosingMethod("SI_9105$$anonfun$bnM$1$J$1", "SI_9105$$anonfun$bnM$1", null, null)
+ assertEnclosingMethod("SI_9105$$anonfun$bnM$1$K$2", "SI_9105$$anonfun$bnM$1", "m$4", "()Ljava/lang/Object;")
+ assertEnclosingMethod("SI_9105$$anonfun$bnM$1$L$1", "SI_9105$$anonfun$bnM$1", null, null)
+
+ assertAnonymous(ownInnerClassNode("SI_9105$$anonfun$4"), "SI_9105$$anonfun$4")
+ assertAnonymous(ownInnerClassNode("SI_9105$$anonfun$met$1"), "SI_9105$$anonfun$met$1")
+
+ assert(innerClassNodes("SI_9105$$anonfun$4").length == 4) // itself and three of the local classes
+ assert(innerClassNodes("SI_9105$$anonfun$met$1").length == 4) // itself and three of the local classes
+ assert(innerClassNodes("SI_9105").length == 4) // the four anon funs
+ }
+ }
+
+ def testSI_9124() {
+ val classes: Map[String, String] = {
+ List("SI_9124$$anon$10",
+ "SI_9124$$anon$11",
+ "SI_9124$$anon$12",
+ "SI_9124$$anon$8",
+ "SI_9124$$anon$9",
+ "SI_9124$O$$anon$13").map({ name =>
+ val node = loadClassNode(name)
+ val fMethod = node.methods.asScala.find(_.name.startsWith("f")).get.name
+ (fMethod, node.name)
+ }).toMap
+ }
+
+ // println(classes)
+
+ assertNoEnclosingMethod("SI_9124$A")
+ assertEnclosingMethod(classes("f1"), "SI_9124", null, null)
+ assertEnclosingMethod(classes("f2"), "SI_9124", "f", "()LSI_9124$A;")
+ assertEnclosingMethod(classes("f3"), "SI_9124", null, null)
+ assertEnclosingMethod(classes("f4"), "SI_9124$O$", null, null)
+ assertEnclosingMethod(classes("f5"), "SI_9124", null, null)
+ assertEnclosingMethod(classes("f6"), "SI_9124", null, null)
+ assertNoEnclosingMethod("SI_9124$O$")
+
+ assertMember(ownInnerClassNode("SI_9124$A"), "SI_9124", "A", flags = publicAbstractInterface)
+ classes.values.foreach(n => assertAnonymous(ownInnerClassNode(n), n))
+ assertMember(ownInnerClassNode("SI_9124$O$"), "SI_9124", "O$")
+ }
+
+ def testImplClassesTopLevel() {
+ val classes = List(
+ "ImplClassesAreTopLevel$$anon$14",
+ "ImplClassesAreTopLevel$$anon$15",
+ "ImplClassesAreTopLevel$$anon$16",
+ "ImplClassesAreTopLevel$B1$class",
+ "ImplClassesAreTopLevel$B1",
+ "ImplClassesAreTopLevel$B2$1$class",
+ "ImplClassesAreTopLevel$B2$1",
+ "ImplClassesAreTopLevel$B3$1$class",
+ "ImplClassesAreTopLevel$B3$1",
+ "ImplClassesAreTopLevel$B4$class",
+ "ImplClassesAreTopLevel$B4$1",
+ "ImplClassesAreTopLevel$class",
+ "ImplClassesAreTopLevel")
+
+ classes.filter(_.endsWith("$class")).foreach(assertNoEnclosingMethod)
+ classes.flatMap(innerClassNodes).foreach(icn => assert(!icn.name.endsWith("$class"), icn))
+
+ assertNoEnclosingMethod("ImplClassesAreTopLevel$B1") // member, no encl meth attr
+
+ // no encl meth, but encl class
+ List("ImplClassesAreTopLevel$B2$1", "ImplClassesAreTopLevel$B3$1",
+ "ImplClassesAreTopLevel$$anon$14", "ImplClassesAreTopLevel$$anon$15").foreach(assertEnclosingMethod(_, "ImplClassesAreTopLevel", null, null))
+
+ // encl meth n
+ List("ImplClassesAreTopLevel$B4$1", "ImplClassesAreTopLevel$$anon$16").foreach(assertEnclosingMethod(_, "ImplClassesAreTopLevel", "n", "()Ljava/lang/Object;"))
+
+ val an14 = assertAnonymous(_: InnerClassNode, "ImplClassesAreTopLevel$$anon$14")
+ val an15 = assertAnonymous(_: InnerClassNode, "ImplClassesAreTopLevel$$anon$15")
+ val an16 = assertAnonymous(_: InnerClassNode, "ImplClassesAreTopLevel$$anon$16")
+ val b1 = assertMember(_: InnerClassNode, "ImplClassesAreTopLevel", "B1", flags = publicAbstractInterface)
+ val b2 = assertLocal(_ : InnerClassNode, "ImplClassesAreTopLevel$B2$1", "B2$1", flags = publicAbstractInterface)
+ val b3 = assertLocal(_ : InnerClassNode, "ImplClassesAreTopLevel$B3$1", "B3$1", flags = publicAbstractInterface)
+ val b4 = assertLocal(_ : InnerClassNode, "ImplClassesAreTopLevel$B4$1", "B4$1", flags = publicAbstractInterface)
+
+ testInner("ImplClassesAreTopLevel$$anon$14", an14, b3)
+ testInner("ImplClassesAreTopLevel$$anon$15", an15, b2)
+ testInner("ImplClassesAreTopLevel$$anon$16", an16, b4)
+
+ testInner("ImplClassesAreTopLevel$B1$class", b1)
+ testInner("ImplClassesAreTopLevel$B2$1$class", b2)
+ testInner("ImplClassesAreTopLevel$B3$1$class", b3)
+ testInner("ImplClassesAreTopLevel$B4$class", b4)
+
+ testInner("ImplClassesAreTopLevel$B1", b1)
+ testInner("ImplClassesAreTopLevel$B2$1", b2)
+ testInner("ImplClassesAreTopLevel$B3$1", b3)
+ testInner("ImplClassesAreTopLevel$B4$1", b4)
+
+ testInner("ImplClassesAreTopLevel$class", an14, an15, an16)
+ testInner("ImplClassesAreTopLevel", an14, an15, an16, b1, b2, b3, b4)
+ }
+
+ def testSpecializedClassesTopLevel() {
+ val cls = List(
+ "SpecializedClassesAreTopLevel$A$mcI$sp",
+ "SpecializedClassesAreTopLevel$A",
+ "SpecializedClassesAreTopLevel$T$",
+ "SpecializedClassesAreTopLevel$T$B$mcI$sp",
+ "SpecializedClassesAreTopLevel$T$B",
+ "SpecializedClassesAreTopLevel")
+
+ // all classes are members, no local (can't test local, they crash in specialize)
+ cls.foreach(assertNoEnclosingMethod)
+ cls.flatMap(innerClassNodes).foreach(icn => assert(!icn.name.endsWith("$sp"), icn))
+
+ val a = assertMember(_: InnerClassNode, "SpecializedClassesAreTopLevel", "A")
+ val t = assertMember(_: InnerClassNode, "SpecializedClassesAreTopLevel", "T$")
+ val b = assertMember(_: InnerClassNode, "SpecializedClassesAreTopLevel$T$", "B", Some("SpecializedClassesAreTopLevel$T$B"))
+
+ List("SpecializedClassesAreTopLevel$A$mcI$sp", "SpecializedClassesAreTopLevel$A").foreach(testInner(_, a))
+ testInner("SpecializedClassesAreTopLevel", a, t)
+ List("SpecializedClassesAreTopLevel$T$", "SpecializedClassesAreTopLevel$T$B$mcI$sp", "SpecializedClassesAreTopLevel$T$B").foreach(testInner(_, t, b))
+ }
+
+ def testNestedInValueClass() {
+ List(
+ "NestedInValueClass",
+ "NestedInValueClass$",
+ "NestedInValueClass$A",
+ "NestedInValueClass$A$",
+ "NestedInValueClass$A$B").foreach(assertNoEnclosingMethod)
+
+ assertEnclosingMethod("NestedInValueClass$A$C$2", "NestedInValueClass$A$", "f", "()Ljava/lang/Object;")
+
+ type I = InnerClassNode
+ val a = assertMember(_: I, "NestedInValueClass", "A", flags = publicStatic | Flags.ACC_FINAL)
+ val am = assertMember(_: I, "NestedInValueClass", "A$", flags = publicStatic)
+ val b = assertMember(_: I, "NestedInValueClass$A$", "B", Some("NestedInValueClass$A$B"), flags = publicStatic)
+ val c = assertLocal(_: I, "NestedInValueClass$A$C$2", "C$2")
+
+ testInner("NestedInValueClass$")
+ testInner("NestedInValueClass", a, am)
+ testInner("NestedInValueClass$A$B", am, b)
+ testInner("NestedInValueClass$A$C$2", am, c)
+
+ val isDelambdafyMethod = classpath.findClass("NestedInValueClass$A$lambda$$f$extension$1").isDefined
+ if (isDelambdafyMethod) {
+ List(
+ "NestedInValueClass$A$lambda$$g$2$1",
+ "NestedInValueClass$A$lambda$$f$extension$1",
+ "NestedInValueClass$A$lambda$$$nestedInAnonfun$13$1",
+ "NestedInValueClass$A$lambda$$$nestedInAnonfun$15$1").foreach(assertNoEnclosingMethod)
+ testInner("NestedInValueClass$A", a, am)
+ testInner("NestedInValueClass$A$", a, am, b, c)
+ testInner("NestedInValueClass$A$lambda$$g$2$1", am)
+ testInner("NestedInValueClass$A$lambda$$f$extension$1", am)
+ testInner("NestedInValueClass$A$lambda$$$nestedInAnonfun$13$1", am)
+ testInner("NestedInValueClass$A$lambda$$$nestedInAnonfun$15$1", am)
+ } else {
+ assertEnclosingMethod("NestedInValueClass$A$$anonfun$g$2$1" , "NestedInValueClass$A" , null, null)
+ assertEnclosingMethod("NestedInValueClass$A$$anonfun$g$2$1$$anonfun$apply$4" , "NestedInValueClass$A$$anonfun$g$2$1" , null, null)
+ assertEnclosingMethod("NestedInValueClass$A$$anonfun$f$extension$1" , "NestedInValueClass$A" , "f", "()Lscala/collection/immutable/List;")
+ assertEnclosingMethod("NestedInValueClass$A$$anonfun$f$extension$1$$anonfun$apply$5", "NestedInValueClass$A$$anonfun$f$extension$1", null, null)
+
+ val gfun = assertAnonymous(_: I, "NestedInValueClass$A$$anonfun$g$2$1")
+ val ffun = assertAnonymous(_: I, "NestedInValueClass$A$$anonfun$f$extension$1")
+ val gfunfun = assertAnonymous(_: I, "NestedInValueClass$A$$anonfun$g$2$1$$anonfun$apply$4")
+ val ffunfun = assertAnonymous(_: I, "NestedInValueClass$A$$anonfun$f$extension$1$$anonfun$apply$5")
+
+ testInner("NestedInValueClass$A", a, am, ffun, gfun)
+ testInner("NestedInValueClass$A$", a, am, ffun, gfun, b, c)
+ testInner("NestedInValueClass$A$$anonfun$g$2$1", a, am, gfun, gfunfun)
+ testInner("NestedInValueClass$A$$anonfun$g$2$1$$anonfun$apply$4", am, gfun, gfunfun)
+ testInner("NestedInValueClass$A$$anonfun$f$extension$1", a, am, ffun, ffunfun)
+ testInner("NestedInValueClass$A$$anonfun$f$extension$1$$anonfun$apply$5", am, ffun, ffunfun)
+ }
+ }
+
+ def show(): Unit = {
+ testA1()
+ testA2()
+ testA3()
+ testA4()
+ testA5()
+ testA6()
+ testA7()
+ testA8()
+ testA10()
+ testA11()
+ testA13()
+ testA14()
+ testA15()
+ testA16()
+ testA17()
+ testA18()
+ testA19()
+ testA20()
+ testA21()
+ testA22()
+ testA23()
+ testA24()
+ testSI_9105()
+ testSI_9124()
+ testImplClassesTopLevel()
+ testSpecializedClassesTopLevel()
+ testNestedInValueClass()
+ }
+}
diff --git a/test/files/jvm/innerClassEnclMethodJavaReflection.scala b/test/files/jvm/innerClassEnclMethodJavaReflection.scala
new file mode 100644
index 0000000000..ee39cb43bf
--- /dev/null
+++ b/test/files/jvm/innerClassEnclMethodJavaReflection.scala
@@ -0,0 +1,65 @@
+import scala.reflect.io._
+import java.net.URLClassLoader
+
+object Test extends App {
+ val jarsOrDirectories = Set("partest.lib", "partest.reflect", "partest.comp") map sys.props
+
+ object AllowedMissingClass {
+ // Some classes in scala-compiler.jar have references to jline / ant classes, which seem to be
+ // not on the classpath. We just skip over those classes.
+ // PENDING: for now we also allow missing $anonfun classes: the optimizer may eliminate some closures
+ // that are refferred to in EnclosingClass attributes. SI-9136
+ val allowedMissingPackages = Set("jline", "org.apache.tools.ant", "$anonfun")
+
+ def ok(t: Throwable) = {
+ allowedMissingPackages.exists(p => t.getMessage.replace('/', '.').contains(p))
+ }
+
+ def unapply(t: Throwable): Option[Throwable] = t match {
+ case _: NoClassDefFoundError | _: ClassNotFoundException | _: TypeNotPresentException if ok(t) => Some(t)
+ case _ => None
+ }
+ }
+
+ jarsOrDirectories foreach testClasses
+
+ def testClasses(jarOrDirectory: String): Unit = {
+ val classPath = AbstractFile.getDirectory(new java.io.File(jarOrDirectory))
+
+ def flatten(f: AbstractFile): Iterator[AbstractFile] =
+ if (f.isClassContainer) f.iterator.flatMap(flatten)
+ else Iterator(f)
+
+ val classFullNames = flatten(classPath).filter(_.hasExtension("class")).map(_.path.replace("/", ".").replaceAll(".class$", ""))
+
+ // it seems that Class objects can only be GC'd together with their class loader
+ // (http://stackoverflow.com/questions/2433261/when-and-how-are-classes-garbage-collected-in-java)
+ // if we just use the same class loader for the entire test (Class.forName), we run out of PermGen
+ // even with that, we still neeed a PermGen of 90M or so, the default 64 is not enough. I tried
+ // using one class loader per 100 classes, but that didn't help, the classes didn't get GC'd.
+ val classLoader = new URLClassLoader(Array(classPath.toURL))
+
+ val faulty = new collection.mutable.ListBuffer[(String, Throwable)]
+
+ def tryGetClass(name: String) = try {
+ Some[Class[_]](classLoader.loadClass(name))
+ } catch {
+ case AllowedMissingClass(_) => None
+ }
+
+ for (name <- classFullNames; cls <- tryGetClass(name)) {
+ try {
+ cls.getEnclosingMethod
+ cls.getEnclosingClass
+ cls.getEnclosingConstructor
+ cls.getDeclaredClasses
+ } catch {
+ case AllowedMissingClass(_) =>
+ case t: Throwable => faulty += ((name, t))
+ }
+ }
+
+ if (faulty.nonEmpty)
+ println(faulty.toList mkString "\n")
+ }
+}
diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check
index b55ecc10e6..d03edb638c 100644
--- a/test/files/jvm/interpreter.check
+++ b/test/files/jvm/interpreter.check
@@ -95,7 +95,7 @@ scala> case class Bar(n: Int)
defined class Bar
scala> implicit def foo2bar(foo: Foo) = Bar(foo.n)
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
foo2bar: (foo: Foo)Bar
scala> val bar: Bar = Foo(3)
@@ -269,7 +269,7 @@ scala> xs map (x => x)
res6: Array[_] = Array(1, 2)
scala> xs map (x => (x, x))
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
res7: Array[(_$1, _$1)] forSome { type _$1 } = Array((1,1), (2,2))
scala>
@@ -361,7 +361,7 @@ It would fail on the following inputs: Exp(), Term()
^
f: (e: Exp)Int
-scala>
+scala> :quit
plusOne: (x: Int)Int
res0: Int = 6
res0: String = after reset
diff --git a/test/files/jvm/javaReflection.check b/test/files/jvm/javaReflection.check
new file mode 100644
index 0000000000..8180ecff8a
--- /dev/null
+++ b/test/files/jvm/javaReflection.check
@@ -0,0 +1,259 @@
+#partest !-Ydelambdafy:method
+A$$anonfun$$lessinit$greater$1 / null (canon) / $anonfun$$lessinit$greater$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth)
+- properties : true (local) / false (member)
+A$$anonfun$$lessinit$greater$1$$anonfun$apply$1 / null (canon) / $anonfun$apply$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A$$anonfun$$lessinit$greater$1 (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$$anonfun$2 / null (canon) / $anonfun$2 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$$anonfun$3 / null (canon) / $anonfun$3 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$$anonfun$4 / null (canon) / $anonfun$4 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$$anonfun$f$1 / null (canon) / $anonfun$f$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- properties : true (local) / false (member)
+A$$anonfun$f$2 / null (canon) / $anonfun$f$2 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- properties : true (local) / false (member)
+A$D$$anonfun$1 / null (canon) / anonfun$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A$D$ (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+AO$$anonfun$5 / null (canon) / anonfun$5 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class AO$ (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+AT$$anonfun$6 / null (canon) / $anonfun$6 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / interface AT (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+#partest -Ydelambdafy:method
+A$D$lambda$1 / A$D$lambda$1 (canon) / A$D$lambda$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+A$lambda$$$lessinit$greater$1 / A$lambda$$$lessinit$greater$1 (canon) / A$lambda$$$lessinit$greater$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+A$lambda$$$nestedInAnonfun$7$1 / A$lambda$$$nestedInAnonfun$7$1 (canon) / A$lambda$$$nestedInAnonfun$7$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+A$lambda$$f$1 / A$lambda$$f$1 (canon) / A$lambda$$f$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+A$lambda$$f$2 / A$lambda$$f$2 (canon) / A$lambda$$f$2 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+A$lambda$1 / A$lambda$1 (canon) / A$lambda$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+A$lambda$2 / A$lambda$2 (canon) / A$lambda$2 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+A$lambda$3 / A$lambda$3 (canon) / A$lambda$3 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+AO$lambda$1 / AO$lambda$1 (canon) / AO$lambda$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+AT$class$lambda$1 / AT$class$lambda$1 (canon) / AT$class$lambda$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+#partest
+A / A (canon) / A (simple)
+- declared cls: List(class A$B, interface A$C, class A$D$)
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+A$$anon$1 / null (canon) / $anon$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$$anon$3 / null (canon) / $anon$3 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$$anon$4 / null (canon) / $anon$4 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- properties : true (local) / false (member)
+A$$anon$5 / null (canon) / $anon$5 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- properties : true (local) / false (member)
+A$$anon$6 / null (canon) / $anon$6 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$$anon$7 / null (canon) / $anon$7 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth)
+- properties : true (local) / false (member)
+A$B / A.B (canon) / B (simple)
+- declared cls: List()
+- enclosing : class A (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+A$C / A.C (canon) / C (simple)
+- declared cls: List()
+- enclosing : class A (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+A$D$ / A.D$ (canon) / D$ (simple)
+- declared cls: List(class A$D$B, interface A$D$C, class A$D$D$)
+- enclosing : class A (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+A$D$$anon$2 / null (canon) / anon$2 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A$D$ (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$D$B / Malformed class name (canon) / Malformed class name (simple)
+- declared cls: List()
+- enclosing : class A$D$ (declaring cls) / class A$D$ (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+A$D$C / Malformed class name (canon) / Malformed class name (simple)
+- declared cls: List()
+- enclosing : class A$D$ (declaring cls) / class A$D$ (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+A$D$D$ / Malformed class name (canon) / Malformed class name (simple)
+- declared cls: List()
+- enclosing : class A$D$ (declaring cls) / class A$D$ (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+A$D$KB$1 / null (canon) / Malformed class name (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A$D$ (cls) / null (constr) / public void A$D$.f() (meth)
+- properties : Malformed class name (local) / false (member)
+A$E$1 / null (canon) / E$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- properties : true (local) / false (member)
+A$F$1 / null (canon) / F$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- properties : true (local) / false (member)
+A$G$2$ / null (canon) / G$2$ (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- properties : true (local) / false (member)
+A$H$1 / null (canon) / H$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- properties : true (local) / false (member)
+A$I$1 / null (canon) / I$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- properties : true (local) / false (member)
+A$J$2$ / null (canon) / J$2$ (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth)
+- properties : true (local) / false (member)
+A$K$1 / null (canon) / K$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$L$1 / null (canon) / L$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$M$2$ / null (canon) / M$2$ (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$N$1 / null (canon) / N$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$O$1 / null (canon) / O$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$P$2$ / null (canon) / P$2$ (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+A$Q$1 / null (canon) / Q$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth)
+- properties : true (local) / false (member)
+A$R$1 / null (canon) / R$1 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth)
+- properties : true (local) / false (member)
+A$S$2$ / null (canon) / S$2$ (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth)
+- properties : true (local) / false (member)
+AO / AO (canon) / AO (simple)
+- declared cls: List(class AO$B, interface AO$C, class AO$D$)
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+AO$ / AO$ (canon) / AO$ (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+AO$$anon$8 / null (canon) / anon$8 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / class AO$ (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+AO$B / AO.B (canon) / B (simple)
+- declared cls: List()
+- enclosing : class AO (declaring cls) / class AO (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+AO$C / AO.C (canon) / C (simple)
+- declared cls: List()
+- enclosing : class AO (declaring cls) / class AO (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+AO$D$ / AO.D$ (canon) / D$ (simple)
+- declared cls: List()
+- enclosing : class AO (declaring cls) / class AO (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+AT / AT (canon) / AT (simple)
+- declared cls: List(class AT$B, interface AT$C, class AT$D$)
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+AT$$anon$9 / null (canon) / $anon$9 (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / interface AT (cls) / null (constr) / null (meth)
+- properties : true (local) / false (member)
+AT$B / AT.B (canon) / B (simple)
+- declared cls: List()
+- enclosing : interface AT (declaring cls) / interface AT (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+AT$C / AT.C (canon) / C (simple)
+- declared cls: List()
+- enclosing : interface AT (declaring cls) / interface AT (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+AT$D$ / AT.D$ (canon) / D$ (simple)
+- declared cls: List()
+- enclosing : interface AT (declaring cls) / interface AT (cls) / null (constr) / null (meth)
+- properties : false (local) / true (member)
+AT$class / AT$class (canon) / AT$class (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+T / T (canon) / T (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
+T$class / T$class (canon) / T$class (simple)
+- declared cls: List()
+- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth)
+- properties : false (local) / false (member)
diff --git a/test/files/jvm/javaReflection/Classes_1.scala b/test/files/jvm/javaReflection/Classes_1.scala
new file mode 100644
index 0000000000..11963e2770
--- /dev/null
+++ b/test/files/jvm/javaReflection/Classes_1.scala
@@ -0,0 +1,84 @@
+// See Test.scala for comments
+
+trait T { def f = 1 }
+
+class A {
+ // member class
+ class B
+ // member trait
+ trait C
+ // member object
+ object D {
+ class B
+ trait C
+ object D
+ new T { }
+ (() => -1)
+ def f = { class KB }
+ }
+
+ // anonymous class, not a member
+ new T { }
+
+ // anonymous function, not a member
+ (() => 1)
+
+ def f = {
+ class E
+ trait F
+ object G
+ new T { }
+ (() => 2)
+
+ if (new Object().hashCode == 1) {
+ class H
+ trait I
+ object J
+ new T { }
+ (() => 3)
+ } else {
+ ()
+ }
+ }
+
+ {
+ class K
+ trait L
+ object M
+ new T { }
+ (() => 4)
+ }
+
+ val x = {
+ class N
+ trait O
+ object P
+ new T { }
+ (() => 5)
+ }
+
+ def this(x: Int) {
+ this()
+ class Q
+ trait R
+ object S
+ new T { }
+ (() => () => 5)
+ }
+}
+
+object AO {
+ class B
+ trait C
+ object D
+ new T { }
+ (() => 1)
+}
+
+trait AT {
+ class B
+ trait C
+ object D
+ new T { }
+ (() => 1)
+}
diff --git a/test/files/jvm/javaReflection/Test.scala b/test/files/jvm/javaReflection/Test.scala
new file mode 100644
index 0000000000..ae5a36eeb2
--- /dev/null
+++ b/test/files/jvm/javaReflection/Test.scala
@@ -0,0 +1,137 @@
+/**
+Interesting aspects of Java reflection applied to scala classes. TL;DR: you should not use
+getSimpleName / getCanonicalName / isAnonymousClass / isLocalClass / isSynthetic.
+
+ - Some methods in Java reflection assume a certain structure in the class names. Scalac
+ can produce class files that don't respect this structure. Certain methods in reflection
+ therefore give surprising answers or may even throw an exception.
+
+ In particular, the method "getSimpleName" assumes that classes are named after the Java spec
+ http://docs.oracle.com/javase/specs/jls/se8/html/jls-13.html#jls-13.1
+
+ Consider the following Scala example:
+ class A { object B { class C } }
+
+ The classfile for C has the name "A$B$C", while the classfile for the module B has the
+ name "A$B$".
+
+ For "cClass.getSimpleName, the implementation first strips the name of the enclosing class,
+ which produces "C". The implementation then expects a "$" character, which is missing, and
+ throws an InternalError.
+
+ Consider another example:
+ trait T
+ class A { val x = new T {} }
+ object B { val x = new T {} }
+
+ The anonymous classes are named "A$$anon$1" and "B$$anon$2". If you call "getSimpleName",
+ you get "$anon$1" (leading $) and "anon$2" (no leading $).
+
+ - There are certain other methods in the Java reflection API that depend on getSimpleName.
+ These should be avoided, they yield unexpected results:
+
+ - isAnonymousClass is always false. Scala-defined classes are never anonymous for Java
+ reflection. Java reflection insepects the class name to decide whether a class is
+ anonymous, based on the name spec referenced above.
+ Also, the implementation of "isAnonymousClass" calls "getSimpleName", which may throw.
+
+ - isLocalClass: should be true true for local classes (nested classes that are not
+ members), but not for anonymous classes. Since "isAnonymousClass" is always false,
+ Java reflection thinks that all Scala-defined anonymous classes are local.
+ The implementation may also throw, since it uses "isAnonymousClass":
+ class A { object B { def f = { class KB; new KB } } }
+ (new A).B.f.getClass.isLocalClass // boom
+
+ - getCanonicalName: uses "getSimpleName" in the implementation. In the first example,
+ cClass.getCanonicalName also fails with an InternalError.
+
+ - Scala-defined classes are never synthetic for Java reflection. The implementation
+ checks for the SYNTHETEIC flag, which does not seem to be added by scalac (maybe this
+ will change some day).
+*/
+
+object Test {
+
+ def tr[T](m: => T): String = try {
+ val r = m
+ if (r == null) "null"
+ else r.toString
+ } catch { case e: InternalError => e.getMessage }
+
+ def assertNotAnonymous(c: Class[_]) = {
+ val an = try {
+ c.isAnonymousClass
+ } catch {
+ // isAnonymousClass is implemented using getSimpleName, which may throw.
+ case e: InternalError => false
+ }
+ assert(!an, c)
+ }
+
+ def ruleMemberOrLocal(c: Class[_]) = {
+ // if it throws, then it's because of the call from isLocalClass to isAnonymousClass.
+ // we know that isAnonymousClass is always false, so it has to be a local class.
+ val loc = try { c.isLocalClass } catch { case e: InternalError => true }
+ if (loc)
+ assert(!c.isMemberClass, c)
+ if (c.isMemberClass)
+ assert(!loc, c)
+ }
+
+ def ruleMemberDeclaring(c: Class[_]) = {
+ if (c.isMemberClass)
+ assert(c.getDeclaringClass.getDeclaredClasses.toList.map(_.getName) contains c.getName)
+ }
+
+ def ruleScalaAnonClassIsLocal(c: Class[_]) = {
+ if (c.getName contains "$anon$")
+ assert(c.isLocalClass, c)
+ }
+
+ def ruleScalaAnonFunInlineIsLocal(c: Class[_]) = {
+ // exclude lambda classes generated by delambdafy:method. nested closures have both "anonfun" and "lambda".
+ if (c.getName.contains("$anonfun$") && !c.getName.contains("$lambda$"))
+ assert(c.isLocalClass, c)
+ }
+
+ def ruleScalaAnonFunMethodIsToplevel(c: Class[_]) = {
+ if (c.getName.contains("$lambda$"))
+ assert(c.getEnclosingClass == null, c)
+ }
+
+ def showClass(name: String) = {
+ val c = Class.forName(name)
+
+ println(s"${c.getName} / ${tr(c.getCanonicalName)} (canon) / ${tr(c.getSimpleName)} (simple)")
+ println( "- declared cls: "+ c.getDeclaredClasses.toList.sortBy(_.getName))
+ println(s"- enclosing : ${c.getDeclaringClass} (declaring cls) / ${c.getEnclosingClass} (cls) / ${c.getEnclosingConstructor} (constr) / ${c.getEnclosingMethod} (meth)")
+ println(s"- properties : ${tr(c.isLocalClass)} (local) / ${c.isMemberClass} (member)")
+
+ assertNotAnonymous(c)
+ assert(!c.isSynthetic, c)
+
+ ruleMemberOrLocal(c)
+ ruleMemberDeclaring(c)
+ ruleScalaAnonClassIsLocal(c)
+ ruleScalaAnonFunInlineIsLocal(c)
+ ruleScalaAnonFunMethodIsToplevel(c)
+ }
+
+ def main(args: Array[String]): Unit = {
+ def isAnonFunClassName(s: String) = s.contains("$anonfun$") || s.contains("$lambda$")
+
+ val classfiles = new java.io.File(sys.props("partest.output")).listFiles().toList.map(_.getName).collect({
+ // exclude files from Test.scala, just take those from Classes_1.scala
+ case s if !s.startsWith("Test") && s.endsWith(".class") => s.substring(0, s.length - 6)
+ }).sortWith((a, b) => {
+ // sort such that first there are all anonymous functions, then all other classes.
+ // within those categories, sort lexically.
+ // this makes the check file smaller: it differs for anonymous functions between -Ydelambdafy:inline/method.
+ // the other classes are the same.
+ if (isAnonFunClassName(a)) !isAnonFunClassName(b) || a < b
+ else !isAnonFunClassName(b) && a < b
+ })
+
+ classfiles foreach showClass
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check
index 47d7bfd920..cb26446f40 100644
--- a/test/files/jvm/serialization-new.check
+++ b/test/files/jvm/serialization-new.check
@@ -1,4 +1,4 @@
-warning: there were 2 deprecation warning(s); re-run with -deprecation for details
+warning: there were three deprecation warnings; re-run with -deprecation for details
a1 = Array[1,2,3]
_a1 = Array[1,2,3]
arrayEquals(a1, _a1): true
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index 47d7bfd920..cb26446f40 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -1,4 +1,4 @@
-warning: there were 2 deprecation warning(s); re-run with -deprecation for details
+warning: there were three deprecation warnings; re-run with -deprecation for details
a1 = Array[1,2,3]
_a1 = Array[1,2,3]
arrayEquals(a1, _a1): true
diff --git a/test/files/jvm/t6941/test.scala b/test/files/jvm/t6941/test.scala
index 248617f71f..fceb54487f 100644
--- a/test/files/jvm/t6941/test.scala
+++ b/test/files/jvm/t6941/test.scala
@@ -1,4 +1,4 @@
-import scala.tools.partest.BytecodeTest
+import scala.tools.partest.{BytecodeTest, ASMConverters}
import scala.tools.nsc.util.JavaClassPath
import java.io.InputStream
@@ -10,6 +10,6 @@ import scala.collection.JavaConverters._
object Test extends BytecodeTest {
def show: Unit = {
val classNode = loadClassNode("SameBytecode")
- similarBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"), equalsModuloVar)
+ similarBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"), ASMConverters.equivalentBytecode(_, _))
}
}
diff --git a/test/files/jvm/t7253/test.scala b/test/files/jvm/t7253/test.scala
index 7fe08e8813..a3f1e86e65 100644
--- a/test/files/jvm/t7253/test.scala
+++ b/test/files/jvm/t7253/test.scala
@@ -1,4 +1,4 @@
-import scala.tools.partest.BytecodeTest
+import scala.tools.partest.{BytecodeTest, ASMConverters}
import scala.tools.nsc.util.JavaClassPath
import java.io.InputStream
@@ -8,10 +8,10 @@ import asm.tree.{ClassNode, InsnList}
import scala.collection.JavaConverters._
object Test extends BytecodeTest {
- import instructions._
+ import ASMConverters._
def show: Unit = {
- val instrBaseSeqs = Seq("ScalaClient_1", "JavaClient_1") map (name => instructions.fromMethod(getMethod(loadClassNode(name), "foo")))
+ val instrBaseSeqs = Seq("ScalaClient_1", "JavaClient_1") map (name => instructionsFromMethod(getMethod(loadClassNode(name), "foo")))
val instrSeqs = instrBaseSeqs map (_ filter isInvoke)
cmpInstructions(instrSeqs(0), instrSeqs(1))
}
diff --git a/test/files/jvm/t8582.check b/test/files/jvm/t8582.check
new file mode 100644
index 0000000000..e388366270
--- /dev/null
+++ b/test/files/jvm/t8582.check
@@ -0,0 +1,44 @@
+getClass on module gives module class
+ class p1.p2.Singleton$Singleton$
+
+Nested module classes are found through reflection
+ p1.p2.Singleton$Singleton$: List(class p1.p2.Singleton$Singleton$Singleton$)
+
+Reflection can find direct nested classes (A1-B1-C1)
+ A1: List(class A1$B1)
+ A1$B1: List(class A1$B1$C1)
+ A1$B1$C1: List()
+
+Reflection can find direct nested classes (A2-B2-C2)
+ A2: List(class A2$B2)
+ A2$B2: List(class A2$B2$C2)
+ A2$B2$C2: List()
+
+The InnerClass attribute of a mirror class contains the members of the module class:
+ className[p1/p2/Singleton$Singleton$] outerClassName[p1/p2/Singleton] innerName[Singleton$] access[9]
+The module members are not in the InnerClass table of the module class (unless referenced):
+
+
+An outer class has a InnerClass attribute for direct nested classes
+ className[A1$B1] outerClassName[A1] innerName[B1] access[1]
+A nested class has an InnerClass attribute for itself (and also for its nested classes)
+ className[A1$B1] outerClassName[A1] innerName[B1] access[1]
+ className[A1$B1$C1] outerClassName[A1$B1] innerName[C1] access[1]
+C1 is a nested class, so it has an InnerClass attribute for itself.
+Because that attribute leads to an entry for B1 in the constant pool, C1 needs an InnerClass attribute for B1.
+ className[A1$B1] outerClassName[A1] innerName[B1] access[1]
+ className[A1$B1$C1] outerClassName[A1$B1] innerName[C1] access[1]
+
+The BeanInfo class has the same InnerClass attributes as the corresponding bean
+ className[A1$B1] outerClassName[A1] innerName[B1] access[1]
+ className[A1$B1$C1] outerClassName[A1$B1] innerName[C1] access[1]
+
+Class A2 mentions class C2 in the constant pool (due to method f), therefore it needs an InnerClass attribute for C1
+ className[A2$B2] outerClassName[A2] innerName[B2] access[1]
+ className[A2$B2$C2] outerClassName[A2$B2] innerName[C2] access[1]
+B2
+ className[A2$B2] outerClassName[A2] innerName[B2] access[1]
+ className[A2$B2$C2] outerClassName[A2$B2] innerName[C2] access[1]
+C2
+ className[A2$B2] outerClassName[A2] innerName[B2] access[1]
+ className[A2$B2$C2] outerClassName[A2$B2] innerName[C2] access[1]
diff --git a/test/files/jvm/t8582.scala b/test/files/jvm/t8582.scala
new file mode 100644
index 0000000000..e9a01f9016
--- /dev/null
+++ b/test/files/jvm/t8582.scala
@@ -0,0 +1,81 @@
+import scala.tools.partest.BytecodeTest
+import scala.collection.JavaConverters._
+
+package p1 {
+ package p2 {
+ object Singleton {
+ object Singleton {
+ object Singleton
+ }
+ }
+ }
+}
+
+class A1 {
+ class B1 {
+ @scala.beans.BeanInfo
+ class C1
+ }
+}
+
+class A2 {
+ class B2 {
+ class C2
+ }
+ def f: B2#C2 = null
+}
+
+
+object Test extends BytecodeTest {
+ import p1.p2._
+
+ def nested(c: Class[_]) = s" ${c.getName}: ${c.getDeclaredClasses.toList}"
+
+ def nprintln(s: String) = println("\n"+s)
+ def printInner(cname: String): Unit = {
+ val cnode = loadClassNode(cname)
+ println(cnode.innerClasses.asScala.toList.map(i => s"className[${i.name}] outerClassName[${i.outerName}] innerName[${i.innerName}] access[${i.access}]").mkString(" ", "\n ", ""))
+ }
+
+ def show() {
+
+ println("getClass on module gives module class")
+ println(" " + Singleton.Singleton.getClass)
+
+ nprintln("Nested module classes are found through reflection")
+ println(nested(Singleton.Singleton.getClass))
+
+ nprintln("Reflection can find direct nested classes (A1-B1-C1)")
+ println(nested(classOf[A1]))
+ println(nested(classOf[A1#B1]))
+ println(nested(classOf[A1#B1#C1]))
+
+ nprintln("Reflection can find direct nested classes (A2-B2-C2)")
+ println(nested(classOf[A2]))
+ println(nested(classOf[A2#B2]))
+ println(nested(classOf[A2#B2#C2]))
+
+ nprintln("The InnerClass attribute of a mirror class contains the members of the module class:")
+ printInner("p1.p2.Singleton") // mirror class
+ println("The module members are not in the InnerClass table of the module class (unless referenced):")
+ printInner("p1.p2.Singleton$")
+
+ nprintln("An outer class has a InnerClass attribute for direct nested classes")
+ printInner("A1")
+ println("A nested class has an InnerClass attribute for itself (and also for its nested classes)")
+ printInner("A1$B1")
+ println("C1 is a nested class, so it has an InnerClass attribute for itself.\n"+
+ "Because that attribute leads to an entry for B1 in the constant pool, C1 needs an InnerClass attribute for B1.")
+ printInner("A1$B1$C1")
+
+ nprintln("The BeanInfo class has the same InnerClass attributes as the corresponding bean")
+ printInner("A1$B1$C1BeanInfo")
+
+ nprintln("Class A2 mentions class C2 in the constant pool (due to method f), therefore it needs an InnerClass attribute for C1")
+ printInner("A2")
+ println("B2")
+ printInner("A2$B2")
+ println("C2")
+ printInner("A2$B2$C2")
+ }
+}
diff --git a/test/files/jvm/t8689.check b/test/files/jvm/t8689.check
new file mode 100644
index 0000000000..2e9ba477f8
--- /dev/null
+++ b/test/files/jvm/t8689.check
@@ -0,0 +1 @@
+success
diff --git a/test/files/jvm/t8689.scala b/test/files/jvm/t8689.scala
new file mode 100644
index 0000000000..3ee20d711a
--- /dev/null
+++ b/test/files/jvm/t8689.scala
@@ -0,0 +1,18 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ import scala.concurrent._
+ import ExecutionContext.Implicits.global
+ val source1 = Promise[Int]()
+ val source2 = Promise[Int]()
+ val done = Promise[Unit]()
+ source2.completeWith(source1.future).future.onComplete {
+ case _ =>
+ print("success")
+ done.success(())
+ }
+ source2.tryFailure(new TimeoutException)
+ source1.success(123)
+ import duration._
+ Await.result(done.future, 120.seconds)
+ }
+}
diff --git a/test/files/jvm/t9044.scala b/test/files/jvm/t9044.scala
new file mode 100644
index 0000000000..b1073325e8
--- /dev/null
+++ b/test/files/jvm/t9044.scala
@@ -0,0 +1,6 @@
+trait A
+trait B
+object Test extends A with B with App {
+ val is = Test.getClass.getInterfaces.mkString(", ")
+ assert(is == "interface A, interface B, interface scala.App", is)
+}
diff --git a/test/files/jvm/t9105.check b/test/files/jvm/t9105.check
new file mode 100644
index 0000000000..34750833f1
--- /dev/null
+++ b/test/files/jvm/t9105.check
@@ -0,0 +1,18 @@
+#partest !-Ydelambdafy:method
+(class C$$anonfun$1$A$1,class C$$anonfun$1,null)
+(class C$$anonfun$1$B$1,class C$$anonfun$1,private final java.lang.Object C$$anonfun$1.m$1())
+(class C$$anonfun$1$C$1,class C$$anonfun$1,null)
+(class C$$anonfun$1$$anonfun$2$D$1,class C$$anonfun$1$$anonfun$2,null)
+(class C$$anonfun$met$1$E$1,class C$$anonfun$met$1,null)
+(class C$$anonfun$met$1$F$1,class C$$anonfun$met$1,private final java.lang.Object C$$anonfun$met$1.m$2())
+(class C$$anonfun$met$1$G$1,class C$$anonfun$met$1,null)
+(class C$$anonfun$met$1$$anonfun$3$H$1,class C$$anonfun$met$1$$anonfun$3,null)
+#partest -Ydelambdafy:method
+(class C$A$1,class C,null)
+(class C$B$1,class C,private final java.lang.Object C.m$1())
+(class C$C$1,class C,null)
+(class C$D$1,class C,null)
+(class C$E$1,class C,public scala.Function0 C.met())
+(class C$F$1,class C,private final java.lang.Object C.m$2())
+(class C$G$1,class C,public scala.Function0 C.met())
+(class C$H$1,class C,public scala.Function0 C.met())
diff --git a/test/files/jvm/t9105.scala b/test/files/jvm/t9105.scala
new file mode 100644
index 0000000000..636ee8a768
--- /dev/null
+++ b/test/files/jvm/t9105.scala
@@ -0,0 +1,22 @@
+class C {
+ val fun = () => {
+ class A
+ def m: Object = { class B; new B }
+ val f: Object = { class C; new C }
+ val g = () => { class D; new D }
+ List[Object](new A, m, f, g())
+ }
+ def met = () => {
+ class E
+ def m: Object = { class F; new F }
+ val f: Object = { class G; new G }
+ val g = () => { class H; new H }
+ List[Object](new E, m, f, g())
+ }
+}
+
+object Test extends App {
+ val x = new C().fun.apply() ::: new C().met.apply()
+ val results = x.map(_.getClass).map(cls => (cls, cls.getEnclosingClass, cls.getEnclosingMethod))
+ println(results.mkString("\n"))
+}
diff --git a/test/files/jvm/throws-annot-from-java.check b/test/files/jvm/throws-annot-from-java.check
index be3ba412f8..c541b26fcc 100644
--- a/test/files/jvm/throws-annot-from-java.check
+++ b/test/files/jvm/throws-annot-from-java.check
@@ -44,4 +44,4 @@ bar
tp.typeParams.isEmpty: true
throws[test.PolymorphicException[_]](classOf[test.PolymorphicException])
-scala>
+scala> :quit
diff --git a/test/files/jvm/varargs.check b/test/files/jvm/varargs.check
index 8379befe93..986f98896a 100644
--- a/test/files/jvm/varargs.check
+++ b/test/files/jvm/varargs.check
@@ -1,3 +1,4 @@
7
10
-19 \ No newline at end of file
+19
+a
diff --git a/test/files/jvm/varargs/JavaClass.java b/test/files/jvm/varargs/JavaClass.java
index 9851e1b78b..6928ee5adc 100644
--- a/test/files/jvm/varargs/JavaClass.java
+++ b/test/files/jvm/varargs/JavaClass.java
@@ -11,5 +11,6 @@ public class JavaClass {
va.vi(1, 2, 3, 4);
varargz(5, 1.0, 2.0, 3.0);
va.vt(16, "", "", "");
+ System.out.println(va.vt1(16, "a", "b", "c"));
}
} \ No newline at end of file
diff --git a/test/files/jvm/varargs/VaClass.scala b/test/files/jvm/varargs/VaClass.scala
index 6343f9c6f6..d83e63ace1 100644
--- a/test/files/jvm/varargs/VaClass.scala
+++ b/test/files/jvm/varargs/VaClass.scala
@@ -10,4 +10,6 @@ class VaClass {
@varargs def vi(a: Int, b: Int*) = println(a + b.sum)
@varargs def vt[T](a: Int, b: T*) = println(a + b.length)
+ // TODO remove type bound after fixing SI-8786, see also https://github.com/scala/scala/pull/3961
+ @varargs def vt1[T <: String](a: Int, b: T*): T = b.head
}
diff --git a/test/files/jvm/xml05.check b/test/files/jvm/xml05.check
index 92ea995350..cad907525d 100644
--- a/test/files/jvm/xml05.check
+++ b/test/files/jvm/xml05.check
@@ -4,4 +4,4 @@ Type :help for more information.
scala> <city name="San Jos&eacute;"/>
res0: scala.xml.Elem = <city name="San Jos&eacute;"/>
-scala>
+scala> :quit
diff --git a/test/files/neg/aladdin1055.check b/test/files/neg/aladdin1055.check
new file mode 100644
index 0000000000..41782ae987
--- /dev/null
+++ b/test/files/neg/aladdin1055.check
@@ -0,0 +1,7 @@
+Test_1.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: (_ : this.<local child>)
+ def foo(t: A.T) = t match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/aladdin1055.flags b/test/files/neg/aladdin1055.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/aladdin1055.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/aladdin1055/A.scala b/test/files/neg/aladdin1055/A.scala
new file mode 100644
index 0000000000..862336e30c
--- /dev/null
+++ b/test/files/neg/aladdin1055/A.scala
@@ -0,0 +1,6 @@
+object A {
+ sealed trait T { def f: Int }
+ class TT extends T { def f = 0 }
+
+ def foo = new T { def f = 1 } // local subclass of sealed trait T
+}
diff --git a/test/files/neg/aladdin1055/Test_1.scala b/test/files/neg/aladdin1055/Test_1.scala
new file mode 100644
index 0000000000..39d9b1dc98
--- /dev/null
+++ b/test/files/neg/aladdin1055/Test_1.scala
@@ -0,0 +1,5 @@
+object Test {
+ def foo(t: A.T) = t match {
+ case a: A.TT => 0
+ }
+}
diff --git a/test/files/neg/case-collision2.flags b/test/files/neg/case-collision2.flags
index 5bfa9da5c5..bea46902c9 100644
--- a/test/files/neg/case-collision2.flags
+++ b/test/files/neg/case-collision2.flags
@@ -1 +1 @@
--Ynooptimize -Ybackend:GenBCode -Xfatal-warnings
+-Ybackend:GenBCode -Xfatal-warnings
diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check
index e5f1a38d96..7de22fef54 100644
--- a/test/files/neg/checksensible.check
+++ b/test/files/neg/checksensible.check
@@ -97,6 +97,7 @@ checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.C3 a
checksensible.scala:95: warning: comparing values of types Unit and Int using `!=' will always yield true
while ((c = in.read) != -1)
^
+warning: there were three deprecation warnings; re-run with -deprecation for details
error: No warnings can be incurred under -Xfatal-warnings.
-33 warnings found
+34 warnings found
one error found
diff --git a/test/files/neg/compile-time-only-a.check b/test/files/neg/compile-time-only-a.check
index 9bc96f6b9b..b1ed1d24c2 100644
--- a/test/files/neg/compile-time-only-a.check
+++ b/test/files/neg/compile-time-only-a.check
@@ -4,9 +4,6 @@ compile-time-only-a.scala:10: error: C3
compile-time-only-a.scala:12: error: C4
@compileTimeOnly("C4") case class C4(x: Int)
^
-compile-time-only-a.scala:17: error: C5
- implicit class C5(val x: Int) {
- ^
compile-time-only-a.scala:32: error: C1
new C1()
^
@@ -76,4 +73,4 @@ compile-time-only-a.scala:75: error: placebo
compile-time-only-a.scala:75: error: placebo
@placebo def x = (2: @placebo)
^
-26 errors found
+25 errors found
diff --git a/test/files/neg/double-def-top-level.check b/test/files/neg/double-def-top-level.check
new file mode 100644
index 0000000000..85b16e81e5
--- /dev/null
+++ b/test/files/neg/double-def-top-level.check
@@ -0,0 +1,7 @@
+D_3.scala:1: error: C is already defined as class C
+class C
+ ^
+D_3.scala:2: error: O is already defined as object O
+object O
+ ^
+two errors found
diff --git a/test/files/neg/double-def-top-level/A_1.scala b/test/files/neg/double-def-top-level/A_1.scala
new file mode 100644
index 0000000000..c3d68d9d05
--- /dev/null
+++ b/test/files/neg/double-def-top-level/A_1.scala
@@ -0,0 +1,4 @@
+package p
+
+class C
+object O
diff --git a/test/files/neg/double-def-top-level/B_2.scala b/test/files/neg/double-def-top-level/B_2.scala
new file mode 100644
index 0000000000..c328e8c964
--- /dev/null
+++ b/test/files/neg/double-def-top-level/B_2.scala
@@ -0,0 +1,2 @@
+class C /* noerror */
+object O /* noerror */ \ No newline at end of file
diff --git a/test/files/neg/double-def-top-level/C_3.scala b/test/files/neg/double-def-top-level/C_3.scala
new file mode 100644
index 0000000000..e1c327c15a
--- /dev/null
+++ b/test/files/neg/double-def-top-level/C_3.scala
@@ -0,0 +1,2 @@
+class C
+object O \ No newline at end of file
diff --git a/test/files/neg/double-def-top-level/D_3.scala b/test/files/neg/double-def-top-level/D_3.scala
new file mode 100644
index 0000000000..518e0d1c54
--- /dev/null
+++ b/test/files/neg/double-def-top-level/D_3.scala
@@ -0,0 +1,2 @@
+class C
+object O
diff --git a/test/files/neg/forgot-interpolator.check b/test/files/neg/forgot-interpolator.check
index 8988458982..8e75350518 100644
--- a/test/files/neg/forgot-interpolator.check
+++ b/test/files/neg/forgot-interpolator.check
@@ -1,25 +1,25 @@
-forgot-interpolator.scala:4: warning: `$bippy` looks like an interpolated identifier! Did you forget the interpolator?
+forgot-interpolator.scala:4: warning: possible missing interpolator: detected interpolated identifier `$bippy`
def f = "Put the $bippy in the $bippy!" // warn 1
^
-forgot-interpolator.scala:14: warning: That looks like an interpolated expression! Did you forget the interpolator?
+forgot-interpolator.scala:14: warning: possible missing interpolator: detected an interpolated expression
def f = """Put the ${println("bippy")} in the bippy!""" // warn 2
^
-forgot-interpolator.scala:30: warning: `$beppo` looks like an interpolated identifier! Did you forget the interpolator?
+forgot-interpolator.scala:30: warning: possible missing interpolator: detected interpolated identifier `$beppo`
def f = "$beppo was a marx bros who saw dollars." // warn 3
^
-forgot-interpolator.scala:34: warning: `$aleppo` looks like an interpolated identifier! Did you forget the interpolator?
+forgot-interpolator.scala:34: warning: possible missing interpolator: detected interpolated identifier `$aleppo`
def f = "$aleppo is a pepper and a city." // warn 4
^
-forgot-interpolator.scala:47: warning: `$hippo` looks like an interpolated identifier! Did you forget the interpolator?
+forgot-interpolator.scala:47: warning: possible missing interpolator: detected interpolated identifier `$hippo`
def h = "$hippo takes an implicit" // warn 6
^
-forgot-interpolator.scala:88: warning: `$groucho` looks like an interpolated identifier! Did you forget the interpolator?
+forgot-interpolator.scala:88: warning: possible missing interpolator: detected interpolated identifier `$groucho`
def f2 = "I salute $groucho" // warn 7
^
-forgot-interpolator.scala:89: warning: `$dingo` looks like an interpolated identifier! Did you forget the interpolator?
+forgot-interpolator.scala:89: warning: possible missing interpolator: detected interpolated identifier `$dingo`
def f3 = "I even salute $dingo" // warn 8
^
-forgot-interpolator.scala:90: warning: `$calico` looks like an interpolated identifier! Did you forget the interpolator?
+forgot-interpolator.scala:90: warning: possible missing interpolator: detected interpolated identifier `$calico`
def f4 = "I also salute $calico" // warn 9
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/forgot-interpolator.scala b/test/files/neg/forgot-interpolator.scala
index a53054d890..ca1ac30821 100644
--- a/test/files/neg/forgot-interpolator.scala
+++ b/test/files/neg/forgot-interpolator.scala
@@ -54,8 +54,8 @@ package test {
}
}
import annotation._
- @implicitNotFound("No Z in ${A}") // no warn
- class Z[A]
+ @implicitNotFound("No Z in ${T}") // no warn
+ class Z[T]
}
diff --git a/test/files/neg/inlineMaxSize.check b/test/files/neg/inlineMaxSize.check
new file mode 100644
index 0000000000..d218a8b6e2
--- /dev/null
+++ b/test/files/neg/inlineMaxSize.check
@@ -0,0 +1,9 @@
+inlineMaxSize.scala:7: warning: C::i()I is annotated @inline but could not be inlined:
+The size of the callsite method C::j()I
+would exceed the JVM method size limit after inlining C::i()I.
+
+ @inline final def j = i + i
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/inlineMaxSize.flags b/test/files/neg/inlineMaxSize.flags
new file mode 100644
index 0000000000..9c6b811622
--- /dev/null
+++ b/test/files/neg/inlineMaxSize.flags
@@ -0,0 +1 @@
+-Ybackend:GenBCode -Ydelambdafy:method -Yopt:l:classpath -Yopt-warnings -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/inlineMaxSize.scala b/test/files/neg/inlineMaxSize.scala
new file mode 100644
index 0000000000..16dc0d9538
--- /dev/null
+++ b/test/files/neg/inlineMaxSize.scala
@@ -0,0 +1,8 @@
+// not a JUnit test because of https://github.com/scala-opt/scala/issues/23
+class C {
+ @inline final def f = 0
+ @inline final def g = f + f + f + f + f + f + f + f + f + f
+ @inline final def h = g + g + g + g + g + g + g + g + g + g
+ @inline final def i = h + h + h + h + h + h + h + h + h + h
+ @inline final def j = i + i
+}
diff --git a/test/files/neg/literals.check b/test/files/neg/literals.check
new file mode 100644
index 0000000000..148a9346c5
--- /dev/null
+++ b/test/files/neg/literals.check
@@ -0,0 +1,40 @@
+literals.scala:6: error: missing integer number
+ def missingHex: Int = { 0x } // line 4: was: not reported, taken as zero
+ ^
+literals.scala:8: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)
+ def leadingZeros: Int = { 01 } // line 6: no leading zero
+ ^
+literals.scala:10: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)
+ def tooManyZeros: Int = { 00 } // line 8: no leading zero
+ ^
+literals.scala:12: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)
+ def zeroOfNine: Int = { 09 } // line 10: no leading zero
+ ^
+literals.scala:16: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)
+ def zeroOfNineDot: Int = { 09. } // line 14: malformed integer, ident expected
+ ^
+literals.scala:23: error: missing integer number
+ def missingHex: Int = 0x // line 22: was: not reported, taken as zero
+ ^
+literals.scala:27: error: Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)
+ def tooManyZeros: Int = 00 // line 26: no leading zero
+ ^
+literals.scala:14: error: identifier expected but '}' found.
+ def orphanDot: Int = { 9. } // line 12: ident expected
+ ^
+literals.scala:16: error: identifier expected but '}' found.
+ def zeroOfNineDot: Int = { 09. } // line 14: malformed integer, ident expected
+ ^
+literals.scala:18: error: ';' expected but double literal found.
+ def noHexFloat: Double = { 0x1.2 } // line 16: ';' expected but double literal found.
+ ^
+literals.scala:25: error: ';' expected but 'def' found.
+ def leadingZeros: Int = 01 // line 24: no leading zero
+ ^
+literals.scala:29: error: ';' expected but 'def' found.
+ def zeroOfNine: Int = 09 // line 28: no leading zero
+ ^
+literals.scala:33: error: identifier expected but 'def' found.
+ def zeroOfNineDot: Int = 09. // line 32: malformed integer, ident expected
+ ^
+13 errors found
diff --git a/test/files/neg/literals.scala b/test/files/neg/literals.scala
new file mode 100644
index 0000000000..3df7f0b408
--- /dev/null
+++ b/test/files/neg/literals.scala
@@ -0,0 +1,36 @@
+
+/* This took me literally all day.
+*/
+trait RejectedLiterals {
+
+ def missingHex: Int = { 0x } // line 4: was: not reported, taken as zero
+
+ def leadingZeros: Int = { 01 } // line 6: no leading zero
+
+ def tooManyZeros: Int = { 00 } // line 8: no leading zero
+
+ def zeroOfNine: Int = { 09 } // line 10: no leading zero
+
+ def orphanDot: Int = { 9. } // line 12: ident expected
+
+ def zeroOfNineDot: Int = { 09. } // line 14: malformed integer, ident expected
+
+ def noHexFloat: Double = { 0x1.2 } // line 16: ';' expected but double literal found.
+}
+
+trait Braceless {
+
+ def missingHex: Int = 0x // line 22: was: not reported, taken as zero
+
+ def leadingZeros: Int = 01 // line 24: no leading zero
+
+ def tooManyZeros: Int = 00 // line 26: no leading zero
+
+ def zeroOfNine: Int = 09 // line 28: no leading zero
+
+ def orphanDot: Int = 9. // line 30: ident expected
+
+ def zeroOfNineDot: Int = 09. // line 32: malformed integer, ident expected
+
+ def noHexFloat: Double = 0x1.2 // line 34: ';' expected but double literal found.
+}
diff --git a/test/files/neg/literate_existentials.scala b/test/files/neg/literate_existentials.scala
index 8580347bf9..5537c50b3a 100644
--- a/test/files/neg/literate_existentials.scala
+++ b/test/files/neg/literate_existentials.scala
@@ -187,7 +187,7 @@ object LiterateExistentials {
//
implicitly[Int <:< (M forSome { type M >: Nothing <: String })] // fails
-// The preceeding line causes the compiler to generate an error message.
+// The preceding line causes the compiler to generate an error message.
diff --git a/test/files/neg/macro-basic-mamdmi.check b/test/files/neg/macro-basic-mamdmi.check
index 61df5131cc..54743d4936 100644
--- a/test/files/neg/macro-basic-mamdmi.check
+++ b/test/files/neg/macro-basic-mamdmi.check
@@ -1,5 +1,13 @@
+Impls_Macros_Test_1.scala:33: error: macro implementation not found: foo
+(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+ ^
+Impls_Macros_Test_1.scala:33: error: macro implementation not found: bar
+(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+ ^
Impls_Macros_Test_1.scala:33: error: macro implementation not found: quux
(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
println(foo(2) + Macros.bar(2) * new Macros().quux(4))
^
-one error found
+three errors found
diff --git a/test/files/neg/macro-invalidret.check b/test/files/neg/macro-invalidret.check
index 568cc7c570..ebdc8ec7da 100644
--- a/test/files/neg/macro-invalidret.check
+++ b/test/files/neg/macro-invalidret.check
@@ -19,8 +19,7 @@ Macros_Test_2.scala:7: warning: macro defs must have explicitly specified return
def foo6 = macro Impls.foo6
^
Macros_Test_2.scala:14: error: exception during macro expansion:
-scala.NotImplementedError: an implementation is missing
- at scala.Predef$.$qmark$qmark$qmark(Predef.scala:225)
+java.lang.NullPointerException
at Impls$.foo3(Impls_1.scala:7)
foo3
diff --git a/test/files/neg/macro-invalidret/Impls_1.scala b/test/files/neg/macro-invalidret/Impls_1.scala
index 434aeef10f..a52e8d8f39 100644
--- a/test/files/neg/macro-invalidret/Impls_1.scala
+++ b/test/files/neg/macro-invalidret/Impls_1.scala
@@ -4,7 +4,7 @@ import scala.reflect.runtime.{universe => ru}
object Impls {
def foo1(c: Context) = 2
def foo2(c: Context) = ru.Literal(ru.Constant(42))
- def foo3(c: Context) = ???
+ def foo3(c: Context) = throw null
def foo5(c: Context) = c.universe.Literal(c.universe.Constant(42))
def foo6(c: Context) = c.Expr[Int](c.universe.Literal(c.universe.Constant(42)))
}
diff --git a/test/files/neg/macro-invalidusage-badargs.check b/test/files/neg/macro-invalidusage-badargs.check
index 4c1115418b..19ac6528d3 100644
--- a/test/files/neg/macro-invalidusage-badargs.check
+++ b/test/files/neg/macro-invalidusage-badargs.check
@@ -9,7 +9,8 @@ Macros_Test_2.scala:6: error: too few argument lists for macro invocation
Macros_Test_2.scala:7: error: Int does not take parameters
foo(4)(2)
^
-Macros_Test_2.scala:8: error: macro applications do not support named and/or default arguments
+Macros_Test_2.scala:8: error: not enough arguments for macro method foo: (x: Int)Int.
+Unspecified value parameter x.
foo()
^
Macros_Test_2.scala:9: error: too many arguments for macro method foo: (x: Int)Int
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index 20ddd55f1f..2db24b6f32 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -151,15 +151,15 @@ names-defaults-neg.scala:144: error: variable definition needs type because 'x'
names-defaults-neg.scala:147: error: variable definition needs type because 'x' is used as a named argument in its body.
object t6 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:147: warning: type-checking the invocation of method f checks if the named argument expression 'x = ...' is a valid assignment
-in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for x.
+names-defaults-neg.scala:147: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
+an explicit type is required for the definition mentioned in the error message above.
object t6 { var x = t.f(x = 1) }
^
names-defaults-neg.scala:150: error: variable definition needs type because 'x' is used as a named argument in its body.
class t9 { var x = t.f(x = 1) }
^
-names-defaults-neg.scala:150: warning: type-checking the invocation of method f checks if the named argument expression 'x = ...' is a valid assignment
-in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for x.
+names-defaults-neg.scala:150: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
+an explicit type is required for the definition mentioned in the error message above.
class t9 { var x = t.f(x = 1) }
^
names-defaults-neg.scala:164: error: variable definition needs type because 'x' is used as a named argument in its body.
@@ -174,8 +174,8 @@ names-defaults-neg.scala:170: error: reference to x is ambiguous; it is both a m
names-defaults-neg.scala:177: error: variable definition needs type because 'x' is used as a named argument in its body.
class u15 { var x = u.f(x = 1) }
^
-names-defaults-neg.scala:177: warning: type-checking the invocation of method f checks if the named argument expression 'x = ...' is a valid assignment
-in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for x.
+names-defaults-neg.scala:177: warning: failed to determine if 'x = ...' is a named argument or an assignment expression.
+an explicit type is required for the definition mentioned in the error message above.
class u15 { var x = u.f(x = 1) }
^
names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
diff --git a/test/files/neg/overloaded-implicit.check b/test/files/neg/overloaded-implicit.check
index ca0870705d..0e6617d904 100644
--- a/test/files/neg/overloaded-implicit.check
+++ b/test/files/neg/overloaded-implicit.check
@@ -4,6 +4,7 @@ overloaded-implicit.scala:2: warning: parameterized overloaded implicit methods
overloaded-implicit.scala:3: warning: parameterized overloaded implicit methods are not visible as view bounds
implicit def imp1[T](x: Set[T]): Map[T, T] = Map()
^
+warning: there were four feature warnings; re-run with -feature for details
error: No warnings can be incurred under -Xfatal-warnings.
-two warnings found
+three warnings found
one error found
diff --git a/test/files/neg/patmatexhaust-huge.check b/test/files/neg/patmatexhaust-huge.check
new file mode 100644
index 0000000000..66dbd42ef3
--- /dev/null
+++ b/test/files/neg/patmatexhaust-huge.check
@@ -0,0 +1,7 @@
+patmatexhaust-huge.scala:404: warning: match may not be exhaustive.
+It would fail on the following inputs: C392, C397
+ def f(c: C): Int = c match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/patmatexhaust-huge.flags b/test/files/neg/patmatexhaust-huge.flags
new file mode 100644
index 0000000000..591a950f83
--- /dev/null
+++ b/test/files/neg/patmatexhaust-huge.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked -Ypatmat-exhaust-depth off \ No newline at end of file
diff --git a/test/files/neg/patmatexhaust-huge.scala b/test/files/neg/patmatexhaust-huge.scala
new file mode 100644
index 0000000000..8f87655b7a
--- /dev/null
+++ b/test/files/neg/patmatexhaust-huge.scala
@@ -0,0 +1,806 @@
+sealed trait C
+case object C1 extends C
+case object C2 extends C
+case object C3 extends C
+case object C4 extends C
+case object C5 extends C
+case object C6 extends C
+case object C7 extends C
+case object C8 extends C
+case object C9 extends C
+case object C10 extends C
+case object C11 extends C
+case object C12 extends C
+case object C13 extends C
+case object C14 extends C
+case object C15 extends C
+case object C16 extends C
+case object C17 extends C
+case object C18 extends C
+case object C19 extends C
+case object C20 extends C
+case object C21 extends C
+case object C22 extends C
+case object C23 extends C
+case object C24 extends C
+case object C25 extends C
+case object C26 extends C
+case object C27 extends C
+case object C28 extends C
+case object C29 extends C
+case object C30 extends C
+case object C31 extends C
+case object C32 extends C
+case object C33 extends C
+case object C34 extends C
+case object C35 extends C
+case object C36 extends C
+case object C37 extends C
+case object C38 extends C
+case object C39 extends C
+case object C40 extends C
+case object C41 extends C
+case object C42 extends C
+case object C43 extends C
+case object C44 extends C
+case object C45 extends C
+case object C46 extends C
+case object C47 extends C
+case object C48 extends C
+case object C49 extends C
+case object C50 extends C
+case object C51 extends C
+case object C52 extends C
+case object C53 extends C
+case object C54 extends C
+case object C55 extends C
+case object C56 extends C
+case object C57 extends C
+case object C58 extends C
+case object C59 extends C
+case object C60 extends C
+case object C61 extends C
+case object C62 extends C
+case object C63 extends C
+case object C64 extends C
+case object C65 extends C
+case object C66 extends C
+case object C67 extends C
+case object C68 extends C
+case object C69 extends C
+case object C70 extends C
+case object C71 extends C
+case object C72 extends C
+case object C73 extends C
+case object C74 extends C
+case object C75 extends C
+case object C76 extends C
+case object C77 extends C
+case object C78 extends C
+case object C79 extends C
+case object C80 extends C
+case object C81 extends C
+case object C82 extends C
+case object C83 extends C
+case object C84 extends C
+case object C85 extends C
+case object C86 extends C
+case object C87 extends C
+case object C88 extends C
+case object C89 extends C
+case object C90 extends C
+case object C91 extends C
+case object C92 extends C
+case object C93 extends C
+case object C94 extends C
+case object C95 extends C
+case object C96 extends C
+case object C97 extends C
+case object C98 extends C
+case object C99 extends C
+case object C100 extends C
+case object C101 extends C
+case object C102 extends C
+case object C103 extends C
+case object C104 extends C
+case object C105 extends C
+case object C106 extends C
+case object C107 extends C
+case object C108 extends C
+case object C109 extends C
+case object C110 extends C
+case object C111 extends C
+case object C112 extends C
+case object C113 extends C
+case object C114 extends C
+case object C115 extends C
+case object C116 extends C
+case object C117 extends C
+case object C118 extends C
+case object C119 extends C
+case object C120 extends C
+case object C121 extends C
+case object C122 extends C
+case object C123 extends C
+case object C124 extends C
+case object C125 extends C
+case object C126 extends C
+case object C127 extends C
+case object C128 extends C
+case object C129 extends C
+case object C130 extends C
+case object C131 extends C
+case object C132 extends C
+case object C133 extends C
+case object C134 extends C
+case object C135 extends C
+case object C136 extends C
+case object C137 extends C
+case object C138 extends C
+case object C139 extends C
+case object C140 extends C
+case object C141 extends C
+case object C142 extends C
+case object C143 extends C
+case object C144 extends C
+case object C145 extends C
+case object C146 extends C
+case object C147 extends C
+case object C148 extends C
+case object C149 extends C
+case object C150 extends C
+case object C151 extends C
+case object C152 extends C
+case object C153 extends C
+case object C154 extends C
+case object C155 extends C
+case object C156 extends C
+case object C157 extends C
+case object C158 extends C
+case object C159 extends C
+case object C160 extends C
+case object C161 extends C
+case object C162 extends C
+case object C163 extends C
+case object C164 extends C
+case object C165 extends C
+case object C166 extends C
+case object C167 extends C
+case object C168 extends C
+case object C169 extends C
+case object C170 extends C
+case object C171 extends C
+case object C172 extends C
+case object C173 extends C
+case object C174 extends C
+case object C175 extends C
+case object C176 extends C
+case object C177 extends C
+case object C178 extends C
+case object C179 extends C
+case object C180 extends C
+case object C181 extends C
+case object C182 extends C
+case object C183 extends C
+case object C184 extends C
+case object C185 extends C
+case object C186 extends C
+case object C187 extends C
+case object C188 extends C
+case object C189 extends C
+case object C190 extends C
+case object C191 extends C
+case object C192 extends C
+case object C193 extends C
+case object C194 extends C
+case object C195 extends C
+case object C196 extends C
+case object C197 extends C
+case object C198 extends C
+case object C199 extends C
+case object C200 extends C
+case object C201 extends C
+case object C202 extends C
+case object C203 extends C
+case object C204 extends C
+case object C205 extends C
+case object C206 extends C
+case object C207 extends C
+case object C208 extends C
+case object C209 extends C
+case object C210 extends C
+case object C211 extends C
+case object C212 extends C
+case object C213 extends C
+case object C214 extends C
+case object C215 extends C
+case object C216 extends C
+case object C217 extends C
+case object C218 extends C
+case object C219 extends C
+case object C220 extends C
+case object C221 extends C
+case object C222 extends C
+case object C223 extends C
+case object C224 extends C
+case object C225 extends C
+case object C226 extends C
+case object C227 extends C
+case object C228 extends C
+case object C229 extends C
+case object C230 extends C
+case object C231 extends C
+case object C232 extends C
+case object C233 extends C
+case object C234 extends C
+case object C235 extends C
+case object C236 extends C
+case object C237 extends C
+case object C238 extends C
+case object C239 extends C
+case object C240 extends C
+case object C241 extends C
+case object C242 extends C
+case object C243 extends C
+case object C244 extends C
+case object C245 extends C
+case object C246 extends C
+case object C247 extends C
+case object C248 extends C
+case object C249 extends C
+case object C250 extends C
+case object C251 extends C
+case object C252 extends C
+case object C253 extends C
+case object C254 extends C
+case object C255 extends C
+case object C256 extends C
+case object C257 extends C
+case object C258 extends C
+case object C259 extends C
+case object C260 extends C
+case object C261 extends C
+case object C262 extends C
+case object C263 extends C
+case object C264 extends C
+case object C265 extends C
+case object C266 extends C
+case object C267 extends C
+case object C268 extends C
+case object C269 extends C
+case object C270 extends C
+case object C271 extends C
+case object C272 extends C
+case object C273 extends C
+case object C274 extends C
+case object C275 extends C
+case object C276 extends C
+case object C277 extends C
+case object C278 extends C
+case object C279 extends C
+case object C280 extends C
+case object C281 extends C
+case object C282 extends C
+case object C283 extends C
+case object C284 extends C
+case object C285 extends C
+case object C286 extends C
+case object C287 extends C
+case object C288 extends C
+case object C289 extends C
+case object C290 extends C
+case object C291 extends C
+case object C292 extends C
+case object C293 extends C
+case object C294 extends C
+case object C295 extends C
+case object C296 extends C
+case object C297 extends C
+case object C298 extends C
+case object C299 extends C
+case object C300 extends C
+case object C301 extends C
+case object C302 extends C
+case object C303 extends C
+case object C304 extends C
+case object C305 extends C
+case object C306 extends C
+case object C307 extends C
+case object C308 extends C
+case object C309 extends C
+case object C310 extends C
+case object C311 extends C
+case object C312 extends C
+case object C313 extends C
+case object C314 extends C
+case object C315 extends C
+case object C316 extends C
+case object C317 extends C
+case object C318 extends C
+case object C319 extends C
+case object C320 extends C
+case object C321 extends C
+case object C322 extends C
+case object C323 extends C
+case object C324 extends C
+case object C325 extends C
+case object C326 extends C
+case object C327 extends C
+case object C328 extends C
+case object C329 extends C
+case object C330 extends C
+case object C331 extends C
+case object C332 extends C
+case object C333 extends C
+case object C334 extends C
+case object C335 extends C
+case object C336 extends C
+case object C337 extends C
+case object C338 extends C
+case object C339 extends C
+case object C340 extends C
+case object C341 extends C
+case object C342 extends C
+case object C343 extends C
+case object C344 extends C
+case object C345 extends C
+case object C346 extends C
+case object C347 extends C
+case object C348 extends C
+case object C349 extends C
+case object C350 extends C
+case object C351 extends C
+case object C352 extends C
+case object C353 extends C
+case object C354 extends C
+case object C355 extends C
+case object C356 extends C
+case object C357 extends C
+case object C358 extends C
+case object C359 extends C
+case object C360 extends C
+case object C361 extends C
+case object C362 extends C
+case object C363 extends C
+case object C364 extends C
+case object C365 extends C
+case object C366 extends C
+case object C367 extends C
+case object C368 extends C
+case object C369 extends C
+case object C370 extends C
+case object C371 extends C
+case object C372 extends C
+case object C373 extends C
+case object C374 extends C
+case object C375 extends C
+case object C376 extends C
+case object C377 extends C
+case object C378 extends C
+case object C379 extends C
+case object C380 extends C
+case object C381 extends C
+case object C382 extends C
+case object C383 extends C
+case object C384 extends C
+case object C385 extends C
+case object C386 extends C
+case object C387 extends C
+case object C388 extends C
+case object C389 extends C
+case object C390 extends C
+case object C391 extends C
+case object C392 extends C
+case object C393 extends C
+case object C394 extends C
+case object C395 extends C
+case object C396 extends C
+case object C397 extends C
+case object C398 extends C
+case object C399 extends C
+case object C400 extends C
+
+object M {
+ def f(c: C): Int = c match {
+ case C1 => 1
+ case C2 => 2
+ case C3 => 3
+ case C4 => 4
+ case C5 => 5
+ case C6 => 6
+ case C7 => 7
+ case C8 => 8
+ case C9 => 9
+ case C10 => 10
+ case C11 => 11
+ case C12 => 12
+ case C13 => 13
+ case C14 => 14
+ case C15 => 15
+ case C16 => 16
+ case C17 => 17
+ case C18 => 18
+ case C19 => 19
+ case C20 => 20
+ case C21 => 21
+ case C22 => 22
+ case C23 => 23
+ case C24 => 24
+ case C25 => 25
+ case C26 => 26
+ case C27 => 27
+ case C28 => 28
+ case C29 => 29
+ case C30 => 30
+ case C31 => 31
+ case C32 => 32
+ case C33 => 33
+ case C34 => 34
+ case C35 => 35
+ case C36 => 36
+ case C37 => 37
+ case C38 => 38
+ case C39 => 39
+ case C40 => 40
+ case C41 => 41
+ case C42 => 42
+ case C43 => 43
+ case C44 => 44
+ case C45 => 45
+ case C46 => 46
+ case C47 => 47
+ case C48 => 48
+ case C49 => 49
+ case C50 => 50
+ case C51 => 51
+ case C52 => 52
+ case C53 => 53
+ case C54 => 54
+ case C55 => 55
+ case C56 => 56
+ case C57 => 57
+ case C58 => 58
+ case C59 => 59
+ case C60 => 60
+ case C61 => 61
+ case C62 => 62
+ case C63 => 63
+ case C64 => 64
+ case C65 => 65
+ case C66 => 66
+ case C67 => 67
+ case C68 => 68
+ case C69 => 69
+ case C70 => 70
+ case C71 => 71
+ case C72 => 72
+ case C73 => 73
+ case C74 => 74
+ case C75 => 75
+ case C76 => 76
+ case C77 => 77
+ case C78 => 78
+ case C79 => 79
+ case C80 => 80
+ case C81 => 81
+ case C82 => 82
+ case C83 => 83
+ case C84 => 84
+ case C85 => 85
+ case C86 => 86
+ case C87 => 87
+ case C88 => 88
+ case C89 => 89
+ case C90 => 90
+ case C91 => 91
+ case C92 => 92
+ case C93 => 93
+ case C94 => 94
+ case C95 => 95
+ case C96 => 96
+ case C97 => 97
+ case C98 => 98
+ case C99 => 99
+ case C100 => 100
+ case C101 => 101
+ case C102 => 102
+ case C103 => 103
+ case C104 => 104
+ case C105 => 105
+ case C106 => 106
+ case C107 => 107
+ case C108 => 108
+ case C109 => 109
+ case C110 => 110
+ case C111 => 111
+ case C112 => 112
+ case C113 => 113
+ case C114 => 114
+ case C115 => 115
+ case C116 => 116
+ case C117 => 117
+ case C118 => 118
+ case C119 => 119
+ case C120 => 120
+ case C121 => 121
+ case C122 => 122
+ case C123 => 123
+ case C124 => 124
+ case C125 => 125
+ case C126 => 126
+ case C127 => 127
+ case C128 => 128
+ case C129 => 129
+ case C130 => 130
+ case C131 => 131
+ case C132 => 132
+ case C133 => 133
+ case C134 => 134
+ case C135 => 135
+ case C136 => 136
+ case C137 => 137
+ case C138 => 138
+ case C139 => 139
+ case C140 => 140
+ case C141 => 141
+ case C142 => 142
+ case C143 => 143
+ case C144 => 144
+ case C145 => 145
+ case C146 => 146
+ case C147 => 147
+ case C148 => 148
+ case C149 => 149
+ case C150 => 150
+ case C151 => 151
+ case C152 => 152
+ case C153 => 153
+ case C154 => 154
+ case C155 => 155
+ case C156 => 156
+ case C157 => 157
+ case C158 => 158
+ case C159 => 159
+ case C160 => 160
+ case C161 => 161
+ case C162 => 162
+ case C163 => 163
+ case C164 => 164
+ case C165 => 165
+ case C166 => 166
+ case C167 => 167
+ case C168 => 168
+ case C169 => 169
+ case C170 => 170
+ case C171 => 171
+ case C172 => 172
+ case C173 => 173
+ case C174 => 174
+ case C175 => 175
+ case C176 => 176
+ case C177 => 177
+ case C178 => 178
+ case C179 => 179
+ case C180 => 180
+ case C181 => 181
+ case C182 => 182
+ case C183 => 183
+ case C184 => 184
+ case C185 => 185
+ case C186 => 186
+ case C187 => 187
+ case C188 => 188
+ case C189 => 189
+ case C190 => 190
+ case C191 => 191
+ case C192 => 192
+ case C193 => 193
+ case C194 => 194
+ case C195 => 195
+ case C196 => 196
+ case C197 => 197
+ case C198 => 198
+ case C199 => 199
+ case C200 => 200
+ case C201 => 201
+ case C202 => 202
+ case C203 => 203
+ case C204 => 204
+ case C205 => 205
+ case C206 => 206
+ case C207 => 207
+ case C208 => 208
+ case C209 => 209
+ case C210 => 210
+ case C211 => 211
+ case C212 => 212
+ case C213 => 213
+ case C214 => 214
+ case C215 => 215
+ case C216 => 216
+ case C217 => 217
+ case C218 => 218
+ case C219 => 219
+ case C220 => 220
+ case C221 => 221
+ case C222 => 222
+ case C223 => 223
+ case C224 => 224
+ case C225 => 225
+ case C226 => 226
+ case C227 => 227
+ case C228 => 228
+ case C229 => 229
+ case C230 => 230
+ case C231 => 231
+ case C232 => 232
+ case C233 => 233
+ case C234 => 234
+ case C235 => 235
+ case C236 => 236
+ case C237 => 237
+ case C238 => 238
+ case C239 => 239
+ case C240 => 240
+ case C241 => 241
+ case C242 => 242
+ case C243 => 243
+ case C244 => 244
+ case C245 => 245
+ case C246 => 246
+ case C247 => 247
+ case C248 => 248
+ case C249 => 249
+ case C250 => 250
+ case C251 => 251
+ case C252 => 252
+ case C253 => 253
+ case C254 => 254
+ case C255 => 255
+ case C256 => 256
+ case C257 => 257
+ case C258 => 258
+ case C259 => 259
+ case C260 => 260
+ case C261 => 261
+ case C262 => 262
+ case C263 => 263
+ case C264 => 264
+ case C265 => 265
+ case C266 => 266
+ case C267 => 267
+ case C268 => 268
+ case C269 => 269
+ case C270 => 270
+ case C271 => 271
+ case C272 => 272
+ case C273 => 273
+ case C274 => 274
+ case C275 => 275
+ case C276 => 276
+ case C277 => 277
+ case C278 => 278
+ case C279 => 279
+ case C280 => 280
+ case C281 => 281
+ case C282 => 282
+ case C283 => 283
+ case C284 => 284
+ case C285 => 285
+ case C286 => 286
+ case C287 => 287
+ case C288 => 288
+ case C289 => 289
+ case C290 => 290
+ case C291 => 291
+ case C292 => 292
+ case C293 => 293
+ case C294 => 294
+ case C295 => 295
+ case C296 => 296
+ case C297 => 297
+ case C298 => 298
+ case C299 => 299
+ case C300 => 300
+ case C301 => 301
+ case C302 => 302
+ case C303 => 303
+ case C304 => 304
+ case C305 => 305
+ case C306 => 306
+ case C307 => 307
+ case C308 => 308
+ case C309 => 309
+ case C310 => 310
+ case C311 => 311
+ case C312 => 312
+ case C313 => 313
+ case C314 => 314
+ case C315 => 315
+ case C316 => 316
+ case C317 => 317
+ case C318 => 318
+ case C319 => 319
+ case C320 => 320
+ case C321 => 321
+ case C322 => 322
+ case C323 => 323
+ case C324 => 324
+ case C325 => 325
+ case C326 => 326
+ case C327 => 327
+ case C328 => 328
+ case C329 => 329
+ case C330 => 330
+ case C331 => 331
+ case C332 => 332
+ case C333 => 333
+ case C334 => 334
+ case C335 => 335
+ case C336 => 336
+ case C337 => 337
+ case C338 => 338
+ case C339 => 339
+ case C340 => 340
+ case C341 => 341
+ case C342 => 342
+ case C343 => 343
+ case C344 => 344
+ case C345 => 345
+ case C346 => 346
+ case C347 => 347
+ case C348 => 348
+ case C349 => 349
+ case C350 => 350
+ case C351 => 351
+ case C352 => 352
+ case C353 => 353
+ case C354 => 354
+ case C355 => 355
+ case C356 => 356
+ case C357 => 357
+ case C358 => 358
+ case C359 => 359
+ case C360 => 360
+ case C361 => 361
+ case C362 => 362
+ case C363 => 363
+ case C364 => 364
+ case C365 => 365
+ case C366 => 366
+ case C367 => 367
+ case C368 => 368
+ case C369 => 369
+ case C370 => 370
+ case C371 => 371
+ case C372 => 372
+ case C373 => 373
+ case C374 => 374
+ case C375 => 375
+ case C376 => 376
+ case C377 => 377
+ case C378 => 378
+ case C379 => 379
+ case C380 => 380
+ case C381 => 381
+ case C382 => 382
+ case C383 => 383
+ case C384 => 384
+ case C385 => 385
+ case C386 => 386
+ case C387 => 387
+ case C388 => 388
+ case C389 => 389
+ case C390 => 390
+ case C391 => 391
+// case C392 => 392
+ case C393 => 393
+ case C394 => 394
+ case C395 => 395
+ case C396 => 396
+// case C397 => 397
+ case C398 => 398
+ case C399 => 399
+ case C400 => 400
+ }
+}
diff --git a/test/files/neg/patmatexhaust.check b/test/files/neg/patmatexhaust.check
index 2dad608451..bbf5e9b528 100644
--- a/test/files/neg/patmatexhaust.check
+++ b/test/files/neg/patmatexhaust.check
@@ -12,7 +12,7 @@ It would fail on the following inputs: (Kult(_), Kult(_)), (Qult(), Qult())
^
patmatexhaust.scala:49: warning: match may not be exhaustive.
It would fail on the following inputs: Gp(), Gu
- def ma4(x:Deep) = x match { // missing cases: Gu, Gp
+ def ma4(x:Deep) = x match { // missing cases: Gu, Gp which is not abstract so must be included
^
patmatexhaust.scala:55: warning: unreachable code
case _ if 1 == 0 =>
diff --git a/test/files/neg/patmatexhaust.flags b/test/files/neg/patmatexhaust.flags
index 85d8eb2ba2..3b01ca062c 100644
--- a/test/files/neg/patmatexhaust.flags
+++ b/test/files/neg/patmatexhaust.flags
@@ -1 +1 @@
--Xfatal-warnings
+-Xfatal-warnings -Ypatmat-exhaust-depth off \ No newline at end of file
diff --git a/test/files/neg/patmatexhaust.scala b/test/files/neg/patmatexhaust.scala
index f937197829..26f0c12a91 100644
--- a/test/files/neg/patmatexhaust.scala
+++ b/test/files/neg/patmatexhaust.scala
@@ -46,7 +46,7 @@ class TestSealedExhaustive { // compile only
case _ =>
}
- def ma4(x:Deep) = x match { // missing cases: Gu, Gp
+ def ma4(x:Deep) = x match { // missing cases: Gu, Gp which is not abstract so must be included
case Ga =>
}
diff --git a/test/files/neg/reflection-names-neg.check b/test/files/neg/reflection-names-neg.check
deleted file mode 100644
index f941ec8dc1..0000000000
--- a/test/files/neg/reflection-names-neg.check
+++ /dev/null
@@ -1,13 +0,0 @@
-reflection-names-neg.scala:5: error: type mismatch;
- found : String("abc")
- required: reflect.runtime.universe.Name
-Note that implicit conversions are not applicable because they are ambiguous:
- both method stringToTermName in trait Names of type (s: String)reflect.runtime.universe.TermName
- and method stringToTypeName in trait Names of type (s: String)reflect.runtime.universe.TypeName
- are possible conversion functions from String("abc") to reflect.runtime.universe.Name
- val x2 = ("abc": Name) drop 1 // error
- ^
-reflection-names-neg.scala:5: error: value drop is not a member of reflect.runtime.universe.Name
- val x2 = ("abc": Name) drop 1 // error
- ^
-two errors found
diff --git a/test/files/neg/reflection-names-neg.scala b/test/files/neg/reflection-names-neg.scala
deleted file mode 100644
index 7283d16db9..0000000000
--- a/test/files/neg/reflection-names-neg.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.reflect.runtime.universe._
-
-object Test {
- val x1 = "abc" drop 1 // "bc": String
- val x2 = ("abc": Name) drop 1 // error
-}
diff --git a/test/files/neg/sammy_error_exist_no_crash.check b/test/files/neg/sammy_error_exist_no_crash.check
new file mode 100644
index 0000000000..a0d2237ce0
--- /dev/null
+++ b/test/files/neg/sammy_error_exist_no_crash.check
@@ -0,0 +1,6 @@
+sammy_error_exist_no_crash.scala:5: error: Could not derive subclass of F[? >: String]
+ (with SAM `def method apply(s: String)Int`)
+ based on: ((x$1: String) => x$1.<parseInt: error>).
+ bar(_.parseInt)
+ ^
+one error found
diff --git a/test/files/run/t5530.flags b/test/files/neg/sammy_error_exist_no_crash.flags
index e1b37447c9..e1b37447c9 100644
--- a/test/files/run/t5530.flags
+++ b/test/files/neg/sammy_error_exist_no_crash.flags
diff --git a/test/files/neg/sammy_error_exist_no_crash.scala b/test/files/neg/sammy_error_exist_no_crash.scala
new file mode 100644
index 0000000000..da7e47206f
--- /dev/null
+++ b/test/files/neg/sammy_error_exist_no_crash.scala
@@ -0,0 +1,6 @@
+abstract class F[T] { def apply(s: T): Int }
+
+object NeedsNiceError {
+ def bar(x: F[_ >: String]) = ???
+ bar(_.parseInt)
+} \ No newline at end of file
diff --git a/test/files/neg/sammy_restrictions.scala b/test/files/neg/sammy_restrictions.scala
index 5f1a04cd20..d003cfaf36 100644
--- a/test/files/neg/sammy_restrictions.scala
+++ b/test/files/neg/sammy_restrictions.scala
@@ -1,28 +1,28 @@
-class NoAbstract
+abstract class NoAbstract
-class TwoAbstract { def ap(a: Int): Int; def pa(a: Int): Int }
+abstract class TwoAbstract { def ap(a: Int): Int; def pa(a: Int): Int }
-class Base // check that the super class constructor isn't considered.
-class NoEmptyConstructor(a: Int) extends Base { def this(a: String) = this(0); def ap(a: Int): Int }
+abstract class Base // check that the super class constructor isn't considered.
+abstract class NoEmptyConstructor(a: Int) extends Base { def this(a: String) = this(0); def ap(a: Int): Int }
-class OneEmptyConstructor() { def this(a: Int) = this(); def ap(a: Int): Int }
+abstract class OneEmptyConstructor() { def this(a: Int) = this(); def ap(a: Int): Int }
-class OneEmptySecondaryConstructor(a: Int) { def this() = this(0); def ap(a: Int): Int }
+abstract class OneEmptySecondaryConstructor(a: Int) { def this() = this(0); def ap(a: Int): Int }
-class MultipleConstructorLists()() { def ap(a: Int): Int }
+abstract class MultipleConstructorLists()() { def ap(a: Int): Int }
-class MultipleMethodLists()() { def ap(a: Int)(): Int }
+abstract class MultipleMethodLists()() { def ap(a: Int)(): Int }
-class ImplicitConstructorParam()(implicit a: String) { def ap(a: Int): Int }
+abstract class ImplicitConstructorParam()(implicit a: String) { def ap(a: Int): Int }
-class ImplicitMethodParam() { def ap(a: Int)(implicit b: String): Int }
+abstract class ImplicitMethodParam() { def ap(a: Int)(implicit b: String): Int }
-class PolyClass[T] { def ap(a: T): T }
+abstract class PolyClass[T] { def ap(a: T): T }
-class PolyMethod { def ap[T](a: T): T }
+abstract class PolyMethod { def ap[T](a: T): T }
-class OneAbstract { def ap(a: Any): Any }
-class DerivedOneAbstract extends OneAbstract
+abstract class OneAbstract { def ap(a: Int): Any }
+abstract class DerivedOneAbstract extends OneAbstract
object Test {
implicit val s: String = ""
diff --git a/test/files/neg/structural.scala b/test/files/neg/structural.scala
index d783399317..00459676a9 100644
--- a/test/files/neg/structural.scala
+++ b/test/files/neg/structural.scala
@@ -11,13 +11,13 @@ object Test extends App {
def f2[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: B): Object; val x: B }) = x.m[Tata](x.x) //fail
def f3[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: C): Object; val x: C }) = x.m[Tata](x.x) //fail
def f4[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: D): Object; val x: D }) = x.m[Tata](x.x) //fail
- def f5[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: E): Object; val x: Tata }) = x.m[Tata](x.x) //suceed
+ def f5[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: E): Object; val x: Tata }) = x.m[Tata](x.x) //succeeds
- def f6[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): A }) = x.m[Tata](null) //suceed
- def f7[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): B }) = x.m[Tata](null) //suceed
- def f8[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): C }) = x.m[Tata](null) //suceed
+ def f6[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): A }) = x.m[Tata](null) //succeeds
+ def f7[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): B }) = x.m[Tata](null) //succeeds
+ def f8[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): C }) = x.m[Tata](null) //succeeds
def f9[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): D }) = x.m[Tata](null) //fail
- def f0[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): E }) = x.m[Tata](null) //suceed
+ def f0[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): E }) = x.m[Tata](null) //succeeds
}
diff --git a/test/files/neg/t0899.check b/test/files/neg/t0899.check
index 8b71be8e0c..28cb06ae5a 100644
--- a/test/files/neg/t0899.check
+++ b/test/files/neg/t0899.check
@@ -1,10 +1,10 @@
-t0899.scala:9: error: super may be not be used on value o
+t0899.scala:9: error: super may not be used on value o
override val o = "Ha! " + super.o
^
-t0899.scala:11: error: super may be not be used on variable v
+t0899.scala:11: error: super may not be used on variable v
super.v = "aa"
^
-t0899.scala:12: error: super may be not be used on variable v
+t0899.scala:12: error: super may not be used on variable v
println(super.v)
^
three errors found
diff --git a/test/files/neg/t1909-object.check b/test/files/neg/t1909-object.check
index 401c1f7ebf..7141c84d4b 100644
--- a/test/files/neg/t1909-object.check
+++ b/test/files/neg/t1909-object.check
@@ -1,4 +1,6 @@
-t1909-object.scala:4: error: !!! SI-1909 Unable to STATICally lift object InnerTrouble$1, which is defined in the self- or super-constructor call of class Kaboom. A VerifyError is likely.
+t1909-object.scala:4: warning: !!! SI-1909 Unable to STATICally lift object InnerTrouble$1, which is defined in the self- or super-constructor call of class Kaboom. A VerifyError is likely.
object InnerTrouble
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t2866.check b/test/files/neg/t2866.check
new file mode 100644
index 0000000000..340fb8da22
--- /dev/null
+++ b/test/files/neg/t2866.check
@@ -0,0 +1,17 @@
+t2866.scala:30: warning: imported `one' is permanently hidden by definition of value one
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ ^
+t2866.scala:42: error: ambiguous implicit values:
+ both value two of type Int
+ and value one in object A of type => Int
+ match expected type Int
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambigous in 2.7.6
+ ^
+t2866.scala:50: error: ambiguous implicit values:
+ both value two of type Int
+ and value one in object A of type => Int
+ match expected type Int
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambiguous in 2.7.6
+ ^
+one warning found
+two errors found
diff --git a/test/files/neg/t2866.scala b/test/files/neg/t2866.scala
new file mode 100644
index 0000000000..55ebff9710
--- /dev/null
+++ b/test/files/neg/t2866.scala
@@ -0,0 +1,59 @@
+// for 2.7.x compatibility
+
+object A {
+ implicit val one = 1
+}
+
+object Test {
+
+ locally {
+ import A._
+ locally {
+ // assert(implicitly[Int] == 1) // error: could not find implicit value for parameter e: Int.
+ // !!! Why one A.one?
+ // (I assume you mean: why _not_ A.one? A.one is shadowed by local one.
+ // but the local one cannot be used yet because it does not have an explicit type.
+ implicit val one = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+ }
+
+ locally {
+ import A._
+ implicit val one: Int = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+
+ locally {
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ // !!! Really?
+ //assert(implicitly[Int] == 1)
+ implicit val one = 2
+ assert(implicitly[Int] == 2) // !!! why not 2?
+ assert(one == 2)
+ }
+
+ locally {
+ import A.one
+ assert(implicitly[Int] == 1)
+ implicit val two = 2
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambigous in 2.7.6
+ }
+
+ locally {
+ import A._
+ assert(implicitly[Int] == 1)
+ implicit val two = 2
+ import A.{one => _}
+ assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambiguous in 2.7.6
+ }
+
+ locally {
+ import A.{one => _, _}
+ implicit val two = 2
+ assert(implicitly[Int] == 2) // not ambiguous in 2.8.0 nor im ambiguous in 2.7.6
+ }
+
+}
diff --git a/test/files/neg/t3240.check b/test/files/neg/t3240.check
deleted file mode 100644
index efae682c66..0000000000
--- a/test/files/neg/t3240.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t3240.scala:3: error: only classes can have declared but undefined members
- type t
- ^
-one error found
diff --git a/test/files/neg/t3909.check b/test/files/neg/t3909.check
index 7da0195607..052b49f855 100644
--- a/test/files/neg/t3909.check
+++ b/test/files/neg/t3909.check
@@ -1,4 +1,5 @@
t3909.scala:1: error: in object DO, multiple overloaded alternatives of m1 define default arguments
+Error occurred in an application involving default arguments.
object DO {
^
one error found
diff --git a/test/files/neg/t4851.check b/test/files/neg/t4851.check
index 132dd91b50..d5711a889b 100644
--- a/test/files/neg/t4851.check
+++ b/test/files/neg/t4851.check
@@ -29,13 +29,13 @@ S.scala:7: warning: Adapting argument list by creating a 3-tuple: this may not b
val y2 = new Some(1, 2, 3)
^
S.scala:9: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want.
- signature: J2[T](x: T): J2[T]
+ signature: J2(x: T): J2[T]
given arguments: <none>
after adaptation: new J2((): Unit)
val z1 = new J2
^
S.scala:10: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want.
- signature: J2[T](x: T): J2[T]
+ signature: J2(x: T): J2[T]
given arguments: <none>
after adaptation: new J2((): Unit)
val z2 = new J2()
diff --git a/test/files/neg/t4851.flags b/test/files/neg/t4851.flags
index ca0d0a0ba3..044ce22c84 100644
--- a/test/files/neg/t4851.flags
+++ b/test/files/neg/t4851.flags
@@ -1 +1 @@
--Ywarn-adapted-args -Xfatal-warnings -deprecation
+-Xlint:adapted-args -Xfatal-warnings -deprecation
diff --git a/test/files/neg/t4851/J2.java b/test/files/neg/t4851/J2.java
index 82954d9489..a90f48e269 100644
--- a/test/files/neg/t4851/J2.java
+++ b/test/files/neg/t4851/J2.java
@@ -1,11 +1,11 @@
public class J2<T> {
T x;
- public <T> J(T x) {
+ public J2(T x) {
this.x = x;
}
public String toString() {
return "J2:" + x.getClass();
}
-} \ No newline at end of file
+}
diff --git a/test/files/neg/t5044.check b/test/files/neg/t5044.check
index 197da2a4e8..dc3708123f 100644
--- a/test/files/neg/t5044.check
+++ b/test/files/neg/t5044.check
@@ -1,8 +1,8 @@
t5044.scala:7: error: recursive value a needs type
val id = m(a)
^
-t5044.scala:6: warning: type-checking the invocation of method foo checks if the named argument expression 'id = ...' is a valid assignment
-in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for id.
+t5044.scala:6: warning: failed to determine if 'id = ...' is a named argument or an assignment expression.
+an explicit type is required for the definition mentioned in the error message above.
val a = foo(id = 1)
^
one warning found
diff --git a/test/files/neg/t5091.check b/test/files/neg/t5091.check
new file mode 100644
index 0000000000..156f695f41
--- /dev/null
+++ b/test/files/neg/t5091.check
@@ -0,0 +1,9 @@
+t5091.scala:8: error: recursive value xxx needs type
+ val param = bar(xxx)
+ ^
+t5091.scala:7: warning: failed to determine if 'param = ...' is a named argument or an assignment expression.
+an explicit type is required for the definition mentioned in the error message above.
+ val xxx = foo(param = null)
+ ^
+one warning found
+one error found
diff --git a/test/pending/pos/t5091.scala b/test/files/neg/t5091.scala
index 217e83f66d..217e83f66d 100644
--- a/test/pending/pos/t5091.scala
+++ b/test/files/neg/t5091.scala
diff --git a/test/files/neg/t5148.check b/test/files/neg/t5148.check
index 8a667f4b88..286ed9e04a 100644
--- a/test/files/neg/t5148.check
+++ b/test/files/neg/t5148.check
@@ -1,5 +1,11 @@
-error: bad symbolic reference to scala.tools.nsc.interpreter.IMain.Request encountered in class file 'Imports.class'.
-Cannot access type Request in class scala.tools.nsc.interpreter.IMain. The current classpath may be
-missing a definition for scala.tools.nsc.interpreter.IMain.Request, or Imports.class may have been compiled against a version that's
-incompatible with the one found on the current classpath.
-one error found
+error: missing or invalid dependency detected while loading class file 'Imports.class'.
+Could not access type Wrapper in class scala.tools.nsc.interpreter.IMain.Request,
+because it (or its dependencies) are missing. Check your build definition for
+missing or conflicting dependencies. (Re-run with `-Ylog-classpath` to see the problematic classpath.)
+A full rebuild may help if 'Imports.class' was compiled against an incompatible version of scala.tools.nsc.interpreter.IMain.Request.
+error: missing or invalid dependency detected while loading class file 'Imports.class'.
+Could not access type Request in class scala.tools.nsc.interpreter.IMain,
+because it (or its dependencies) are missing. Check your build definition for
+missing or conflicting dependencies. (Re-run with `-Ylog-classpath` to see the problematic classpath.)
+A full rebuild may help if 'Imports.class' was compiled against an incompatible version of scala.tools.nsc.interpreter.IMain.
+two errors found
diff --git a/test/files/neg/t562.check b/test/files/neg/t562.check
index 8c3823642a..95be075af1 100644
--- a/test/files/neg/t562.check
+++ b/test/files/neg/t562.check
@@ -1,4 +1,4 @@
-t562.scala:10: error: super may be not be used on value y
+t562.scala:10: error: super may not be used on value y
override val y = super.y;
^
one error found
diff --git a/test/files/neg/t5639b.check b/test/files/neg/t5639b.check
new file mode 100644
index 0000000000..faa1766660
--- /dev/null
+++ b/test/files/neg/t5639b.check
@@ -0,0 +1,4 @@
+A_2.scala:6: error: could not find implicit value for parameter e: Int
+ implicitly[Int]
+ ^
+one error found
diff --git a/test/files/neg/t5639b/A_1.scala b/test/files/neg/t5639b/A_1.scala
new file mode 100644
index 0000000000..c5da10eae4
--- /dev/null
+++ b/test/files/neg/t5639b/A_1.scala
@@ -0,0 +1,17 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+ // This implicit was being ignored by `isQualifyingImplicit`
+ // if the classpath contained a class file for `class Baz`.
+ // This is because the package scope contains a speculative
+ // symbol for `object Baz` which is entered by `SymbolLoaders`
+ // before looking inside the class file. (A Java originated
+ // classfile results in the class/module symbol pair.)
+}
diff --git a/test/files/neg/t5639b/A_2.scala b/test/files/neg/t5639b/A_2.scala
new file mode 100644
index 0000000000..2bb36273e0
--- /dev/null
+++ b/test/files/neg/t5639b/A_2.scala
@@ -0,0 +1,11 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+}
diff --git a/test/files/neg/t5675.check b/test/files/neg/t5675.check
index da608a2b78..3b3b2fa04c 100644
--- a/test/files/neg/t5675.check
+++ b/test/files/neg/t5675.check
@@ -1,2 +1,4 @@
-error: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t5691.check b/test/files/neg/t5691.check
new file mode 100644
index 0000000000..a51ca98a10
--- /dev/null
+++ b/test/files/neg/t5691.check
@@ -0,0 +1,24 @@
+t5691.scala:7: warning: type parameter D defined in method foobar shadows trait D defined in class B. You may want to rename your type parameter, or possibly remove it.
+ def foobar[D](in: D) = in.toString
+ ^
+t5691.scala:10: warning: type parameter D defined in type MySeq shadows trait D defined in class B. You may want to rename your type parameter, or possibly remove it.
+ type MySeq[D] = Seq[D]
+ ^
+t5691.scala:15: warning: type parameter T defined in method bar shadows type T defined in class Foo. You may want to rename your type parameter, or possibly remove it.
+ def bar[T](w: T) = w.toString
+ ^
+t5691.scala:13: warning: type parameter T defined in class Foo shadows type T defined in class B. You may want to rename your type parameter, or possibly remove it.
+ class Foo[T](t: T) {
+ ^
+t5691.scala:19: warning: type parameter List defined in type M shadows type List defined in package object scala. You may want to rename your type parameter, or possibly remove it.
+ class C[M[List[_]]]
+ ^
+t5691.scala:20: warning: type parameter List defined in type M shadows type List defined in package object scala. You may want to rename your type parameter, or possibly remove it.
+ type E[M[List[_]]] = Int
+ ^
+t5691.scala:21: warning: type parameter List defined in type M shadows type List defined in package object scala. You may want to rename your type parameter, or possibly remove it.
+ def foo[N[M[List[_]]]] = ???
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+7 warnings found
+one error found
diff --git a/test/files/neg/t5691.flags b/test/files/neg/t5691.flags
new file mode 100644
index 0000000000..0e09b8575b
--- /dev/null
+++ b/test/files/neg/t5691.flags
@@ -0,0 +1 @@
+-Xlint:type-parameter-shadow -language:higherKinds -Xfatal-warnings
diff --git a/test/files/neg/t5691.scala b/test/files/neg/t5691.scala
new file mode 100644
index 0000000000..e6a9bdc16a
--- /dev/null
+++ b/test/files/neg/t5691.scala
@@ -0,0 +1,27 @@
+class B {
+
+ type T = Int
+ trait D
+
+ // method parameter shadows some other type
+ def foobar[D](in: D) = in.toString
+
+ // type member's parameter shadows some other type
+ type MySeq[D] = Seq[D]
+
+ // class parameter shadows some other type
+ class Foo[T](t: T) {
+ // a type parameter shadows another type parameter
+ def bar[T](w: T) = w.toString
+ }
+
+ // even deeply nested...
+ class C[M[List[_]]]
+ type E[M[List[_]]] = Int
+ def foo[N[M[List[_]]]] = ???
+
+ // ...but not between type parameters in the same list
+ class F[A, M[L[A]]] // no warning!
+ type G[A, M[L[A]]] = Int // no warning!
+ def bar[A, N[M[L[A]]]] = ??? // no warning!
+}
diff --git a/test/files/neg/t6162-inheritance.check b/test/files/neg/t6162-inheritance.check
index 13c78030d9..c9f4ddaec1 100644
--- a/test/files/neg/t6162-inheritance.check
+++ b/test/files/neg/t6162-inheritance.check
@@ -7,12 +7,6 @@ object SubT extends T
usage.scala:8: warning: inheritance from trait S in package t6126 is deprecated
new S {
^
-usage.scala:3: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
-class SubFoo extends Foo
- ^
-usage.scala:5: warning: inheritance from trait T in package t6126 is deprecated
-object SubT extends T
- ^
error: No warnings can be incurred under -Xfatal-warnings.
-5 warnings found
+three warnings found
one error found
diff --git a/test/files/neg/t6289.check b/test/files/neg/t6289.check
index f6f43cabd3..989932750f 100644
--- a/test/files/neg/t6289.check
+++ b/test/files/neg/t6289.check
@@ -3,7 +3,7 @@ t6289/J.java:2: method does not override or implement a method from a supertype
@Override public void foo() { }
^
1 error
-#partest java7
+#partest !java6
t6289/J.java:2: error: method does not override or implement a method from a supertype
@Override public void foo() { }
^
diff --git a/test/files/neg/t6567.check b/test/files/neg/t6567.check
index a733d75354..f42f157371 100644
--- a/test/files/neg/t6567.check
+++ b/test/files/neg/t6567.check
@@ -4,6 +4,7 @@ t6567.scala:8: warning: Suspicious application of an implicit view (Test.this.a2
t6567.scala:10: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
val b: Option[B] = Option(a)
^
+warning: there was one feature warning; re-run with -feature for details
error: No warnings can be incurred under -Xfatal-warnings.
-two warnings found
+three warnings found
one error found
diff --git a/test/files/neg/t6582_exhaust_big.check b/test/files/neg/t6582_exhaust_big.check
new file mode 100644
index 0000000000..9e2be038b5
--- /dev/null
+++ b/test/files/neg/t6582_exhaust_big.check
@@ -0,0 +1,7 @@
+t6582_exhaust_big.scala:27: warning: match may not be exhaustive.
+It would fail on the following input: Z11()
+ def foo(z: Z) = z match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t6582_exhaust_big.flags b/test/files/neg/t6582_exhaust_big.flags
new file mode 100644
index 0000000000..b5a8748652
--- /dev/null
+++ b/test/files/neg/t6582_exhaust_big.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked
diff --git a/test/files/neg/t6582_exhaust_big.scala b/test/files/neg/t6582_exhaust_big.scala
new file mode 100644
index 0000000000..dd639eb56e
--- /dev/null
+++ b/test/files/neg/t6582_exhaust_big.scala
@@ -0,0 +1,32 @@
+sealed abstract class Z
+object Z {
+ object Z0 extends Z
+ case class Z1() extends Z
+ object Z2 extends Z
+ case class Z3() extends Z
+ object Z4 extends Z
+ case class Z5() extends Z
+ object Z6 extends Z
+ case class Z7() extends Z
+ object Z8 extends Z
+ case class Z9() extends Z
+ object Z10 extends Z
+ case class Z11() extends Z
+ object Z12 extends Z
+ case class Z13() extends Z
+ object Z14 extends Z
+ case class Z15() extends Z
+ object Z16 extends Z
+ case class Z17() extends Z
+ object Z18 extends Z
+ case class Z19() extends Z
+}
+
+object Test {
+ import Z._
+ def foo(z: Z) = z match {
+ case Z0 | Z1() | Z2 | Z3() | Z4 | Z5() | Z6 | Z7() | Z8 | Z9() |
+ Z10 | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19()
+ =>
+ }
+}
diff --git a/test/files/neg/t6675b.scala b/test/files/neg/t6675b.scala
index c86c9c3955..da27e1b91f 100644
--- a/test/files/neg/t6675b.scala
+++ b/test/files/neg/t6675b.scala
@@ -13,7 +13,7 @@ object NativelyTwo {
}
-class A {
+class E {
def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a) => a } // warn
def f2 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b)) => a } // no warn
def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b, c)) => a } // fail
diff --git a/test/files/neg/t6771b.check b/test/files/neg/t6771b.check
index ba99e9178d..0c9fae533e 100644
--- a/test/files/neg/t6771b.check
+++ b/test/files/neg/t6771b.check
@@ -1,4 +1,4 @@
-t6771b.scala:14: error: type mismatch;
+t6771b.scala:12: error: type mismatch;
found : x.type (with underlying type String)
required: Test.a.type
b = b match { case x => x }
diff --git a/test/files/neg/t6771b.scala b/test/files/neg/t6771b.scala
index 78f11f7750..9723f70290 100644
--- a/test/files/neg/t6771b.scala
+++ b/test/files/neg/t6771b.scala
@@ -6,8 +6,6 @@
// But, to the intrepid hacker who works on this, a few notes:
// You'll have to look into places in the pattern matcher that
// call `dealias`, and see if they need to be `dealiasWiden`.
-// For example, if `checkableType` used only `dealias`, `pos/t6671.scala`
-// would fail.
object Test {
val a = ""; var b: a.type = a
diff --git a/test/files/neg/t6902.scala b/test/files/neg/t6902.scala
index ce5ff8b6fb..627c324279 100644
--- a/test/files/neg/t6902.scala
+++ b/test/files/neg/t6902.scala
@@ -16,7 +16,7 @@ object Test {
// at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:50)
// at scala.tools.nsc.Global.abort(Global.scala:249)
// at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder$jcode$.emitSWITCH(GenASM.scala:1850)
- ((1: Byte): @unchecked @annotation.switch) match {
+ ((1: Byte): @unchecked) match {
case 1 => 2
case 1 => 3 // crash
}
diff --git a/test/files/neg/t6988.check b/test/files/neg/t6988.check
new file mode 100644
index 0000000000..acb7b3cb08
--- /dev/null
+++ b/test/files/neg/t6988.check
@@ -0,0 +1,7 @@
+t6988.scala:3: error: annotation argument needs to be a constant; found: 13.asInstanceOf[Long]
+@SerialVersionUID(13.asInstanceOf[Long]) case class IdentifyMessage1(userName: String, user: User, code: Int)
+ ^
+t6988.scala:8: error: annotation argument needs to be a constant; found: O.SerialUID
+@SerialVersionUID(O.SerialUID) case class IdentifyMessage3(userName: String, user: User, code: Int)
+ ^
+two errors found
diff --git a/test/files/neg/t6988.scala b/test/files/neg/t6988.scala
new file mode 100644
index 0000000000..8171dc9dd0
--- /dev/null
+++ b/test/files/neg/t6988.scala
@@ -0,0 +1,10 @@
+case class User()
+
+@SerialVersionUID(13.asInstanceOf[Long]) case class IdentifyMessage1(userName: String, user: User, code: Int)
+@SerialVersionUID(13l) case class IdentifyMessage2(userName: String, user: User, code: Int)
+object O {
+ val SerialUID = "13".toLong
+}
+@SerialVersionUID(O.SerialUID) case class IdentifyMessage3(userName: String, user: User, code: Int)
+
+
diff --git a/test/files/neg/t7157.check b/test/files/neg/t7157.check
index c6a7af9a23..3988460d4b 100644
--- a/test/files/neg/t7157.check
+++ b/test/files/neg/t7157.check
@@ -7,7 +7,8 @@ Test_2.scala:6: error: too many arguments for macro method m1_0_0: ()Unit
Test_2.scala:7: error: too many arguments for macro method m1_0_0: ()Unit
m1_0_0(1, 2, 3)
^
-Test_2.scala:9: error: macro applications do not support named and/or default arguments
+Test_2.scala:9: error: not enough arguments for macro method m1_1_1: (x: Int)Unit.
+Unspecified value parameter x.
m1_1_1()
^
Test_2.scala:11: error: too many arguments for macro method m1_1_1: (x: Int)Unit
@@ -16,22 +17,27 @@ Test_2.scala:11: error: too many arguments for macro method m1_1_1: (x: Int)Unit
Test_2.scala:12: error: too many arguments for macro method m1_1_1: (x: Int)Unit
m1_1_1(1, 2, 3)
^
-Test_2.scala:14: error: macro applications do not support named and/or default arguments
+Test_2.scala:14: error: not enough arguments for macro method m1_2_2: (x: Int, y: Int)Unit.
+Unspecified value parameters x, y.
m1_2_2()
^
-Test_2.scala:15: error: macro applications do not support named and/or default arguments
+Test_2.scala:15: error: not enough arguments for macro method m1_2_2: (x: Int, y: Int)Unit.
+Unspecified value parameter y.
m1_2_2(1)
^
Test_2.scala:17: error: too many arguments for macro method m1_2_2: (x: Int, y: Int)Unit
m1_2_2(1, 2, 3)
^
-Test_2.scala:24: error: macro applications do not support named and/or default arguments
+Test_2.scala:24: error: not enough arguments for macro method m1_1_inf: (x: Int, y: Int*)Unit.
+Unspecified value parameters x, y.
m1_1_inf()
^
-Test_2.scala:29: error: macro applications do not support named and/or default arguments
+Test_2.scala:29: error: not enough arguments for macro method m1_2_inf: (x: Int, y: Int, z: Int*)Unit.
+Unspecified value parameters x, y, z.
m1_2_inf()
^
-Test_2.scala:30: error: macro applications do not support named and/or default arguments
+Test_2.scala:30: error: not enough arguments for macro method m1_2_inf: (x: Int, y: Int, z: Int*)Unit.
+Unspecified value parameters y, z.
m1_2_inf(1)
^
Test_2.scala:35: error: too many arguments for macro method m2_0_0: ()Unit
@@ -43,7 +49,8 @@ Test_2.scala:36: error: too many arguments for macro method m2_0_0: ()Unit
Test_2.scala:37: error: too many arguments for macro method m2_0_0: ()Unit
m2_0_0()(1, 2, 3)
^
-Test_2.scala:39: error: macro applications do not support named and/or default arguments
+Test_2.scala:39: error: not enough arguments for macro method m2_1_1: (x: Int)Unit.
+Unspecified value parameter x.
m2_1_1()()
^
Test_2.scala:41: error: too many arguments for macro method m2_1_1: (x: Int)Unit
@@ -52,22 +59,27 @@ Test_2.scala:41: error: too many arguments for macro method m2_1_1: (x: Int)Unit
Test_2.scala:42: error: too many arguments for macro method m2_1_1: (x: Int)Unit
m2_1_1()(1, 2, 3)
^
-Test_2.scala:44: error: macro applications do not support named and/or default arguments
+Test_2.scala:44: error: not enough arguments for macro method m2_2_2: (x: Int, y: Int)Unit.
+Unspecified value parameters x, y.
m2_2_2()()
^
-Test_2.scala:45: error: macro applications do not support named and/or default arguments
+Test_2.scala:45: error: not enough arguments for macro method m2_2_2: (x: Int, y: Int)Unit.
+Unspecified value parameter y.
m2_2_2()(1)
^
Test_2.scala:47: error: too many arguments for macro method m2_2_2: (x: Int, y: Int)Unit
m2_2_2()(1, 2, 3)
^
-Test_2.scala:54: error: macro applications do not support named and/or default arguments
+Test_2.scala:54: error: not enough arguments for macro method m2_1_inf: (x: Int, y: Int*)Unit.
+Unspecified value parameters x, y.
m2_1_inf()()
^
-Test_2.scala:59: error: macro applications do not support named and/or default arguments
+Test_2.scala:59: error: not enough arguments for macro method m2_2_inf: (x: Int, y: Int, z: Int*)Unit.
+Unspecified value parameters x, y, z.
m2_2_inf()()
^
-Test_2.scala:60: error: macro applications do not support named and/or default arguments
+Test_2.scala:60: error: not enough arguments for macro method m2_2_inf: (x: Int, y: Int, z: Int*)Unit.
+Unspecified value parameters y, z.
m2_2_inf()(1)
^
24 errors found
diff --git a/test/files/neg/t7602.check b/test/files/neg/t7602.check
new file mode 100644
index 0000000000..5bb1450d7d
--- /dev/null
+++ b/test/files/neg/t7602.check
@@ -0,0 +1,5 @@
+t7602.scala:16: error: method foo is defined twice
+ conflicting symbols both originated in file 't7602.scala'
+ def foo : Device
+ ^
+one error found
diff --git a/test/files/neg/t7602.scala b/test/files/neg/t7602.scala
new file mode 100644
index 0000000000..5a9444a1ab
--- /dev/null
+++ b/test/files/neg/t7602.scala
@@ -0,0 +1,26 @@
+trait Table[T]{
+ def foo : T
+}
+trait Computer
+trait Device
+
+object schema{
+ def lub[T]( a:T, b:T ) = ???
+ lub(null:Computers,null:Devices)
+}
+trait Computers extends Table[Computer]{
+ def foo : Computer
+}
+trait Devices extends Table[Device]{
+ def foo : Device
+ def foo : Device
+}
+/* Was:
+Exception in thread "main" java.lang.AssertionError: assertion failed: List(method foo, method foo)
+ at scala.Predef$.assert(Predef.scala:165)
+ at scala.reflect.internal.Symbols$Symbol.suchThat(Symbols.scala:1916)
+ at scala.reflect.internal.tpe.GlbLubs$$anonfun$23.apply(GlbLubs.scala:350)
+ at scala.reflect.internal.tpe.GlbLubs$$anonfun$23.apply(GlbLubs.scala:349)
+ at scala.collection.immutable.List.map(List.scala:272)
+ at scala.reflect.internal.tpe.GlbLubs$class.lubsym$1(GlbLubs.scala:349)
+*/ \ No newline at end of file
diff --git a/test/files/neg/t7623.check b/test/files/neg/t7623.check
new file mode 100644
index 0000000000..db368dd369
--- /dev/null
+++ b/test/files/neg/t7623.check
@@ -0,0 +1,21 @@
+t7623.scala:19: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def f = "" match { case X(s) => }
+ ^
+t7623.scala:21: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def g = "" match { case X(s, t) => }
+ ^
+t7623.scala:23: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def h = "" match { case X(s, t, u @ _*) => }
+ ^
+t7623.scala:9: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def f = C("") match { case C(s) => }
+ ^
+t7623.scala:11: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def g = C("") match { case C(s, t) => }
+ ^
+t7623.scala:13: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).
+ def h = C("") match { case C(s, t, u @ _*) => }
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/t7623.flags b/test/files/neg/t7623.flags
new file mode 100644
index 0000000000..74c9e38323
--- /dev/null
+++ b/test/files/neg/t7623.flags
@@ -0,0 +1 @@
+-Xlint:stars-align -Xfatal-warnings
diff --git a/test/files/neg/t7623.scala b/test/files/neg/t7623.scala
new file mode 100644
index 0000000000..5c40f37bc1
--- /dev/null
+++ b/test/files/neg/t7623.scala
@@ -0,0 +1,38 @@
+
+
+case class C(s: String, xs: Int*)
+
+object X { def unapplySeq(a: Any): Option[(String, Seq[Int])] = Some("", List(1,2,3)) }
+
+// for case classes with varargs, avoid misaligned patterns
+trait Ctest {
+ def f = C("") match { case C(s) => }
+
+ def g = C("") match { case C(s, t) => }
+
+ def h = C("") match { case C(s, t, u @ _*) => }
+
+ def ok = C("") match { case C(s, u @ _*) => }
+}
+// for extractors that unapplySeq: Option[(Something, Seq[_])], avoid misaligned patterns
+trait Xtest {
+ def f = "" match { case X(s) => }
+
+ def g = "" match { case X(s, t) => }
+
+ def h = "" match { case X(s, t, u @ _*) => }
+
+ def ok = "" match { case X(s, u @ _*) => }
+}
+// for extractors that unapplySeq: Option[Seq[_]], anything goes
+trait Rtest {
+ val r = "(a+)".r
+
+ def f = "" match { case r(s) => }
+
+ def g = "" match { case r(s, t) => }
+
+ def h = "" match { case r(s, t, u @ _*) => }
+
+ def whatever = "" match { case r(u @ _*) => }
+}
diff --git a/test/files/neg/t7636.check b/test/files/neg/t7636.check
index f70d50bee3..12391cccc8 100644
--- a/test/files/neg/t7636.check
+++ b/test/files/neg/t7636.check
@@ -4,7 +4,7 @@ t7636.scala:3: error: illegal inheritance;
^
t7636.scala:3: error: type mismatch;
found : Either[_$2,_$3(in constructor C)] where type _$3(in constructor C), type _$2
- required: Either[_, _$3(in object Main)] where type _$3(in object Main)
+ required: Either[_, _$3(in value <local Main>)] where type _$3(in value <local Main>)
class C extends ResultTable(Left(5):Either[_,_])(5)
^
two errors found
diff --git a/test/files/neg/t7848-interp-warn.check b/test/files/neg/t7848-interp-warn.check
index b7df6d8ce2..637fc8941a 100644
--- a/test/files/neg/t7848-interp-warn.check
+++ b/test/files/neg/t7848-interp-warn.check
@@ -1,12 +1,15 @@
-t7848-interp-warn.scala:8: warning: `$foo` looks like an interpolated identifier! Did you forget the interpolator?
+t7848-interp-warn.scala:8: warning: possible missing interpolator: detected interpolated identifier `$foo`
"An important $foo message!"
^
-t7848-interp-warn.scala:12: warning: That looks like an interpolated expression! Did you forget the interpolator?
+t7848-interp-warn.scala:12: warning: possible missing interpolator: detected an interpolated expression
"A doubly important ${foo * 2} message!"
^
-t7848-interp-warn.scala:16: warning: `$bar` looks like an interpolated identifier! Did you forget the interpolator?
+t7848-interp-warn.scala:15: warning: possible missing interpolator: detected interpolated identifier `$bar`
+ def i = s"Try using '${ "$bar" }' instead." // was: no warn on space test
+ ^
+t7848-interp-warn.scala:16: warning: possible missing interpolator: detected interpolated identifier `$bar`
def j = s"Try using '${ "something like $bar" }' instead." // warn
^
error: No warnings can be incurred under -Xfatal-warnings.
-three warnings found
+four warnings found
one error found
diff --git a/test/files/neg/t7848-interp-warn.flags b/test/files/neg/t7848-interp-warn.flags
index 7949c2afa2..b0d7bc25cb 100644
--- a/test/files/neg/t7848-interp-warn.flags
+++ b/test/files/neg/t7848-interp-warn.flags
@@ -1 +1 @@
--Xlint -Xfatal-warnings
+-Xlint:missing-interpolator -Xfatal-warnings
diff --git a/test/files/neg/t7848-interp-warn.scala b/test/files/neg/t7848-interp-warn.scala
index 3887aff8de..a76141041d 100644
--- a/test/files/neg/t7848-interp-warn.scala
+++ b/test/files/neg/t7848-interp-warn.scala
@@ -12,7 +12,7 @@ object Test {
"A doubly important ${foo * 2} message!"
}
def h = s"Try using '$$bar' instead." // no warn
- def i = s"Try using '${ "$bar" }' instead." // no warn on space test
+ def i = s"Try using '${ "$bar" }' instead." // was: no warn on space test
def j = s"Try using '${ "something like $bar" }' instead." // warn
def k = f"Try using '$bar' instead." // no warn on other std interps
}
diff --git a/test/files/neg/t8035-no-adapted-args.check b/test/files/neg/t8035-no-adapted-args.check
new file mode 100644
index 0000000000..43637b2c1f
--- /dev/null
+++ b/test/files/neg/t8035-no-adapted-args.check
@@ -0,0 +1,21 @@
+t8035-no-adapted-args.scala:4: warning: No automatic adaptation here: use explicit parentheses.
+ signature: Test.f[T](x: T): Int
+ given arguments: 1, 2, 3
+ after adaptation: Test.f((1, 2, 3): (Int, Int, Int))
+ f(1, 2, 3)
+ ^
+t8035-no-adapted-args.scala:4: error: too many arguments for method f: (x: (Int, Int, Int))Int
+ f(1, 2, 3)
+ ^
+t8035-no-adapted-args.scala:5: warning: No automatic adaptation here: use explicit parentheses.
+ signature: Test.f[T](x: T): Int
+ given arguments: <none>
+ after adaptation: Test.f((): Unit)
+ f()
+ ^
+t8035-no-adapted-args.scala:5: error: not enough arguments for method f: (x: Unit)Int.
+Unspecified value parameter x.
+ f()
+ ^
+two warnings found
+two errors found
diff --git a/test/files/neg/t8035-no-adapted-args.flags b/test/files/neg/t8035-no-adapted-args.flags
new file mode 100644
index 0000000000..b3e8c505e2
--- /dev/null
+++ b/test/files/neg/t8035-no-adapted-args.flags
@@ -0,0 +1 @@
+-Yno-adapted-args \ No newline at end of file
diff --git a/test/files/neg/t8035-no-adapted-args.scala b/test/files/neg/t8035-no-adapted-args.scala
new file mode 100644
index 0000000000..82690ebe94
--- /dev/null
+++ b/test/files/neg/t8035-no-adapted-args.scala
@@ -0,0 +1,6 @@
+object Test {
+ def f[T](x: T) = 0
+
+ f(1, 2, 3)
+ f()
+}
diff --git a/test/files/neg/t8217-local-alias-requires-rhs.check b/test/files/neg/t8217-local-alias-requires-rhs.check
new file mode 100644
index 0000000000..0d4f0864ba
--- /dev/null
+++ b/test/files/neg/t8217-local-alias-requires-rhs.check
@@ -0,0 +1,10 @@
+t8217-local-alias-requires-rhs.scala:6: error: only classes can have declared but undefined members
+ type B
+ ^
+t8217-local-alias-requires-rhs.scala:3: error: only classes can have declared but undefined members
+ type A
+ ^
+t8217-local-alias-requires-rhs.scala:14: error: only classes can have declared but undefined members
+ def this(a: Any) = { this(); type C }
+ ^
+three errors found
diff --git a/test/files/neg/t8217-local-alias-requires-rhs.scala b/test/files/neg/t8217-local-alias-requires-rhs.scala
new file mode 100644
index 0000000000..12b7976835
--- /dev/null
+++ b/test/files/neg/t8217-local-alias-requires-rhs.scala
@@ -0,0 +1,15 @@
+trait Alias {
+ def foo = {
+ type A
+ }
+ val bar = {
+ type B
+ object O {
+ type OK
+ }
+ }
+}
+
+class C {
+ def this(a: Any) = { this(); type C }
+}
diff --git a/test/files/neg/t8266-invalid-interp.check b/test/files/neg/t8266-invalid-interp.check
index 70dd4081b0..bb2d44a80c 100644
--- a/test/files/neg/t8266-invalid-interp.check
+++ b/test/files/neg/t8266-invalid-interp.check
@@ -1,10 +1,10 @@
t8266-invalid-interp.scala:4: error: Trailing '\' escapes nothing.
f"a\",
^
-t8266-invalid-interp.scala:5: error: invalid escape character at index 1 in "a\xc"
+t8266-invalid-interp.scala:5: error: invalid escape '\x' not one of [\b, \t, \n, \f, \r, \\, \", \'] at index 1 in "a\xc". Use \\ for literal \.
f"a\xc",
^
-t8266-invalid-interp.scala:7: error: invalid escape character at index 1 in "a\vc"
+t8266-invalid-interp.scala:7: error: invalid escape '\v' not one of [\b, \t, \n, \f, \r, \\, \", \'] at index 1 in "a\vc". Use \\ for literal \.
f"a\vc"
^
three errors found
diff --git a/test/files/neg/t8291.check b/test/files/neg/t8291.check
new file mode 100644
index 0000000000..c9972e5575
--- /dev/null
+++ b/test/files/neg/t8291.check
@@ -0,0 +1,7 @@
+t8291.scala:5: error: Could not find implicit for Int or String
+ implicitly[X[Int, String]]
+ ^
+t8291.scala:6: error: Could not find implicit for Int or String
+ implicitly[Z[String]]
+ ^
+two errors found
diff --git a/test/files/neg/t8291.scala b/test/files/neg/t8291.scala
new file mode 100644
index 0000000000..b344586a56
--- /dev/null
+++ b/test/files/neg/t8291.scala
@@ -0,0 +1,7 @@
+@scala.annotation.implicitNotFound("Could not find implicit for ${T} or ${U}") trait X[T, U]
+
+object Test {
+ type Z[U] = X[Int, U]
+ implicitly[X[Int, String]]
+ implicitly[Z[String]]
+}
diff --git a/test/files/neg/t8325-b.check b/test/files/neg/t8325-b.check
new file mode 100644
index 0000000000..ec80826dc0
--- /dev/null
+++ b/test/files/neg/t8325-b.check
@@ -0,0 +1,10 @@
+t8325-b.scala:3: error: Unmatched closing brace '}' ignored here
+ def k(is: Int*} = ???
+ ^
+t8325-b.scala:3: error: ';' expected but '=' found.
+ def k(is: Int*} = ???
+ ^
+t8325-b.scala:4: error: eof expected but '}' found.
+}
+^
+three errors found
diff --git a/test/files/neg/t8325-b.scala b/test/files/neg/t8325-b.scala
new file mode 100644
index 0000000000..6ac78708bb
--- /dev/null
+++ b/test/files/neg/t8325-b.scala
@@ -0,0 +1,4 @@
+
+trait Test {
+ def k(is: Int*} = ???
+}
diff --git a/test/files/neg/t8325-c.check b/test/files/neg/t8325-c.check
new file mode 100644
index 0000000000..51ea4988a6
--- /dev/null
+++ b/test/files/neg/t8325-c.check
@@ -0,0 +1,7 @@
+t8325-c.scala:3: error: identifier expected but ')' found.
+ def k(xx: Int`*`) = ???
+ ^
+t8325-c.scala:4: error: ')' expected but '}' found.
+}
+^
+two errors found
diff --git a/test/files/neg/t8325-c.scala b/test/files/neg/t8325-c.scala
new file mode 100644
index 0000000000..076202df3f
--- /dev/null
+++ b/test/files/neg/t8325-c.scala
@@ -0,0 +1,4 @@
+
+trait Test {
+ def k(xx: Int`*`) = ???
+}
diff --git a/test/files/neg/t8325.check b/test/files/neg/t8325.check
new file mode 100644
index 0000000000..175a0db415
--- /dev/null
+++ b/test/files/neg/t8325.check
@@ -0,0 +1,15 @@
+t8325.scala:5: error: *-parameter must come last
+ def f(is: Int*, s: String) = ???
+ ^
+t8325.scala:7: error: *-parameter must come last
+ def h(is: Int * String *, s: String) = ???
+ ^
+t8325.scala:10: error: type mismatch;
+ found : Int(5)
+ required: Int*
+ def j(is: Int* = 5) = ???
+ ^
+t8325.scala:10: error: a parameter section with a `*'-parameter is not allowed to have default arguments
+ def j(is: Int* = 5) = ???
+ ^
+four errors found
diff --git a/test/files/neg/t8325.scala b/test/files/neg/t8325.scala
new file mode 100644
index 0000000000..3813797e83
--- /dev/null
+++ b/test/files/neg/t8325.scala
@@ -0,0 +1,11 @@
+
+trait Test {
+ type OK[A,B] = A Tuple2 B
+ type *[A,B] = A Tuple2 B
+ def f(is: Int*, s: String) = ???
+ def g(is: Int * String, s: String) = ??? // OK
+ def h(is: Int * String *, s: String) = ???
+ // won't recover from following
+ //def i(is: Int OK) = ??? //error: identifier expected but ')' found.
+ def j(is: Int* = 5) = ???
+}
diff --git a/test/files/neg/t8430.check b/test/files/neg/t8430.check
new file mode 100644
index 0000000000..dbc0c70bba
--- /dev/null
+++ b/test/files/neg/t8430.check
@@ -0,0 +1,27 @@
+t8430.scala:15: warning: match may not be exhaustive.
+It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ ^
+t8430.scala:16: warning: match may not be exhaustive.
+It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ ^
+t8430.scala:17: warning: match may not be exhaustive.
+It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ ^
+t8430.scala:18: warning: match may not be exhaustive.
+It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ ^
+t8430.scala:19: warning: match may not be exhaustive.
+It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ ^
+t8430.scala:20: warning: match may not be exhaustive.
+It would fail on the following inputs: LetC, LetF, LetL(BooleanLit), LetL(IntLit), LetL(UnitLit), LetP
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/t8430.flags b/test/files/neg/t8430.flags
new file mode 100644
index 0000000000..6f60189a8d
--- /dev/null
+++ b/test/files/neg/t8430.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Ypatmat-exhaust-depth off
diff --git a/test/files/neg/t8430.scala b/test/files/neg/t8430.scala
new file mode 100644
index 0000000000..4166b08a0a
--- /dev/null
+++ b/test/files/neg/t8430.scala
@@ -0,0 +1,32 @@
+sealed trait CL3Literal
+case object IntLit extends CL3Literal
+case object CharLit extends CL3Literal
+case object BooleanLit extends CL3Literal
+case object UnitLit extends CL3Literal
+
+
+sealed trait Tree
+case class LetL(value: CL3Literal) extends Tree
+case object LetP extends Tree
+case object LetC extends Tree
+case object LetF extends Tree
+
+object Test {
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ (tree: Tree) => tree match {case LetL(CharLit) => ??? }
+ // After the first patch for SI-8430, we achieve stability: all of
+ // these get the same warning:
+ //
+ // ??, LetC, LetF, LetL(IntLit), LetP
+ //
+ // Before, it was non-deterministic.
+ //
+ // However, we our list of counter examples is itself non-exhaustive.
+ // We need to rework counter example generation to fix that.
+ //
+ // That work is the subject of SI-7746
+}
diff --git a/test/files/neg/t845.check b/test/files/neg/t845.check
deleted file mode 100644
index 07ed7e417b..0000000000
--- a/test/files/neg/t845.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t845.scala:4: error: only classes can have declared but undefined members
- type Bar;
- ^
-one error found
diff --git a/test/files/neg/t8450.check b/test/files/neg/t8450.check
new file mode 100644
index 0000000000..eeabb9730c
--- /dev/null
+++ b/test/files/neg/t8450.check
@@ -0,0 +1,6 @@
+t8450.scala:5: warning: implicit numeric widening
+ def elapsed: Foo = (System.nanoTime - 100L).foo
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t8450.flags b/test/files/neg/t8450.flags
new file mode 100644
index 0000000000..9a1332d7af
--- /dev/null
+++ b/test/files/neg/t8450.flags
@@ -0,0 +1 @@
+-Ywarn-numeric-widen -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t8450.scala b/test/files/neg/t8450.scala
new file mode 100644
index 0000000000..f20ed2bc31
--- /dev/null
+++ b/test/files/neg/t8450.scala
@@ -0,0 +1,12 @@
+trait Foo
+
+class WarnWidening {
+ implicit class FooDouble(d: Double) { def foo = new Foo {} }
+ def elapsed: Foo = (System.nanoTime - 100L).foo
+}
+
+class NoWarnWidening {
+ implicit class FooLong(l: Long) { def foo = new Foo {} }
+ implicit class FooDouble(d: Double) { def foo = new Foo {} }
+ def elapsed: Foo = (System.nanoTime - 100L).foo
+}
diff --git a/test/files/neg/t8463.check b/test/files/neg/t8463.check
new file mode 100644
index 0000000000..9aaacf8391
--- /dev/null
+++ b/test/files/neg/t8463.check
@@ -0,0 +1,27 @@
+t8463.scala:5: error: type mismatch;
+ found : Long
+ required: ?T[Long]
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method longWrapper in class LowPriorityImplicits of type (x: Long)scala.runtime.RichLong
+ and method ArrowAssoc in object Predef of type [A](self: A)ArrowAssoc[A]
+ are possible conversion functions from Long to ?T[Long]
+ insertCell(Foo(5))
+ ^
+t8463.scala:5: error: no type parameters for method apply: (activity: T[Long])Test.Foo[T] in object Foo exist so that it can be applied to arguments (Long)
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : Long
+ required: ?T[Long]
+ insertCell(Foo(5))
+ ^
+t8463.scala:5: error: type mismatch;
+ found : Long(5L)
+ required: T[Long]
+ insertCell(Foo(5))
+ ^
+t8463.scala:5: error: type mismatch;
+ found : Test.Foo[T]
+ required: Test.Foo[Test.Cell]
+ insertCell(Foo(5))
+ ^
+four errors found
diff --git a/test/files/neg/t8463.scala b/test/files/neg/t8463.scala
new file mode 100644
index 0000000000..7c954fd834
--- /dev/null
+++ b/test/files/neg/t8463.scala
@@ -0,0 +1,38 @@
+object Test {
+ case class Foo[+T[_]](activity:T[Long])
+ type Cell[T] = T
+ def insertCell(u:Foo[Cell]) = ???
+ insertCell(Foo(5))
+}
+
+/* If SI-8230 is fixed, and `viewExists` is changed to no longer leak
+ ambiguity errors, you might expect the check file for this test to
+ change as folloes:
+
+@@ -1,18 +1,10 @@
+-t8463.scala:5: error: no type parameters for method apply: (activity:
+- --- because ---
+-argument expression's type is not compatible with formal parameter ty
++t8463.scala:5: error: type mismatch;
+ found : Long
+ required: ?T[Long]
++Note that implicit conversions are not applicable because they are am
++ both method longWrapper in class LowPriorityImplicits of type (x: Lo
++ and method ArrowAssoc in object Predef of type [A](self: A)ArrowAsso
++ are possible conversion functions from Long to ?T[Long]
+ insertCell(Foo(5))
+- ^
+-t8463.scala:5: error: type mismatch;
+- found : Long(5L)
+- required: T[Long]
+- insertCell(Foo(5))
+- ^
+-t8463.scala:5: error: type mismatch;
+- found : Test.Foo[T]
+- required: Test.Foo[Test.Cell]
+- insertCell(Foo(5))
+- ^
+-three errors found
++ ^
++one error found
+*/
diff --git a/test/files/neg/t8525.check b/test/files/neg/t8525.check
new file mode 100644
index 0000000000..5287e43b7a
--- /dev/null
+++ b/test/files/neg/t8525.check
@@ -0,0 +1,15 @@
+t8525.scala:7: warning: Adapting argument list by creating a 2-tuple: this may not be what you want.
+ signature: X.f(p: (Int, Int)): Int
+ given arguments: 3, 4
+ after adaptation: X.f((3, 4): (Int, Int))
+ def g = f(3, 4) // adapted
+ ^
+t8525.scala:9: warning: private[this] value name in class X shadows mutable name inherited from class Named. Changes to name will not be visible within class X - you may want to give them distinct names.
+ override def toString = name // shadowing mutable var name
+ ^
+t8525.scala:8: warning: side-effecting nullary methods are discouraged: suggest defining as `def u()` instead
+ def u: Unit = () // unitarian universalist
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t8525.flags b/test/files/neg/t8525.flags
new file mode 100644
index 0000000000..53b2dfe7ec
--- /dev/null
+++ b/test/files/neg/t8525.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint:-missing-interpolator -Xlint
diff --git a/test/files/neg/t8525.scala b/test/files/neg/t8525.scala
new file mode 100644
index 0000000000..7bed04904f
--- /dev/null
+++ b/test/files/neg/t8525.scala
@@ -0,0 +1,10 @@
+
+class Named(var name: String)
+
+class X(name: String) extends Named(name) {
+ def x = "Hi, $name" // missing interp
+ def f(p: (Int, Int)): Int = p._1 * p._2
+ def g = f(3, 4) // adapted
+ def u: Unit = () // unitarian universalist
+ override def toString = name // shadowing mutable var name
+}
diff --git a/test/files/neg/t8534.check b/test/files/neg/t8534.check
new file mode 100644
index 0000000000..297e7c1beb
--- /dev/null
+++ b/test/files/neg/t8534.check
@@ -0,0 +1,4 @@
+t8534.scala:6: error: MyTrait is not an enclosing class
+ class BugTest {def isTheBugHere(in: MyTrait.this.type#SomeData) = false}
+ ^
+one error found
diff --git a/test/files/neg/t8534.scala b/test/files/neg/t8534.scala
new file mode 100644
index 0000000000..f118d22b82
--- /dev/null
+++ b/test/files/neg/t8534.scala
@@ -0,0 +1,7 @@
+object line1 {
+ trait MyTrait
+}
+object line2 {
+ import line2._
+ class BugTest {def isTheBugHere(in: MyTrait.this.type#SomeData) = false}
+}
diff --git a/test/files/neg/t8534b.check b/test/files/neg/t8534b.check
new file mode 100644
index 0000000000..39ffa41194
--- /dev/null
+++ b/test/files/neg/t8534b.check
@@ -0,0 +1,4 @@
+t8534b.scala:3: error: stable identifier required, but foo.type found.
+ type T = foo.type#Foo
+ ^
+one error found
diff --git a/test/files/neg/t8534b.scala b/test/files/neg/t8534b.scala
new file mode 100644
index 0000000000..73b6703a9c
--- /dev/null
+++ b/test/files/neg/t8534b.scala
@@ -0,0 +1,4 @@
+object Test {
+ def foo = ""
+ type T = foo.type#Foo
+}
diff --git a/test/files/neg/t8597.check b/test/files/neg/t8597.check
new file mode 100644
index 0000000000..bc945f9191
--- /dev/null
+++ b/test/files/neg/t8597.check
@@ -0,0 +1,21 @@
+t8597.scala:2: warning: abstract type T in type pattern Some[T] is unchecked since it is eliminated by erasure
+ def nowarn[T] = (null: Any) match { case _: Some[T] => } // warn (did not warn due to SI-8597)
+ ^
+t8597.scala:5: warning: abstract type pattern T is unchecked since it is eliminated by erasure
+ def warn1[T] = (null: Any) match { case _: T => } // warn
+ ^
+t8597.scala:6: warning: non-variable type argument String in type pattern Some[String] is unchecked since it is eliminated by erasure
+ def warn2 = (null: Any) match { case _: Some[String] => } // warn
+ ^
+t8597.scala:7: warning: non-variable type argument Unchecked.this.C in type pattern Some[Unchecked.this.C] is unchecked since it is eliminated by erasure
+ (null: Any) match { case _: Some[C] => } // warn
+ ^
+t8597.scala:18: warning: abstract type T in type pattern Array[T] is unchecked since it is eliminated by erasure
+ def warnArray[T] = (null: Any) match { case _: Array[T] => } // warn (did not warn due to SI-8597)
+ ^
+t8597.scala:26: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+ def warnArrayErasure2 = (null: Any) match {case Some(_: Array[Array[List[String]]]) => } // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/pos/switch-small.flags b/test/files/neg/t8597.flags
index 85d8eb2ba2..85d8eb2ba2 100644
--- a/test/files/pos/switch-small.flags
+++ b/test/files/neg/t8597.flags
diff --git a/test/files/neg/t8597.scala b/test/files/neg/t8597.scala
new file mode 100644
index 0000000000..068e87d91a
--- /dev/null
+++ b/test/files/neg/t8597.scala
@@ -0,0 +1,27 @@
+class Unchecked[C] {
+ def nowarn[T] = (null: Any) match { case _: Some[T] => } // warn (did not warn due to SI-8597)
+
+ // These warned before.
+ def warn1[T] = (null: Any) match { case _: T => } // warn
+ def warn2 = (null: Any) match { case _: Some[String] => } // warn
+ (null: Any) match { case _: Some[C] => } // warn
+
+ // These must remain without warnings. These are excerpts from
+ // related tests that are more exhauative.
+ class C; class D extends C
+ def okay = (List(new D) : Seq[D]) match { case _: List[C] => case _ => } // nowarn
+ class B2[A, B]
+ class A2[X] extends B2[X, String]
+ def okay2(x: A2[Int]) = x match { case _: B2[Int, _] => true } // nowarn
+ def okay3(x: A2[Int]) = x match { case _: B2[Int, typeVar] => true } // nowarn
+
+ def warnArray[T] = (null: Any) match { case _: Array[T] => } // warn (did not warn due to SI-8597)
+ def nowarnArrayC = (null: Any) match { case _: Array[C] => } // nowarn
+
+ def nowarnArrayTypeVar[T] = (null: Any) match { case _: Array[t] => } // nowarn
+
+ def noWarnArrayErasure1 = (null: Any) match {case Some(_: Array[String]) => } // nowarn
+ def noWarnArrayErasure2 = (null: Any) match {case Some(_: Array[List[_]]) => } // nowarn
+ def noWarnArrayErasure3 = (null: Any) match {case Some(_: Array[Array[List[_]]]) => } // nowarn
+ def warnArrayErasure2 = (null: Any) match {case Some(_: Array[Array[List[String]]]) => } // warn
+}
diff --git a/test/files/neg/t8597b.check b/test/files/neg/t8597b.check
new file mode 100644
index 0000000000..3c45a31337
--- /dev/null
+++ b/test/files/neg/t8597b.check
@@ -0,0 +1,6 @@
+t8597b.scala:18: warning: non-variable type argument T in type pattern Some[T] is unchecked since it is eliminated by erasure
+ case _: Some[T] => // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t8597b.flags b/test/files/neg/t8597b.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t8597b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t8597b.scala b/test/files/neg/t8597b.scala
new file mode 100644
index 0000000000..b29d591cb1
--- /dev/null
+++ b/test/files/neg/t8597b.scala
@@ -0,0 +1,21 @@
+object Unchecked {
+ (null: Any) match {
+ case _: Some[t] =>
+
+ // t is a fresh pattern type variable, despite our attempts to
+ // backtick our way to the enclosing `t`. Under this interpretation,
+ // the absense of an unchecked warning is expected.
+ (null: Any) match {
+ case _: Some[t] => // no warn
+ }
+ (null: Any) match {
+ case _: Some[`t`] => // no warn
+ }
+
+ // here we correctly issue an unchecked warning
+ type T = t
+ (null: Any) match {
+ case _: Some[T] => // warn
+ }
+ }
+}
diff --git a/test/files/neg/t8610-arg.check b/test/files/neg/t8610-arg.check
new file mode 100644
index 0000000000..d6fe207119
--- /dev/null
+++ b/test/files/neg/t8610-arg.check
@@ -0,0 +1,6 @@
+t8610-arg.scala:8: warning: side-effecting nullary methods are discouraged: suggest defining as `def u()` instead
+ def u: Unit = () // unitarian universalist
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t8610-arg.flags b/test/files/neg/t8610-arg.flags
new file mode 100644
index 0000000000..f331ba9383
--- /dev/null
+++ b/test/files/neg/t8610-arg.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint nullary-unit
diff --git a/test/files/neg/t8610-arg.scala b/test/files/neg/t8610-arg.scala
new file mode 100644
index 0000000000..7bed04904f
--- /dev/null
+++ b/test/files/neg/t8610-arg.scala
@@ -0,0 +1,10 @@
+
+class Named(var name: String)
+
+class X(name: String) extends Named(name) {
+ def x = "Hi, $name" // missing interp
+ def f(p: (Int, Int)): Int = p._1 * p._2
+ def g = f(3, 4) // adapted
+ def u: Unit = () // unitarian universalist
+ override def toString = name // shadowing mutable var name
+}
diff --git a/test/files/neg/t8610.check b/test/files/neg/t8610.check
new file mode 100644
index 0000000000..334a947549
--- /dev/null
+++ b/test/files/neg/t8610.check
@@ -0,0 +1,18 @@
+t8610.scala:5: warning: possible missing interpolator: detected interpolated identifier `$name`
+ def x = "Hi, $name" // missing interp
+ ^
+t8610.scala:7: warning: Adapting argument list by creating a 2-tuple: this may not be what you want.
+ signature: X.f(p: (Int, Int)): Int
+ given arguments: 3, 4
+ after adaptation: X.f((3, 4): (Int, Int))
+ def g = f(3, 4) // adapted
+ ^
+t8610.scala:9: warning: private[this] value name in class X shadows mutable name inherited from class Named. Changes to name will not be visible within class X - you may want to give them distinct names.
+ override def toString = name // shadowing mutable var name
+ ^
+t8610.scala:8: warning: side-effecting nullary methods are discouraged: suggest defining as `def u()` instead
+ def u: Unit = () // unitarian universalist
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t8610.flags b/test/files/neg/t8610.flags
new file mode 100644
index 0000000000..954eaba352
--- /dev/null
+++ b/test/files/neg/t8610.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint
diff --git a/test/files/neg/t8610.scala b/test/files/neg/t8610.scala
new file mode 100644
index 0000000000..7bed04904f
--- /dev/null
+++ b/test/files/neg/t8610.scala
@@ -0,0 +1,10 @@
+
+class Named(var name: String)
+
+class X(name: String) extends Named(name) {
+ def x = "Hi, $name" // missing interp
+ def f(p: (Int, Int)): Int = p._1 * p._2
+ def g = f(3, 4) // adapted
+ def u: Unit = () // unitarian universalist
+ override def toString = name // shadowing mutable var name
+}
diff --git a/test/files/neg/t8630.check b/test/files/neg/t8630.check
new file mode 100644
index 0000000000..98b084b153
--- /dev/null
+++ b/test/files/neg/t8630.check
@@ -0,0 +1,7 @@
+t8630.scala:1: error: '{' expected but 'abstract' found.
+package bobsdelights abstract class Fruit( val name: String, val color: String ) object Fruits { object Apple extends Fruit("apple", "red") object Orange extends Fruit("orange", "orange") object Pear extends Fruit("pear", "yellowish") val menu = List(Apple, Orange, Pear) }
+ ^
+t8630.scala:1: error: '}' expected but eof found.
+package bobsdelights abstract class Fruit( val name: String, val color: String ) object Fruits { object Apple extends Fruit("apple", "red") object Orange extends Fruit("orange", "orange") object Pear extends Fruit("pear", "yellowish") val menu = List(Apple, Orange, Pear) }
+ ^
+two errors found
diff --git a/test/files/neg/t8630.scala b/test/files/neg/t8630.scala
new file mode 100644
index 0000000000..ea25227452
--- /dev/null
+++ b/test/files/neg/t8630.scala
@@ -0,0 +1 @@
+package bobsdelights abstract class Fruit( val name: String, val color: String ) object Fruits { object Apple extends Fruit("apple", "red") object Orange extends Fruit("orange", "orange") object Pear extends Fruit("pear", "yellowish") val menu = List(Apple, Orange, Pear) } \ No newline at end of file
diff --git a/test/files/neg/t8675.check b/test/files/neg/t8675.check
new file mode 100644
index 0000000000..4e44fba918
--- /dev/null
+++ b/test/files/neg/t8675.check
@@ -0,0 +1,11 @@
+t8675.scala:13: error: type mismatch;
+ found : Boolean(true)
+ required: String
+ a.update(0, x[A]({new isString(true)})) // !!! allowed
+ ^
+t8675.scala:22: error: type mismatch;
+ found : Boolean(true)
+ required: String
+ new X().m(x[A]({new isString(true)})) // !!! allowed
+ ^
+two errors found
diff --git a/test/files/neg/t8675.scala b/test/files/neg/t8675.scala
new file mode 100644
index 0000000000..ca9bb57ffa
--- /dev/null
+++ b/test/files/neg/t8675.scala
@@ -0,0 +1,24 @@
+class A(s: String) {
+ def foo(x: A) = x
+}
+
+class isString(s: String)
+
+class Test {
+
+ def x[A](a: Any): A = ???
+
+ def test {
+ val a = Array[A]()
+ a.update(0, x[A]({new isString(true)})) // !!! allowed
+
+ // boils down to
+ class X {
+ def m(p: Any) {}
+ }
+ implicit class XOps(x: X) {
+ def m(p: Any) {}
+ }
+ new X().m(x[A]({new isString(true)})) // !!! allowed
+ }
+}
diff --git a/test/files/neg/t8675b.check b/test/files/neg/t8675b.check
new file mode 100644
index 0000000000..cb7ac8af59
--- /dev/null
+++ b/test/files/neg/t8675b.check
@@ -0,0 +1,6 @@
+t8675b.scala:19: error: missing parameter type for expanded function
+The argument types of an anonymous function must be fully known. (SLS 8.5)
+Expected type was: List[Test.Reportable1[?,?]] => Boolean
+ for (path: List[Any] <- (null : Engine1).asRequirement.pathsIncludingSelf.toList) {
+ ^
+one error found
diff --git a/test/files/neg/t8675b.scala b/test/files/neg/t8675b.scala
new file mode 100644
index 0000000000..2c5015b1d0
--- /dev/null
+++ b/test/files/neg/t8675b.scala
@@ -0,0 +1,22 @@
+object Test {
+ trait Engine1
+
+ implicit class EngineTools1[Params, R](e: Engine1) {
+ def asRequirement: Requirement1[Params, R] = ???
+ }
+ trait Requirement1[Params, R] {
+ def pathsIncludingSelf: Traversable[List[Reportable1[Params, R]]]
+ }
+ trait Reportable1[Params, R]
+
+ // "missing paramater type" error was swallowed in 2.11.0 leading to a crash
+ // in the backend.
+ //
+ // This error is itself a regression (or at least a change) in 2.11.0-M7,
+ // specifically in SI-7944. The type paramaters to the implicit view
+ // `EngineTools1` are undetermined, and are now treated as type variables
+ // in the expected type of the closure argument to `withFilter`.
+ for (path: List[Any] <- (null : Engine1).asRequirement.pathsIncludingSelf.toList) {
+ ???
+ }
+}
diff --git a/test/files/neg/t8731.check b/test/files/neg/t8731.check
new file mode 100644
index 0000000000..d47bd55b45
--- /dev/null
+++ b/test/files/neg/t8731.check
@@ -0,0 +1,6 @@
+t8731.scala:10: warning: could not emit switch for @switch annotated match
+ def g(x: Int) = (x: @annotation.switch) match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t8731.flags b/test/files/neg/t8731.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t8731.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t8731.scala b/test/files/neg/t8731.scala
new file mode 100644
index 0000000000..d93fe706ad
--- /dev/null
+++ b/test/files/neg/t8731.scala
@@ -0,0 +1,15 @@
+class C {
+ // not a compile-time constant due to return type
+ final val K: Int = 20
+
+ def f(x: Int) = (x: @annotation.switch) match {
+ case K => 0
+ case 2 => 1
+ }
+
+ def g(x: Int) = (x: @annotation.switch) match {
+ case K => 0
+ case 2 => 1
+ case 3 => 2
+ }
+}
diff --git a/test/files/neg/t8736-c.check b/test/files/neg/t8736-c.check
new file mode 100644
index 0000000000..06b2228543
--- /dev/null
+++ b/test/files/neg/t8736-c.check
@@ -0,0 +1,11 @@
+t8736-c.scala:4: warning: higher-kinded type should be enabled
+by making the implicit value scala.language.higherKinds visible.
+This can be achieved by adding the import clause 'import scala.language.higherKinds'
+or by setting the compiler option -language:higherKinds.
+See the Scala docs for value scala.language.higherKinds for a discussion
+why the feature should be explicitly enabled.
+ def hk[M[_]] = ???
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t8736-c.flags b/test/files/neg/t8736-c.flags
new file mode 100644
index 0000000000..fde5313c96
--- /dev/null
+++ b/test/files/neg/t8736-c.flags
@@ -0,0 +1 @@
+-feature -language:-higherKinds,_ -Xfatal-warnings
diff --git a/test/files/neg/t8736-c.scala b/test/files/neg/t8736-c.scala
new file mode 100644
index 0000000000..8432775ae1
--- /dev/null
+++ b/test/files/neg/t8736-c.scala
@@ -0,0 +1,7 @@
+// scalac: -feature -language:-higherKinds,_ -Xfatal-warnings
+// showing that wildcard doesn't supersede explicit disablement
+class X {
+ def hk[M[_]] = ???
+
+ implicit def imp(x: X): Int = x.hashCode
+}
diff --git a/test/files/neg/t8764.check b/test/files/neg/t8764.check
new file mode 100644
index 0000000000..6d89ebe106
--- /dev/null
+++ b/test/files/neg/t8764.check
@@ -0,0 +1,6 @@
+t8764.scala:8: error: type mismatch;
+ found : AnyVal
+ required: Double
+ val d: Double = a.productElement(0)
+ ^
+one error found
diff --git a/test/files/run/t5614.flags b/test/files/neg/t8764.flags
index 48fd867160..48fd867160 100644
--- a/test/files/run/t5614.flags
+++ b/test/files/neg/t8764.flags
diff --git a/test/files/neg/t8764.scala b/test/files/neg/t8764.scala
new file mode 100644
index 0000000000..dc5bfb0160
--- /dev/null
+++ b/test/files/neg/t8764.scala
@@ -0,0 +1,9 @@
+object Main {
+
+ case class IntAndDouble(i: Int, d: Double)
+
+ // a.productElement used to be Int => Double
+ // now: Int => AnyVal
+ val a = IntAndDouble(1, 5.0)
+ val d: Double = a.productElement(0)
+}
diff --git a/test/files/neg/t8841.check b/test/files/neg/t8841.check
new file mode 100644
index 0000000000..ad525dc3f8
--- /dev/null
+++ b/test/files/neg/t8841.check
@@ -0,0 +1,9 @@
+t8841.scala:13: error: recursive value c needs type
+ val ambiguousName = c.ambiguousName
+ ^
+t8841.scala:12: warning: failed to determine if 'ambiguousName = ...' is a named argument or an assignment expression.
+an explicit type is required for the definition mentioned in the error message above.
+ val c = new Cell(ambiguousName = Some("bla"))
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/t8841.scala b/test/files/neg/t8841.scala
new file mode 100644
index 0000000000..80430d997e
--- /dev/null
+++ b/test/files/neg/t8841.scala
@@ -0,0 +1,15 @@
+class Cell(val ambiguousName: Option[String])
+
+class Test {
+ def wrap(f: Any): Nothing = ???
+
+ wrap {
+ // the namer for these two ValDefs is created when typing the argument expression
+ // of wrap. This happens to be in a silent context (tryTypedApply). Therefore, the
+ // cyclic reference will not be thrown, but transformed into a NormalTypeError by
+ // `silent`. This requires different handling in NamesDefaults.
+
+ val c = new Cell(ambiguousName = Some("bla"))
+ val ambiguousName = c.ambiguousName
+ }
+}
diff --git a/test/files/neg/t8869.check b/test/files/neg/t8869.check
new file mode 100644
index 0000000000..40b8570f9f
--- /dev/null
+++ b/test/files/neg/t8869.check
@@ -0,0 +1,7 @@
+t8869.scala:5: error: class Option takes type parameters
+ def value: TC[({type l1[x] = Option})#l1] = ??? // error not reported!
+ ^
+t8869.scala:7: error: class Option takes type parameters
+ type l2[x] = Option // error correctly reported
+ ^
+two errors found
diff --git a/test/files/neg/t8869.scala b/test/files/neg/t8869.scala
new file mode 100644
index 0000000000..0c7f0c9451
--- /dev/null
+++ b/test/files/neg/t8869.scala
@@ -0,0 +1,10 @@
+class TC[T[_]] {
+ def identity[A](a: T[A]): T[A] = a
+}
+object Test {
+ def value: TC[({type l1[x] = Option})#l1] = ??? // error not reported!
+
+ type l2[x] = Option // error correctly reported
+ def value1: TC[l2] = ???
+}
+
diff --git a/test/files/neg/t8890.check b/test/files/neg/t8890.check
new file mode 100644
index 0000000000..1b69d6cf30
--- /dev/null
+++ b/test/files/neg/t8890.check
@@ -0,0 +1,4 @@
+t8890.scala:6: error: not found: type Str
+ def bar(x: Str): Unit = ???
+ ^
+one error found
diff --git a/test/files/neg/t8890.scala b/test/files/neg/t8890.scala
new file mode 100644
index 0000000000..cbdeb11d43
--- /dev/null
+++ b/test/files/neg/t8890.scala
@@ -0,0 +1,11 @@
+package foo
+
+class A {
+ /** The other */
+ def bar(x: Int): Unit = ???
+ def bar(x: Str): Unit = ???
+}
+
+class B {
+ (new A).bar(0)
+} \ No newline at end of file
diff --git a/test/files/neg/t9008.check b/test/files/neg/t9008.check
new file mode 100644
index 0000000000..c32bc41baf
--- /dev/null
+++ b/test/files/neg/t9008.check
@@ -0,0 +1,4 @@
+t9008.scala:2: error: type M takes type parameters
+ def x: List[M forSome { type M[_] }] = ???
+ ^
+one error found
diff --git a/test/files/neg/t9008.scala b/test/files/neg/t9008.scala
new file mode 100644
index 0000000000..c6a5389e42
--- /dev/null
+++ b/test/files/neg/t9008.scala
@@ -0,0 +1,3 @@
+object Test {
+ def x: List[M forSome { type M[_] }] = ???
+}
diff --git a/test/files/neg/t9008b.check b/test/files/neg/t9008b.check
new file mode 100644
index 0000000000..5e911fc138
--- /dev/null
+++ b/test/files/neg/t9008b.check
@@ -0,0 +1,4 @@
+t9008b.scala:2: error: type M takes type parameters
+ type T = M forSome { type M[_] }
+ ^
+one error found
diff --git a/test/files/neg/t9008b.scala b/test/files/neg/t9008b.scala
new file mode 100644
index 0000000000..58f9d0e8de
--- /dev/null
+++ b/test/files/neg/t9008b.scala
@@ -0,0 +1,3 @@
+object Test {
+ type T = M forSome { type M[_] }
+}
diff --git a/test/files/neg/t9041.check b/test/files/neg/t9041.check
new file mode 100644
index 0000000000..669e9434e0
--- /dev/null
+++ b/test/files/neg/t9041.check
@@ -0,0 +1,4 @@
+t9041.scala:11: error: could not find implicit value for parameter cellSetter: CellSetter[scala.math.BigDecimal]
+ def setCell(cell: Cell, data: math.BigDecimal) { cell.setCellValue(data) }
+ ^
+one error found
diff --git a/test/files/neg/t9041.scala b/test/files/neg/t9041.scala
new file mode 100644
index 0000000000..2bdef0d3ae
--- /dev/null
+++ b/test/files/neg/t9041.scala
@@ -0,0 +1,17 @@
+// False negative test, requires overloading in Cell.
+
+trait Cell { def setCellValue(i: Int) = () ; def setCellValue(d: Double) = () }
+
+trait Nope {
+ def f = {
+ trait CellSetter[A] {
+ def setCell(cell: Cell, data: A): Unit
+ }
+ implicit val bigDecimalCellSetter = new CellSetter[math.BigDecimal]() {
+ def setCell(cell: Cell, data: math.BigDecimal) { cell.setCellValue(data) }
+ }
+ implicit class RichCell(cell: Cell) {
+ def setCellValue[A](data: A)(implicit cellSetter: CellSetter[A]) = cellSetter.setCell(cell, data)
+ }
+ }
+}
diff --git a/test/files/neg/t9093.check b/test/files/neg/t9093.check
new file mode 100644
index 0000000000..085a433f0b
--- /dev/null
+++ b/test/files/neg/t9093.check
@@ -0,0 +1,6 @@
+t9093.scala:3: error: polymorphic expression cannot be instantiated to expected type;
+ found : [C](f: C)Null
+ required: Unit
+ val x: Unit = apply2(0)/*(0)*/
+ ^
+one error found
diff --git a/test/files/neg/t9093.scala b/test/files/neg/t9093.scala
new file mode 100644
index 0000000000..d9922ad70e
--- /dev/null
+++ b/test/files/neg/t9093.scala
@@ -0,0 +1,5 @@
+object Main {
+ def apply2[C](fa: Any)(f: C) = null
+ val x: Unit = apply2(0)/*(0)*/
+}
+
diff --git a/test/files/neg/t9127.check b/test/files/neg/t9127.check
new file mode 100644
index 0000000000..2ecf8af464
--- /dev/null
+++ b/test/files/neg/t9127.check
@@ -0,0 +1,12 @@
+t9127.scala:4: warning: possible missing interpolator: detected interpolated identifier `$s`
+ val t = "$s"
+ ^
+t9127.scala:5: warning: possible missing interpolator: detected an interpolated expression
+ val u = "a${s}b"
+ ^
+t9127.scala:6: warning: possible missing interpolator: detected interpolated identifier `$s`
+ val v = "a$s b"
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t9127.flags b/test/files/neg/t9127.flags
new file mode 100644
index 0000000000..b0d7bc25cb
--- /dev/null
+++ b/test/files/neg/t9127.flags
@@ -0,0 +1 @@
+-Xlint:missing-interpolator -Xfatal-warnings
diff --git a/test/files/neg/t9127.scala b/test/files/neg/t9127.scala
new file mode 100644
index 0000000000..c0746144eb
--- /dev/null
+++ b/test/files/neg/t9127.scala
@@ -0,0 +1,7 @@
+
+trait X {
+ val s = "hello"
+ val t = "$s"
+ val u = "a${s}b"
+ val v = "a$s b"
+}
diff --git a/test/files/neg/t9231.check b/test/files/neg/t9231.check
new file mode 100644
index 0000000000..43c14f53ca
--- /dev/null
+++ b/test/files/neg/t9231.check
@@ -0,0 +1,4 @@
+t9231.scala:8: error: not found: type DoesNotExist
+ foo[DoesNotExist]
+ ^
+one error found
diff --git a/test/files/neg/t9231.scala b/test/files/neg/t9231.scala
new file mode 100644
index 0000000000..05b1d24e9a
--- /dev/null
+++ b/test/files/neg/t9231.scala
@@ -0,0 +1,9 @@
+class M[A]
+class C {
+ implicit def M1: M[Int] = null
+ implicit def M2: M[String] = null
+
+ def foo[A](implicit M: M[A]) = null
+
+ foo[DoesNotExist]
+}
diff --git a/test/files/neg/t9273.check b/test/files/neg/t9273.check
new file mode 100644
index 0000000000..1dca63a736
--- /dev/null
+++ b/test/files/neg/t9273.check
@@ -0,0 +1,10 @@
+t9273.scala:2: error: class type required but ? found
+ val foo: Class[_] = classOf // error without position, line or file
+ ^
+t9273.scala:3: error: not found: type X
+ val foo1: Class[_] = classOf[X] // good error, all info contained
+ ^
+t9273.scala:7: error: not found: type X
+ val foo4: Class[_] = Predef.classOf[X] // good error, all info contained
+ ^
+three errors found
diff --git a/test/files/neg/t9273.scala b/test/files/neg/t9273.scala
new file mode 100644
index 0000000000..3f99dff17f
--- /dev/null
+++ b/test/files/neg/t9273.scala
@@ -0,0 +1,9 @@
+class MissingLineNumbers {
+ val foo: Class[_] = classOf // error without position, line or file
+ val foo1: Class[_] = classOf[X] // good error, all info contained
+ val foo2 = classOf // Infers T=Nothing
+
+ val foo3: Class[_] = Predef.classOf // Infers T=Nothing. Irregular wrt typedIdent.
+ val foo4: Class[_] = Predef.classOf[X] // good error, all info contained
+ val foo5 = Predef.classOf // Infers T=Nothing
+}
diff --git a/test/files/neg/t963.check b/test/files/neg/t963.check
index 4dc202c7bd..483e53c77d 100644
--- a/test/files/neg/t963.check
+++ b/test/files/neg/t963.check
@@ -1,9 +1,9 @@
-t963.scala:14: error: stable identifier required, but Test.this.y3.x found.
+t963.scala:14: error: stable identifier required, but y3.x.type found.
val w3 : y3.x.type = y3.x
- ^
-t963.scala:17: error: stable identifier required, but Test.this.y4.x found.
+ ^
+t963.scala:17: error: stable identifier required, but y4.x.type found.
val w4 : y4.x.type = y4.x
- ^
+ ^
t963.scala:10: error: type mismatch;
found : AnyRef{def x: Integer}
required: AnyRef{val x: Integer}
diff --git a/test/files/neg/tailrec-4.check b/test/files/neg/tailrec-4.check
new file mode 100644
index 0000000000..3ec3274478
--- /dev/null
+++ b/test/files/neg/tailrec-4.check
@@ -0,0 +1,16 @@
+tailrec-4.scala:6: error: could not optimize @tailrec annotated method foo: it contains a recursive call not in tail position
+ @tailrec def foo: Int = foo + 1
+ ^
+tailrec-4.scala:11: error: could not optimize @tailrec annotated method foo: it contains a recursive call not in tail position
+ @tailrec def foo: Int = foo + 1
+ ^
+tailrec-4.scala:17: error: could not optimize @tailrec annotated method foo: it contains a recursive call not in tail position
+ @tailrec def foo: Int = foo + 1
+ ^
+tailrec-4.scala:23: error: could not optimize @tailrec annotated method foo: it contains a recursive call not in tail position
+ @tailrec def foo: Int = foo + 1
+ ^
+tailrec-4.scala:31: error: could not optimize @tailrec annotated method foo: it contains a recursive call not in tail position
+ @tailrec def foo: Int = foo + 1
+ ^
+5 errors found
diff --git a/test/files/neg/tailrec-4.scala b/test/files/neg/tailrec-4.scala
new file mode 100644
index 0000000000..4822799dfa
--- /dev/null
+++ b/test/files/neg/tailrec-4.scala
@@ -0,0 +1,35 @@
+import annotation._
+
+object Tail {
+ def tcInFunc: Unit = {
+ () => {
+ @tailrec def foo: Int = foo + 1
+ }
+ }
+ def tcInBooleanExprFirstOp(x: Int, v: Int): Boolean = {
+ {
+ @tailrec def foo: Int = foo + 1
+ foo
+ } == v && true
+ }
+ def tcInBooleanExprSecondOp(x: Int, v: Int): Boolean = {
+ true && {
+ @tailrec def foo: Int = foo + 1
+ foo
+ } == v
+ }
+ def tcInIfCond(x: Int, v: Int): Boolean = {
+ if ({
+ @tailrec def foo: Int = foo + 1
+ foo
+ } == v) true else false
+ }
+ def tcInPatternGuard(x: Int, v: Int): Boolean =
+ v match {
+ case _ if
+ {
+ @tailrec def foo: Int = foo + 1
+ foo == 42
+ } => true
+ }
+}
diff --git a/test/files/neg/unchecked-abstract.check b/test/files/neg/unchecked-abstract.check
index 72019082ac..703929dca8 100644
--- a/test/files/neg/unchecked-abstract.check
+++ b/test/files/neg/unchecked-abstract.check
@@ -4,6 +4,9 @@ unchecked-abstract.scala:16: warning: abstract type H in type Contravariant[M.th
unchecked-abstract.scala:21: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Contravariant[H]])
^
+unchecked-abstract.scala:22: warning: abstract type T in type Contravariant[M.this.T] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Contravariant[T]])
+ ^
unchecked-abstract.scala:27: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[T]])
^
@@ -22,6 +25,15 @@ unchecked-abstract.scala:36: warning: abstract type H in type Invariant[M.this.H
unchecked-abstract.scala:37: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[T]])
^
+unchecked-abstract.scala:42: warning: abstract type T in type Covariant[M.this.T] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Covariant[T]])
+ ^
+unchecked-abstract.scala:43: warning: abstract type L in type Covariant[M.this.L] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Covariant[L]])
+ ^
+unchecked-abstract.scala:48: warning: abstract type L in type Covariant[M.this.L] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Covariant[L]])
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
-8 warnings found
+12 warnings found
one error found
diff --git a/test/files/neg/unchecked-refinement.check b/test/files/neg/unchecked-refinement.check
index e85a51f44d..0bb944621b 100644
--- a/test/files/neg/unchecked-refinement.check
+++ b/test/files/neg/unchecked-refinement.check
@@ -10,6 +10,7 @@ unchecked-refinement.scala:23: warning: a pattern match on a refinement type is
unchecked-refinement.scala:24: warning: a pattern match on a refinement type is unchecked
/* nowarn - todo */ case x: AnyRef { def size: Int } if b => x.size // this could/should do a static conformance test and not warn
^
+warning: there was one feature warning; re-run with -feature for details
error: No warnings can be incurred under -Xfatal-warnings.
-four warnings found
+5 warnings found
one error found
diff --git a/test/files/neg/virtpatmat_exhaust_big.check b/test/files/neg/virtpatmat_exhaust_big.check
new file mode 100644
index 0000000000..fddc85a362
--- /dev/null
+++ b/test/files/neg/virtpatmat_exhaust_big.check
@@ -0,0 +1,7 @@
+virtpatmat_exhaust_big.scala:27: warning: match may not be exhaustive.
+It would fail on the following input: Z11()
+ def foo(z: Z) = z match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/virtpatmat_exhaust_big.flags b/test/files/neg/virtpatmat_exhaust_big.flags
new file mode 100644
index 0000000000..b5a8748652
--- /dev/null
+++ b/test/files/neg/virtpatmat_exhaust_big.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked
diff --git a/test/files/neg/virtpatmat_exhaust_big.scala b/test/files/neg/virtpatmat_exhaust_big.scala
new file mode 100644
index 0000000000..dd639eb56e
--- /dev/null
+++ b/test/files/neg/virtpatmat_exhaust_big.scala
@@ -0,0 +1,32 @@
+sealed abstract class Z
+object Z {
+ object Z0 extends Z
+ case class Z1() extends Z
+ object Z2 extends Z
+ case class Z3() extends Z
+ object Z4 extends Z
+ case class Z5() extends Z
+ object Z6 extends Z
+ case class Z7() extends Z
+ object Z8 extends Z
+ case class Z9() extends Z
+ object Z10 extends Z
+ case class Z11() extends Z
+ object Z12 extends Z
+ case class Z13() extends Z
+ object Z14 extends Z
+ case class Z15() extends Z
+ object Z16 extends Z
+ case class Z17() extends Z
+ object Z18 extends Z
+ case class Z19() extends Z
+}
+
+object Test {
+ import Z._
+ def foo(z: Z) = z match {
+ case Z0 | Z1() | Z2 | Z3() | Z4 | Z5() | Z6 | Z7() | Z8 | Z9() |
+ Z10 | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19()
+ =>
+ }
+}
diff --git a/test/files/neg/virtpatmat_exhaust_compound.check b/test/files/neg/virtpatmat_exhaust_compound.check
new file mode 100644
index 0000000000..72e0340682
--- /dev/null
+++ b/test/files/neg/virtpatmat_exhaust_compound.check
@@ -0,0 +1,15 @@
+virtpatmat_exhaust_compound.scala:14: warning: match may not be exhaustive.
+It would fail on the following inputs: O1, O2, O4
+ a match {
+ ^
+virtpatmat_exhaust_compound.scala:18: warning: match may not be exhaustive.
+It would fail on the following input: O4
+ def t1(a: Product with Base with Base2) = a match {
+ ^
+virtpatmat_exhaust_compound.scala:22: warning: match may not be exhaustive.
+It would fail on the following input: O2
+ def t2(a: Product with Base { def foo: Int }) = a match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/virtpatmat_exhaust_compound.flags b/test/files/neg/virtpatmat_exhaust_compound.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/virtpatmat_exhaust_compound.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/virtpatmat_exhaust_compound.scala b/test/files/neg/virtpatmat_exhaust_compound.scala
new file mode 100644
index 0000000000..386c7af98d
--- /dev/null
+++ b/test/files/neg/virtpatmat_exhaust_compound.scala
@@ -0,0 +1,29 @@
+sealed trait Base
+case object O1 extends Base
+case object O2 extends Base {
+ def foo: Int = 0
+}
+
+sealed trait Base2
+case object O3 extends Base2
+
+case object O4 extends Base with Base2
+
+object Test {
+ val a /*: Product with Serialiable with Base */ = if (true) O1 else O2
+ a match {
+ case null =>
+ }
+
+ def t1(a: Product with Base with Base2) = a match {
+ case null => // O1..O3 should *not* be possible here
+ }
+
+ def t2(a: Product with Base { def foo: Int }) = a match {
+ case null => // O2 in the domain
+ }
+
+ def t3(a: Product with Base { def bar: Int }) = a match {
+ case null => // nothing in the domain
+ }
+}
diff --git a/test/files/neg/warn-inferred-any.check b/test/files/neg/warn-inferred-any.check
index 4628033e55..8ad81d1529 100644
--- a/test/files/neg/warn-inferred-any.check
+++ b/test/files/neg/warn-inferred-any.check
@@ -7,6 +7,9 @@ warn-inferred-any.scala:16: warning: a type was inferred to be `AnyVal`; this ma
warn-inferred-any.scala:17: warning: a type was inferred to be `AnyVal`; this may indicate a programming error.
{ 1l to 5l contains 5d }
^
+warn-inferred-any.scala:25: warning: a type was inferred to be `Any`; this may indicate a programming error.
+ def za = f(1, "one")
+ ^
error: No warnings can be incurred under -Xfatal-warnings.
-three warnings found
+four warnings found
one error found
diff --git a/test/files/neg/warn-inferred-any.flags b/test/files/neg/warn-inferred-any.flags
index a3127d392a..b580dfbbe3 100644
--- a/test/files/neg/warn-inferred-any.flags
+++ b/test/files/neg/warn-inferred-any.flags
@@ -1 +1 @@
--Xfatal-warnings -Ywarn-infer-any
+-Xfatal-warnings -Xlint:infer-any
diff --git a/test/files/neg/warn-inferred-any.scala b/test/files/neg/warn-inferred-any.scala
index b853e6e5a8..693c33e7be 100644
--- a/test/files/neg/warn-inferred-any.scala
+++ b/test/files/neg/warn-inferred-any.scala
@@ -17,3 +17,11 @@ trait Ys[+A] {
{ 1l to 5l contains 5d }
{ 1l to 5l contains 5l }
}
+
+trait Zs {
+ def f[A](a: A*) = 42
+ def g[A >: Any](a: A*) = 42 // don't warn
+
+ def za = f(1, "one")
+ def zu = g(1, "one")
+}
diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check
index d012869c93..4876ed8fc2 100644
--- a/test/files/neg/warn-unused-privates.check
+++ b/test/files/neg/warn-unused-privates.check
@@ -10,57 +10,60 @@ warn-unused-privates.scala:6: warning: private val in class Bippy is never used
warn-unused-privates.scala:13: warning: private val in object Bippy is never used
private val HEY_INSTANCE: Int = 1000 // warn
^
-warn-unused-privates.scala:35: warning: private val in class Boppy is never used
+warn-unused-privates.scala:14: warning: private val in object Bippy is never used
+ private lazy val BOOL: Boolean = true // warn
+ ^
+warn-unused-privates.scala:36: warning: private val in class Boppy is never used
private val hummer = "def" // warn
^
-warn-unused-privates.scala:42: warning: private var in trait Accessors is never used
+warn-unused-privates.scala:43: warning: private var in trait Accessors is never used
private var v1: Int = 0 // warn
^
-warn-unused-privates.scala:42: warning: private setter in trait Accessors is never used
+warn-unused-privates.scala:43: warning: private setter in trait Accessors is never used
private var v1: Int = 0 // warn
^
-warn-unused-privates.scala:43: warning: private setter in trait Accessors is never used
+warn-unused-privates.scala:44: warning: private setter in trait Accessors is never used
private var v2: Int = 0 // warn, never set
^
-warn-unused-privates.scala:44: warning: private var in trait Accessors is never used
+warn-unused-privates.scala:45: warning: private var in trait Accessors is never used
private var v3: Int = 0 // warn, never got
^
-warn-unused-privates.scala:56: warning: private default argument in trait DefaultArgs is never used
+warn-unused-privates.scala:57: warning: private default argument in trait DefaultArgs is never used
private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
^
-warn-unused-privates.scala:56: warning: private default argument in trait DefaultArgs is never used
+warn-unused-privates.scala:57: warning: private default argument in trait DefaultArgs is never used
private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
^
-warn-unused-privates.scala:67: warning: local var in method f0 is never used
+warn-unused-privates.scala:68: warning: local var in method f0 is never used
var x = 1 // warn
^
-warn-unused-privates.scala:74: warning: local val in method f1 is never used
+warn-unused-privates.scala:75: warning: local val in method f1 is never used
val b = new Outer // warn
^
-warn-unused-privates.scala:84: warning: private object in object Types is never used
+warn-unused-privates.scala:85: warning: private object in object Types is never used
private object Dongo { def f = this } // warn
^
-warn-unused-privates.scala:94: warning: local object in method l1 is never used
+warn-unused-privates.scala:95: warning: local object in method l1 is never used
object HiObject { def f = this } // warn
^
-warn-unused-privates.scala:78: warning: local var x in method f2 is never set - it could be a val
+warn-unused-privates.scala:79: warning: local var x in method f2 is never set - it could be a val
var x = 100 // warn about it being a var
^
-warn-unused-privates.scala:85: warning: private class Bar1 in object Types is never used
+warn-unused-privates.scala:86: warning: private class Bar1 in object Types is never used
private class Bar1 // warn
^
-warn-unused-privates.scala:87: warning: private type Alias1 in object Types is never used
+warn-unused-privates.scala:88: warning: private type Alias1 in object Types is never used
private type Alias1 = String // warn
^
-warn-unused-privates.scala:95: warning: local class Hi is never used
+warn-unused-privates.scala:96: warning: local class Hi is never used
class Hi { // warn
^
-warn-unused-privates.scala:99: warning: local class DingDongDoobie is never used
+warn-unused-privates.scala:100: warning: local class DingDongDoobie is never used
class DingDongDoobie // warn
^
-warn-unused-privates.scala:102: warning: local type OtherThing is never used
+warn-unused-privates.scala:103: warning: local type OtherThing is never used
type OtherThing = String // warn
^
error: No warnings can be incurred under -Xfatal-warnings.
-21 warnings found
+22 warnings found
one error found
diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala
index cb6e946a34..2faa07e759 100644
--- a/test/files/neg/warn-unused-privates.scala
+++ b/test/files/neg/warn-unused-privates.scala
@@ -11,6 +11,7 @@ object Bippy {
def hi(x: Bippy) = x.HI_COMPANION
private val HI_INSTANCE: Int = 500 // no warn, accessed from instance
private val HEY_INSTANCE: Int = 1000 // warn
+ private lazy val BOOL: Boolean = true // warn
}
class A(val msg: String)
diff --git a/test/files/pos/dotless-targs.flags b/test/files/pos/dotless-targs.flags
new file mode 100644
index 0000000000..ea7fc37e1a
--- /dev/null
+++ b/test/files/pos/dotless-targs.flags
@@ -0,0 +1 @@
+-Yrangepos:false
diff --git a/test/files/pos/five-dot-f.flags b/test/files/pos/five-dot-f.flags
deleted file mode 100644
index 112fc720a0..0000000000
--- a/test/files/pos/five-dot-f.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfuture \ No newline at end of file
diff --git a/test/files/pos/jesper.scala b/test/files/pos/jesper.scala
new file mode 100644
index 0000000000..82623e4a24
--- /dev/null
+++ b/test/files/pos/jesper.scala
@@ -0,0 +1,30 @@
+object Pair {
+ sealed trait Pair {
+ type First
+ type Second <: Pair
+ }
+
+ case class End() extends Pair {
+ type First = Nothing
+ type Second = End
+
+ def ::[T](v : T) : Cons[T, End] = Cons(v, this)
+ }
+
+ object End extends End()
+
+ final case class Cons[T1, T2 <: Pair](_1 : T1, _2 : T2) extends Pair {
+ type First = T1
+ type Second = T2
+
+ def ::[T](v : T) : Cons[T, Cons[T1, T2]] = Cons(v, this)
+ def find[T](implicit finder : Cons[T1, T2] => T) = finder(this)
+ }
+
+ implicit def findFirst[T1, T2 <: Pair] : Cons[T1, T2] => T1 = (p : Cons[T1, T2]) => p._1
+ implicit def findSecond[T, T1, T2 <: Pair](implicit finder : T2 => T) : Cons[T1, T2] => T = (p : Cons[T1, T2]) => finder(p._2)
+
+ val p : Cons[Int, Cons[Boolean, End]] = 10 :: false :: End
+// val x : Boolean = p.find[Boolean](findSecond(findFirst))
+ val x2 : Boolean = p.find[Boolean] // Doesn't compile
+}
diff --git a/test/files/pos/macro-attachments/Macros_1.scala b/test/files/pos/macro-attachments/Macros_1.scala
new file mode 100644
index 0000000000..38d05d5b85
--- /dev/null
+++ b/test/files/pos/macro-attachments/Macros_1.scala
@@ -0,0 +1,19 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.whitebox.Context
+
+trait Base
+class Att extends Base
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ import c.internal._
+ import decorators._
+ val dummy = q"x"
+ dummy.updateAttachment(new Att)
+ if (dummy.attachments.get[Base].isEmpty) c.abort(c.enclosingPosition, "that's not good")
+ q"()"
+ }
+
+ def foo: Any = macro impl
+} \ No newline at end of file
diff --git a/test/files/pos/macro-attachments/Test_2.scala b/test/files/pos/macro-attachments/Test_2.scala
new file mode 100644
index 0000000000..acfddae942
--- /dev/null
+++ b/test/files/pos/macro-attachments/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo
+} \ No newline at end of file
diff --git a/test/files/pos/patmat-suppress.flags b/test/files/pos/patmat-suppress.flags
new file mode 100644
index 0000000000..a988a5b807
--- /dev/null
+++ b/test/files/pos/patmat-suppress.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xno-patmat-analysis \ No newline at end of file
diff --git a/test/files/pos/patmat-suppress.scala b/test/files/pos/patmat-suppress.scala
new file mode 100644
index 0000000000..7c8aded690
--- /dev/null
+++ b/test/files/pos/patmat-suppress.scala
@@ -0,0 +1,159 @@
+// test that none of these warn due to -Xno-patmat-analysis
+// tests taken from test/files/neg/patmatexhaust.scala, test/files/neg/pat_unreachable.scala
+class TestSealedExhaustive { // compile only
+ sealed abstract class Foo
+
+ case class Bar(x:Int) extends Foo
+ case object Baz extends Foo
+
+ def ma1(x:Foo) = x match {
+ case Bar(_) => // not exhaustive
+ }
+
+ def ma2(x:Foo) = x match {
+ case Baz => // not exhaustive
+ }
+
+ sealed abstract class Mult
+ case class Kult(s:Mult) extends Mult
+ case class Qult() extends Mult
+
+ def ma33(x:Kult) = x match { // exhaustive
+ case Kult(_) => // exhaustive
+ }
+
+ def ma3(x:Mult) = (x,x) match { // not exhaustive
+ case (Kult(_), Qult()) => // Kult missing
+ //case (Kult(_), Kult(_)) =>
+ case (Qult(), Kult(_)) => // Qult missing
+ //case (Qult(), Qult()) =>
+ }
+
+ def ma3u(x:Mult) = ((x,x) : @unchecked) match { // not exhaustive, but not checked!
+ case (Kult(_), Qult()) =>
+ case (Qult(), Kult(_)) =>
+ }
+
+ sealed abstract class Deep
+
+ case object Ga extends Deep
+ sealed class Gp extends Deep
+ case object Gu extends Gp
+
+ def zma3(x:Deep) = x match { // exhaustive!
+ case _ =>
+ }
+ def zma4(x:Deep) = x match { // exhaustive!
+ case Ga =>
+ case _ =>
+ }
+
+ def ma4(x:Deep) = x match { // missing cases: Gu, Gp which is not abstract so must be included
+ case Ga =>
+ }
+
+ def ma5(x:Deep) = x match {
+ case Gu =>
+ case _ if 1 == 0 =>
+ case Ga =>
+ }
+
+ def ma6() = List(1,2) match { // give up
+ case List(1,2) =>
+ case x :: xs =>
+ }
+
+ def ma7() = List(1,2) match { //exhaustive
+ case 1::2::Nil =>
+ case _ =>
+ }
+
+ sealed class B
+ case class B1() extends B
+ case object B2 extends B
+ def ma8(x: B) = x match {
+ case _: B => true
+ }
+ def ma9(x: B) = x match {
+ case B1() => true // missing B, which is not abstract so must be included
+ case B2 => true
+ }
+
+ object ob1 {
+ sealed abstract class C
+ sealed abstract class C1 extends C
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // exhaustive: abstract sealed C1 is dead end.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+
+ object ob2 {
+ sealed abstract class C
+ abstract class C1 extends C
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // not exhaustive: C1 is not sealed.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+ object ob3 {
+ sealed abstract class C
+ sealed abstract class C1 extends C
+ object D1 extends C1
+ case class D2() extends C1
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // not exhaustive: C1 has subclasses.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+ object ob4 {
+ sealed abstract class C
+ sealed class C1 extends C
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // not exhaustive: C1 is not abstract.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+}
+
+object TestUnreachable extends App {
+ def unreachable1(xs:Seq[Char]) = xs match {
+ case Seq(x, y, _*) => x::y::Nil
+ case Seq(x, y, z, w) => List(z,w) // redundant!
+ }
+ def unreachable2(xs:Seq[Char]) = xs match {
+ case Seq(x, y, _*) => x::y::Nil
+ case Seq(x, y) => List(x, y)
+ }
+
+ def not_unreachable(xs:Seq[Char]) = xs match {
+ case Seq(x, y, _*) => x::y::Nil
+ case Seq(x) => List(x)
+ }
+ def not_unreachable2(xs:Seq[Char]) = xs match {
+ case Seq(x, y) => x::y::Nil
+ case Seq(x, y, z, _*) => List(x,y)
+ }
+
+ def contrivedExample[A, B, C](a: A, b: B, c: C): Unit = a match {
+ case b => println("matched b")
+ case c => println("matched c")
+ case _ => println("matched neither")
+ }
+}
diff --git a/test/files/pos/sammy_exist.flags b/test/files/pos/sammy_exist.flags
new file mode 100644
index 0000000000..48fd867160
--- /dev/null
+++ b/test/files/pos/sammy_exist.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/pos/sammy_exist.scala b/test/files/pos/sammy_exist.scala
new file mode 100644
index 0000000000..f05ae20463
--- /dev/null
+++ b/test/files/pos/sammy_exist.scala
@@ -0,0 +1,17 @@
+// scala> typeOf[java.util.stream.Stream[_]].nonPrivateMember(TermName("map")).info
+// [R](x$1: java.util.function.Function[_ >: T, _ <: R])java.util.stream.Stream[R]
+
+// java.util.function.Function
+trait Fun[A, B] { def apply(x: A): B }
+
+// java.util.stream.Stream
+class S[T](x: T) { def map[R](f: Fun[_ >: T, _ <: R]): R = f(x) }
+
+class Bla { def foo: Bla = this }
+
+// NOTE: inferred types show unmoored skolems, should pack them to display properly as bounded wildcards
+object T {
+ val aBlaSAM = (new S(new Bla)).map(_.foo)
+ val fun: Fun[Bla, Bla] = (x: Bla) => x
+ val aBlaSAMX = (new S(new Bla)).map(fun)
+}
diff --git a/test/files/pos/sammy_overload.flags b/test/files/pos/sammy_overload.flags
new file mode 100644
index 0000000000..48fd867160
--- /dev/null
+++ b/test/files/pos/sammy_overload.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/pos/sammy_overload.scala b/test/files/pos/sammy_overload.scala
new file mode 100644
index 0000000000..5472248f4d
--- /dev/null
+++ b/test/files/pos/sammy_overload.scala
@@ -0,0 +1,9 @@
+trait Consumer[T] {
+ def consume(x: T): Unit
+}
+
+object Test {
+ def foo(x: String): Unit = ???
+ def foo(): Unit = ???
+ val f: Consumer[_ >: String] = foo
+} \ No newline at end of file
diff --git a/test/files/pos/sammy_override.flags b/test/files/pos/sammy_override.flags
new file mode 100644
index 0000000000..48fd867160
--- /dev/null
+++ b/test/files/pos/sammy_override.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/pos/sammy_override.scala b/test/files/pos/sammy_override.scala
new file mode 100644
index 0000000000..a1d0651c39
--- /dev/null
+++ b/test/files/pos/sammy_override.scala
@@ -0,0 +1,8 @@
+trait IntConsumer {
+ def consume(x: Int): Unit
+}
+
+object Test {
+ def anyConsumer(x: Any): Unit = ???
+ val f: IntConsumer = anyConsumer
+} \ No newline at end of file
diff --git a/test/files/pos/switch-small.scala b/test/files/pos/switch-small.scala
deleted file mode 100644
index 9de9ca028e..0000000000
--- a/test/files/pos/switch-small.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import annotation._
-
-object Test {
- def f(x: Int) = (x: @switch) match {
- case 1 => 1
- case _ => 2
- }
-}
diff --git a/test/files/neg/t3240.scala b/test/files/pos/t3240.scala
index cf197a406d..cf197a406d 100644
--- a/test/files/neg/t3240.scala
+++ b/test/files/pos/t3240.scala
diff --git a/test/files/pos/t3368.flags b/test/files/pos/t3368.flags
new file mode 100644
index 0000000000..cb20509902
--- /dev/null
+++ b/test/files/pos/t3368.flags
@@ -0,0 +1 @@
+-Ystop-after:parser
diff --git a/test/files/pos/t3368.scala b/test/files/pos/t3368.scala
new file mode 100644
index 0000000000..c8e861a899
--- /dev/null
+++ b/test/files/pos/t3368.scala
@@ -0,0 +1,5 @@
+
+trait X {
+ // error: in XML literal: name expected, but char '!' cannot start a name
+ def x = <![CDATA[hi & bye]]> <![CDATA[red & black]]>
+}
diff --git a/test/files/pos/t3439.scala b/test/files/pos/t3439.scala
new file mode 100644
index 0000000000..ccc75cc4cf
--- /dev/null
+++ b/test/files/pos/t3439.scala
@@ -0,0 +1,26 @@
+class Base[M](i: Int)
+
+// was "implicit modifier not allowed on top level objects"
+class D1()(implicit i: Int) extends Base({println(i); 0})
+
+// what "no implicit value of type Int found"
+class D2()(implicit i: Int) extends Base(implicitly[Int])
+
+
+abstract class ParametricMessage[M: Manifest](msg: M) { def message = msg }
+case class ParametricMessage1[M: Manifest](msg: M, p1: Class[_]) extends ParametricMessage(msg)
+
+
+class Wrap {
+ class Base[M](i: Int)
+
+ // was "implicit modifier not allowed on top level objects"
+ class D1()(implicit i: Int) extends Base({println(i); 0})
+
+ // what "no implicit value of type Int found"
+ class D2()(implicit i: Int) extends Base(implicitly[Int])
+
+
+ abstract class ParametricMessage[M: Manifest](msg: M) { def message = msg }
+ case class ParametricMessage1[M: Manifest](msg: M, p1: Class[_]) extends ParametricMessage(msg)
+}
diff --git a/test/files/pos/t4070.scala b/test/files/pos/t4070.scala
index a9777f02ed..11af67a529 100644
--- a/test/files/pos/t4070.scala
+++ b/test/files/pos/t4070.scala
@@ -20,7 +20,7 @@ package b {
/*
-// With crash below the clasess:
+// With crash below the classes:
% scalac -Dscalac.debug.tvar ./a.scala
[ create] ?_$1 ( In Foo#crash )
[ setInst] tv[Int] ( In Foo#crash, _$1=tv[Int] )
diff --git a/test/files/pos/t5154.scala b/test/files/pos/t5154.scala
new file mode 100644
index 0000000000..2629308f00
--- /dev/null
+++ b/test/files/pos/t5154.scala
@@ -0,0 +1,9 @@
+
+trait Z {
+ // extra space made the pattern OK
+ def f = <z> {{3}}</z> match { case <z> {{3}}</z> => }
+
+ // lack of space: error: illegal start of simple pattern
+ def g = <z>{{3}}</z> match { case <z>{{3}}</z> => }
+}
+
diff --git a/test/files/pos/t5217.scala b/test/files/pos/t5217.scala
new file mode 100644
index 0000000000..1fe3f5696f
--- /dev/null
+++ b/test/files/pos/t5217.scala
@@ -0,0 +1,17 @@
+// private types and terms of companion module are
+// available in scope of ctor params.
+// before 2.10.1, class B in object A cannot be accessed in object A
+object A {
+ private class B
+ private val b: B = new B
+ private type C = Int
+ def apply(): A = new A()
+}
+// if not private, then default arg results in:
+// private class B escapes its defining scope as part of type A.B
+class A private (b: A.B = A.b, c: A.C = 42)
+
+object C {
+ private class B
+}
+class C(b: C.B)
diff --git a/test/files/pos/t5413.scala b/test/files/pos/t5413.scala
new file mode 100644
index 0000000000..47af514a14
--- /dev/null
+++ b/test/files/pos/t5413.scala
@@ -0,0 +1,9 @@
+object Fail {
+ def nom (guard : => Boolean) (something : => Unit) { }
+ def main(args: Array[String]) {
+ nom {
+ val i = 0
+ (i != 3)
+ }()
+ }
+}
diff --git a/test/files/pos/t5454.scala b/test/files/pos/t5454.scala
new file mode 100644
index 0000000000..4045f3b57b
--- /dev/null
+++ b/test/files/pos/t5454.scala
@@ -0,0 +1,10 @@
+object IllegalInheritance {
+ trait A
+ implicit def a = new A {} // def => val
+ //val r = implicitly[A] // uncomment
+
+ class B[T](t : T)(implicit a : A) // remove implicit param block
+
+ class C extends B/*[Int]*/(23) // uncomment
+ val c = new C // comment
+}
diff --git a/test/files/pos/t5639.flags b/test/files/pos/t5639.flags
new file mode 100644
index 0000000000..0acce1e7ce
--- /dev/null
+++ b/test/files/pos/t5639.flags
@@ -0,0 +1 @@
+-Xsource:2.12
diff --git a/test/files/pos/t5639/A_1.scala b/test/files/pos/t5639/A_1.scala
new file mode 100644
index 0000000000..c5da10eae4
--- /dev/null
+++ b/test/files/pos/t5639/A_1.scala
@@ -0,0 +1,17 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+ // This implicit was being ignored by `isQualifyingImplicit`
+ // if the classpath contained a class file for `class Baz`.
+ // This is because the package scope contains a speculative
+ // symbol for `object Baz` which is entered by `SymbolLoaders`
+ // before looking inside the class file. (A Java originated
+ // classfile results in the class/module symbol pair.)
+}
diff --git a/test/files/pos/t5639/A_2.scala b/test/files/pos/t5639/A_2.scala
new file mode 100644
index 0000000000..2bb36273e0
--- /dev/null
+++ b/test/files/pos/t5639/A_2.scala
@@ -0,0 +1,11 @@
+import Implicits._
+
+class Baz
+
+object Test {
+ implicitly[Int]
+}
+
+object Implicits {
+ implicit val Baz: Int = 0
+}
diff --git a/test/files/pos/t5639/Bar.scala b/test/files/pos/t5639/Bar.scala
deleted file mode 100644
index f577500acd..0000000000
--- a/test/files/pos/t5639/Bar.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package pack.age
-
-import pack.age.Implicits._
-
-object Quux {
- def baz : Baz = 1
-}
diff --git a/test/files/pos/t5639/Foo.scala b/test/files/pos/t5639/Foo.scala
deleted file mode 100644
index 1a07734a8e..0000000000
--- a/test/files/pos/t5639/Foo.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package pack.age
-
-class Baz
-
-object Implicits {
- implicit def Baz(n: Int): Baz = new Baz
-}
diff --git a/test/files/pos/t6051.scala b/test/files/pos/t6051.scala
new file mode 100644
index 0000000000..854524feb9
--- /dev/null
+++ b/test/files/pos/t6051.scala
@@ -0,0 +1,19 @@
+object Foo1 {
+ def foo(x: Int, y: Int = 10) = x*y
+ lazy val y = foo(x = 20)
+}
+
+object Foo2 {
+ def foo(x: Int, y: Int = 10) = x*y
+ val y = foo(x = 20)
+}
+
+object Foo3 {
+ def foo(x: Int, y: Int = 10) = x*y
+ def y = foo(x = 20)
+}
+
+object Foo4 {
+ def foo(x: Int, y: Int = 10) = x*y
+ var y = foo(x = 20)
+}
diff --git a/test/files/pos/t6582_exhaust_big.scala b/test/files/pos/t6582_exhaust_big.scala
new file mode 100644
index 0000000000..7bb8879805
--- /dev/null
+++ b/test/files/pos/t6582_exhaust_big.scala
@@ -0,0 +1,33 @@
+sealed abstract class Z
+object Z {
+ object Z0 extends Z
+ case class Z1() extends Z
+ object Z2 extends Z
+ case class Z3() extends Z
+ object Z4 extends Z
+ case class Z5() extends Z
+ object Z6 extends Z
+ case class Z7() extends Z
+ object Z8 extends Z
+ case class Z9() extends Z
+ object Z10 extends Z
+ case class Z11() extends Z
+ object Z12 extends Z
+ case class Z13() extends Z
+ object Z14 extends Z
+ case class Z15() extends Z
+ object Z16 extends Z
+ case class Z17() extends Z
+ object Z18 extends Z
+ case class Z19() extends Z
+}
+
+// drop any case and it will report an error
+object Test {
+ import Z._
+ def foo(z: Z) = z match {
+ case Z0 | Z1() | Z2 | Z3() | Z4 | Z5() | Z6 | Z7() | Z8 | Z9() |
+ Z10 | Z11() | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19()
+ =>
+ }
+}
diff --git a/test/files/pos/t6942.flags b/test/files/pos/t6942.flags
index e8fb65d50c..0f96f1f872 100644
--- a/test/files/pos/t6942.flags
+++ b/test/files/pos/t6942.flags
@@ -1 +1 @@
--Xfatal-warnings \ No newline at end of file
+-nowarn \ No newline at end of file
diff --git a/test/files/pos/t7459a.scala b/test/files/pos/t7459a.scala
new file mode 100644
index 0000000000..5107715e06
--- /dev/null
+++ b/test/files/pos/t7459a.scala
@@ -0,0 +1,18 @@
+trait SpecialException extends Throwable
+
+object Test {
+ def run() {
+ try {
+ ???
+ } catch {
+ case e: SpecialException => e.isInstanceOf[SpecialException]
+ case e =>
+ }
+
+ // OKAY
+ // (null: Throwable) match {
+ // case e: SpecialException => e.isInstanceOf[SpecialException]
+ // case e =>
+ // }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t7459b.scala b/test/files/pos/t7459b.scala
new file mode 100644
index 0000000000..a4b4fd07a9
--- /dev/null
+++ b/test/files/pos/t7459b.scala
@@ -0,0 +1,12 @@
+import scala.concurrent._
+import scala.util._
+
+
+class Test {
+ (null: Any) match {
+ case s @ Some(_) => ???
+ case f @ _ =>
+ () => f
+ ???
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t7459c.scala b/test/files/pos/t7459c.scala
new file mode 100644
index 0000000000..dc2605abe6
--- /dev/null
+++ b/test/files/pos/t7459c.scala
@@ -0,0 +1,18 @@
+object Test {
+ trait Universe {
+ type Type
+ type TypeTag[A] >: Null <: TypeTagApi[A]
+ trait TypeTagApi[A] { def tpe: Type }
+ }
+ trait JavaUniverse extends Universe
+
+ trait Mirror[U <: Universe] {
+ def universe: U
+ }
+ (null: Mirror[_]).universe match {
+ case ju: JavaUniverse =>
+ val ju1 = ju
+ val f = {() => (null: ju.TypeTag[Nothing]).tpe }
+ }
+ trait M[A]
+}
diff --git a/test/files/pos/t7459d.scala b/test/files/pos/t7459d.scala
new file mode 100644
index 0000000000..7843156885
--- /dev/null
+++ b/test/files/pos/t7459d.scala
@@ -0,0 +1,8 @@
+class Test {
+ (null: Any) match {
+ case s @ Some(_) => ???
+ case f @ _ =>
+ () => f
+ ???
+ }
+}
diff --git a/test/files/pos/t7596/A_1.scala b/test/files/pos/t7596/A_1.scala
new file mode 100644
index 0000000000..6303c6d132
--- /dev/null
+++ b/test/files/pos/t7596/A_1.scala
@@ -0,0 +1,10 @@
+trait Driver {
+ abstract class Table
+}
+
+object Config {
+ val driver : Driver = ???
+ def driver(a: Any) = ???
+}
+
+object Sites extends Config.driver.Table
diff --git a/test/files/pos/t7596/B_2.scala b/test/files/pos/t7596/B_2.scala
new file mode 100644
index 0000000000..977e5c8bd1
--- /dev/null
+++ b/test/files/pos/t7596/B_2.scala
@@ -0,0 +1,19 @@
+object Test {
+ locally {
+ Sites: Config.driver.Table
+ }
+}
+
+// Under separate compilation, the pickler is foiled by the
+// overloaded term `Config.driver`, and results in:
+
+// qbin/scalac test/files/pos/t7596/A_1.scala && qbin/scalac -explaintypes test/files/pos/t7596/B_2.scala
+// test/files/pos/t7596/B_2.scala:3: error: type mismatch;
+// found : Sites.type
+// required: Config.driver.Table
+// Sites: Config.driver.Table
+// ^
+// Sites.type <: Config.driver.Table?
+// Driver.this.type = Config.driver.type?
+// false
+// false \ No newline at end of file
diff --git a/test/files/pos/t7596b/A.scala b/test/files/pos/t7596b/A.scala
new file mode 100644
index 0000000000..65c1bc56ef
--- /dev/null
+++ b/test/files/pos/t7596b/A.scala
@@ -0,0 +1,10 @@
+trait H2Driver{
+ abstract class Table[T]
+}
+
+object Config {
+ val driver : H2Driver = ???
+ def driver(app: Any): H2Driver = ???
+}
+
+class Sites extends Config.driver.Table[String]
diff --git a/test/files/pos/t7596b/B.scala b/test/files/pos/t7596b/B.scala
new file mode 100644
index 0000000000..cbcf149c23
--- /dev/null
+++ b/test/files/pos/t7596b/B.scala
@@ -0,0 +1,6 @@
+class DAOBase[E]{
+ type TableType <: Config.driver.Table[E]
+}
+class SitesDAO extends DAOBase[String]{
+ type TableType = Sites
+}
diff --git a/test/files/pos/t7596c/A_1.scala b/test/files/pos/t7596c/A_1.scala
new file mode 100644
index 0000000000..3e366df477
--- /dev/null
+++ b/test/files/pos/t7596c/A_1.scala
@@ -0,0 +1,11 @@
+trait Driver {
+ abstract class Table
+}
+
+object Config {
+ val driver : Driver = ???
+ val driverUniqueName: driver.type = driver
+ def driver(a: Any) = ???
+}
+
+object Sites extends Config.driver.Table
diff --git a/test/files/pos/t7596c/B_2.scala b/test/files/pos/t7596c/B_2.scala
new file mode 100644
index 0000000000..33da68c1ff
--- /dev/null
+++ b/test/files/pos/t7596c/B_2.scala
@@ -0,0 +1,9 @@
+object Test {
+ locally {
+ Sites: Config.driver.Table
+ }
+}
+
+// This variation worked by avoiding referring to the
+// overloaded term `Config.driver` in the parent type of
+// Sites \ No newline at end of file
diff --git a/test/files/pos/t7683-stop-after-parser/ThePlugin.scala b/test/files/pos/t7683-stop-after-parser/ThePlugin.scala
new file mode 100644
index 0000000000..cd800781dc
--- /dev/null
+++ b/test/files/pos/t7683-stop-after-parser/ThePlugin.scala
@@ -0,0 +1,31 @@
+package scala.test.plugins
+
+import scala.tools.nsc
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class ThePlugin(val global: Global) extends Plugin {
+ import global._
+
+ val name = "timebomb"
+ val description = "Explodes if run. Maybe I haven't implemented it yet."
+ val components = List[PluginComponent](thePhase1)
+
+ private object thePhase1 extends PluginComponent {
+ val global = ThePlugin.this.global
+
+ val runsAfter = List[String]("parser")
+ override val runsBefore = List[String]("namer")
+ val phaseName = ThePlugin.this.name
+
+ def newPhase(prev: Phase) = new ThePhase(prev)
+ }
+
+ private class ThePhase(prev: Phase) extends Phase(prev) {
+ override def name = ThePlugin.this.name
+ override def run = ???
+ }
+}
+
diff --git a/test/files/pos/t7683-stop-after-parser/sample_2.flags b/test/files/pos/t7683-stop-after-parser/sample_2.flags
new file mode 100644
index 0000000000..99672cdfd3
--- /dev/null
+++ b/test/files/pos/t7683-stop-after-parser/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-require:timebomb -Ystop-after:parser
diff --git a/test/files/pos/t7683-stop-after-parser/sample_2.scala b/test/files/pos/t7683-stop-after-parser/sample_2.scala
new file mode 100644
index 0000000000..7eb11b8204
--- /dev/null
+++ b/test/files/pos/t7683-stop-after-parser/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the explosive plugin disabled
+object Sample extends App {
+}
diff --git a/test/files/pos/t7683-stop-after-parser/scalac-plugin.xml b/test/files/pos/t7683-stop-after-parser/scalac-plugin.xml
new file mode 100644
index 0000000000..2558d6fd03
--- /dev/null
+++ b/test/files/pos/t7683-stop-after-parser/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<plugin>
+ <name>ignored</name>
+ <classname>scala.test.plugins.ThePlugin</classname>
+</plugin>
+
diff --git a/test/files/pos/t7704.scala b/test/files/pos/t7704.scala
new file mode 100644
index 0000000000..cae88d3324
--- /dev/null
+++ b/test/files/pos/t7704.scala
@@ -0,0 +1,10 @@
+class Attr { type V ; class Val }
+class StrAttr extends Attr { type V = String }
+class BoolAttr extends Attr { type V = Boolean }
+
+object Main {
+ def f(x: Attr) = x match {
+ case v: StrAttr => new v.Val
+ case v: BoolAttr => new v.Val
+ }
+}
diff --git a/test/files/pos/t7750.flags b/test/files/pos/t7750.flags
new file mode 100644
index 0000000000..b216e74c97
--- /dev/null
+++ b/test/files/pos/t7750.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -feature
diff --git a/test/files/pos/t7750.scala b/test/files/pos/t7750.scala
new file mode 100644
index 0000000000..befec76949
--- /dev/null
+++ b/test/files/pos/t7750.scala
@@ -0,0 +1,8 @@
+trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing]
+trait Growable[T]
+trait Sizing
+
+
+object Test {
+ null.isInstanceOf[LazyCombiner[_, _, _]] // issued an existential feature warning
+}
diff --git a/test/files/pos/t7815.scala b/test/files/pos/t7815.scala
index 12a434c5b0..0a126f9faa 100644
--- a/test/files/pos/t7815.scala
+++ b/test/files/pos/t7815.scala
@@ -21,7 +21,7 @@ object Foo {
object Main extends App {
def mapWithFoo[A <: AnyRef, B](as: List[A])(implicit foo: Foo.Aux[A, B]) = {
// Should be Eta expandable because the result type of `f` is not
- // dependant on the value, it is just `B`.
+ // dependent on the value, it is just `B`.
as map foo.f
as map foo.g
as map foo.m
diff --git a/test/files/pos/t8013.flags b/test/files/pos/t8013.flags
index 954eaba352..3955bb6710 100644
--- a/test/files/pos/t8013.flags
+++ b/test/files/pos/t8013.flags
@@ -1 +1 @@
--Xfatal-warnings -Xlint
+-Xfatal-warnings -Xlint:-infer-any,_
diff --git a/test/files/pos/t8157-2.10.flags b/test/files/pos/t8157-2.10.flags
new file mode 100644
index 0000000000..94c8056747
--- /dev/null
+++ b/test/files/pos/t8157-2.10.flags
@@ -0,0 +1 @@
+-Xsource:2.10
diff --git a/test/files/pos/t8157-2.10.scala b/test/files/pos/t8157-2.10.scala
new file mode 100644
index 0000000000..597585a96d
--- /dev/null
+++ b/test/files/pos/t8157-2.10.scala
@@ -0,0 +1,5 @@
+object Test { // PolyTYped function default arg unicity check,
+ // fails in 2.11, authorized under -Xsource:2.10
+ def foo(printer: Any, question: => String, show: Boolean = false)(op: => Any): Any = ???
+ def foo[T](question: => String, show: Boolean)(op: => Any = ()): Any = ???
+}
diff --git a/test/files/pos/t8267.scala b/test/files/pos/t8267.scala
new file mode 100644
index 0000000000..37b498fe3e
--- /dev/null
+++ b/test/files/pos/t8267.scala
@@ -0,0 +1,33 @@
+class Bippy { trait Foo[A] }
+
+final class RichBippy[C <: Bippy with Singleton](val c1: C) {
+ def f: Int = 1
+ def f[A](x: A)(ev: c1.Foo[A]): Int = 2
+
+ def g[A <: Nothing](x: A): Int = 1
+ def g[A](x: A)(ev: c1.Foo[A]): Int = 2
+
+ def h[A](x: A)(ev: c1.Foo[A]): Int = 1
+
+ def i(x: Nothing): Int = 1
+ def i(x: AnyRef)(ev: c1.Foo[x.type]): Int = 2
+}
+
+object p {
+
+ val c = new Bippy
+ val d0 = new RichBippy[c.type](c)
+ def d1 = new RichBippy[c.type](c)
+
+ d0.f[Int](5)(null: c.Foo[Int]) // ok
+ d1.f[Int](5)(null: c.Foo[Int]) // fails
+
+ d0.g[Int](5)(null: c.Foo[Int]) // ok
+ d1.g[Int](5)(null: c.Foo[Int]) // fails
+
+ d0.h[Int](5)(null: c.Foo[Int]) // ok
+ d1.h[Int](5)(null: c.Foo[Int]) // ok
+
+ d0.i("")(null) // ok
+ d1.i("")(null) // ok
+}
diff --git a/test/files/pos/t8310.flags b/test/files/pos/t8310.flags
new file mode 100644
index 0000000000..48fd867160
--- /dev/null
+++ b/test/files/pos/t8310.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/pos/t8310.scala b/test/files/pos/t8310.scala
new file mode 100644
index 0000000000..874caf4d3b
--- /dev/null
+++ b/test/files/pos/t8310.scala
@@ -0,0 +1,22 @@
+trait Comparinator[T] { def compare(a: T, b: T): Int }
+
+object TestOkay {
+ def sort(x: Comparinator[_ >: String]) = ()
+ sort((a: String, b: String) => a.compareToIgnoreCase(b))
+}
+
+object TestOkay2 {
+ def sort[T](x: Comparinator[_ >: T]) = ()
+ sort((a: String, b: String) => a.compareToIgnoreCase(b))
+}
+
+object TestOkay3 {
+ def sort[T](xs: Option[T], x: Comparinator[_ >: T]) = ()
+ sort(Some(""), (a: String, b: String) => a.compareToIgnoreCase(b))
+}
+
+object TestKoOverloaded {
+ def sort[T](xs: Option[T]) = ()
+ def sort[T](xs: Option[T], x: Comparinator[_ >: T]) = ()
+ sort(Some(""), (a: String, b: String) => a.compareToIgnoreCase(b))
+}
diff --git a/test/files/pos/t8325.scala b/test/files/pos/t8325.scala
new file mode 100644
index 0000000000..af33ee7bb3
--- /dev/null
+++ b/test/files/pos/t8325.scala
@@ -0,0 +1,9 @@
+
+trait Test {
+ type +[A, B] = (A, B)
+ type *[A, B] = (A, B)
+
+ type X[A, B] = A + B
+ type Y[A, B] = A * B
+ type Z[A, B] = A `*` B
+}
diff --git a/test/files/pos/t8329.scala b/test/files/pos/t8329.scala
new file mode 100644
index 0000000000..fcd5e50b37
--- /dev/null
+++ b/test/files/pos/t8329.scala
@@ -0,0 +1,29 @@
+object Test {
+ def pf(pf: PartialFunction[Any, Unit]) = ()
+ def f1(pf: Function[Any, Unit]) = ()
+
+ class A1; class B1
+ def test1(x: String, x1: String, default: String) = pf {
+ case _ if (
+ x.isEmpty
+ && default.isEmpty // was binding to synthetic param
+ && x1.isEmpty // was binding to synthetic param
+ ) =>
+ x.isEmpty
+ default.isEmpty // was binding to synthetic param
+ x1.isEmpty // was binding to synthetic param
+ new A1; new B1
+ }
+
+ def test2(x: String, x1: String, default: String) = f1 {
+ case _ if (
+ x.isEmpty
+ && default.isEmpty
+ && x1.isEmpty
+ ) =>
+ x.isEmpty
+ default.isEmpty
+ x1.isEmpty
+ new A1; new B1
+ }
+}
diff --git a/test/files/pos/t8359-closelim-crash.flags b/test/files/pos/t8359-closelim-crash.flags
new file mode 100644
index 0000000000..49d036a887
--- /dev/null
+++ b/test/files/pos/t8359-closelim-crash.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/pos/t8359-closelim-crash.scala b/test/files/pos/t8359-closelim-crash.scala
new file mode 100644
index 0000000000..1413694d10
--- /dev/null
+++ b/test/files/pos/t8359-closelim-crash.scala
@@ -0,0 +1,23 @@
+package test
+
+// This is a minimization of code that crashed the compiler during bootstrapping
+// in the first iteration of https://github.com/scala/scala/pull/4373, the PR
+// that adjusted the order of free and declared params in LambdaLift.
+
+// Was:
+// java.lang.AssertionError: assertion failed:
+// Record Record(<$anon: Function1>,Map(value a$1 -> Deref(LocalVar(value b)))) does not contain a field value b$1
+// at scala.tools.nsc.Global.assert(Global.scala:262)
+// at scala.tools.nsc.backend.icode.analysis.CopyPropagation$copyLattice$State.getFieldNonRecordValue(CopyPropagation.scala:113)
+// at scala.tools.nsc.backend.icode.analysis.CopyPropagation$copyLattice$State.getFieldNonRecordValue(CopyPropagation.scala:122)
+// at scala.tools.nsc.backend.opt.ClosureElimination$ClosureElim$$anonfun$analyzeMethod$1$$anonfun$apply$2.replaceFieldAccess$1(ClosureElimination.scala:124)
+class Typer {
+ def bar(a: Boolean, b: Boolean): Unit = {
+ @inline
+ def baz(): Unit = {
+ ((_: Any) => (Typer.this, a, b)).apply("")
+ }
+ ((_: Any) => baz()).apply("")
+ }
+}
+
diff --git a/test/files/pos/t8410.flags b/test/files/pos/t8410.flags
new file mode 100644
index 0000000000..dcd5943c2f
--- /dev/null
+++ b/test/files/pos/t8410.flags
@@ -0,0 +1 @@
+-optimise -Xfatal-warnings -deprecation:false -Yinline-warnings:false
diff --git a/test/files/pos/t8410.scala b/test/files/pos/t8410.scala
new file mode 100644
index 0000000000..4d862311fa
--- /dev/null
+++ b/test/files/pos/t8410.scala
@@ -0,0 +1,15 @@
+
+object Test extends App {
+ @deprecated("","") def f = 42
+ @deprecated("","") def z = f
+ def g = { @deprecated("","") def _f = f ; _f } // warns in 2.11.0-M8
+ def x = { @deprecated("","") class X { def x = f } ; new X().x } // warns in 2.11.0-M8
+ Console println g
+ Console println f // warns
+
+ @deprecated("","") trait T
+ object T extends T { def t = f }
+ Console println T.t
+
+ def k = List(0).dropWhile(_ < 1) // inlining warns doubly
+}
diff --git a/test/files/neg/t845.scala b/test/files/pos/t845.scala
index ddf6a16f32..ddf6a16f32 100644
--- a/test/files/neg/t845.scala
+++ b/test/files/pos/t845.scala
diff --git a/test/files/pos/t8497/A_1.scala b/test/files/pos/t8497/A_1.scala
new file mode 100644
index 0000000000..6a76b0ee99
--- /dev/null
+++ b/test/files/pos/t8497/A_1.scala
@@ -0,0 +1,13 @@
+package p {
+ object Crash {
+ def e(s: (String @java.lang.Deprecated)): Unit = ()
+ def f(s: (String @nonStatic)): Unit = ()
+ }
+ object Ok {
+ def g(s: (String @nonStatic @static)): Unit = ()
+ def h(s: (String @static)): Unit = ()
+ }
+}
+
+class nonStatic extends scala.annotation.Annotation
+class static extends scala.annotation.StaticAnnotation
diff --git a/test/files/pos/t8497/B_2.scala b/test/files/pos/t8497/B_2.scala
new file mode 100644
index 0000000000..efe2edf2c3
--- /dev/null
+++ b/test/files/pos/t8497/B_2.scala
@@ -0,0 +1 @@
+package p { object Test { Crash } }
diff --git a/test/files/pos/t8498.scala b/test/files/pos/t8498.scala
new file mode 100644
index 0000000000..6808c89051
--- /dev/null
+++ b/test/files/pos/t8498.scala
@@ -0,0 +1,6 @@
+import scala.annotation.compileTimeOnly
+
+class C(val s: String) extends AnyVal {
+ @compileTimeOnly("error")
+ def error = ???
+}
diff --git a/test/files/pos/t8523.flags b/test/files/pos/t8523.flags
new file mode 100644
index 0000000000..c7d406c649
--- /dev/null
+++ b/test/files/pos/t8523.flags
@@ -0,0 +1 @@
+-Ywarn-dead-code -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t8523.scala b/test/files/pos/t8523.scala
new file mode 100644
index 0000000000..dfcb35404d
--- /dev/null
+++ b/test/files/pos/t8523.scala
@@ -0,0 +1,10 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+class Impl(val c: Context) {
+ def impl: c.Tree = ???
+}
+
+object Macros {
+ def foo: Any = macro Impl.impl
+} \ No newline at end of file
diff --git a/test/files/pos/t8531/MyEnum.java b/test/files/pos/t8531/MyEnum.java
new file mode 100644
index 0000000000..06cc128a79
--- /dev/null
+++ b/test/files/pos/t8531/MyEnum.java
@@ -0,0 +1,5 @@
+
+package foobar;
+public enum MyEnum {
+A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22, A23, A24, A25, A26, A27, A28, A29, A30, A31, A32, A33, A34, A35, A36, A37, A38, A39, A40, A41, A42, A43, A44, A45, A46, A47, A48, A49, A50, A51, A52, A53, A54, A55, A56, A57, A58, A59, A60, A61, A62, A63, A64, A65, A66, A67, A68, A69, A70, A71, A72, A73, A74, A75, A76, A77, A78, A79, A80, A81, A82, A83, A84, A85, A86, A87, A88, A89, A90, A91, A92, A93, A94, A95, A96, A97, A98, A99, A100, A101, A102, A103, A104, A105, A106, A107, A108, A109, A110, A111, A112, A113, A114, A115, A116, A117, A118, A119, A120, A121, A122, A123, A124, A125, A126, A127, A128, A129, A130, A131, A132, A133, A134, A135, A136, A137, A138, A139, A140, A141, A142, A143, A144, A145, A146, A147, A148, A149, A150, A151, A152, A153, A154, A155, A156, A157, A158, A159, A160, A161, A162, A163, A164, A165, A166, A167, A168, A169, A170, A171, A172, A173, A174, A175, A176, A177, A178, A179, A180, A181, A182, A183, A184, A185, A186, A187, A188, A189, A190, A191, A192, A193;
+}
diff --git a/test/files/pos/t8531/Test.scala b/test/files/pos/t8531/Test.scala
new file mode 100644
index 0000000000..59861435a6
--- /dev/null
+++ b/test/files/pos/t8531/Test.scala
@@ -0,0 +1,24 @@
+package test
+
+// takes > 50s and > 800M heap to compile under 2.11.0
+import foobar._
+class `SI-8531` {
+ //https://issues.scala-lang.org/browse/SI-8531
+
+ import MyEnum._
+ def foo(e1: MyEnum, e2: MyEnum) = (e1, e2) match {
+ case (A1, x) => "a1"
+ case (x, A1) => "a1"
+ case (A2, x) => "a2"
+ case (x, A2) => "a2"
+ case (A3, x) => "a3"
+ case (x, A3) => "a3"
+ case (A4, x) => "a4"
+ case (x, A4) => "a4"
+ case (A5, x) => "a5"
+ case (x, A5) => "a5"
+ case (A6, x) => "a6"
+ case (x, A6) => "a6"
+ case (a, b) => "ab"
+ }
+}
diff --git a/test/files/pos/t8546.flags b/test/files/pos/t8546.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/pos/t8546.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/pos/t8546.scala b/test/files/pos/t8546.scala
new file mode 100644
index 0000000000..c39d749b4c
--- /dev/null
+++ b/test/files/pos/t8546.scala
@@ -0,0 +1,49 @@
+package test
+
+class F1() {
+ private sealed abstract class T
+ private case class A(m: Int) extends T
+ private case class B() extends T
+ private case object C extends T
+
+ // No warnings here
+ private def foo(t: T) = t match {
+ case A(m) => println("A:" + m)
+ case B() => println("B")
+ case C => println("C")
+ }
+
+ def test(m: Int): Unit = {
+ foo(A(m))
+ foo(B())
+ foo(C)
+ }
+}
+
+class F2[M]() {
+ private sealed abstract class T
+ private case class A(m: M) extends T
+ private case class B() extends T
+ private case object C extends T
+
+ // match may not be exhaustive. It would fail on the following input: C
+ private def foo(t: T) = t match {
+ case A(m) => println("A:" + m)
+ case B() => println("B")
+ case C => println("C")
+ }
+
+ def test(m: M): Unit = {
+ foo(A(m))
+ foo(B())
+ foo(C)
+ }
+
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new F1().test(1)
+ new F2[Int]().test(1)
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t8578.flags b/test/files/pos/t8578.flags
new file mode 100644
index 0000000000..48b438ddf8
--- /dev/null
+++ b/test/files/pos/t8578.flags
@@ -0,0 +1 @@
+-Ydelambdafy:method
diff --git a/test/files/pos/t8578.scala b/test/files/pos/t8578.scala
new file mode 100644
index 0000000000..879b5f5550
--- /dev/null
+++ b/test/files/pos/t8578.scala
@@ -0,0 +1,18 @@
+class DuplicateClassName {
+ () => {
+ {() => ()}
+ {() => ()}
+ {() => ()}
+ {() => ()}
+ {() => ()}
+ {() => ()}
+ {() => ()}
+ {() => ()}
+ {() => ()}
+ {() => () => ()}
+ {() => ()}
+ }
+}
+// Was:
+// Different class symbols have the same bytecode-level internal name:
+// name: DuplicateClassName$lambda$$$anonfun$111 \ No newline at end of file
diff --git a/test/files/pos/t8596.flags b/test/files/pos/t8596.flags
new file mode 100644
index 0000000000..281f0a10cd
--- /dev/null
+++ b/test/files/pos/t8596.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/t8596.scala b/test/files/pos/t8596.scala
new file mode 100644
index 0000000000..bfed58eadf
--- /dev/null
+++ b/test/files/pos/t8596.scala
@@ -0,0 +1,7 @@
+class TypeTreeObjects {
+ class Container {
+ def typeParamAndDefaultArg[C](name: String = ""): String = ""
+ }
+ // crashed under -Yrangepos
+ new Container().typeParamAndDefaultArg[Any]()
+}
diff --git a/test/files/pos/t8617.flags b/test/files/pos/t8617.flags
new file mode 100644
index 0000000000..281f0a10cd
--- /dev/null
+++ b/test/files/pos/t8617.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/t8617.scala b/test/files/pos/t8617.scala
new file mode 100644
index 0000000000..fc825bbcba
--- /dev/null
+++ b/test/files/pos/t8617.scala
@@ -0,0 +1,10 @@
+object Test {
+ def foo[A] = implicitly[OptManifest[A]] // was "unpositioned tree" under -Yrangepos
+
+ // These did not crash, but testing for good measure.
+ implicitly[OptManifest[String]]
+ implicitly[Manifest[String]]
+
+ implicitly[reflect.ClassTag[String]]
+ implicitly[reflect.runtime.universe.TypeTag[String]]
+}
diff --git a/test/files/pos/t8625.scala b/test/files/pos/t8625.scala
new file mode 100644
index 0000000000..95c4b0dbcd
--- /dev/null
+++ b/test/files/pos/t8625.scala
@@ -0,0 +1,5 @@
+object Test {
+ def f1(a: Boolean, b: Boolean) = (a || ???) && (b || ???)
+ def f2(a: Boolean, b: Boolean) = (a || ???) && b
+ def f3(a: Boolean, b: Boolean) = (a && ???) || b
+}
diff --git a/test/files/pos/t8708/Either_1.scala b/test/files/pos/t8708/Either_1.scala
new file mode 100644
index 0000000000..000ed6e7c2
--- /dev/null
+++ b/test/files/pos/t8708/Either_1.scala
@@ -0,0 +1,6 @@
+sealed trait \/[+A, +B]
+
+sealed trait EitherT[F[+_], +A, +B]
+object EitherT {
+ def apply[F[+_], A, B](a: F[A \/ B]): EitherT[F, A, B] = new EitherT[F, A, B] { val run = a }
+}
diff --git a/test/files/pos/t8708/Test_2.scala b/test/files/pos/t8708/Test_2.scala
new file mode 100644
index 0000000000..d0e56b9a37
--- /dev/null
+++ b/test/files/pos/t8708/Test_2.scala
@@ -0,0 +1,13 @@
+import scala.language.higherKinds
+
+trait ClientTypes[M[+_]] {
+ final type Context[+A] = EitherT[M, String, A]
+ object Context {
+ def apply[A](ca: M[String \/ A]): Context[A] = EitherT[M, String, A](ca)
+ }
+
+ final type StatefulContext[+A] = EitherT[Context, String, A]
+ object StatefulContext {
+ def apply[A](state: Context[String \/ A]): StatefulContext[A] = ???
+ }
+}
diff --git a/test/files/pos/t8719.check b/test/files/pos/t8719.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/pos/t8719.check
diff --git a/test/files/pos/t8719/Macros_1.scala b/test/files/pos/t8719/Macros_1.scala
new file mode 100644
index 0000000000..152c92f254
--- /dev/null
+++ b/test/files/pos/t8719/Macros_1.scala
@@ -0,0 +1,21 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.TypecheckException
+import scala.reflect.macros.whitebox.Context
+
+object Macros {
+ def typecheck_impl(c: Context)(code: c.Expr[String]): c.Expr[Option[String]] = {
+ import c.universe._
+
+ val Expr(Literal(Constant(codeStr: String))) = code
+
+ try {
+ c.typecheck(c.parse(codeStr))
+ c.Expr(q"None: Option[String]")
+ } catch {
+ case e: TypecheckException =>
+ c.Expr(q"Some(${ e.toString }): Option[String]")
+ }
+ }
+
+ def typecheck(code: String): Option[String] = macro typecheck_impl
+} \ No newline at end of file
diff --git a/test/files/pos/t8719/Test_2.scala b/test/files/pos/t8719/Test_2.scala
new file mode 100644
index 0000000000..997eb2f236
--- /dev/null
+++ b/test/files/pos/t8719/Test_2.scala
@@ -0,0 +1,10 @@
+case class Foo(i: Int, c: Char)
+
+object Bar {
+ def unapply(foo: Foo): Option[(Int, Char)] = Some((foo.i, foo.c))
+}
+
+object Test extends App {
+ println(Macros.typecheck("val Foo(x, y, z) = Foo(1, 'a')"))
+ println(Macros.typecheck("val Bar(x, y, z) = Foo(1, 'a')"))
+} \ No newline at end of file
diff --git a/test/files/pos/t8736-b.flags b/test/files/pos/t8736-b.flags
new file mode 100644
index 0000000000..1ad4eabe0f
--- /dev/null
+++ b/test/files/pos/t8736-b.flags
@@ -0,0 +1 @@
+-feature -language:_ -Xfatal-warnings
diff --git a/test/files/pos/t8736-b.scala b/test/files/pos/t8736-b.scala
new file mode 100644
index 0000000000..903292d232
--- /dev/null
+++ b/test/files/pos/t8736-b.scala
@@ -0,0 +1,7 @@
+// scalac: -feature -language:_ -Xfatal-warnings
+// showing that all are set
+class X {
+ def hk[M[_]] = ???
+
+ implicit def imp(x: X): Int = x.hashCode
+}
diff --git a/test/files/pos/t8736.flags b/test/files/pos/t8736.flags
new file mode 100644
index 0000000000..7fe42f7340
--- /dev/null
+++ b/test/files/pos/t8736.flags
@@ -0,0 +1 @@
+-feature -language:implicitConversions -language:higherKinds -language:-implicitConversions -Xfatal-warnings
diff --git a/test/files/pos/t8736.scala b/test/files/pos/t8736.scala
new file mode 100644
index 0000000000..46c0cdfd00
--- /dev/null
+++ b/test/files/pos/t8736.scala
@@ -0,0 +1,7 @@
+// scalac: -feature -language:implicitConversions -language:higherKinds -language:-implicitConversions -Xfatal-warnings
+// showing that multiple settings are respected, and explicit enablement has precedence
+class X {
+ def hk[M[_]] = ???
+
+ implicit def imp(x: X): Int = x.hashCode
+}
diff --git a/test/files/pos/t8743.scala b/test/files/pos/t8743.scala
new file mode 100644
index 0000000000..03b0cd7044
--- /dev/null
+++ b/test/files/pos/t8743.scala
@@ -0,0 +1,15 @@
+import annotation.varargs
+
+object VarArgs {
+ @varargs
+ def foo[A](x: A, xs: String*): A = ???
+
+ @varargs
+ def bar[A](x: List[A], xs: String*): A = ???
+
+ @varargs
+ def baz[A](x: List[A], xs: String*): A = ???
+
+ @varargs
+ def boz[A](x: A, xs: String*): Nothing = ???
+}
diff --git a/test/files/pos/t8781/Macro_1.scala b/test/files/pos/t8781/Macro_1.scala
new file mode 100644
index 0000000000..ecd9c5e8d5
--- /dev/null
+++ b/test/files/pos/t8781/Macro_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val name = TypeName(c.freshName())
+ q"class $name extends T; new $name"
+ }
+ def fresh: Any = macro impl
+}
+
+trait T
diff --git a/test/files/pos/t8781/Test_2.flags b/test/files/pos/t8781/Test_2.flags
new file mode 100644
index 0000000000..24e2109690
--- /dev/null
+++ b/test/files/pos/t8781/Test_2.flags
@@ -0,0 +1 @@
+-Ymacro-expand:discard -Ystop-after:typer
diff --git a/test/files/pos/t8781/Test_2.scala b/test/files/pos/t8781/Test_2.scala
new file mode 100644
index 0000000000..3ca6406599
--- /dev/null
+++ b/test/files/pos/t8781/Test_2.scala
@@ -0,0 +1,5 @@
+object Test {
+ implicit class RichT(t: T) { def augmented = "" }
+
+ Macros.fresh.augmented
+}
diff --git a/test/files/pos/t8793.scala b/test/files/pos/t8793.scala
new file mode 100644
index 0000000000..1276155675
--- /dev/null
+++ b/test/files/pos/t8793.scala
@@ -0,0 +1,15 @@
+package regr
+
+trait F[A]
+
+class G(val a: F[_], val b: F[_])
+
+object G {
+ def unapply(g: G) = Option((g.a, g.b))
+}
+
+object H {
+ def unapply(g: G) = g match {
+ case G(a, _) => Option(a)
+ }
+}
diff --git a/test/files/pos/t8801.scala b/test/files/pos/t8801.scala
new file mode 100644
index 0000000000..695b456e12
--- /dev/null
+++ b/test/files/pos/t8801.scala
@@ -0,0 +1,21 @@
+sealed trait Nat {
+ type Prev <: Nat { type Succ = Nat.this.type }
+ type Succ <: Nat { type Prev = Nat.this.type }
+}
+
+object Nat {
+ object Zero extends Nat {
+ type Prev = Nothing
+ }
+
+ type _0 = Zero.type
+ type _1 = _0#Succ
+ type _2 = _1#Succ
+ type _3 = _2#Succ
+ type _4 = _3#Succ
+ type _5 = _4#Succ
+ type _6 = _5#Succ
+ type _7 = _6#Succ
+ type _8 = _7#Succ
+ type _9 = _8#Succ
+}
diff --git a/test/files/pos/t8828.flags b/test/files/pos/t8828.flags
new file mode 100644
index 0000000000..e68991f643
--- /dev/null
+++ b/test/files/pos/t8828.flags
@@ -0,0 +1 @@
+-Xlint:inaccessible -Xfatal-warnings
diff --git a/test/files/pos/t8828.scala b/test/files/pos/t8828.scala
new file mode 100644
index 0000000000..182aba54c0
--- /dev/null
+++ b/test/files/pos/t8828.scala
@@ -0,0 +1,20 @@
+
+package outer
+
+package inner {
+
+ private[inner] class A
+
+ // the class is final: no warning
+ private[outer] final class B {
+ def doWork(a: A): A = a
+ }
+
+ // the trait is sealed and doWork is not
+ // and cannot be overridden: no warning
+ private[outer] sealed trait C {
+ def doWork(a: A): A = a
+ }
+
+ private[outer] final class D extends C
+}
diff --git a/test/files/pos/t8844.scala b/test/files/pos/t8844.scala
new file mode 100644
index 0000000000..d33c520d1b
--- /dev/null
+++ b/test/files/pos/t8844.scala
@@ -0,0 +1,4 @@
+object Example {
+ type S[A] = String
+ def foo(s: S[_]): String = s
+}
diff --git a/test/files/pos/t8861.flags b/test/files/pos/t8861.flags
new file mode 100644
index 0000000000..99a6391058
--- /dev/null
+++ b/test/files/pos/t8861.flags
@@ -0,0 +1 @@
+-Xlint:infer-any -Xfatal-warnings
diff --git a/test/files/pos/t8861.scala b/test/files/pos/t8861.scala
new file mode 100644
index 0000000000..816d15700e
--- /dev/null
+++ b/test/files/pos/t8861.scala
@@ -0,0 +1,11 @@
+
+trait Test {
+ type R = PartialFunction[Any, Unit]
+
+ val x: R = { case "" => }
+ val y: R = { case "" => }
+
+ val z: R = x orElse y
+ val zz = x orElse y
+}
+
diff --git a/test/files/pos/t8868a/Sub_2.scala b/test/files/pos/t8868a/Sub_2.scala
new file mode 100644
index 0000000000..a19b529c88
--- /dev/null
+++ b/test/files/pos/t8868a/Sub_2.scala
@@ -0,0 +1 @@
+class Sub extends T
diff --git a/test/files/pos/t8868a/T_1.scala b/test/files/pos/t8868a/T_1.scala
new file mode 100644
index 0000000000..9fb97b1413
--- /dev/null
+++ b/test/files/pos/t8868a/T_1.scala
@@ -0,0 +1,6 @@
+class C
+
+trait T {
+ @deprecated(since = "", message = "")
+ class X
+}
diff --git a/test/files/pos/t8868b/Sub_2.scala b/test/files/pos/t8868b/Sub_2.scala
new file mode 100644
index 0000000000..58b44db2b3
--- /dev/null
+++ b/test/files/pos/t8868b/Sub_2.scala
@@ -0,0 +1,2 @@
+class Sub extends T
+
diff --git a/test/files/pos/t8868b/T_1.scala b/test/files/pos/t8868b/T_1.scala
new file mode 100644
index 0000000000..0b71cfdaa3
--- /dev/null
+++ b/test/files/pos/t8868b/T_1.scala
@@ -0,0 +1,4 @@
+@deprecated(since = "2.4.0", message = "")
+trait T {
+ class X
+}
diff --git a/test/files/pos/t8868c/Sub_2.scala b/test/files/pos/t8868c/Sub_2.scala
new file mode 100644
index 0000000000..58b44db2b3
--- /dev/null
+++ b/test/files/pos/t8868c/Sub_2.scala
@@ -0,0 +1,2 @@
+class Sub extends T
+
diff --git a/test/files/pos/t8868c/T_1.scala b/test/files/pos/t8868c/T_1.scala
new file mode 100644
index 0000000000..dc541950d8
--- /dev/null
+++ b/test/files/pos/t8868c/T_1.scala
@@ -0,0 +1,9 @@
+class C(a: Any) extends annotation.StaticAnnotation
+
+@C({val x = 0; x})
+trait T {
+ class X
+
+ @C({val x = 0; x})
+ def foo = 42
+}
diff --git a/test/files/pos/t8893.scala b/test/files/pos/t8893.scala
new file mode 100644
index 0000000000..b87c8bdd3c
--- /dev/null
+++ b/test/files/pos/t8893.scala
@@ -0,0 +1,129 @@
+// Took > 10 minutes to run the tail call phase.
+object Test {
+ def a(): Option[String] = Some("a")
+
+ def main(args: Array[String]) {
+ a() match {
+ case Some(b1) =>
+ a() match {
+ case Some(b2) =>
+ a() match {
+ case Some(b3) =>
+ a() match {
+ case Some(b4) =>
+ a() match {
+ case Some(b5) =>
+ a() match {
+ case Some(b6) =>
+ a() match {
+ case Some(b7) =>
+ a() match {
+ case Some(b8) =>
+ a() match {
+ case Some(b9) =>
+ a() match {
+ case Some(b10) =>
+ a() match {
+ case Some(b11) =>
+ a() match {
+ case Some(b12) =>
+ a() match {
+ case Some(b13) =>
+ a() match {
+ case Some(b14) =>
+ a() match {
+ case Some(b15) =>
+ a() match {
+ case Some(b16) =>
+ a() match {
+ case Some(b17) =>
+ a() match {
+ case Some(b18) =>
+ a() match {
+ case Some(b19) =>
+ a() match {
+ case Some(b20) =>
+ a() match {
+ case Some(b21) =>
+ a() match {
+ case Some(b22) =>
+ a() match {
+ case Some(b23) =>
+ a() match {
+ case Some(b24) =>
+ a() match {
+ case Some(b25) =>
+ a() match {
+ case Some(b26) =>
+ a() match {
+ case Some(b27) =>
+ a() match {
+ case Some(b28) =>
+ a() match {
+ case Some(b29) =>
+ a() match {
+ case Some(b30) =>
+ println("yay")
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ case None => None
+ }
+ }
+}
+
diff --git a/test/files/pos/t8894.scala b/test/files/pos/t8894.scala
new file mode 100644
index 0000000000..3b26f1ae7e
--- /dev/null
+++ b/test/files/pos/t8894.scala
@@ -0,0 +1,12 @@
+class CC(val i: Int, val s: String)
+object CC extends {
+ type P = (Int, String)
+
+ //def unapply(c: CC): Option[(Int, String)] = Some((c.i, c.s)) // OK
+ def unapply(c: CC): Option[P] = Some((c.i, c.s)) // fails (because of the type alias)
+}
+
+class Test {
+ val cc = new CC(23, "foo")
+ val CC(i, s) = cc
+} \ No newline at end of file
diff --git a/test/files/pos/t8900.scala b/test/files/pos/t8900.scala
new file mode 100644
index 0000000000..376bd786f2
--- /dev/null
+++ b/test/files/pos/t8900.scala
@@ -0,0 +1,11 @@
+package foo
+package lambdaking
+
+class Test {
+ def byname(b: => Any) = ???
+ def foo: Any = {
+ def bar: Any = {
+ byname(bar)
+ }
+ }
+}
diff --git a/test/files/pos/t8934a/A_1.scala b/test/files/pos/t8934a/A_1.scala
new file mode 100644
index 0000000000..6c1f29d030
--- /dev/null
+++ b/test/files/pos/t8934a/A_1.scala
@@ -0,0 +1,18 @@
+import language.experimental.macros
+import reflect.macros.whitebox.Context
+
+object Unapply {
+ def impl1(c: Context)(a: c.Tree): c.Tree = {
+ import c.universe._
+ q"(new { def unapply[T](a: String): Option[(Int, String)] = ??? }).unapply($a)"
+ }
+ def unapply(a: Any): Any = macro impl1
+}
+
+object UnapplySeq {
+ def impl1(c: Context)(a: c.Tree): c.Tree = {
+ import c.universe._
+ q"(new { def unapplySeq[T](a: String): Option[(Int, Seq[String])] = ??? }).unapplySeq($a)"
+ }
+ def unapplySeq(a: Any): Any = macro impl1
+}
diff --git a/test/files/pos/t8934a/Test_2.flags b/test/files/pos/t8934a/Test_2.flags
new file mode 100644
index 0000000000..618dfe2b75
--- /dev/null
+++ b/test/files/pos/t8934a/Test_2.flags
@@ -0,0 +1 @@
+-Ystop-after:typer -Ymacro-expand:discard -nowarn
diff --git a/test/files/pos/t8934a/Test_2.scala b/test/files/pos/t8934a/Test_2.scala
new file mode 100644
index 0000000000..e1792ed3c5
--- /dev/null
+++ b/test/files/pos/t8934a/Test_2.scala
@@ -0,0 +1,12 @@
+object Test {
+ "" match {
+ case Unapply(a, b) =>
+ a: Int
+ b: String
+ case UnapplySeq(a, b1, b2) =>
+ a: Int
+ b1: String
+ b2: String
+ }
+}
+// These used to fail `too many patterns` under -Ymacro-expand:discard
diff --git a/test/files/pos/t8947/Client_2.scala b/test/files/pos/t8947/Client_2.scala
new file mode 100644
index 0000000000..1a5082a2f9
--- /dev/null
+++ b/test/files/pos/t8947/Client_2.scala
@@ -0,0 +1 @@
+object Test { X.extractor } \ No newline at end of file
diff --git a/test/files/pos/t8947/Macro_1.scala b/test/files/pos/t8947/Macro_1.scala
new file mode 100644
index 0000000000..4a5de3decb
--- /dev/null
+++ b/test/files/pos/t8947/Macro_1.scala
@@ -0,0 +1,41 @@
+import language.experimental.macros
+import scala.reflect.macros._
+import blackbox.Context
+
+object X {
+
+ def classTagOrNull[T](implicit t: reflect.ClassTag[T] = null) = t
+ // the failed search for ClassTag[T] does not issue a visible
+ // error as we fall back to the default argument. But, the
+ // macro engine things we have expanded the macro `materializeClassTag[D]()`
+ // to `EmptyTree`, and then attaches a backreference from the expansion
+ // to the expandee. This is the `MacroExpansionAttachment` tree attachment.
+ def foo[D] = classTagOrNull[D]
+
+ def extractor: Any = macro X.extractorMacro
+ def extractorMacro(c: Context): c.Expr[Any] = {
+ // Later, in reify, an unrelated use of `EmptyTree` in the AST representing
+ // the argument is now treated as a macro expansion which should be rolled
+ // back in the tree we reify! This ends up generating a call to `implicitly`
+ // which leads to an ambiguous error.
+ //
+ // Any macro call that expands to EmptyTree could have triggered this problem.
+ c.universe.reify(new { def something(data: Any) = ??? })
+ }
+
+ // Workarounds:
+ //
+ // 1. Use quasiquotes rather than `reify`. (But, beware to fully qualify all references, e.g. `_root_.scala.Predef.???`)
+ // 2. Avoid failed ClassTag lookups (e.g. in the original bug report, annotate the type argument to `map`)
+ // 3. In the macro implementation, just before calling the `reify` macro, you could call another macro
+ //
+ // def prepareReify = macro prepareReifyImpl
+ // def prepareReifyImpl(c: Context) = {
+ // val symtab = c.universe.asInstanceOf[reflect.internal.SymbolTable]
+ // symtab.EmptyTree.setAttachments(symtab.NoPosition)
+ // }
+ //
+ // To make this visible to the macro implementaiton, it will need to be compiled in an earlier stage,
+ // e.g a separate SBT sub-project.
+
+}
diff --git a/test/files/pos/t8954.flags b/test/files/pos/t8954.flags
new file mode 100644
index 0000000000..7de3c0f3ee
--- /dev/null
+++ b/test/files/pos/t8954.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -deprecation
diff --git a/test/files/pos/t8954/t1.scala b/test/files/pos/t8954/t1.scala
new file mode 100644
index 0000000000..3986d9f3b5
--- /dev/null
+++ b/test/files/pos/t8954/t1.scala
@@ -0,0 +1,13 @@
+package scala.foo
+
+// 1. a class about to be made final
+@deprecatedInheritance class A {
+ def foo(): Unit = ???
+}
+
+// 1.1:
+// - no inheritance warning because same file
+// - no "override non-deprecated member" because @deprecatedInheritance
+class B2 extends A {
+ @deprecated("","") override def foo(): Unit = ???
+}
diff --git a/test/files/pos/t8954/t2.scala b/test/files/pos/t8954/t2.scala
new file mode 100644
index 0000000000..4def127832
--- /dev/null
+++ b/test/files/pos/t8954/t2.scala
@@ -0,0 +1,39 @@
+package scala.foo
+
+// 1.2 deprecated children should be fine...
+@deprecated("", "") class B extends A {
+
+ // 1.3 and shouldn't trigger the
+ // "overriding non-deprecated parent" warning
+ override def foo(): Unit = ???
+}
+
+@deprecated("","") class F {
+ // 1.4 a class inside a deprecated class should work too
+ class G extends A
+}
+
+// 2. a method about to be made final
+class C {
+ @deprecatedOverriding def foo(): Unit = ???
+}
+
+// 2.1 overriding with a deprecated def should be fine
+// and also shoudln't trigger the "deprecation is useless"
+// warning
+class D extends C {
+ @deprecated("","") override def foo(): Unit = ???
+}
+
+// 2.2 overriding from a deprecated class should be fine
+@deprecated("","") class E extends C {
+ override def foo(): Unit = ???
+}
+
+// 2.3 overriding from deeper inside a deprecated class
+// should work too
+@deprecated("","") class H {
+ class I extends C {
+ override def foo(): Unit = ???
+ }
+}
diff --git a/test/files/pos/t8962.scala b/test/files/pos/t8962.scala
new file mode 100644
index 0000000000..4331c154ba
--- /dev/null
+++ b/test/files/pos/t8962.scala
@@ -0,0 +1,31 @@
+package test.nestedcov
+
+sealed abstract class Outer[+A]
+case class Let[+A](expr: Outer[Inner[A]]) extends Outer[A]
+
+sealed abstract class Inner[+A]
+
+sealed abstract class Outer2[+A, +B]
+case class Let2[+A](expr: Outer2[Inner2[A], A]) extends Outer2[A, A]
+
+sealed abstract class Inner2[+A]
+
+sealed abstract class Outer3[+A, +B]
+case class Let3[+A](expr: Outer3[A, A]) extends Outer3[A, A]
+
+object NestedCov {
+ def run[A](nc: Outer[A]) = nc match {
+ case Let(expr) =>
+ expr : Outer[Inner[A]]
+ }
+
+ def run2[A](nc: Outer2[A, A]) = nc match {
+ case Let2(expr) =>
+ expr : Outer2[Inner2[A], A]
+ }
+
+ def run3[A](nc: Outer3[A, A]) = nc match {
+ case Let3(expr) =>
+ expr : Outer3[A, A]
+ }
+}
diff --git a/test/files/pos/t8965.flags b/test/files/pos/t8965.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/pos/t8965.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/pos/t8965.scala b/test/files/pos/t8965.scala
new file mode 100644
index 0000000000..4f39330f4e
--- /dev/null
+++ b/test/files/pos/t8965.scala
@@ -0,0 +1,7 @@
+class A {
+ def f(x: Any with AnyRef, y: Any with AnyRef) = x eq y
+ // a.scala:2: warning: Any and Any are unrelated: they will most likely never compare equal
+ // def f(x: Any with AnyRef, y: Any with AnyRef) = x eq y
+ // ^
+ // one warning found
+}
diff --git a/test/files/pos/t8999.flags b/test/files/pos/t8999.flags
new file mode 100644
index 0000000000..0f96f1f872
--- /dev/null
+++ b/test/files/pos/t8999.flags
@@ -0,0 +1 @@
+-nowarn \ No newline at end of file
diff --git a/test/files/pos/t8999.scala b/test/files/pos/t8999.scala
new file mode 100644
index 0000000000..99c4b2ad84
--- /dev/null
+++ b/test/files/pos/t8999.scala
@@ -0,0 +1,271 @@
+object Types {
+
+ abstract sealed class Type
+
+ case object AnyType extends Type
+
+ case object NothingType extends Type
+
+ case object UndefType extends Type
+
+ case object BooleanType extends Type
+
+ case object IntType extends Type
+
+ case object LongType extends Type
+
+ case object FloatType extends Type
+
+ case object DoubleType extends Type
+
+ case object StringType extends Type
+
+ case object NullType extends Type
+
+ sealed abstract class ReferenceType extends Type
+
+ final case class ClassType(className: String) extends ReferenceType
+
+ final case class ArrayType(baseClassName: String, dimensions: Int) extends ReferenceType
+
+ final case class RecordType(fields: List[RecordType.Field]) extends Type
+
+ object RecordType {
+ final case class Field(name: String, originalName: Option[String],
+ tpe: Type, mutable: Boolean)
+ }
+
+ case object NoType extends Type
+
+}
+
+
+sealed abstract class ClassKind
+
+object ClassKind {
+
+ case object Class extends ClassKind
+
+ case object ModuleClass extends ClassKind
+
+ case object Interface extends ClassKind
+
+ case object RawJSType extends ClassKind
+
+ case object HijackedClass extends ClassKind
+
+ case object TraitImpl extends ClassKind
+
+}
+
+object Trees {
+
+ import Types._
+
+ abstract sealed class Tree
+
+ case object EmptyTree extends Tree
+
+ sealed trait PropertyName
+ case class Ident(name: String, originalName: Option[String]) extends PropertyName
+ object Ident {
+ def apply(name: String): Ident =
+ new Ident(name, Some(name))
+ }
+
+ case class VarDef(name: Ident, vtpe: Type, mutable: Boolean, rhs: Tree) extends Tree
+
+ case class ParamDef(name: Ident, ptpe: Type, mutable: Boolean) extends Tree
+
+ case class Skip() extends Tree
+
+ class Block private(val stats: List[Tree]) extends Tree
+
+ object Block {
+ def unapply(block: Block): Some[List[Tree]] = Some(block.stats)
+ }
+
+ case class Labeled(label: Ident, tpe: Type, body: Tree) extends Tree
+
+ case class Assign(lhs: Tree, rhs: Tree) extends Tree
+
+ case class Return(expr: Tree, label: Option[Ident] = None) extends Tree
+
+ case class If(cond: Tree, thenp: Tree, elsep: Tree) extends Tree
+
+ case class While(cond: Tree, body: Tree, label: Option[Ident] = None) extends Tree
+
+ case class DoWhile(body: Tree, cond: Tree, label: Option[Ident] = None) extends Tree
+
+ case class Try(block: Tree, errVar: Ident, handler: Tree, finalizer: Tree) extends Tree
+
+ case class Throw(expr: Tree) extends Tree
+
+ case class Continue(label: Option[Ident] = None) extends Tree
+
+ case class Match(selector: Tree, cases: List[(List[Literal], Tree)], default: Tree) extends Tree
+
+ case class Debugger() extends Tree
+
+ case class New(cls: ClassType, ctor: Ident, args: List[Tree]) extends Tree
+
+ case class LoadModule(cls: ClassType) extends Tree
+
+ case class StoreModule(cls: ClassType, value: Tree) extends Tree
+
+ case class Select(qualifier: Tree, item: Ident, mutable: Boolean) extends Tree
+
+ case class Apply(receiver: Tree, method: Ident, args: List[Tree]) extends Tree
+
+ case class StaticApply(receiver: Tree, cls: ClassType, method: Ident, args: List[Tree]) extends Tree
+
+ case class TraitImplApply(impl: ClassType, method: Ident, args: List[Tree]) extends Tree
+
+ case class UnaryOp(op: Int, lhs: Tree) extends Tree
+
+ case class BinaryOp(op: Int, lhs: Tree, rhs: Tree) extends Tree
+
+ case class NewArray(tpe: ArrayType, lengths: List[Tree]) extends Tree
+
+ case class ArrayValue(tpe: ArrayType, elems: List[Tree]) extends Tree
+
+ case class ArrayLength(array: Tree) extends Tree
+
+ case class ArraySelect(array: Tree, index: Tree) extends Tree
+
+ case class RecordValue(tpe: RecordType, elems: List[Tree]) extends Tree
+
+ case class IsInstanceOf(expr: Tree, cls: ReferenceType) extends Tree
+
+ case class AsInstanceOf(expr: Tree, cls: ReferenceType) extends Tree
+
+ case class Unbox(expr: Tree, charCode: Char) extends Tree
+
+ case class GetClass(expr: Tree) extends Tree
+
+ case class CallHelper(helper: String, args: List[Tree]) extends Tree
+
+ case class JSNew(ctor: Tree, args: List[Tree]) extends Tree
+
+ case class JSDotSelect(qualifier: Tree, item: Ident) extends Tree
+
+ case class JSBracketSelect(qualifier: Tree, item: Tree) extends Tree
+
+ case class JSFunctionApply(fun: Tree, args: List[Tree]) extends Tree
+
+ case class JSDotMethodApply(receiver: Tree, method: Ident, args: List[Tree]) extends Tree
+
+ case class JSBracketMethodApply(receiver: Tree, method: Tree, args: List[Tree]) extends Tree
+
+ case class JSDelete(prop: Tree) extends Tree
+
+ case class JSUnaryOp(op: String, lhs: Tree) extends Tree
+
+ case class JSBinaryOp(op: String, lhs: Tree, rhs: Tree) extends Tree
+
+ case class JSArrayConstr(items: List[Tree]) extends Tree
+
+ case class JSObjectConstr(fields: List[(PropertyName, Tree)]) extends Tree
+
+ case class JSEnvInfo() extends Tree
+
+ sealed trait Literal extends Tree
+
+ case class Undefined() extends Literal
+
+ case class UndefinedParam() extends Literal
+
+ case class Null() extends Literal
+
+ case class BooleanLiteral(value: Boolean) extends Literal
+
+ case class IntLiteral(value: Int) extends Literal
+
+ case class LongLiteral(value: Long) extends Literal
+
+ case class FloatLiteral(value: Float) extends Literal
+
+ case class DoubleLiteral(value: Double) extends Literal
+
+ case class StringLiteral(value: String) extends Literal with PropertyName
+
+ case class ClassOf(cls: ReferenceType) extends Literal
+
+ case class VarRef(ident: Ident, mutable: Boolean) extends Tree
+
+ case class This() extends Tree
+
+ case class Closure(captureParams: List[ParamDef], params: List[ParamDef],
+ body: Tree, captureValues: List[Tree]) extends Tree
+
+ case class ClassDef(name: Ident, kind: ClassKind, parent: Option[Ident], ancestors: List[Ident], defs: List[Tree]) extends Tree
+
+ case class MethodDef(name: PropertyName, args: List[ParamDef], resultType: Type, body: Tree) extends Tree
+
+ case class PropertyDef(name: PropertyName, getterBody: Tree, setterArg: ParamDef, setterBody: Tree) extends Tree
+
+ case class ConstructorExportDef(name: String, args: List[ParamDef], body: Tree) extends Tree
+
+ case class ModuleExportDef(fullName: String) extends Tree
+
+ final class TreeHash(val treeHash: Array[Byte], val posHash: Array[Byte])
+}
+
+object Main {
+ import Trees._
+ import Types._
+
+ private def transform(tree: Tree) = {
+ val ObjectClass = "O"
+ tree match {
+ case VarDef(_, _, _, rhs) =>
+ case tree: Block =>
+ case Labeled(ident@Ident(label, _), tpe, body) =>
+ case Assign(lhs, rhs) =>
+ case Return(expr, optLabel) =>
+ case If(cond, thenp, elsep) =>
+ case While(cond, body, optLabel) =>
+ case DoWhile(body, cond, None) =>
+ case Try(block, errVar, EmptyTree, finalizer) =>
+ case Try(block, errVar@Ident(name, originalName), handler, finalizer) =>
+ case Throw(expr) =>
+ case Continue(optLabel) =>
+ case Match(selector, cases, default) =>
+ case New(cls, ctor, args) =>
+ case StoreModule(cls, value) =>
+ case tree: Select =>
+ case tree: Apply =>
+ case tree: StaticApply =>
+ case tree: TraitImplApply =>
+ case tree@UnaryOp(_, arg) =>
+ case tree@BinaryOp(op, lhs, rhs) =>
+ case NewArray(tpe, lengths) =>
+ case ArrayValue(tpe, elems) =>
+ case ArrayLength(array) =>
+ case ArraySelect(array, index) =>
+ case RecordValue(tpe, elems) =>
+ case IsInstanceOf(expr, ClassType(ObjectClass)) =>
+ case IsInstanceOf(expr, tpe) =>
+ case AsInstanceOf(expr, ClassType(ObjectClass)) =>
+ case AsInstanceOf(expr, cls) =>
+ case Unbox(arg, charCode) =>
+ case GetClass(expr) =>
+ case JSNew(ctor, args) =>
+ case JSDotSelect(qualifier, item) =>
+ case JSBracketSelect(qualifier, item) =>
+ case tree: JSFunctionApply =>
+ case JSDotMethodApply(receiver, method, args) =>
+ case JSBracketMethodApply(receiver, method, args) =>
+ case JSDelete(JSDotSelect(obj, prop)) =>
+ case JSDelete(JSBracketSelect(obj, prop)) =>
+ case JSUnaryOp(op, lhs) =>
+ case JSBinaryOp(op, lhs, rhs) =>
+ case JSArrayConstr(items) =>
+ case JSObjectConstr(fields) =>
+ case _: VarRef | _: This =>
+ case Closure(captureParams, params, body, captureValues) =>
+ case _: Skip | _: Debugger | _: LoadModule |
+ _: JSEnvInfo | _: Literal | EmptyTree =>
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t9008.scala b/test/files/pos/t9008.scala
new file mode 100644
index 0000000000..d11b8604f2
--- /dev/null
+++ b/test/files/pos/t9008.scala
@@ -0,0 +1,5 @@
+trait Monad[M[_]]
+
+object Test {
+ def x: Monad[M forSome { type M[_] }] = ???
+}
diff --git a/test/files/pos/t9018.scala b/test/files/pos/t9018.scala
new file mode 100644
index 0000000000..7fb4cf21b3
--- /dev/null
+++ b/test/files/pos/t9018.scala
@@ -0,0 +1,16 @@
+object TestObject {
+
+ def m(i: Int): AnyRef = i match {
+ case 0 => new C()
+ case 1 => Some(E.A).getOrElse("")
+ }
+
+ class C extends Ordered[C] {
+ def compare(that: C): Int = ???
+ }
+
+ object E extends Enumeration {
+ type CharacterClass = Value
+ val A = Value
+ }
+}
diff --git a/test/files/pos/t9020.flags b/test/files/pos/t9020.flags
new file mode 100644
index 0000000000..efb2dd3e6f
--- /dev/null
+++ b/test/files/pos/t9020.flags
@@ -0,0 +1 @@
+-Ywarn-value-discard -Xfatal-warnings
diff --git a/test/files/pos/t9020.scala b/test/files/pos/t9020.scala
new file mode 100644
index 0000000000..16e31e2572
--- /dev/null
+++ b/test/files/pos/t9020.scala
@@ -0,0 +1,10 @@
+trait ValueDiscard[@specialized U] {
+ def u: U
+}
+/* Was:
+scalac-hash v2.11.5 -Ywarn-value-discard test/files/pos/t9020.scala
+test/files/pos/t9020.scala:2: warning: discarded non-Unit value
+ def u: U
+ ^
+one warning found
+*/
diff --git a/test/files/pos/t9050.scala b/test/files/pos/t9050.scala
new file mode 100644
index 0000000000..b1ab09f901
--- /dev/null
+++ b/test/files/pos/t9050.scala
@@ -0,0 +1,13 @@
+final class Mu[F](val value: Any) extends AnyVal {
+ def cata(f: F) {
+ // crash
+ ((y: Mu[F]) => y.cata(f))
+ // crash
+ def foo(x : Mu[F]) = x.cata(f)
+
+ // // okay
+ def x: Mu[F] = ???
+ (() => x.cata(f))
+ assert(true, cata(f))
+ }
+}
diff --git a/test/files/pos/t9086.scala b/test/files/pos/t9086.scala
new file mode 100644
index 0000000000..fba34ee226
--- /dev/null
+++ b/test/files/pos/t9086.scala
@@ -0,0 +1,8 @@
+class X[A](a: A)
+object Test {
+ implicit val ImplicitBoolean: Boolean = true
+ def local = {
+ implicit object X extends X({ implicitly[Boolean] ; "" })
+ implicitly[X[String]] // failed in 2.11.5
+ }
+}
diff --git a/test/files/pos/t9111-inliner-workaround.flags b/test/files/pos/t9111-inliner-workaround.flags
new file mode 100644
index 0000000000..63b5558cfd
--- /dev/null
+++ b/test/files/pos/t9111-inliner-workaround.flags
@@ -0,0 +1 @@
+-Ybackend:GenBCode -Yopt:l:classpath \ No newline at end of file
diff --git a/test/files/pos/t9111-inliner-workaround/A_1.java b/test/files/pos/t9111-inliner-workaround/A_1.java
new file mode 100644
index 0000000000..bc60b68ea6
--- /dev/null
+++ b/test/files/pos/t9111-inliner-workaround/A_1.java
@@ -0,0 +1,13 @@
+public class A_1 {
+ public static class T { }
+
+ public static class Inner {
+ public static class T { }
+
+ public void foo(T t) { }
+
+ public T t = null;
+
+ public class Deeper extends T { }
+ }
+}
diff --git a/test/files/pos/t9111-inliner-workaround/Test_1.scala b/test/files/pos/t9111-inliner-workaround/Test_1.scala
new file mode 100644
index 0000000000..1a00fff833
--- /dev/null
+++ b/test/files/pos/t9111-inliner-workaround/Test_1.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ println(new A_1.Inner())
+
+ // Accessing foo or Deeper triggers the error of SI-9111.
+ // However, when not referring to those definitions, compilation should
+ // succeed, also if the inliner is enabled.
+
+ // println(i.foo(null))
+ // new i.Deeper()
+}
diff --git a/test/files/pos/t9116.scala b/test/files/pos/t9116.scala
new file mode 100644
index 0000000000..16b04c2e6b
--- /dev/null
+++ b/test/files/pos/t9116.scala
@@ -0,0 +1,7 @@
+
+trait X {
+ List(1, 2, 3).toSet.subsets.map(_.toList) // ok now
+
+ List(1, 2, 3).toSet.subsets().map(_.toList) // now also
+ List(1, 2, 3).toSet.subsets(2).map(_.toList) // still ok
+}
diff --git a/test/files/pos/t9123.flags b/test/files/pos/t9123.flags
new file mode 100644
index 0000000000..c16e2f71dc
--- /dev/null
+++ b/test/files/pos/t9123.flags
@@ -0,0 +1 @@
+-optimize -Ydelambdafy:method
diff --git a/test/files/pos/t9123.scala b/test/files/pos/t9123.scala
new file mode 100644
index 0000000000..22d55b4351
--- /dev/null
+++ b/test/files/pos/t9123.scala
@@ -0,0 +1,10 @@
+trait Setting {
+ type T
+ def value: T
+}
+
+object Test {
+ def test(x: Some[Setting]) = x match {
+ case Some(dep) => Some(dep.value) map (_ => true)
+ }
+}
diff --git a/test/files/pos/t9135.scala b/test/files/pos/t9135.scala
new file mode 100644
index 0000000000..1e2c97baf9
--- /dev/null
+++ b/test/files/pos/t9135.scala
@@ -0,0 +1,16 @@
+
+class Free[A] {
+
+
+ this match {
+ case a @ Gosub() => gosub(a.a)(x => gosub(???)(???))
+ }
+ def gosub[A, B](a0: Free[A])(f0: A => Any): Free[B] = ???
+}
+
+
+
+ case class Gosub[B]() extends Free[B] {
+ type C
+ def a: Free[C] = ???
+ }
diff --git a/test/files/pos/t9157.scala b/test/files/pos/t9157.scala
new file mode 100644
index 0000000000..e178b5d84d
--- /dev/null
+++ b/test/files/pos/t9157.scala
@@ -0,0 +1,13 @@
+trait Flow[-In, +Out] {
+ type Repr[+O] <: Flow[In, O]
+ def map: Repr[String]
+}
+
+class Test {
+ // typechecking was exponentially slow wrt the number of projections here.
+ def slowFlow(
+ f: Flow[String,String]#Repr[String]#Repr[String]#Repr[String]#Repr[String]#Repr[String]#Repr[String]#Repr[String]#Repr[String]#Repr[String]#Repr[String]#Repr[String]
+ ) = {
+ f.map
+ }
+}
diff --git a/test/files/pos/t9181.flags b/test/files/pos/t9181.flags
new file mode 100644
index 0000000000..0f96f1f872
--- /dev/null
+++ b/test/files/pos/t9181.flags
@@ -0,0 +1 @@
+-nowarn \ No newline at end of file
diff --git a/test/files/pos/t9181.scala b/test/files/pos/t9181.scala
new file mode 100644
index 0000000000..2edf6fe4a3
--- /dev/null
+++ b/test/files/pos/t9181.scala
@@ -0,0 +1,806 @@
+sealed trait C
+case object C1 extends C
+case object C2 extends C
+case object C3 extends C
+case object C4 extends C
+case object C5 extends C
+case object C6 extends C
+case object C7 extends C
+case object C8 extends C
+case object C9 extends C
+case object C10 extends C
+case object C11 extends C
+case object C12 extends C
+case object C13 extends C
+case object C14 extends C
+case object C15 extends C
+case object C16 extends C
+case object C17 extends C
+case object C18 extends C
+case object C19 extends C
+case object C20 extends C
+case object C21 extends C
+case object C22 extends C
+case object C23 extends C
+case object C24 extends C
+case object C25 extends C
+case object C26 extends C
+case object C27 extends C
+case object C28 extends C
+case object C29 extends C
+case object C30 extends C
+case object C31 extends C
+case object C32 extends C
+case object C33 extends C
+case object C34 extends C
+case object C35 extends C
+case object C36 extends C
+case object C37 extends C
+case object C38 extends C
+case object C39 extends C
+case object C40 extends C
+case object C41 extends C
+case object C42 extends C
+case object C43 extends C
+case object C44 extends C
+case object C45 extends C
+case object C46 extends C
+case object C47 extends C
+case object C48 extends C
+case object C49 extends C
+case object C50 extends C
+case object C51 extends C
+case object C52 extends C
+case object C53 extends C
+case object C54 extends C
+case object C55 extends C
+case object C56 extends C
+case object C57 extends C
+case object C58 extends C
+case object C59 extends C
+case object C60 extends C
+case object C61 extends C
+case object C62 extends C
+case object C63 extends C
+case object C64 extends C
+case object C65 extends C
+case object C66 extends C
+case object C67 extends C
+case object C68 extends C
+case object C69 extends C
+case object C70 extends C
+case object C71 extends C
+case object C72 extends C
+case object C73 extends C
+case object C74 extends C
+case object C75 extends C
+case object C76 extends C
+case object C77 extends C
+case object C78 extends C
+case object C79 extends C
+case object C80 extends C
+case object C81 extends C
+case object C82 extends C
+case object C83 extends C
+case object C84 extends C
+case object C85 extends C
+case object C86 extends C
+case object C87 extends C
+case object C88 extends C
+case object C89 extends C
+case object C90 extends C
+case object C91 extends C
+case object C92 extends C
+case object C93 extends C
+case object C94 extends C
+case object C95 extends C
+case object C96 extends C
+case object C97 extends C
+case object C98 extends C
+case object C99 extends C
+case object C100 extends C
+case object C101 extends C
+case object C102 extends C
+case object C103 extends C
+case object C104 extends C
+case object C105 extends C
+case object C106 extends C
+case object C107 extends C
+case object C108 extends C
+case object C109 extends C
+case object C110 extends C
+case object C111 extends C
+case object C112 extends C
+case object C113 extends C
+case object C114 extends C
+case object C115 extends C
+case object C116 extends C
+case object C117 extends C
+case object C118 extends C
+case object C119 extends C
+case object C120 extends C
+case object C121 extends C
+case object C122 extends C
+case object C123 extends C
+case object C124 extends C
+case object C125 extends C
+case object C126 extends C
+case object C127 extends C
+case object C128 extends C
+case object C129 extends C
+case object C130 extends C
+case object C131 extends C
+case object C132 extends C
+case object C133 extends C
+case object C134 extends C
+case object C135 extends C
+case object C136 extends C
+case object C137 extends C
+case object C138 extends C
+case object C139 extends C
+case object C140 extends C
+case object C141 extends C
+case object C142 extends C
+case object C143 extends C
+case object C144 extends C
+case object C145 extends C
+case object C146 extends C
+case object C147 extends C
+case object C148 extends C
+case object C149 extends C
+case object C150 extends C
+case object C151 extends C
+case object C152 extends C
+case object C153 extends C
+case object C154 extends C
+case object C155 extends C
+case object C156 extends C
+case object C157 extends C
+case object C158 extends C
+case object C159 extends C
+case object C160 extends C
+case object C161 extends C
+case object C162 extends C
+case object C163 extends C
+case object C164 extends C
+case object C165 extends C
+case object C166 extends C
+case object C167 extends C
+case object C168 extends C
+case object C169 extends C
+case object C170 extends C
+case object C171 extends C
+case object C172 extends C
+case object C173 extends C
+case object C174 extends C
+case object C175 extends C
+case object C176 extends C
+case object C177 extends C
+case object C178 extends C
+case object C179 extends C
+case object C180 extends C
+case object C181 extends C
+case object C182 extends C
+case object C183 extends C
+case object C184 extends C
+case object C185 extends C
+case object C186 extends C
+case object C187 extends C
+case object C188 extends C
+case object C189 extends C
+case object C190 extends C
+case object C191 extends C
+case object C192 extends C
+case object C193 extends C
+case object C194 extends C
+case object C195 extends C
+case object C196 extends C
+case object C197 extends C
+case object C198 extends C
+case object C199 extends C
+case object C200 extends C
+case object C201 extends C
+case object C202 extends C
+case object C203 extends C
+case object C204 extends C
+case object C205 extends C
+case object C206 extends C
+case object C207 extends C
+case object C208 extends C
+case object C209 extends C
+case object C210 extends C
+case object C211 extends C
+case object C212 extends C
+case object C213 extends C
+case object C214 extends C
+case object C215 extends C
+case object C216 extends C
+case object C217 extends C
+case object C218 extends C
+case object C219 extends C
+case object C220 extends C
+case object C221 extends C
+case object C222 extends C
+case object C223 extends C
+case object C224 extends C
+case object C225 extends C
+case object C226 extends C
+case object C227 extends C
+case object C228 extends C
+case object C229 extends C
+case object C230 extends C
+case object C231 extends C
+case object C232 extends C
+case object C233 extends C
+case object C234 extends C
+case object C235 extends C
+case object C236 extends C
+case object C237 extends C
+case object C238 extends C
+case object C239 extends C
+case object C240 extends C
+case object C241 extends C
+case object C242 extends C
+case object C243 extends C
+case object C244 extends C
+case object C245 extends C
+case object C246 extends C
+case object C247 extends C
+case object C248 extends C
+case object C249 extends C
+case object C250 extends C
+case object C251 extends C
+case object C252 extends C
+case object C253 extends C
+case object C254 extends C
+case object C255 extends C
+case object C256 extends C
+case object C257 extends C
+case object C258 extends C
+case object C259 extends C
+case object C260 extends C
+case object C261 extends C
+case object C262 extends C
+case object C263 extends C
+case object C264 extends C
+case object C265 extends C
+case object C266 extends C
+case object C267 extends C
+case object C268 extends C
+case object C269 extends C
+case object C270 extends C
+case object C271 extends C
+case object C272 extends C
+case object C273 extends C
+case object C274 extends C
+case object C275 extends C
+case object C276 extends C
+case object C277 extends C
+case object C278 extends C
+case object C279 extends C
+case object C280 extends C
+case object C281 extends C
+case object C282 extends C
+case object C283 extends C
+case object C284 extends C
+case object C285 extends C
+case object C286 extends C
+case object C287 extends C
+case object C288 extends C
+case object C289 extends C
+case object C290 extends C
+case object C291 extends C
+case object C292 extends C
+case object C293 extends C
+case object C294 extends C
+case object C295 extends C
+case object C296 extends C
+case object C297 extends C
+case object C298 extends C
+case object C299 extends C
+case object C300 extends C
+case object C301 extends C
+case object C302 extends C
+case object C303 extends C
+case object C304 extends C
+case object C305 extends C
+case object C306 extends C
+case object C307 extends C
+case object C308 extends C
+case object C309 extends C
+case object C310 extends C
+case object C311 extends C
+case object C312 extends C
+case object C313 extends C
+case object C314 extends C
+case object C315 extends C
+case object C316 extends C
+case object C317 extends C
+case object C318 extends C
+case object C319 extends C
+case object C320 extends C
+case object C321 extends C
+case object C322 extends C
+case object C323 extends C
+case object C324 extends C
+case object C325 extends C
+case object C326 extends C
+case object C327 extends C
+case object C328 extends C
+case object C329 extends C
+case object C330 extends C
+case object C331 extends C
+case object C332 extends C
+case object C333 extends C
+case object C334 extends C
+case object C335 extends C
+case object C336 extends C
+case object C337 extends C
+case object C338 extends C
+case object C339 extends C
+case object C340 extends C
+case object C341 extends C
+case object C342 extends C
+case object C343 extends C
+case object C344 extends C
+case object C345 extends C
+case object C346 extends C
+case object C347 extends C
+case object C348 extends C
+case object C349 extends C
+case object C350 extends C
+case object C351 extends C
+case object C352 extends C
+case object C353 extends C
+case object C354 extends C
+case object C355 extends C
+case object C356 extends C
+case object C357 extends C
+case object C358 extends C
+case object C359 extends C
+case object C360 extends C
+case object C361 extends C
+case object C362 extends C
+case object C363 extends C
+case object C364 extends C
+case object C365 extends C
+case object C366 extends C
+case object C367 extends C
+case object C368 extends C
+case object C369 extends C
+case object C370 extends C
+case object C371 extends C
+case object C372 extends C
+case object C373 extends C
+case object C374 extends C
+case object C375 extends C
+case object C376 extends C
+case object C377 extends C
+case object C378 extends C
+case object C379 extends C
+case object C380 extends C
+case object C381 extends C
+case object C382 extends C
+case object C383 extends C
+case object C384 extends C
+case object C385 extends C
+case object C386 extends C
+case object C387 extends C
+case object C388 extends C
+case object C389 extends C
+case object C390 extends C
+case object C391 extends C
+case object C392 extends C
+case object C393 extends C
+case object C394 extends C
+case object C395 extends C
+case object C396 extends C
+case object C397 extends C
+case object C398 extends C
+case object C399 extends C
+case object C400 extends C
+
+object M {
+ def f(c: C): Int = c match {
+ case C1 => 1
+ case C2 => 2
+ case C3 => 3
+ case C4 => 4
+ case C5 => 5
+ case C6 => 6
+ case C7 => 7
+ case C8 => 8
+ case C9 => 9
+ case C10 => 10
+ case C11 => 11
+ case C12 => 12
+ case C13 => 13
+ case C14 => 14
+ case C15 => 15
+ case C16 => 16
+ case C17 => 17
+ case C18 => 18
+ case C19 => 19
+ case C20 => 20
+ case C21 => 21
+ case C22 => 22
+ case C23 => 23
+ case C24 => 24
+ case C25 => 25
+ case C26 => 26
+ case C27 => 27
+ case C28 => 28
+ case C29 => 29
+ case C30 => 30
+ case C31 => 31
+ case C32 => 32
+ case C33 => 33
+ case C34 => 34
+ case C35 => 35
+ case C36 => 36
+ case C37 => 37
+ case C38 => 38
+ case C39 => 39
+ case C40 => 40
+ case C41 => 41
+ case C42 => 42
+ case C43 => 43
+ case C44 => 44
+ case C45 => 45
+ case C46 => 46
+ case C47 => 47
+ case C48 => 48
+ case C49 => 49
+ case C50 => 50
+ case C51 => 51
+ case C52 => 52
+ case C53 => 53
+ case C54 => 54
+ case C55 => 55
+ case C56 => 56
+ case C57 => 57
+ case C58 => 58
+ case C59 => 59
+ case C60 => 60
+ case C61 => 61
+ case C62 => 62
+ case C63 => 63
+ case C64 => 64
+ case C65 => 65
+ case C66 => 66
+ case C67 => 67
+ case C68 => 68
+ case C69 => 69
+ case C70 => 70
+ case C71 => 71
+ case C72 => 72
+ case C73 => 73
+ case C74 => 74
+ case C75 => 75
+ case C76 => 76
+ case C77 => 77
+ case C78 => 78
+ case C79 => 79
+ case C80 => 80
+ case C81 => 81
+ case C82 => 82
+ case C83 => 83
+ case C84 => 84
+ case C85 => 85
+ case C86 => 86
+ case C87 => 87
+ case C88 => 88
+ case C89 => 89
+ case C90 => 90
+ case C91 => 91
+ case C92 => 92
+ case C93 => 93
+ case C94 => 94
+ case C95 => 95
+ case C96 => 96
+ case C97 => 97
+ case C98 => 98
+ case C99 => 99
+ case C100 => 100
+ case C101 => 101
+ case C102 => 102
+ case C103 => 103
+ case C104 => 104
+ case C105 => 105
+ case C106 => 106
+ case C107 => 107
+ case C108 => 108
+ case C109 => 109
+ case C110 => 110
+ case C111 => 111
+ case C112 => 112
+ case C113 => 113
+ case C114 => 114
+ case C115 => 115
+ case C116 => 116
+ case C117 => 117
+ case C118 => 118
+ case C119 => 119
+ case C120 => 120
+ case C121 => 121
+ case C122 => 122
+ case C123 => 123
+ case C124 => 124
+ case C125 => 125
+ case C126 => 126
+ case C127 => 127
+ case C128 => 128
+ case C129 => 129
+ case C130 => 130
+ case C131 => 131
+ case C132 => 132
+ case C133 => 133
+ case C134 => 134
+ case C135 => 135
+ case C136 => 136
+ case C137 => 137
+ case C138 => 138
+ case C139 => 139
+ case C140 => 140
+ case C141 => 141
+ case C142 => 142
+ case C143 => 143
+ case C144 => 144
+ case C145 => 145
+ case C146 => 146
+ case C147 => 147
+ case C148 => 148
+ case C149 => 149
+ case C150 => 150
+ case C151 => 151
+ case C152 => 152
+ case C153 => 153
+ case C154 => 154
+ case C155 => 155
+ case C156 => 156
+ case C157 => 157
+ case C158 => 158
+ case C159 => 159
+ case C160 => 160
+ case C161 => 161
+ case C162 => 162
+ case C163 => 163
+ case C164 => 164
+ case C165 => 165
+ case C166 => 166
+ case C167 => 167
+ case C168 => 168
+ case C169 => 169
+ case C170 => 170
+ case C171 => 171
+ case C172 => 172
+ case C173 => 173
+ case C174 => 174
+ case C175 => 175
+ case C176 => 176
+ case C177 => 177
+ case C178 => 178
+ case C179 => 179
+ case C180 => 180
+ case C181 => 181
+ case C182 => 182
+ case C183 => 183
+ case C184 => 184
+ case C185 => 185
+ case C186 => 186
+ case C187 => 187
+ case C188 => 188
+ case C189 => 189
+ case C190 => 190
+ case C191 => 191
+ case C192 => 192
+ case C193 => 193
+ case C194 => 194
+ case C195 => 195
+ case C196 => 196
+ case C197 => 197
+ case C198 => 198
+ case C199 => 199
+ case C200 => 200
+ case C201 => 201
+ case C202 => 202
+ case C203 => 203
+ case C204 => 204
+ case C205 => 205
+ case C206 => 206
+ case C207 => 207
+ case C208 => 208
+ case C209 => 209
+ case C210 => 210
+ case C211 => 211
+ case C212 => 212
+ case C213 => 213
+ case C214 => 214
+ case C215 => 215
+ case C216 => 216
+ case C217 => 217
+ case C218 => 218
+ case C219 => 219
+ case C220 => 220
+ case C221 => 221
+ case C222 => 222
+ case C223 => 223
+ case C224 => 224
+ case C225 => 225
+ case C226 => 226
+ case C227 => 227
+ case C228 => 228
+ case C229 => 229
+ case C230 => 230
+ case C231 => 231
+ case C232 => 232
+ case C233 => 233
+ case C234 => 234
+ case C235 => 235
+ case C236 => 236
+ case C237 => 237
+ case C238 => 238
+ case C239 => 239
+ case C240 => 240
+ case C241 => 241
+ case C242 => 242
+ case C243 => 243
+ case C244 => 244
+ case C245 => 245
+ case C246 => 246
+ case C247 => 247
+ case C248 => 248
+ case C249 => 249
+ case C250 => 250
+ case C251 => 251
+ case C252 => 252
+ case C253 => 253
+ case C254 => 254
+ case C255 => 255
+ case C256 => 256
+ case C257 => 257
+ case C258 => 258
+ case C259 => 259
+ case C260 => 260
+ case C261 => 261
+ case C262 => 262
+ case C263 => 263
+ case C264 => 264
+ case C265 => 265
+ case C266 => 266
+ case C267 => 267
+ case C268 => 268
+ case C269 => 269
+ case C270 => 270
+ case C271 => 271
+ case C272 => 272
+ case C273 => 273
+ case C274 => 274
+ case C275 => 275
+ case C276 => 276
+ case C277 => 277
+ case C278 => 278
+ case C279 => 279
+ case C280 => 280
+ case C281 => 281
+ case C282 => 282
+ case C283 => 283
+ case C284 => 284
+ case C285 => 285
+ case C286 => 286
+ case C287 => 287
+ case C288 => 288
+ case C289 => 289
+ case C290 => 290
+ case C291 => 291
+ case C292 => 292
+ case C293 => 293
+ case C294 => 294
+ case C295 => 295
+ case C296 => 296
+ case C297 => 297
+ case C298 => 298
+ case C299 => 299
+ case C300 => 300
+ case C301 => 301
+ case C302 => 302
+ case C303 => 303
+ case C304 => 304
+ case C305 => 305
+ case C306 => 306
+ case C307 => 307
+ case C308 => 308
+ case C309 => 309
+ case C310 => 310
+ case C311 => 311
+ case C312 => 312
+ case C313 => 313
+ case C314 => 314
+ case C315 => 315
+ case C316 => 316
+ case C317 => 317
+ case C318 => 318
+ case C319 => 319
+ case C320 => 320
+ case C321 => 321
+ case C322 => 322
+ case C323 => 323
+ case C324 => 324
+ case C325 => 325
+ case C326 => 326
+ case C327 => 327
+ case C328 => 328
+ case C329 => 329
+ case C330 => 330
+ case C331 => 331
+ case C332 => 332
+ case C333 => 333
+ case C334 => 334
+ case C335 => 335
+ case C336 => 336
+ case C337 => 337
+ case C338 => 338
+ case C339 => 339
+ case C340 => 340
+ case C341 => 341
+ case C342 => 342
+ case C343 => 343
+ case C344 => 344
+ case C345 => 345
+ case C346 => 346
+ case C347 => 347
+ case C348 => 348
+ case C349 => 349
+ case C350 => 350
+ case C351 => 351
+ case C352 => 352
+ case C353 => 353
+ case C354 => 354
+ case C355 => 355
+ case C356 => 356
+ case C357 => 357
+ case C358 => 358
+ case C359 => 359
+ case C360 => 360
+ case C361 => 361
+ case C362 => 362
+ case C363 => 363
+ case C364 => 364
+ case C365 => 365
+ case C366 => 366
+ case C367 => 367
+ case C368 => 368
+ case C369 => 369
+ case C370 => 370
+ case C371 => 371
+ case C372 => 372
+ case C373 => 373
+ case C374 => 374
+ case C375 => 375
+ case C376 => 376
+ case C377 => 377
+ case C378 => 378
+ case C379 => 379
+ case C380 => 380
+ case C381 => 381
+ case C382 => 382
+ case C383 => 383
+ case C384 => 384
+ case C385 => 385
+ case C386 => 386
+ case C387 => 387
+ case C388 => 388
+ case C389 => 389
+ case C390 => 390
+ case C391 => 391
+ case C392 => 392
+ case C393 => 393
+ case C394 => 394
+ case C395 => 395
+ case C396 => 396
+ case C397 => 397
+ case C398 => 398
+ case C399 => 399
+ case C400 => 400
+ }
+}
diff --git a/test/files/pos/t9239/Declaration.scala b/test/files/pos/t9239/Declaration.scala
new file mode 100644
index 0000000000..452dcc1e77
--- /dev/null
+++ b/test/files/pos/t9239/Declaration.scala
@@ -0,0 +1,3 @@
+class Foo[A]
+trait Bar[A] extends Foo[A]
+class Baz[A] extends Bar[A]
diff --git a/test/files/pos/t9239/Usage.java b/test/files/pos/t9239/Usage.java
new file mode 100644
index 0000000000..d1e3fb0c3e
--- /dev/null
+++ b/test/files/pos/t9239/Usage.java
@@ -0,0 +1,15 @@
+/**
+ * Used to fail with:
+ *
+ * Usage.java:5: error: incompatible types: Baz<String> cannot be converted to Foo<String>
+ * foo(f);
+ * ^
+ */
+public class Usage {
+ public Usage() {
+ Baz<String> f = null;
+ foo(f);
+ }
+
+ public void foo(Foo<String> f) { };
+}
diff --git a/test/files/pos/t9285.flags b/test/files/pos/t9285.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/pos/t9285.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/pos/t9285.scala b/test/files/pos/t9285.scala
new file mode 100644
index 0000000000..b7146cdf1c
--- /dev/null
+++ b/test/files/pos/t9285.scala
@@ -0,0 +1 @@
+case class C(placeholder: Unit)
diff --git a/test/files/pos/virtpatmat_exhaust_big.scala b/test/files/pos/virtpatmat_exhaust_big.scala
new file mode 100644
index 0000000000..9850933540
--- /dev/null
+++ b/test/files/pos/virtpatmat_exhaust_big.scala
@@ -0,0 +1,33 @@
+sealed abstract class Z
+object Z {
+ object Z0 extends Z
+ case class Z1() extends Z
+ object Z2 extends Z
+ case class Z3() extends Z
+ object Z4 extends Z
+ case class Z5() extends Z
+ object Z6 extends Z
+ case class Z7() extends Z
+ object Z8 extends Z
+ case class Z9() extends Z
+ object Z10 extends Z
+ case class Z11() extends Z
+ object Z12 extends Z
+ case class Z13() extends Z
+ object Z14 extends Z
+ case class Z15() extends Z
+ object Z16 extends Z
+ case class Z17() extends Z
+ object Z18 extends Z
+ case class Z19() extends Z
+}
+
+// drop any case and it will report an error
+object Test {
+ import Z._
+ def foo(z: Z) = z match {
+ case Z0 | Z1() | Z2 | Z3() | Z4 | Z5() | Z6 | Z7() | Z8 | Z9() |
+ Z10 | Z11() | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19()
+ =>
+ }
+} \ No newline at end of file
diff --git a/test/disabled/presentation/doc.check b/test/files/presentation/doc.check
index 5a3ff13151..5a3ff13151 100644
--- a/test/disabled/presentation/doc.check
+++ b/test/files/presentation/doc.check
diff --git a/test/disabled/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala
index f2233f1828..f2233f1828 100755
--- a/test/disabled/presentation/doc/doc.scala
+++ b/test/files/presentation/doc/doc.scala
diff --git a/test/disabled/presentation/doc/src/Class.scala b/test/files/presentation/doc/src/Class.scala
index a974bd6f5c..a974bd6f5c 100755
--- a/test/disabled/presentation/doc/src/Class.scala
+++ b/test/files/presentation/doc/src/Class.scala
diff --git a/test/disabled/presentation/doc/src/p/Base.scala b/test/files/presentation/doc/src/p/Base.scala
index d91632b6f6..d91632b6f6 100755
--- a/test/disabled/presentation/doc/src/p/Base.scala
+++ b/test/files/presentation/doc/src/p/Base.scala
diff --git a/test/disabled/presentation/doc/src/p/Derived.scala b/test/files/presentation/doc/src/p/Derived.scala
index 1a9c9a26d1..1a9c9a26d1 100755
--- a/test/disabled/presentation/doc/src/p/Derived.scala
+++ b/test/files/presentation/doc/src/p/Derived.scala
diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check
index d8c7a369f7..12eafcd6de 100644
--- a/test/files/presentation/ide-bug-1000531.check
+++ b/test/files/presentation/ide-bug-1000531.check
@@ -1,111 +1,24 @@
-reload: CrashOnLoad.scala
+reload: CrashOnLoad.scala, TestIterable.java
-askTypeCompletion at CrashOnLoad.scala(6,12)
+askTypeCompletion at CrashOnLoad.scala(6,11)
================================================================================
-[response] askTypeCompletion at (6,12)
-retrieved 117 members
+[response] askTypeCompletion at (6,11)
+retrieved 30 members
[inaccessible] protected[package lang] def clone(): Object
[inaccessible] protected[package lang] def finalize(): Unit
-[inaccessible] protected[this] def reversed: List[B]
-class GroupedIterator[B >: A] extends AbstractIterator[Seq[B]] with Iterator[Seq[B]]
def +(other: String): String
-def ++[B >: B](that: => scala.collection.GenTraversableOnce[B]): Iterator[B]
-def ->[B](y: B): (java.util.Iterator[B], B)
-def /:[B](z: B)(op: (B, B) => B): B
-def :\[B](z: B)(op: (B, B) => B): B
-def addString(b: StringBuilder): StringBuilder
-def addString(b: StringBuilder,sep: String): StringBuilder
-def addString(b: StringBuilder,start: String,sep: String,end: String): StringBuilder
-def aggregate[B](z: => B)(seqop: (B, B) => B,combop: (B, B) => B): B
-def buffered: scala.collection.BufferedIterator[B]
-def collectFirst[B](pf: PartialFunction[B,B]): Option[B]
-def collect[B](pf: PartialFunction[B,B]): Iterator[B]
-def contains(elem: Any): Boolean
-def copyToArray[B >: B](xs: Array[B]): Unit
-def copyToArray[B >: B](xs: Array[B],start: Int): Unit
-def copyToArray[B >: B](xs: Array[B],start: Int,len: Int): Unit
-def copyToBuffer[B >: B](dest: scala.collection.mutable.Buffer[B]): Unit
-def corresponds[B](that: scala.collection.GenTraversableOnce[B])(p: (B, B) => Boolean): Boolean
-def count(p: B => Boolean): Int
-def drop(n: Int): Iterator[B]
-def dropWhile(p: B => Boolean): Iterator[B]
-def duplicate: (Iterator[B], Iterator[B])
-def ensuring(cond: Boolean): java.util.Iterator[B]
-def ensuring(cond: Boolean,msg: => Any): java.util.Iterator[B]
-def ensuring(cond: java.util.Iterator[B] => Boolean): java.util.Iterator[B]
-def ensuring(cond: java.util.Iterator[B] => Boolean,msg: => Any): java.util.Iterator[B]
+def ->[B](y: B): (other.TestIterator[Nothing], B)
+def ensuring(cond: Boolean): other.TestIterator[Nothing]
+def ensuring(cond: Boolean,msg: => Any): other.TestIterator[Nothing]
+def ensuring(cond: other.TestIterator[Nothing] => Boolean): other.TestIterator[Nothing]
+def ensuring(cond: other.TestIterator[Nothing] => Boolean,msg: => Any): other.TestIterator[Nothing]
def equals(x$1: Any): Boolean
-def exists(p: B => Boolean): Boolean
-def filter(p: B => Boolean): Iterator[B]
-def filterNot(p: B => Boolean): Iterator[B]
-def find(p: B => Boolean): Option[B]
-def flatMap[B](f: B => scala.collection.GenTraversableOnce[B]): Iterator[B]
-def foldLeft[B](z: B)(op: (B, B) => B): B
-def foldRight[B](z: B)(op: (B, B) => B): B
-def fold[A1 >: B](z: A1)(op: (A1, A1) => A1): A1
-def forall(p: B => Boolean): Boolean
-def foreach[U](f: B => U): Unit
def formatted(fmtstr: String): String
-def grouped[B >: B](size: Int): Iterator[B]#GroupedIterator[B]
-def hasDefiniteSize: Boolean
-def hasNext(): Boolean
+def hasNext: Boolean
def hashCode(): Int
-def indexOf[B >: B](elem: B): Int
-def indexWhere(p: B => Boolean): Int
-def isEmpty: Boolean
-def isTraversableAgain: Boolean
-def length: Int
-def map[B](f: B => B): Iterator[B]
-def maxBy[B](f: B => B)(implicit cmp: Ordering[B]): B
-def max[B >: B](implicit cmp: Ordering[B]): B
-def minBy[B](f: B => B)(implicit cmp: Ordering[B]): B
-def min[B >: B](implicit cmp: Ordering[B]): B
-def mkString(sep: String): String
-def mkString(start: String,sep: String,end: String): String
-def mkString: String
-def next(): B
-def nonEmpty: Boolean
-def padTo[A1 >: B](len: Int,elem: A1): Iterator[A1]
-def partition(p: B => Boolean): (Iterator[B], Iterator[B])
-def patch[B >: B](from: Int,patchElems: Iterator[B],replaced: Int): Iterator[B]
-def product[B >: B](implicit num: Numeric[B]): B
-def reduceLeftOption[B >: B](op: (B, B) => B): Option[B]
-def reduceLeft[B >: B](op: (B, B) => B): B
-def reduceOption[A1 >: B](op: (A1, A1) => A1): Option[A1]
-def reduceRightOption[B >: B](op: (B, B) => B): Option[B]
-def reduceRight[B >: B](op: (B, B) => B): B
-def reduce[A1 >: B](op: (A1, A1) => A1): A1
-def remove(): Unit
-def sameElements(that: Iterator[_]): Boolean
-def scanLeft[B](z: B)(op: (B, B) => B): Iterator[B]
-def scanRight[B](z: B)(op: (B, B) => B): Iterator[B]
-def seq: Iterator[B]
-def size: Int
-def slice(from: Int,until: Int): Iterator[B]
-def sliding[B >: B](size: Int,step: Int): Iterator[B]#GroupedIterator[B]
-def span(p: B => Boolean): (Iterator[B], Iterator[B])
-def sum[B >: B](implicit num: Numeric[B]): B
-def take(n: Int): Iterator[B]
-def takeWhile(p: B => Boolean): Iterator[B]
-def toArray[B >: B](implicit evidence$1: scala.reflect.ClassTag[B]): Array[B]
-def toBuffer[B >: B]: scala.collection.mutable.Buffer[B]
-def toIndexedSeq: scala.collection.immutable.IndexedSeq[B]
-def toIterable: Iterable[B]
-def toIterator: Iterator[B]
-def toList: List[B]
-def toMap[T, U](implicit ev: <:<[B,(T, U)]): scala.collection.immutable.Map[T,U]
-def toSeq: Seq[B]
-def toSet[B >: B]: scala.collection.immutable.Set[B]
-def toStream: scala.collection.immutable.Stream[B]
+def next: T
def toString(): String
-def toTraversable: Traversable[B]
-def toVector: Vector[B]
-def to[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]]): Col[B]
-def withFilter(p: B => Boolean): Iterator[B]
-def zipAll[B, A1 >: B, B1 >: B](that: Iterator[B],thisElem: A1,thatElem: B1): Iterator[(A1, B1)]
-def zipWithIndex: Iterator[(B, Int)]
-def zip[B](that: Iterator[B]): Iterator[(B, B)]
-def →[B](y: B): (java.util.Iterator[B], B)
+def →[B](y: B): (other.TestIterator[Nothing], B)
final def !=(x$1: Any): Boolean
final def ##(): Int
final def ==(x$1: Any): Boolean
diff --git a/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala b/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
index 878bbfa19e..3f59282083 100644
--- a/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
+++ b/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
@@ -1,7 +1,14 @@
/** When this files is opened within the IDE, a typing error is reported. */
-class A[B] extends java.lang.Iterable[B] {
+class A[B] extends TestIterable[B] {
import scala.collection.JavaConversions._
- def iterator = Iterator.empty
+ def iterator: other.TestIterator[Nothing] = ???
- iterator. /*!*/
-} \ No newline at end of file
+ iterator./*!*/
+}
+
+object other {
+ trait TestIterator[T] {
+ def hasNext: Boolean
+ def next: T
+ }
+}
diff --git a/test/files/presentation/ide-bug-1000531/src/TestIterable.java b/test/files/presentation/ide-bug-1000531/src/TestIterable.java
new file mode 100644
index 0000000000..84a6fe77f1
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000531/src/TestIterable.java
@@ -0,0 +1,7 @@
+public abstract class TestIterable<T> {
+ public abstract TestIterator<T> iterator();
+ public static abstract class TestIterator<T> {
+ public abstract T next();
+ public abstract boolean hasNext();
+ }
+}
diff --git a/test/files/presentation/infix-completion.check b/test/files/presentation/infix-completion.check
new file mode 100644
index 0000000000..f62dc81d34
--- /dev/null
+++ b/test/files/presentation/infix-completion.check
@@ -0,0 +1,193 @@
+reload: Snippet.scala
+
+askTypeCompletion at Snippet.scala(1,34)
+================================================================================
+[response] askTypeCompletion at (1,34)
+retrieved 192 members
+[inaccessible] protected def integralNum: math.Numeric.DoubleAsIfIntegral.type
+[inaccessible] protected def num: math.Numeric.DoubleIsFractional.type
+[inaccessible] protected def ord: math.Ordering.Double.type
+[inaccessible] protected def unifiedPrimitiveEquals(x: Any): Boolean
+[inaccessible] protected def unifiedPrimitiveHashcode(): Int
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def !=(x: Byte): Boolean
+def !=(x: Char): Boolean
+def !=(x: Double): Boolean
+def !=(x: Float): Boolean
+def !=(x: Int): Boolean
+def !=(x: Long): Boolean
+def !=(x: Short): Boolean
+def %(x: Byte): Int
+def %(x: Char): Int
+def %(x: Double): Double
+def %(x: Float): Float
+def %(x: Int): Int
+def %(x: Long): Long
+def %(x: Short): Int
+def &(x: Byte): Int
+def &(x: Char): Int
+def &(x: Int): Int
+def &(x: Long): Long
+def &(x: Short): Int
+def *(x: Byte): Int
+def *(x: Char): Int
+def *(x: Double): Double
+def *(x: Float): Float
+def *(x: Int): Int
+def *(x: Long): Long
+def *(x: Short): Int
+def +(x: Byte): Int
+def +(x: Char): Int
+def +(x: Double): Double
+def +(x: Float): Float
+def +(x: Int): Int
+def +(x: Long): Long
+def +(x: Short): Int
+def +(x: String): String
+def -(x: Byte): Int
+def -(x: Char): Int
+def -(x: Double): Double
+def -(x: Float): Float
+def -(x: Int): Int
+def -(x: Long): Long
+def -(x: Short): Int
+def ->[B](y: B): (Int, B)
+def /(x: Byte): Int
+def /(x: Char): Int
+def /(x: Double): Double
+def /(x: Float): Float
+def /(x: Int): Int
+def /(x: Long): Long
+def /(x: Short): Int
+def <(x: Byte): Boolean
+def <(x: Char): Boolean
+def <(x: Double): Boolean
+def <(x: Float): Boolean
+def <(x: Int): Boolean
+def <(x: Long): Boolean
+def <(x: Short): Boolean
+def <<(x: Int): Int
+def <<(x: Long): Int
+def <=(x: Byte): Boolean
+def <=(x: Char): Boolean
+def <=(x: Double): Boolean
+def <=(x: Float): Boolean
+def <=(x: Int): Boolean
+def <=(x: Long): Boolean
+def <=(x: Short): Boolean
+def ==(x: Byte): Boolean
+def ==(x: Char): Boolean
+def ==(x: Double): Boolean
+def ==(x: Float): Boolean
+def ==(x: Int): Boolean
+def ==(x: Long): Boolean
+def ==(x: Short): Boolean
+def >(x: Byte): Boolean
+def >(x: Char): Boolean
+def >(x: Double): Boolean
+def >(x: Float): Boolean
+def >(x: Int): Boolean
+def >(x: Long): Boolean
+def >(x: Short): Boolean
+def >=(x: Byte): Boolean
+def >=(x: Char): Boolean
+def >=(x: Double): Boolean
+def >=(x: Float): Boolean
+def >=(x: Int): Boolean
+def >=(x: Long): Boolean
+def >=(x: Short): Boolean
+def >>(x: Int): Int
+def >>(x: Long): Int
+def >>>(x: Int): Int
+def >>>(x: Long): Int
+def ^(x: Byte): Int
+def ^(x: Char): Int
+def ^(x: Int): Int
+def ^(x: Long): Long
+def ^(x: Short): Int
+def byteValue(): Byte
+def ceil: Double
+def compare(y: Double): Int
+def compare(y: Long): Int
+def compareTo(that: Double): Int
+def compareTo(that: Long): Int
+def compareTo(x$1: Double): Int
+def compareTo(x$1: Long): Int
+def doubleValue(): Double
+def ensuring(cond: Boolean): Int
+def ensuring(cond: Boolean,msg: => Any): Int
+def ensuring(cond: Int => Boolean): Int
+def ensuring(cond: Int => Boolean,msg: => Any): Int
+def equals(x$1: Any): Boolean
+def floatValue(): Float
+def floor: Double
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def intValue(): Int
+def isInfinite(): Boolean
+def isInfinity: Boolean
+def isNaN(): Boolean
+def isNegInfinity: Boolean
+def isPosInfinity: Boolean
+def isValidLong: Boolean
+def longValue(): Long
+def round: Long
+def shortValue(): Short
+def to(end: Double): Range.Partial[Double,scala.collection.immutable.NumericRange[Double]]
+def to(end: Double,step: Double): scala.collection.immutable.NumericRange.Inclusive[Double]
+def to(end: Long): scala.collection.immutable.NumericRange.Inclusive[Long]
+def to(end: Long,step: Long): scala.collection.immutable.NumericRange.Inclusive[Long]
+def toBinaryString: String
+def toByte: Byte
+def toChar: Char
+def toDegrees: Double
+def toDouble: Double
+def toFloat: Float
+def toHexString: String
+def toInt: Int
+def toLong: Long
+def toOctalString: String
+def toRadians: Double
+def toShort: Short
+def toString(): String
+def unary_+: Int
+def unary_-: Int
+def unary_~: Int
+def underlying(): AnyRef
+def until(end: Double): Range.Partial[Double,scala.collection.immutable.NumericRange[Double]]
+def until(end: Double,step: Double): scala.collection.immutable.NumericRange.Exclusive[Double]
+def until(end: Long): scala.collection.immutable.NumericRange.Exclusive[Long]
+def until(end: Long,step: Long): scala.collection.immutable.NumericRange.Exclusive[Long]
+def |(x: Byte): Int
+def |(x: Char): Int
+def |(x: Int): Int
+def |(x: Long): Long
+def |(x: Short): Int
+def →[B](y: B): (Int, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+override def abs: Double
+override def isValidByte: Boolean
+override def isValidChar: Boolean
+override def isValidInt: Boolean
+override def isValidShort: Boolean
+override def isWhole(): Boolean
+override def max(that: Double): Double
+override def max(that: Long): Long
+override def min(that: Double): Double
+override def min(that: Long): Long
+override def signum: Int
+private[this] val self: Double
+================================================================================
diff --git a/test/files/presentation/infix-completion/Runner.scala b/test/files/presentation/infix-completion/Runner.scala
new file mode 100644
index 0000000000..1c03e3d5ba
--- /dev/null
+++ b/test/files/presentation/infix-completion/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
diff --git a/test/files/presentation/infix-completion/src/Snippet.scala b/test/files/presentation/infix-completion/src/Snippet.scala
new file mode 100644
index 0000000000..7e03c486ba
--- /dev/null
+++ b/test/files/presentation/infix-completion/src/Snippet.scala
@@ -0,0 +1 @@
+object Snippet{val x = 123; 1 + 1./*!*/}
diff --git a/test/files/presentation/infix-completion2.check b/test/files/presentation/infix-completion2.check
new file mode 100644
index 0000000000..5c69cd84cb
--- /dev/null
+++ b/test/files/presentation/infix-completion2.check
@@ -0,0 +1,211 @@
+reload: Snippet.scala
+
+askTypeCompletion at Snippet.scala(1,34)
+================================================================================
+[response] askTypeCompletion at (1,34)
+retrieved 211 members
+[inaccessible] protected def integralNum: math.Numeric.DoubleAsIfIntegral.type
+[inaccessible] protected def num: math.Numeric.DoubleIsFractional.type
+[inaccessible] protected def ord: math.Ordering.Double.type
+[inaccessible] protected def unifiedPrimitiveEquals(x: Any): Boolean
+[inaccessible] protected def unifiedPrimitiveHashcode(): Int
+[inaccessible] protected[package lang] def clone(): Object
+[inaccessible] protected[package lang] def finalize(): Unit
+def !=(x: Byte): Boolean
+def !=(x: Char): Boolean
+def !=(x: Double): Boolean
+def !=(x: Float): Boolean
+def !=(x: Int): Boolean
+def !=(x: Long): Boolean
+def !=(x: Short): Boolean
+def %(x: Byte): Int
+def %(x: Char): Int
+def %(x: Double): Double
+def %(x: Float): Float
+def %(x: Int): Int
+def %(x: Long): Long
+def %(x: Short): Int
+def &(x: Byte): Int
+def &(x: Char): Int
+def &(x: Int): Int
+def &(x: Long): Long
+def &(x: Short): Int
+def *(x: Byte): Int
+def *(x: Char): Int
+def *(x: Double): Double
+def *(x: Float): Float
+def *(x: Int): Int
+def *(x: Long): Long
+def *(x: Short): Int
+def +(x: Byte): Int
+def +(x: Char): Int
+def +(x: Double): Double
+def +(x: Float): Float
+def +(x: Int): Int
+def +(x: Long): Long
+def +(x: Short): Int
+def +(x: String): String
+def -(x: Byte): Int
+def -(x: Char): Int
+def -(x: Double): Double
+def -(x: Float): Float
+def -(x: Int): Int
+def -(x: Long): Long
+def -(x: Short): Int
+def ->[B](y: B): (Int, B)
+def /(x: Byte): Int
+def /(x: Char): Int
+def /(x: Double): Double
+def /(x: Float): Float
+def /(x: Int): Int
+def /(x: Long): Long
+def /(x: Short): Int
+def <(x: Byte): Boolean
+def <(x: Char): Boolean
+def <(x: Double): Boolean
+def <(x: Float): Boolean
+def <(x: Int): Boolean
+def <(x: Long): Boolean
+def <(x: Short): Boolean
+def <<(x: Int): Int
+def <<(x: Long): Int
+def <=(x: Byte): Boolean
+def <=(x: Char): Boolean
+def <=(x: Double): Boolean
+def <=(x: Float): Boolean
+def <=(x: Int): Boolean
+def <=(x: Long): Boolean
+def <=(x: Short): Boolean
+def ==(x: Byte): Boolean
+def ==(x: Char): Boolean
+def ==(x: Double): Boolean
+def ==(x: Float): Boolean
+def ==(x: Int): Boolean
+def ==(x: Long): Boolean
+def ==(x: Short): Boolean
+def >(x: Byte): Boolean
+def >(x: Char): Boolean
+def >(x: Double): Boolean
+def >(x: Float): Boolean
+def >(x: Int): Boolean
+def >(x: Long): Boolean
+def >(x: Short): Boolean
+def >=(x: Byte): Boolean
+def >=(x: Char): Boolean
+def >=(x: Double): Boolean
+def >=(x: Float): Boolean
+def >=(x: Int): Boolean
+def >=(x: Long): Boolean
+def >=(x: Short): Boolean
+def >>(x: Int): Int
+def >>(x: Long): Int
+def >>>(x: Int): Int
+def >>>(x: Long): Int
+def ^(x: Byte): Int
+def ^(x: Char): Int
+def ^(x: Int): Int
+def ^(x: Long): Long
+def ^(x: Short): Int
+def byteValue(): Byte
+def ceil: Double
+def compare(y: Double): Int
+def compare(y: Float): Int
+def compare(y: Int): Int
+def compare(y: Long): Int
+def compareTo(that: Double): Int
+def compareTo(that: Float): Int
+def compareTo(that: Int): Int
+def compareTo(that: Long): Int
+def compareTo(x$1: Double): Int
+def compareTo(x$1: Float): Int
+def compareTo(x$1: Integer): Int
+def compareTo(x$1: Long): Int
+def doubleValue(): Double
+def ensuring(cond: Boolean): Int
+def ensuring(cond: Boolean,msg: => Any): Int
+def ensuring(cond: Int => Boolean): Int
+def ensuring(cond: Int => Boolean,msg: => Any): Int
+def equals(x$1: Any): Boolean
+def floatValue(): Float
+def floor: Double
+def formatted(fmtstr: String): String
+def hashCode(): Int
+def intValue(): Int
+def isInfinite(): Boolean
+def isInfinity: Boolean
+def isNaN(): Boolean
+def isNegInfinity: Boolean
+def isPosInfinity: Boolean
+def isValidLong: Boolean
+def longValue(): Long
+def round: Long
+def shortValue(): Short
+def to(end: Double): Range.Partial[Double,scala.collection.immutable.NumericRange[Double]]
+def to(end: Double,step: Double): scala.collection.immutable.NumericRange.Inclusive[Double]
+def to(end: Float): Range.Partial[Float,scala.collection.immutable.NumericRange[Float]]
+def to(end: Float,step: Float): scala.collection.immutable.NumericRange.Inclusive[Float]
+def to(end: Int): scala.collection.immutable.Range.Inclusive
+def to(end: Int,step: Int): scala.collection.immutable.Range.Inclusive
+def to(end: Long): scala.collection.immutable.NumericRange.Inclusive[Long]
+def to(end: Long,step: Long): scala.collection.immutable.NumericRange.Inclusive[Long]
+def toBinaryString: String
+def toByte: Byte
+def toChar: Char
+def toDegrees: Double
+def toDouble: Double
+def toFloat: Float
+def toHexString: String
+def toInt: Int
+def toLong: Long
+def toOctalString: String
+def toRadians: Double
+def toShort: Short
+def toString(): String
+def unary_+: Int
+def unary_-: Int
+def unary_~: Int
+def underlying(): AnyRef
+def until(end: Double): Range.Partial[Double,scala.collection.immutable.NumericRange[Double]]
+def until(end: Double,step: Double): scala.collection.immutable.NumericRange.Exclusive[Double]
+def until(end: Float): Range.Partial[Float,scala.collection.immutable.NumericRange[Float]]
+def until(end: Float,step: Float): scala.collection.immutable.NumericRange.Exclusive[Float]
+def until(end: Int): scala.collection.immutable.Range
+def until(end: Int,step: Int): scala.collection.immutable.Range
+def until(end: Long): scala.collection.immutable.NumericRange.Exclusive[Long]
+def until(end: Long,step: Long): scala.collection.immutable.NumericRange.Exclusive[Long]
+def |(x: Byte): Int
+def |(x: Char): Int
+def |(x: Int): Int
+def |(x: Long): Long
+def |(x: Short): Int
+def →[B](y: B): (Int, B)
+final def !=(x$1: Any): Boolean
+final def ##(): Int
+final def ==(x$1: Any): Boolean
+final def asInstanceOf[T0]: T0
+final def eq(x$1: AnyRef): Boolean
+final def isInstanceOf[T0]: Boolean
+final def ne(x$1: AnyRef): Boolean
+final def notify(): Unit
+final def notifyAll(): Unit
+final def synchronized[T0](x$1: T0): T0
+final def wait(): Unit
+final def wait(x$1: Long): Unit
+final def wait(x$1: Long,x$2: Int): Unit
+override def abs: Double
+override def isValidByte: Boolean
+override def isValidChar: Boolean
+override def isValidInt: Boolean
+override def isValidShort: Boolean
+override def isWhole(): Boolean
+override def max(that: Double): Double
+override def max(that: Float): Float
+override def max(that: Int): Int
+override def max(that: Long): Long
+override def min(that: Double): Double
+override def min(that: Float): Float
+override def min(that: Int): Int
+override def min(that: Long): Long
+override def signum: Int
+private[this] val self: Double
+================================================================================
diff --git a/test/files/presentation/infix-completion2/Runner.scala b/test/files/presentation/infix-completion2/Runner.scala
new file mode 100644
index 0000000000..1c03e3d5ba
--- /dev/null
+++ b/test/files/presentation/infix-completion2/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
diff --git a/test/files/presentation/infix-completion2/src/Snippet.scala b/test/files/presentation/infix-completion2/src/Snippet.scala
new file mode 100644
index 0000000000..4eb8c24a2e
--- /dev/null
+++ b/test/files/presentation/infix-completion2/src/Snippet.scala
@@ -0,0 +1 @@
+object Snippet{val x = 123; 1 + x./*!*/}
diff --git a/test/files/presentation/private-case-class-members.check b/test/files/presentation/private-case-class-members.check
new file mode 100644
index 0000000000..678f9a34e6
--- /dev/null
+++ b/test/files/presentation/private-case-class-members.check
@@ -0,0 +1 @@
+Test OK
diff --git a/test/files/presentation/private-case-class-members/Test.scala b/test/files/presentation/private-case-class-members/Test.scala
new file mode 100644
index 0000000000..e64c8238ea
--- /dev/null
+++ b/test/files/presentation/private-case-class-members/Test.scala
@@ -0,0 +1,34 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+import scala.reflect.internal.util.SourceFile
+import scala.tools.nsc.interactive.Response
+
+object Test extends InteractiveTest {
+ override def execute(): Unit = {
+ val source = loadSourceAndWaitUntilTypechecked("State.scala")
+ checkErrors(source)
+ }
+
+ private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = {
+ val sourceFile = sourceFiles.find(_.file.name == sourceName).head
+ compiler.askToDoFirst(sourceFile)
+ val res = new Response[Unit]
+ compiler.askReload(List(sourceFile), res)
+ res.get
+ askLoadedTyped(sourceFile).get
+ // the second round of type-checking makes it fail
+ compiler.askReload(List(sourceFile), res)
+ res.get
+ askLoadedTyped(sourceFile).get
+
+ sourceFile
+ }
+
+ private def checkErrors(source: SourceFile): Unit = compiler.getUnitOf(source) match {
+ case Some(unit) =>
+ val problems = unit.problems.toList
+ if(problems.isEmpty) reporter.println("Test OK")
+ else problems.foreach(problem => reporter.println(problem.msg))
+
+ case None => reporter.println("No compilation unit found for " + source.file.name)
+ }
+}
diff --git a/test/files/presentation/private-case-class-members/src/State.scala b/test/files/presentation/private-case-class-members/src/State.scala
new file mode 100644
index 0000000000..c31817076c
--- /dev/null
+++ b/test/files/presentation/private-case-class-members/src/State.scala
@@ -0,0 +1,5 @@
+object State
+case class State(private val foo: Int)
+
+case class State2(private val foo: Int)
+object State2
diff --git a/test/files/presentation/quasiquotes.flags b/test/files/presentation/quasiquotes.flags
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/presentation/quasiquotes.flags
diff --git a/test/files/presentation/t7915.check b/test/files/presentation/t7915.check
index b18b4ddb55..0849aaa82b 100644
--- a/test/files/presentation/t7915.check
+++ b/test/files/presentation/t7915.check
@@ -9,3 +9,23 @@ askHyperlinkPos for `bar` at (7,22) Foo.scala
================================================================================
[response] found askHyperlinkPos for `bar` at (2,7) Foo.scala
================================================================================
+
+askHyperlinkPos for `Bar` at (8,11) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `Bar` at (1,7) Foo.scala
+================================================================================
+
+askHyperlinkPos for `baz` at (8,22) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `baz` at (2,31) Foo.scala
+================================================================================
+
+askHyperlinkPos for `Bar` at (9,11) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `Bar` at (1,7) Foo.scala
+================================================================================
+
+askHyperlinkPos for `baz` at (9,22) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `baz` at (2,31) Foo.scala
+================================================================================
diff --git a/test/files/presentation/t7915/src/Foo.scala b/test/files/presentation/t7915/src/Foo.scala
index a4166ae5b4..5c9ca36a6e 100644
--- a/test/files/presentation/t7915/src/Foo.scala
+++ b/test/files/presentation/t7915/src/Foo.scala
@@ -1,9 +1,11 @@
class Bar {
- def bar(b: Int = 2) {}
+ def bar(b: Int = 2) {}; def baz[X](b: Int = 2) {}
}
class Foo {
def foo() {
new Bar/*#*/().bar/*#*/()
+ new Bar/*#*/().baz/*#*/[Any]()
+ new Bar/*#*/().baz/*#*/()
}
}
diff --git a/test/files/presentation/t8459.check b/test/files/presentation/t8459.check
new file mode 100644
index 0000000000..336c147141
--- /dev/null
+++ b/test/files/presentation/t8459.check
@@ -0,0 +1,14 @@
+reload: IncompleteDynamicSelect.scala
+
+askType at IncompleteDynamicSelect.scala(12,2)
+================================================================================
+[response] askTypeAt (12,2)
+scala.AnyRef {
+ def <init>(): Foo = {
+ Foo.super.<init>();
+ ()
+ };
+ private[this] val bar: F = new F();
+ Foo.this.bar.<selectDynamic: error>("<error>")
+}
+================================================================================
diff --git a/test/files/presentation/t8459/Test.scala b/test/files/presentation/t8459/Test.scala
new file mode 100644
index 0000000000..bec1131c4c
--- /dev/null
+++ b/test/files/presentation/t8459/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest \ No newline at end of file
diff --git a/test/files/presentation/t8459/src/IncompleteDynamicSelect.scala b/test/files/presentation/t8459/src/IncompleteDynamicSelect.scala
new file mode 100644
index 0000000000..61976fe2f9
--- /dev/null
+++ b/test/files/presentation/t8459/src/IncompleteDynamicSelect.scala
@@ -0,0 +1,14 @@
+import scala.language.dynamics
+
+class F extends Dynamic {
+ def applyDynamic(name: String)(args: Any*) =
+ s"method '$name' called with arguments ${args.mkString("'", "', '", "'")}"
+}
+
+class Foo {
+ val bar = new F
+
+ bar. //note whitespace after dot
+ /*?*/ //force typechecking
+}
+
diff --git a/test/files/presentation/t8934.check b/test/files/presentation/t8934.check
new file mode 100644
index 0000000000..0ece87f808
--- /dev/null
+++ b/test/files/presentation/t8934.check
@@ -0,0 +1,2 @@
+reload: Source.scala
+Test OK
diff --git a/test/files/presentation/t8934/Runner.scala b/test/files/presentation/t8934/Runner.scala
new file mode 100644
index 0000000000..944f458391
--- /dev/null
+++ b/test/files/presentation/t8934/Runner.scala
@@ -0,0 +1,27 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+import scala.reflect.internal.util.SourceFile
+import scala.tools.nsc.interactive.Response
+
+object Test extends InteractiveTest {
+
+ override def execute(): Unit = {
+ val src = loadSourceAndWaitUntilTypechecked("Source.scala")
+ checkErrors(src)
+ }
+
+ private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = {
+ val sourceFile = sourceFiles.find(_.file.name == sourceName).head
+ askReload(List(sourceFile)).get
+ askLoadedTyped(sourceFile).get
+ sourceFile
+ }
+
+ private def checkErrors(source: SourceFile): Unit = compiler.getUnitOf(source) match {
+ case Some(unit) =>
+ val problems = unit.problems.toList
+ if(problems.isEmpty) reporter.println("Test OK")
+ else problems.foreach(problem => reporter.println(problem.msg))
+
+ case None => reporter.println("No compilation unit found for " + source.file.name)
+ }
+}
diff --git a/test/files/presentation/t8934/src/Source.scala b/test/files/presentation/t8934/src/Source.scala
new file mode 100644
index 0000000000..769c8fd38b
--- /dev/null
+++ b/test/files/presentation/t8934/src/Source.scala
@@ -0,0 +1,10 @@
+class Quasi {
+ import reflect.runtime.universe._
+
+ def test: Unit = {
+ (null: Any) match {
+ case q"$foo($bar)" =>
+ }
+ ()
+ }
+}
diff --git a/test/files/presentation/t8941.check b/test/files/presentation/t8941.check
new file mode 100644
index 0000000000..341804903a
--- /dev/null
+++ b/test/files/presentation/t8941.check
@@ -0,0 +1,7 @@
+reload: Source.scala
+
+askType at Source.scala(6,7)
+================================================================================
+[response] askTypeAt (6,7)
+scala.this.Predef.???
+================================================================================
diff --git a/test/files/presentation/t8941/Runner.scala b/test/files/presentation/t8941/Runner.scala
new file mode 100644
index 0000000000..0a8923a583
--- /dev/null
+++ b/test/files/presentation/t8941/Runner.scala
@@ -0,0 +1,11 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest {
+ override def runDefaultTests() {
+ // make sure typer is done.. the virtual pattern matcher might translate
+ // some trees and mess up positions. But we'll catch it red handed!
+ // sourceFiles foreach (src => askLoadedTyped(src).get)
+ super.runDefaultTests()
+ }
+
+}
diff --git a/test/files/presentation/t8941/src/Source.scala b/test/files/presentation/t8941/src/Source.scala
new file mode 100644
index 0000000000..7438cccb03
--- /dev/null
+++ b/test/files/presentation/t8941/src/Source.scala
@@ -0,0 +1,8 @@
+object Foo {
+ implicit class MatCreator(val ctx: StringContext) extends AnyVal {
+ def m(args: Any*): Unit = {
+ ctx.checkLengths(args)
+ }
+ ???/*?*/
+ }
+}
diff --git a/test/files/presentation/t8941b/IdempotencyTest.scala b/test/files/presentation/t8941b/IdempotencyTest.scala
new file mode 100644
index 0000000000..af01b36898
--- /dev/null
+++ b/test/files/presentation/t8941b/IdempotencyTest.scala
@@ -0,0 +1,73 @@
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import reporters.{Reporter => CompilerReporter}
+import scala.tools.nsc.interactive.InteractiveReporter
+import scala.reflect.internal.util.SourceFile
+
+/** Determistically interrupts typechecking of `code` when a defintion named
+ * `MagicInterruptionMarker` is typechecked, and then performs a targetted
+ * typecheck of the tree at the specal comment marker marker
+ */
+abstract class IdempotencyTest { self =>
+ private val settings = new Settings
+ settings.usejavacp.value = true
+
+ private object Break extends scala.util.control.ControlThrowable
+
+ private val compilerReporter: CompilerReporter = new InteractiveReporter {
+ override def compiler = self.compiler
+ }
+
+ object compiler extends Global(settings, compilerReporter) {
+ override def checkForMoreWork(pos: Position) {
+ }
+ override def signalDone(context: Context, old: Tree, result: Tree) {
+ // println("signalDone: " + old.toString.take(50).replaceAll("\n", "\\n"))
+ if (!interrupted && analyzer.lockedCount == 0 && interruptsEnabled && shouldInterrupt(result)) {
+ interrupted = true
+ val typed = typedTreeAt(markerPosition)
+ checkTypedTree(typed)
+ throw Break
+ }
+ super.signalDone(context, old, result)
+ }
+
+ // we're driving manually using our own thread, disable the check here.
+ override def assertCorrectThread() {}
+ }
+
+ import compiler._
+
+ private var interrupted = false
+
+ // Extension points
+ protected def code: String
+ protected def shouldInterrupt(tree: Tree): Boolean = {
+ tree.symbol != null && tree.symbol.name.toString == "MagicInterruptionMarker"
+ }
+ protected def checkTypedTree(tree: Tree): Unit = {}
+
+
+ private val source: SourceFile = newSourceFile(code)
+ private def markerPosition: Position = source.position(code.indexOf("/*?*/"))
+
+ def assertNoProblems() {
+ val problems = getUnit(source).get.problems
+ assert(problems.isEmpty, problems.mkString("\n"))
+ }
+
+ def show() {
+ reloadSource(source)
+ try {
+ typedTree(source, true)
+ assert(false, "Expected to break out of typechecking.")
+ } catch {
+ case Break => // expected
+ }
+ assertNoProblems()
+ }
+
+ def main(args: Array[String]) { show() }
+}
diff --git a/test/files/presentation/t8941b/Test.scala b/test/files/presentation/t8941b/Test.scala
new file mode 100644
index 0000000000..7269a14286
--- /dev/null
+++ b/test/files/presentation/t8941b/Test.scala
@@ -0,0 +1,53 @@
+import scala.tools.nsc.interactive.tests.core.IdempotencyTest
+
+// At the time of writing this test, removing any part of `enterExistingSym`
+// leads to a failure.
+object Test {
+ def main(args: Array[String]) {
+ test("""
+ object Foo {
+ def term {
+ def foo(c: String = "") = c
+ class MagicInterruptionMarker
+ foo()/*?*/
+ }
+ }
+ """)
+
+ test("""
+ object Foo {
+ def term {
+ def foo = 42
+ class MagicInterruptionMarker
+ foo/*?*/
+ }
+ }
+ """)
+
+ test("""
+ object Foo {
+ def term {
+ lazy val foo = 42
+ class MagicInterruptionMarker
+ foo/*?*/
+ }
+ }
+ """)
+
+ test("""
+ object Foo {
+ implicit class C(val a: String) extends AnyVal
+ class MagicInterruptionMarker
+ ""/*?*/
+ }
+ """)
+ }
+
+ def test(code0: String) {
+ val t = new IdempotencyTest {
+ def code = code0
+ }
+ t.show()
+ }
+}
+
diff --git a/test/files/presentation/visibility/src/Completions.scala b/test/files/presentation/visibility/src/Completions.scala
index 8c07934915..69ec3959ad 100644
--- a/test/files/presentation/visibility/src/Completions.scala
+++ b/test/files/presentation/visibility/src/Completions.scala
@@ -11,7 +11,7 @@ package accessibility {
def secretPublic(): Unit
def someTests(other: Foo) {
- other./*!*/secretPrivate // should be all but scretThis
+ other./*!*/secretPrivate // should be all but secretThis
this./*!*/secretProtected // should hit five completions
}
@@ -25,7 +25,7 @@ package accessibility {
class UnrelatedClass {
def someTests(foo: Foo) {
- foo./*!*/ // should list public and protected[accessiblity]
+ foo./*!*/ // should list public and protected[accessibility]
}
}
diff --git a/test/files/res/t6613.check b/test/files/res/t6613.check
new file mode 100644
index 0000000000..bbd9331b16
--- /dev/null
+++ b/test/files/res/t6613.check
@@ -0,0 +1,5 @@
+
+nsc>
+nsc>
+nsc>
+nsc>
diff --git a/test/files/res/t6613.res b/test/files/res/t6613.res
new file mode 100644
index 0000000000..e3fa000fdd
--- /dev/null
+++ b/test/files/res/t6613.res
@@ -0,0 +1,3 @@
+t6613/Enummy.java
+t6613/Broken.scala
+t6613/Broken.scala
diff --git a/test/files/res/t6613/Broken.scala b/test/files/res/t6613/Broken.scala
new file mode 100644
index 0000000000..9bcd12dbe1
--- /dev/null
+++ b/test/files/res/t6613/Broken.scala
@@ -0,0 +1 @@
+class Broken() { def broken() = Enummy.Broke.CHIP }
diff --git a/test/files/res/t6613/Enummy.java b/test/files/res/t6613/Enummy.java
new file mode 100644
index 0000000000..1863ef1297
--- /dev/null
+++ b/test/files/res/t6613/Enummy.java
@@ -0,0 +1 @@
+public class Enummy { public enum Broke { SHARD, CHIP } }
diff --git a/test/files/res/t8871.check b/test/files/res/t8871.check
new file mode 100644
index 0000000000..bbd9331b16
--- /dev/null
+++ b/test/files/res/t8871.check
@@ -0,0 +1,5 @@
+
+nsc>
+nsc>
+nsc>
+nsc>
diff --git a/test/files/res/t8871.res b/test/files/res/t8871.res
new file mode 100644
index 0000000000..9b1a5fb57f
--- /dev/null
+++ b/test/files/res/t8871.res
@@ -0,0 +1,4 @@
+t8871/tag.scala
+t8871/usetag.scala
+t8871/usetag.scala
+
diff --git a/test/files/res/t8871/tag.scala b/test/files/res/t8871/tag.scala
new file mode 100644
index 0000000000..1a1803b77d
--- /dev/null
+++ b/test/files/res/t8871/tag.scala
@@ -0,0 +1,3 @@
+class Tag {
+ @inline def apply[@specialized A, T](a: A): A = a
+}
diff --git a/test/files/res/t8871/usetag.scala b/test/files/res/t8871/usetag.scala
new file mode 100644
index 0000000000..139d768552
--- /dev/null
+++ b/test/files/res/t8871/usetag.scala
@@ -0,0 +1,6 @@
+trait Foo
+
+object Test {
+ val y = new Tag().apply[Double, Foo](3.3)
+ // under FSC, this gave t8871/usetag.scala:4: error: wrong number of type parameters for method apply$mDc$sp: [T](a: Double)Double
+}
diff --git a/test/files/res/t9089.check b/test/files/res/t9089.check
new file mode 100644
index 0000000000..6cf64f734b
--- /dev/null
+++ b/test/files/res/t9089.check
@@ -0,0 +1,4 @@
+
+nsc>
+nsc>
+nsc>
diff --git a/test/files/res/t9089.res b/test/files/res/t9089.res
new file mode 100644
index 0000000000..ab5cc8534d
--- /dev/null
+++ b/test/files/res/t9089.res
@@ -0,0 +1,2 @@
+t9089/A.scala
+t9089/A.scala
diff --git a/test/files/res/t9089/A.scala b/test/files/res/t9089/A.scala
new file mode 100644
index 0000000000..bccf269639
--- /dev/null
+++ b/test/files/res/t9089/A.scala
@@ -0,0 +1 @@
+object O { def f(x: => Int): Int = x }
diff --git a/test/files/res/t9170.check b/test/files/res/t9170.check
new file mode 100644
index 0000000000..6d40b6ba8d
--- /dev/null
+++ b/test/files/res/t9170.check
@@ -0,0 +1,7 @@
+
+nsc> t9170/A.scala:3: error: double definition:
+def f[A](a: => A): Int at line 2 and
+def f[A](a: => Either[Exception,A]): Int at line 3
+have same type after erasure: (a: Function0)Int
+ def f[A](a: => Either[Exception, A]) = 2
+ ^
diff --git a/test/files/res/t9170.res b/test/files/res/t9170.res
new file mode 100644
index 0000000000..c2aec2f8dd
--- /dev/null
+++ b/test/files/res/t9170.res
@@ -0,0 +1,2 @@
+t9170/A.scala
+t9170/A.scala
diff --git a/test/files/res/t9170/A.scala b/test/files/res/t9170/A.scala
new file mode 100644
index 0000000000..239df89679
--- /dev/null
+++ b/test/files/res/t9170/A.scala
@@ -0,0 +1,4 @@
+object Y {
+ def f[A](a: => A) = 1
+ def f[A](a: => Either[Exception, A]) = 2
+}
diff --git a/test/files/run/abstypetags_serialize.check b/test/files/run/abstypetags_serialize.check
index bddc4523e6..1b5e2ebddf 100644
--- a/test/files/run/abstypetags_serialize.check
+++ b/test/files/run/abstypetags_serialize.check
@@ -1,2 +1,2 @@
-java.io.NotSerializableException: Test$$typecreator1$1
-java.io.NotSerializableException: Test$$typecreator2$1
+WeakTypeTag[T]
+WeakTypeTag[U[String]]
diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check
index e3ab554d4c..9803465ddc 100644
--- a/test/files/run/analyzerPlugins.check
+++ b/test/files/run/analyzerPlugins.check
@@ -19,7 +19,7 @@ canAdaptAnnotations(Trees$Typed, Any) [1]
canAdaptAnnotations(Trees$Typed, Int) [1]
lub(List(Int @testAnn, Int)) [1]
pluginsPt(?, Trees$Annotated) [7]
-pluginsPt(?, Trees$Apply) [9]
+pluginsPt(?, Trees$Apply) [8]
pluginsPt(?, Trees$ApplyImplicitView) [2]
pluginsPt(?, Trees$Assign) [7]
pluginsPt(?, Trees$Block) [4]
@@ -31,13 +31,13 @@ pluginsPt(?, Trees$Literal) [16]
pluginsPt(?, Trees$New) [5]
pluginsPt(?, Trees$PackageDef) [1]
pluginsPt(?, Trees$Return) [1]
-pluginsPt(?, Trees$Select) [48]
+pluginsPt(?, Trees$Select) [47]
pluginsPt(?, Trees$Super) [2]
pluginsPt(?, Trees$This) [20]
-pluginsPt(?, Trees$TypeApply) [4]
+pluginsPt(?, Trees$TypeApply) [3]
pluginsPt(?, Trees$TypeBoundsTree) [2]
pluginsPt(?, Trees$TypeDef) [1]
-pluginsPt(?, Trees$TypeTree) [39]
+pluginsPt(?, Trees$TypeTree) [38]
pluginsPt(?, Trees$Typed) [1]
pluginsPt(?, Trees$ValDef) [21]
pluginsPt(Any, Trees$Literal) [2]
@@ -98,7 +98,6 @@ pluginsTyped(()String, Trees$Ident) [1]
pluginsTyped(()String, Trees$TypeApply) [1]
pluginsTyped(()scala.annotation.Annotation, Trees$Select) [1]
pluginsTyped(()testAnn, Trees$Select) [10]
-pluginsTyped(()type, Trees$TypeApply) [1]
pluginsTyped((str: String)A <and> (param: Double)A, Trees$Select) [1]
pluginsTyped((x$1: Any)Boolean <and> (x: Double)Boolean <and> (x: Float)Boolean <and> (x: Long)Boolean <and> (x: Int)Boolean <and> (x: Char)Boolean <and> (x: Short)Boolean <and> (x: Byte)Boolean, Trees$Select) [1]
pluginsTyped((x$1: Int)Unit, Trees$Select) [1]
@@ -173,7 +172,7 @@ pluginsTyped(Unit, Trees$Literal) [5]
pluginsTyped(Unit, Trees$TypeTree) [1]
pluginsTyped([A](xs: A*)List[A], Trees$Select) [1]
pluginsTyped([T <: Int]=> Int, Trees$Select) [1]
-pluginsTyped([T0]()T0, Trees$Select) [2]
+pluginsTyped([T0]()T0, Trees$Select) [1]
pluginsTyped([T](xs: Array[T])scala.collection.mutable.WrappedArray[T], Trees$Select) [1]
pluginsTyped(annotation.type, Trees$Select) [4]
pluginsTyped(math.type, Trees$Select) [9]
@@ -190,7 +189,5 @@ pluginsTyped(testAnn, Trees$New) [5]
pluginsTyped(testAnn, Trees$This) [1]
pluginsTyped(testAnn, Trees$TypeTree) [2]
pluginsTyped(testAnn.super.type, Trees$Super) [1]
-pluginsTyped(type, Trees$Apply) [1]
pluginsTyped(type, Trees$Select) [1]
-pluginsTyped(type, Trees$TypeTree) [1]
pluginsTypedReturn(return f, String) [1]
diff --git a/test/files/run/applydynamic_sip.flags b/test/files/run/applydynamic_sip.flags
index 1141f97507..ba6d37305e 100644
--- a/test/files/run/applydynamic_sip.flags
+++ b/test/files/run/applydynamic_sip.flags
@@ -1 +1,2 @@
+-Yrangepos:false
-language:dynamics
diff --git a/test/files/run/applydynamic_sip.scala b/test/files/run/applydynamic_sip.scala
index cf918a82ed..47d0c6a303 100644
--- a/test/files/run/applydynamic_sip.scala
+++ b/test/files/run/applydynamic_sip.scala
@@ -40,7 +40,7 @@ object Test extends App {
// qual.sel(arg = a, a2: _*)
// qual.sel(arg, arg2 = "a2", a2: _*)
- // If qual.sel appears immediately on the left-hand side of an assigment
+ // If qual.sel appears immediately on the left-hand side of an assignment
// qual.updateDynamic(“sel”)(expr)
qual.sel = expr
diff --git a/test/files/run/bcodeInlinerMixed.flags b/test/files/run/bcodeInlinerMixed.flags
new file mode 100644
index 0000000000..63b5558cfd
--- /dev/null
+++ b/test/files/run/bcodeInlinerMixed.flags
@@ -0,0 +1 @@
+-Ybackend:GenBCode -Yopt:l:classpath \ No newline at end of file
diff --git a/test/files/run/bcodeInlinerMixed/A_1.java b/test/files/run/bcodeInlinerMixed/A_1.java
new file mode 100644
index 0000000000..44d7d88eeb
--- /dev/null
+++ b/test/files/run/bcodeInlinerMixed/A_1.java
@@ -0,0 +1,3 @@
+public class A_1 {
+ public static final int bar() { return 100; }
+}
diff --git a/test/files/run/bcodeInlinerMixed/B_1.scala b/test/files/run/bcodeInlinerMixed/B_1.scala
new file mode 100644
index 0000000000..2aadeccb82
--- /dev/null
+++ b/test/files/run/bcodeInlinerMixed/B_1.scala
@@ -0,0 +1,20 @@
+// Partest does proper mixed compilation:
+// 1. scalac *.scala *.java
+// 2. javac *.java
+// 3. scalc *.scala
+//
+// In the second scalc round, the classfile for A_1 is on the classpath.
+// Therefore the inliner has access to the bytecode of `bar`, which means
+// it can verify that the invocation to `bar` can be safely inlined.
+//
+// So both callsites of `flop` are inlined.
+//
+// In a single mixed compilation, `flop` cannot be inlined, see JUnit InlinerTest.scala, def mixedCompilationNoInline.
+
+class B {
+ @inline final def flop = A_1.bar
+ def g = flop
+}
+class C {
+ def h(b: B) = b.flop
+}
diff --git a/test/files/run/bcodeInlinerMixed/Test.scala b/test/files/run/bcodeInlinerMixed/Test.scala
new file mode 100644
index 0000000000..c8c7a9fe2a
--- /dev/null
+++ b/test/files/run/bcodeInlinerMixed/Test.scala
@@ -0,0 +1,16 @@
+import scala.tools.partest.{BytecodeTest, ASMConverters}
+import ASMConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val gIns = instructionsFromMethod(getMethod(loadClassNode("B"), "g"))
+ val hIns = instructionsFromMethod(getMethod(loadClassNode("C"), "h"))
+ // val invocation = Invoke(INVOKESTATIC, A_1, bar, ()I, false)
+ for (i <- List(gIns, hIns)) {
+ assert(i exists {
+ case Invoke(_, _, "bar", "()I", _) => true
+ case _ => false
+ }, i mkString "\n")
+ }
+ }
+}
diff --git a/test/files/run/bitsets.check b/test/files/run/bitsets.check
index 41c2ccdcb8..c24fd6238f 100644
--- a/test/files/run/bitsets.check
+++ b/test/files/run/bitsets.check
@@ -1,3 +1,4 @@
+warning: there were three deprecation warnings; re-run with -deprecation for details
ms0 = BitSet(2)
ms1 = BitSet(2)
ms2 = BitSet(2)
diff --git a/test/files/run/class-symbol-contravariant.check b/test/files/run/class-symbol-contravariant.check
index 987f215bca..cbb90b52c2 100644
--- a/test/files/run/class-symbol-contravariant.check
+++ b/test/files/run/class-symbol-contravariant.check
@@ -33,4 +33,4 @@ res2: Boolean = true
scala> sym.isContravariant // was true
res3: Boolean = false
-scala>
+scala> :quit
diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala
index f92382d89b..7523130afa 100644
--- a/test/files/run/classfile-format-51.scala
+++ b/test/files/run/classfile-format-51.scala
@@ -12,7 +12,7 @@ import Opcodes._
// it runs a normal compile on the source in the 'code' field that refers to
// DynamicInvoker. Any failure will be dumped to std out.
//
-// By it's nature the test can only work on JDK 7+ because under JDK 6 some of the
+// By its nature the test can only work on JDK 7+ because under JDK 6 some of the
// classes referred to by DynamicInvoker won't be available and DynamicInvoker won't
// verify. So the test includes a version check that short-circuites the whole test
// on JDK 6
@@ -32,7 +32,7 @@ object Test extends DirectTest {
val constructor = cw.visitMethod(ACC_PUBLIC, "<init>", "()V", null, null)
constructor.visitCode()
constructor.visitVarInsn(ALOAD, 0)
- constructor.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "<init>", "()V")
+ constructor.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "<init>", "()V", false)
constructor.visitInsn(RETURN)
constructor.visitMaxs(1, 1)
constructor.visitEnd()
@@ -47,19 +47,19 @@ object Test extends DirectTest {
val bootstrap = cw.visitMethod(ACC_PUBLIC + ACC_STATIC, bootstrapMethodName, bootStrapMethodType, null, null)
bootstrap.visitCode()
// val lookup = MethodHandles.lookup();
- bootstrap.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodHandles", "lookup", "()Ljava/lang/invoke/MethodHandles$Lookup;")
+ bootstrap.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodHandles", "lookup", "()Ljava/lang/invoke/MethodHandles$Lookup;", false)
bootstrap.visitVarInsn(ASTORE, 3) // lookup
// val clazz = lookup.lookupClass();
bootstrap.visitVarInsn(ALOAD, 3) // lookup
- bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "lookupClass", "()Ljava/lang/Class;")
+ bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "lookupClass", "()Ljava/lang/Class;", false)
bootstrap.visitVarInsn(ASTORE, 4) // clazz
// val methodType = MethodType.fromMethodDescriptorString("()Ljava/lang/String, clazz.getClassLoader()")
bootstrap.visitLdcInsn("()Ljava/lang/String;")
bootstrap.visitVarInsn(ALOAD, 4) // CLAZZ
- bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Class", "getClassLoader", "()Ljava/lang/ClassLoader;")
- bootstrap.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodType", "fromMethodDescriptorString", "(Ljava/lang/String;Ljava/lang/ClassLoader;)Ljava/lang/invoke/MethodType;")
+ bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Class", "getClassLoader", "()Ljava/lang/ClassLoader;", false)
+ bootstrap.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodType", "fromMethodDescriptorString", "(Ljava/lang/String;Ljava/lang/ClassLoader;)Ljava/lang/invoke/MethodType;", false)
bootstrap.visitVarInsn(ASTORE, 5) // methodType
// val methodHandle = lookup.findStatic(thisClass, "target", methodType)
@@ -67,14 +67,14 @@ object Test extends DirectTest {
bootstrap.visitVarInsn(ALOAD, 4) // clazz
bootstrap.visitLdcInsn("target")
bootstrap.visitVarInsn(ALOAD, 5) // methodType
- bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "findStatic", "(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/MethodHandle;")
+ bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "findStatic", "(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/MethodHandle;", false)
bootstrap.visitVarInsn(ASTORE, 6) // methodHandle
// new ConstantCallSite(methodHandle)
bootstrap.visitTypeInsn(NEW, "java/lang/invoke/ConstantCallSite")
bootstrap.visitInsn(DUP)
bootstrap.visitVarInsn(ALOAD, 6) // methodHandle
- bootstrap.visitMethodInsn(INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V")
+ bootstrap.visitMethodInsn(INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V", false)
bootstrap.visitInsn(ARETURN)
bootstrap.visitMaxs(4,7)
bootstrap.visitEnd()
diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala
index e12c84124c..453f61ac84 100644
--- a/test/files/run/classfile-format-52.scala
+++ b/test/files/run/classfile-format-52.scala
@@ -11,7 +11,7 @@ import Opcodes._
// HasDefaultMethod. Then it runs a normal compile on Scala source that extends that
// interface. Any failure will be dumped to std out.
//
-// By it's nature the test can only work on JDK 8+ because under JDK 7- the
+// By its nature the test can only work on JDK 8+ because under JDK 7- the
// interface won't verify.
object Test extends DirectTest {
override def extraSettings: String = "-optimise -usejavacp -d " + testOutput.path + " -cp " + testOutput.path
diff --git a/test/files/run/collection-stacks.check b/test/files/run/collection-stacks.check
index 895bde374d..3a366bfcdf 100644
--- a/test/files/run/collection-stacks.check
+++ b/test/files/run/collection-stacks.check
@@ -1,4 +1,4 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
3-2-1: true
3-2-1: true
apply
diff --git a/test/files/run/colltest.check b/test/files/run/colltest.check
index 1e850bb582..9579d781aa 100644
--- a/test/files/run/colltest.check
+++ b/test/files/run/colltest.check
@@ -1,4 +1,4 @@
-warning: there were 2 deprecation warning(s); re-run with -deprecation for details
+warning: there were two deprecation warnings; re-run with -deprecation for details
true
false
true
diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala
index 8dce69afc9..de8780a050 100644
--- a/test/files/run/colltest1.scala
+++ b/test/files/run/colltest1.scala
@@ -1,5 +1,5 @@
/*
- * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ * filter: inliner warnings; re-run with
*/
import scala.collection._
import scala.language.postfixOps
diff --git a/test/files/run/compiler-asSeenFrom.scala b/test/files/run/compiler-asSeenFrom.scala
index ea96c6fba7..a60c2e8925 100644
--- a/test/files/run/compiler-asSeenFrom.scala
+++ b/test/files/run/compiler-asSeenFrom.scala
@@ -1,5 +1,5 @@
/*
- * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ * filter: inliner warning; re-run with
*/
import scala.tools.nsc._
import scala.tools.partest.DirectTest
diff --git a/test/files/run/constant-type.check b/test/files/run/constant-type.check
index 77bdf618e6..a7ba5a46c2 100644
--- a/test/files/run/constant-type.check
+++ b/test/files/run/constant-type.check
@@ -23,4 +23,4 @@ Class[String](classOf[java.lang.String])
scala> { ConstantType(Constant(s)); exitingPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
Class(classOf[java.lang.String])
-scala>
+scala> :quit
diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check
index 9a106785a1..89a08d5ccb 100644
--- a/test/files/run/constrained-types.check
+++ b/test/files/run/constrained-types.check
@@ -71,11 +71,11 @@ scala> var four = "four"
four: String = four
scala> val four2 = m(four) // should have an existential bound
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
four2: String @Annot(x) forSome { val x: String } = four
scala> val four3 = four2 // should have the same type as four2
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
four3: String @Annot(x) forSome { val x: String } = four
scala> val stuff = m("stuff") // should not crash
@@ -98,7 +98,7 @@ scala> def m = {
val y : String @Annot(x) = x
y
} // x should not escape the local scope with a narrow type
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
m: String @Annot(x) forSome { val x: String }
scala>
@@ -112,7 +112,7 @@ scala> def n(y: String) = {
}
m("stuff".stripMargin)
} // x should be existentially bound
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
n: (y: String)String @Annot(x) forSome { val x: String }
scala>
@@ -148,4 +148,4 @@ scala> val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message
^
-scala>
+scala> :quit
diff --git a/test/files/run/delambdafy-specialized.check b/test/files/run/delambdafy-specialized.check
new file mode 100644
index 0000000000..c6903b9e29
--- /dev/null
+++ b/test/files/run/delambdafy-specialized.check
@@ -0,0 +1 @@
+scala.runtime.AbstractFunction1$mcII$sp
diff --git a/test/files/run/delambdafy-specialized.flags b/test/files/run/delambdafy-specialized.flags
new file mode 100644
index 0000000000..48b438ddf8
--- /dev/null
+++ b/test/files/run/delambdafy-specialized.flags
@@ -0,0 +1 @@
+-Ydelambdafy:method
diff --git a/test/files/run/delambdafy-specialized.scala b/test/files/run/delambdafy-specialized.scala
new file mode 100644
index 0000000000..634d4e490b
--- /dev/null
+++ b/test/files/run/delambdafy-specialized.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val f = (x: Int) => -x
+ println(f.getClass.getSuperclass.getName)
+ }
+}
diff --git a/test/files/run/delambdafyLambdaClassNames.check b/test/files/run/delambdafyLambdaClassNames.check
new file mode 100644
index 0000000000..d425d15dd0
--- /dev/null
+++ b/test/files/run/delambdafyLambdaClassNames.check
@@ -0,0 +1 @@
+A$$nestedInAnon$1$lambda$$run$1
diff --git a/test/files/run/delambdafyLambdaClassNames.flags b/test/files/run/delambdafyLambdaClassNames.flags
new file mode 100644
index 0000000000..b10233d322
--- /dev/null
+++ b/test/files/run/delambdafyLambdaClassNames.flags
@@ -0,0 +1 @@
+-Ybackend:GenBCode -Ydelambdafy:method \ No newline at end of file
diff --git a/test/files/run/delambdafyLambdaClassNames/A_1.scala b/test/files/run/delambdafyLambdaClassNames/A_1.scala
new file mode 100644
index 0000000000..10489414b7
--- /dev/null
+++ b/test/files/run/delambdafyLambdaClassNames/A_1.scala
@@ -0,0 +1,5 @@
+class A {
+ def f = new Runnable {
+ def run(): Unit = List(1,2).foreach(println)
+ }
+}
diff --git a/test/files/run/delambdafyLambdaClassNames/Test.scala b/test/files/run/delambdafyLambdaClassNames/Test.scala
new file mode 100644
index 0000000000..49a397d1d2
--- /dev/null
+++ b/test/files/run/delambdafyLambdaClassNames/Test.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val c = Class.forName("A$$nestedInAnon$1$lambda$$run$1")
+ println(c.getName)
+}
diff --git a/test/files/run/delambdafy_t6028.check b/test/files/run/delambdafy_t6028.check
index 92cfbaefb6..419e7043a3 100644
--- a/test/files/run/delambdafy_t6028.check
+++ b/test/files/run/delambdafy_t6028.check
@@ -1,35 +1,35 @@
[[syntax trees at end of lambdalift]] // newSource1.scala
package <empty> {
class T extends Object {
- <paramaccessor> private[this] val classParam: Int = _;
- def <init>(classParam: Int): T = {
+ <paramaccessor> private[this] val classParam: String = _;
+ def <init>(classParam: String): T = {
T.super.<init>();
()
};
- private[this] val field: Int = 0;
- <stable> <accessor> def field(): Int = T.this.field;
- def foo(methodParam: Int): Function0 = {
- val methodLocal: Int = 0;
+ private[this] val field: String = "";
+ <stable> <accessor> def field(): String = T.this.field;
+ def foo(methodParam: String): Function0 = {
+ val methodLocal: String = "";
{
(() => T.this.$anonfun$1(methodParam, methodLocal)).$asInstanceOf[Function0]()
}
};
- def bar(barParam: Int): Object = {
+ def bar(barParam: String): Object = {
@volatile var MethodLocalObject$module: runtime.VolatileObjectRef = scala.runtime.VolatileObjectRef.zero();
T.this.MethodLocalObject$1(barParam, MethodLocalObject$module)
};
- def tryy(tryyParam: Int): Function0 = {
- var tryyLocal: runtime.IntRef = scala.runtime.IntRef.create(0);
+ def tryy(tryyParam: String): Function0 = {
+ var tryyLocal: runtime.ObjectRef = scala.runtime.ObjectRef.create("");
{
- (() => T.this.$anonfun$2(tryyParam, tryyLocal)).$asInstanceOf[Function0]()
+ (new <$anon: Function0>(T.this, tryyParam, tryyLocal): Function0)
}
};
- final <artifact> private[this] def $anonfun$1(methodParam$1: Int, methodLocal$1: Int): Int = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1);
+ final <artifact> private[this] def $anonfun$1(methodParam$1: String, methodLocal$1: String): String = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1);
abstract trait MethodLocalTrait$1 extends Object {
<synthetic> <stable> <artifact> def $outer(): T
};
object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 {
- def <init>($outer: T, barParam$1: Int): T#MethodLocalObject$2.type = {
+ def <init>($outer: T, barParam$1: String): T#MethodLocalObject$2.type = {
MethodLocalObject$2.super.<init>();
MethodLocalObject$2.this.$asInstanceOf[T#MethodLocalTrait$1$class]()./*MethodLocalTrait$1$class*/$init$(barParam$1);
()
@@ -38,20 +38,35 @@ package <empty> {
<synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer;
<synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer
};
- final <stable> private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = {
+ final <stable> private[this] def MethodLocalObject$1(barParam$1: String, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = {
MethodLocalObject$module$1.elem = new T#MethodLocalObject$2.type(T.this, barParam$1);
MethodLocalObject$module$1.elem.$asInstanceOf[T#MethodLocalObject$2.type]()
};
abstract trait MethodLocalTrait$1$class extends Object with T#MethodLocalTrait$1 {
- def /*MethodLocalTrait$1$class*/$init$(barParam$1: Int): Unit = {
+ def /*MethodLocalTrait$1$class*/$init$(barParam$1: String): Unit = {
()
};
- scala.this.Predef.print(scala.Int.box(barParam$1))
+ scala.this.Predef.print(barParam$1)
};
- final <artifact> private[this] def $anonfun$2(tryyParam$1: Int, tryyLocal$1: runtime.IntRef): Unit = try {
- tryyLocal$1.elem = tryyParam$1
- } finally ()
+ @SerialVersionUID(value = 0) final <synthetic> class $anonfun$tryy$1 extends scala.runtime.AbstractFunction0$mcV$sp with Serializable {
+ def <init>($outer: T, tryyParam$1: String, tryyLocal$1: runtime.ObjectRef): <$anon: Function0> = {
+ $anonfun$tryy$1.super.<init>();
+ ()
+ };
+ final def apply(): Unit = $anonfun$tryy$1.this.apply$mcV$sp();
+ <specialized> def apply$mcV$sp(): Unit = try {
+ $anonfun$tryy$1.this.tryyLocal$1.elem = $anonfun$tryy$1.this.tryyParam$1
+ } finally ();
+ <synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
+ <synthetic> <stable> <artifact> def $outer(): T = $anonfun$tryy$1.this.$outer;
+ final <bridge> <artifact> def apply(): Object = {
+ $anonfun$tryy$1.this.apply();
+ scala.runtime.BoxedUnit.UNIT
+ };
+ <synthetic> <paramaccessor> private[this] val tryyParam$1: String = _;
+ <synthetic> <paramaccessor> private[this] val tryyLocal$1: runtime.ObjectRef = _
+ }
}
}
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
diff --git a/test/files/run/delambdafy_t6028.scala b/test/files/run/delambdafy_t6028.scala
index 0b7ef48c3d..ca39195310 100644
--- a/test/files/run/delambdafy_t6028.scala
+++ b/test/files/run/delambdafy_t6028.scala
@@ -5,11 +5,11 @@ object Test extends DirectTest {
override def extraSettings: String = "-usejavacp -Ydelambdafy:method -Xprint:lambdalift -d " + testOutput.path
- override def code = """class T(classParam: Int) {
- | val field: Int = 0
- | def foo(methodParam: Int) = {val methodLocal = 0 ; () => classParam + field + methodParam + methodLocal }
- | def bar(barParam: Int) = { trait MethodLocalTrait { print(barParam) }; object MethodLocalObject extends MethodLocalTrait; MethodLocalObject }
- | def tryy(tryyParam: Int) = { var tryyLocal = 0; () => try { tryyLocal = tryyParam } finally () }
+ override def code = """class T(classParam: String) {
+ | val field: String = ""
+ | def foo(methodParam: String) = {val methodLocal = "" ; () => classParam + field + methodParam + methodLocal }
+ | def bar(barParam: String) = { trait MethodLocalTrait { print(barParam) }; object MethodLocalObject extends MethodLocalTrait; MethodLocalObject }
+ | def tryy(tryyParam: String) = { var tryyLocal = ""; () => try { tryyLocal = tryyParam } finally () }
|}
|""".stripMargin.trim
diff --git a/test/files/run/delambdafy_t6555.check b/test/files/run/delambdafy_t6555.check
index 6b174c0d2a..b6ccebde78 100644
--- a/test/files/run/delambdafy_t6555.check
+++ b/test/files/run/delambdafy_t6555.check
@@ -5,11 +5,11 @@ package <empty> {
Foo.super.<init>();
()
};
- private[this] val f: Int => Int = {
- final <artifact> def $anonfun(param: Int): Int = param;
- ((param: Int) => $anonfun(param))
+ private[this] val f: String => String = {
+ final <artifact> def $anonfun(param: String): String = param;
+ ((param: String) => $anonfun(param))
};
- <stable> <accessor> def f(): Int => Int = Foo.this.f
+ <stable> <accessor> def f(): String => String = Foo.this.f
}
}
diff --git a/test/files/run/delambdafy_t6555.scala b/test/files/run/delambdafy_t6555.scala
index a1dcfe790c..8d4976e989 100644
--- a/test/files/run/delambdafy_t6555.scala
+++ b/test/files/run/delambdafy_t6555.scala
@@ -5,7 +5,7 @@ object Test extends DirectTest {
override def extraSettings: String = "-usejavacp -Xprint:specialize -Ydelambdafy:method -d " + testOutput.path
- override def code = "class Foo { val f = (param: Int) => param } "
+ override def code = "class Foo { val f = (param: String) => param } "
override def show(): Unit = {
Console.withErr(System.out) {
diff --git a/test/files/run/delambdafy_uncurry_byname_inline.check b/test/files/run/delambdafy_uncurry_byname_inline.check
index 0dc69b379a..d96a995f44 100644
--- a/test/files/run/delambdafy_uncurry_byname_inline.check
+++ b/test/files/run/delambdafy_uncurry_byname_inline.check
@@ -7,7 +7,7 @@ package <empty> {
};
def bar(x: () => Int): Int = x.apply();
def foo(): Int = Foo.this.bar({
- @SerialVersionUID(0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction0[Int] with Serializable {
+ @SerialVersionUID(value = 0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction0[Int] with Serializable {
def <init>(): <$anon: () => Int> = {
$anonfun.super.<init>();
()
diff --git a/test/files/run/delambdafy_uncurry_byname_method.check b/test/files/run/delambdafy_uncurry_byname_method.check
index cd3edc7d6f..e0f281b1cd 100644
--- a/test/files/run/delambdafy_uncurry_byname_method.check
+++ b/test/files/run/delambdafy_uncurry_byname_method.check
@@ -5,9 +5,9 @@ package <empty> {
Foo.super.<init>();
()
};
- def bar(x: () => Int): Int = x.apply();
- def foo(): Int = Foo.this.bar({
- final <artifact> def $anonfun(): Int = 1;
+ def bar(x: () => String): String = x.apply();
+ def foo(): String = Foo.this.bar({
+ final <artifact> def $anonfun(): String = "";
(() => $anonfun())
})
}
diff --git a/test/files/run/delambdafy_uncurry_byname_method.scala b/test/files/run/delambdafy_uncurry_byname_method.scala
index 1adeec8433..0ccc1f2e92 100644
--- a/test/files/run/delambdafy_uncurry_byname_method.scala
+++ b/test/files/run/delambdafy_uncurry_byname_method.scala
@@ -6,9 +6,9 @@ object Test extends DirectTest {
override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry -d " + testOutput.path
override def code = """class Foo {
- | def bar(x: => Int) = x
+ | def bar(x: => String) = x
|
- | def foo = bar(1)
+ | def foo = bar("")
|}
|""".stripMargin.trim
diff --git a/test/files/run/delambdafy_uncurry_inline.check b/test/files/run/delambdafy_uncurry_inline.check
index e2b024b462..5521cc4a2c 100644
--- a/test/files/run/delambdafy_uncurry_inline.check
+++ b/test/files/run/delambdafy_uncurry_inline.check
@@ -7,7 +7,7 @@ package <empty> {
};
def bar(): Unit = {
val f: Int => Int = {
- @SerialVersionUID(0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction1[Int,Int] with Serializable {
+ @SerialVersionUID(value = 0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction1[Int,Int] with Serializable {
def <init>(): <$anon: Int => Int> = {
$anonfun.super.<init>();
()
diff --git a/test/files/run/delay-bad.check b/test/files/run/delay-bad.check
index 5d8c5fa1d4..cb6e329f7a 100644
--- a/test/files/run/delay-bad.check
+++ b/test/files/run/delay-bad.check
@@ -4,7 +4,7 @@ delay-bad.scala:53: warning: a pure expression does nothing in statement positio
delay-bad.scala:73: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
f(new { val x = 5 } with E() { 5 })
^
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
// new C { }
diff --git a/test/files/run/eta-expand-star2.check b/test/files/run/eta-expand-star2.check
index cbf4781255..d6929e4969 100644
--- a/test/files/run/eta-expand-star2.check
+++ b/test/files/run/eta-expand-star2.check
@@ -1,2 +1,2 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
hello
diff --git a/test/files/run/existentials-in-compiler.scala b/test/files/run/existentials-in-compiler.scala
index d019d56b42..e516eddf95 100644
--- a/test/files/run/existentials-in-compiler.scala
+++ b/test/files/run/existentials-in-compiler.scala
@@ -1,5 +1,5 @@
/*
- * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ * filter: inliner warnings; re-run with
*/
import scala.tools.nsc._
import scala.tools.partest.CompilerTest
diff --git a/test/files/run/exprs_serialize.check b/test/files/run/exprs_serialize.check
index 20ad6c110c..551823ccdc 100644
--- a/test/files/run/exprs_serialize.check
+++ b/test/files/run/exprs_serialize.check
@@ -1,2 +1,19 @@
-java.io.NotSerializableException: Test$$treecreator1$1
-java.io.NotSerializableException: Test$$treecreator2$1
+Expr[Int(2)](2)
+Expr[java.lang.String]({
+ def foo = "hello";
+ foo.$plus("world!")
+})
+Expr[Boolean]({
+ def foo(x: Int) = {
+ class Local extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ val f = 2
+ };
+ val obj = new Local();
+ x.$percent(obj.f).$eq$eq(0)
+ };
+ foo(5)
+})
diff --git a/test/files/run/exprs_serialize.scala b/test/files/run/exprs_serialize.scala
index c4310b0fe1..91027803b4 100644
--- a/test/files/run/exprs_serialize.scala
+++ b/test/files/run/exprs_serialize.scala
@@ -26,4 +26,14 @@ object Test extends App {
test(reify(2))
test(reify{def foo = "hello"; foo + "world!"})
-} \ No newline at end of file
+ test(reify {
+ def foo(x: Int) = {
+ class Local {
+ val f = 2
+ }
+ val obj = new Local
+ x % obj.f == 0
+ }
+ foo(5)
+ })
+}
diff --git a/test/files/run/global-showdef.check b/test/files/run/global-showdef.check
index 4c2fd41a1a..4ac96b4315 100644
--- a/test/files/run/global-showdef.check
+++ b/test/files/run/global-showdef.check
@@ -1,14 +1,14 @@
<<-- class foo.bar.Bippy after phase 'typer' -->>
def showdefTestMemberClass1: Int
+<<-- object foo.bar.Bippy after phase 'typer' -->>
+ def showdefTestMemberObject2: String
<<-- type foo.bar.Bippy.BippyType after phase 'typer' -->>
def showdefTestMemberType1: Unit
+<<-- object foo.bar.Bippy.Boppity.Boo after phase 'typer' -->>
+ def showdefTestMemberObject1: String
<<-- type foo.bar.Bippy.BippyType after phase 'typer' -->>
def showdefTestMemberType2: Unit
<<-- class foo.bar.Bippy.Boppity after phase 'typer' -->>
def showdefTestMemberClass2: Int
<<-- class foo.bar.Bippy.Boppity.Boo after phase 'typer' -->>
def showdefTestMemberClass3: Int
-<<-- object foo.bar.Bippy after phase 'typer' -->>
- def showdefTestMemberObject2: String
-<<-- object foo.bar.Bippy.Boppity.Boo after phase 'typer' -->>
- def showdefTestMemberObject1: String
diff --git a/test/files/run/global-showdef.scala b/test/files/run/global-showdef.scala
index 1d4891fd1f..276fcc1e7c 100644
--- a/test/files/run/global-showdef.scala
+++ b/test/files/run/global-showdef.scala
@@ -1,11 +1,10 @@
-import scala.tools.nsc._
-import scala.reflect.io.AbstractFile
+import scala.tools.partest.DirectTest
import scala.tools.nsc.util.stringFromStream
-import scala.reflect.internal.util.{ SourceFile, BatchSourceFile }
-import scala.tools.nsc.reporters.ConsoleReporter
-object Test {
- val src: SourceFile = new BatchSourceFile("src", """
+object Test extends DirectTest {
+ override def extraSettings: String = "-usejavacp -Yshow:typer -Ystop-after:typer"
+
+ override def code = """
package foo.bar
class Bippy {
@@ -32,39 +31,28 @@ object Bippy {
def showdefTestMemberObject2 = "abc"
}
- """)
+ """
+
+ override def show(): Unit = {
+ val classes = List("Bippy", "Bippy#BippyType", "Bippy.BippyType", "Bippy#Boppity", "Bippy#Boppity#Boo")
+ val objects = List("Bippy", "Bippy#Boppity#Boo")
+
+ def interesting(line: String) = (line contains "def showdefTestMember") || (line startsWith "<<-- ")
- def mkCompiler(args: String*) = {
- val settings = new Settings()
- val command = new CompilerCommand("-usejavacp" :: args.toList, settings)
+ def run(args: String*) = slurp(args: _*).lines filter interesting foreach println
- new Global(settings)
+ classes.zipAll(objects, "", "") foreach {
+ case (c, "") => run("-Xshow-class", c)
+ case (c, o) => run("-Xshow-class", c, "-Xshow-object", o)
+ }
}
- def slurp(body: => Unit): String = stringFromStream { stream =>
+ // slurp the compilation result
+ def slurp(args: String*): String = stringFromStream { stream =>
Console.withOut(stream) {
Console.withErr(stream) {
- body
+ compile(args: _*)
}
}
}
- def lines(args: String*): List[String] = {
- val output = slurp {
- val compiler = mkCompiler(args: _*)
- val run = new compiler.Run()
- run.compileSources(List(src))
- }
- output.lines.toList
- }
- def showClass(name: String) = lines("-Yshow:typer", "-Xshow-class", name)
- def showObject(name: String) = lines("-Yshow:typer", "-Xshow-object", name)
-
- def show(xs: List[String]) = {
- xs filter (x => (x contains "def showdefTestMember") || (x startsWith "<<-- ")) foreach println
- }
-
- def main(args: Array[String]) {
- show(List("Bippy", "Bippy#BippyType", "Bippy.BippyType", "Bippy#Boppity", "Bippy#Boppity#Boo") flatMap showClass)
- show(List("Bippy", "Bippy#Boppity#Boo") flatMap showObject)
- }
}
diff --git a/test/files/run/icode-reader-dead-code.check b/test/files/run/icode-reader-dead-code.check
new file mode 100644
index 0000000000..c9de93283e
--- /dev/null
+++ b/test/files/run/icode-reader-dead-code.check
@@ -0,0 +1,27 @@
+Bytecode for method f
+
+ // access flags 0x11
+ public final f()I
+ L0
+ LINENUMBER 4 L0
+ ICONST_1
+ IRETURN
+ L1
+ LOCALVARIABLE this Lp/A; L0 L1 0
+ MAXSTACK = 1
+ MAXLOCALS = 1
+
+Bytecode for method f
+
+ // access flags 0x11
+ public final f()I
+ L0
+ LINENUMBER 4 L0
+ ICONST_1
+ ATHROW
+ IRETURN
+ L1
+ LOCALVARIABLE this Lp/A; L0 L1 0
+ MAXSTACK = 1
+ MAXLOCALS = 1
+
diff --git a/test/files/run/icode-reader-dead-code.scala b/test/files/run/icode-reader-dead-code.scala
new file mode 100644
index 0000000000..00ba58829f
--- /dev/null
+++ b/test/files/run/icode-reader-dead-code.scala
@@ -0,0 +1,82 @@
+import java.io.{FileOutputStream, FileInputStream}
+
+import scala.tools.asm.{ClassWriter, Opcodes, ClassReader}
+import scala.tools.asm.tree.{InsnNode, ClassNode}
+import scala.tools.nsc.backend.jvm.AsmUtils
+import scala.tools.partest.DirectTest
+import scala.collection.JavaConverters._
+
+/**
+ * Test that the ICodeReader does not crash if the bytecode of a method has unreachable code.
+ */
+object Test extends DirectTest {
+ def code: String = ???
+
+ def show(): Unit = {
+ // The bytecode of f will be modified using ASM by `addDeadCode`
+ val aCode =
+ """
+ |package p
+ |class A {
+ | @inline final def f = 1
+ |}
+ """.stripMargin
+
+ val bCode =
+ """
+ |package p
+ |class B {
+ | def g = (new A()).f
+ |}
+ """.stripMargin
+
+ compileString(newCompiler("-usejavacp"))(aCode)
+
+ addDeadCode()
+
+ // If inlining fails, the compiler will issue an inliner warning that is not present in the
+ // check file
+ compileString(newCompiler("-usejavacp", "-optimise"))(bCode)
+ }
+
+ def readClass(file: String) = {
+ val cnode = new ClassNode()
+ val is = new FileInputStream(file)
+ val reader = new ClassReader(is)
+ reader.accept(cnode, 0)
+ is.close()
+ cnode
+ }
+
+ def writeClass(file: String, cnode: ClassNode): Unit = {
+ val writer = new ClassWriter(0)
+ cnode.accept(writer)
+
+ val os = new FileOutputStream(file)
+ os.write(writer.toByteArray)
+ os.close()
+ }
+
+ def addDeadCode() {
+ val file = (testOutput / "p" / "A.class").path
+ val cnode = readClass(file)
+ val method = cnode.methods.asScala.find(_.name == "f").head
+
+ AsmUtils.traceMethod(method)
+
+ val insns = method.instructions
+ val it = insns.iterator()
+ while (it.hasNext) {
+ val in = it.next()
+ if (in.getOpcode == Opcodes.IRETURN) {
+ // Insert an ATHROW before the IRETURN. The IRETURN will then be dead code.
+ // The ICodeReader should not crash if there's dead code.
+ insns.insert(in.getPrevious, new InsnNode(Opcodes.ATHROW))
+ }
+ }
+
+ AsmUtils.traceMethod(method)
+
+ writeClass(file, cnode)
+ }
+}
diff --git a/test/files/run/inferred-type-constructors.check b/test/files/run/inferred-type-constructors.check
index 5992ef02ad..4a63853bd9 100644
--- a/test/files/run/inferred-type-constructors.check
+++ b/test/files/run/inferred-type-constructors.check
@@ -1,4 +1,4 @@
-warning: there were 2 feature warning(s); re-run with -feature for details
+warning: there were two feature warnings; re-run with -feature for details
p.Iterable[Int]
p.Set[Int]
p.Seq[Int]
diff --git a/test/files/run/interop_typetags_are_manifests.flags b/test/files/run/interop_typetags_are_manifests.flags
new file mode 100644
index 0000000000..ea7fc37e1a
--- /dev/null
+++ b/test/files/run/interop_typetags_are_manifests.flags
@@ -0,0 +1 @@
+-Yrangepos:false
diff --git a/test/files/run/is-valid-num.scala b/test/files/run/is-valid-num.scala
index 65e8ceeca6..156121cab5 100644
--- a/test/files/run/is-valid-num.scala
+++ b/test/files/run/is-valid-num.scala
@@ -1,5 +1,5 @@
/*
- * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ * filter: inliner warnings; re-run with
*/
object Test {
def x = BigInt("10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
diff --git a/test/files/run/iterator-concat.check b/test/files/run/iterator-concat.check
deleted file mode 100644
index 23835b07ae..0000000000
--- a/test/files/run/iterator-concat.check
+++ /dev/null
@@ -1,4 +0,0 @@
-100
-1000
-10000
-100000
diff --git a/test/files/run/iterator-concat.scala b/test/files/run/iterator-concat.scala
deleted file mode 100644
index f11363410f..0000000000
--- a/test/files/run/iterator-concat.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test {
- // Create `size` Function0s, each of which evaluates to an Iterator
- // which produces 1. Then fold them over ++ to get a single iterator,
- // which should sum to "size".
- def mk(size: Int): Iterator[Int] = {
- val closures = (1 to size).toList.map(x => (() => Iterator(1)))
- closures.foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
- }
- def main(args: Array[String]): Unit = {
- println(mk(100).sum)
- println(mk(1000).sum)
- println(mk(10000).sum)
- println(mk(100000).sum)
- }
-}
diff --git a/test/files/run/iterator-from.scala b/test/files/run/iterator-from.scala
index 269e859657..e7ba1aeb28 100644
--- a/test/files/run/iterator-from.scala
+++ b/test/files/run/iterator-from.scala
@@ -1,5 +1,5 @@
/* This file tests iteratorFrom, keysIteratorFrom, and valueIteratorFrom on various sorted sets and maps
- * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ * filter: inliner warnings; re-run with
*/
import scala.util.{Random => R}
diff --git a/test/files/run/iterator-iterate-lazy.scala b/test/files/run/iterator-iterate-lazy.scala
deleted file mode 100644
index 92b170062e..0000000000
--- a/test/files/run/iterator-iterate-lazy.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object Test {
- def main(args: Array[String]): Unit = {
- Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
- }
-}
diff --git a/test/files/run/iterators.check b/test/files/run/iterators.check
deleted file mode 100644
index bb139c1610..0000000000
--- a/test/files/run/iterators.check
+++ /dev/null
@@ -1,13 +0,0 @@
-test check_from was successful
-test check_range was successful
-test check_range2 was successful
-test check_range3 was successful
-test check_take was successful
-test check_drop was successful
-test check_foreach was successful
-test check_forall was successful
-test check_fromArray was successful
-test check_toSeq was successful
-test check_indexOf was successful
-test check_findIndexOf was successful
-
diff --git a/test/files/run/iterators.scala b/test/files/run/iterators.scala
deleted file mode 100644
index 57e05d3472..0000000000
--- a/test/files/run/iterators.scala
+++ /dev/null
@@ -1,136 +0,0 @@
-//############################################################################
-// Iterators
-//############################################################################
-
-//############################################################################
-
-import scala.language.postfixOps
-
-object Test {
-
- def check_from: Int = {
- val it1 = Iterator.from(-1)
- val it2 = Iterator.from(0, -1)
- it1.next + it2.next
- }
-
- def check_range: Int = {
- val xs1 = Iterator.range(0, 10, 2) toList;
- val xs2 = Iterator.range(0, 10, -2) toList;
- val xs3 = Iterator.range(10, 0, -2) toList;
- val xs4 = Iterator.range(10, 0, 2) toList;
- val xs5 = Iterator.range(0, 10, 11) toList;
- xs1.length + xs2.length + xs3.length + xs4.length + xs5.length
- }
-
- def check_range2: Int = {
- val r1start = 0
- val r1end = 10
- val r1step = 1
- val r1 = Iterator.range(r1start, r1end, r1step) toList;
- val r2 = Iterator.range(r1start, r1end, r1step + 1) toList;
- val r3 = Iterator.range(r1end, r1start, -r1step) toList;
- val r4 = Iterator.range(0, 10, 11) toList;
- // 10 + 5 + 10 + 1
- r1.length + r2.length + r3.length + r4.length
- }
-
- def check_range3: Int = {
- def trues(xs: List[Boolean]) = xs.foldLeft(0)((a, b) => if (b) a+1 else a)
- val r1 = Iterator.range(0, 10)
- val xs1 = List(r1 contains 5, r1 contains 6)
- val r2a = Iterator.range(0, 10, 2)
- val r2b = Iterator.range(0, 10, 2)
- val xs2 = List(r2a contains 5, r2b contains 6)
- val r3 = Iterator.range(0, 10, 11)
- val xs3 = List(r3 contains 5, r3 contains 6)
- // 2 + 1 + 0
- trues(xs1) + trues(xs2) + trues(xs3)
- }
-
- def check_take: Int = {
- val it1 = Iterator.from(0)
- val xs1 = it1 take 10 toList;
- xs1.length
- }
-
- def check_drop: Int = {
- val it1 = Iterator.from(0)
- val it2 = it1 map { 2 * _ }
- val n1 = it1 drop 2 next
- val n2 = it2 drop 2 next;
- n1 + n2
- }
-
- def check_foreach: Int = {
- val it1 = Iterator.from(0) take 20
- var n = 0
- it1 foreach { n += _ }
- n
- }
-
- def check_forall: Int = {
- val it1 = Iterator.from(0)
- val it2 = Iterator.from(1)
- 0
- }
-
- def check_fromArray: Int = { // ticket #429
- val a = List(1, 2, 3, 4).toArray
- var xs0 = a.iterator.toList;
- var xs1 = a.slice(0, 1).iterator.toList;
- var xs2 = a.slice(0, 2).iterator.toList;
- var xs3 = a.slice(0, 3).iterator.toList;
- var xs4 = a.slice(0, 4).iterator.toList;
- xs0.length + xs1.length + xs2.length + xs3.length + xs4.length
- }
-
- def check_toSeq: String =
- List(1, 2, 3, 4, 5).iterator.toSeq.mkString("x")
-
- def check_indexOf: String = {
- val i = List(1, 2, 3, 4, 5).indexOf(4)
- val j = List(1, 2, 3, 4, 5).indexOf(16)
- "" + i + "x" + j
- }
-
- def check_findIndexOf: String = {
- val i = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 4 }
- val j = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 16 }
- "" + i + "x" + j
- }
-
- def check_success[A](name: String, closure: => A, expected: A) {
- print("test " + name)
- try {
- val actual: A = closure
- if (actual == expected)
- print(" was successful")
- else
- print(" failed: expected "+ expected +", found "+ actual)
- }
- catch {
- case exception: Throwable =>
- print(" raised exception " + exception)
- }
- println()
- }
-
- def main(args: Array[String]) {
- check_success("check_from", check_from, -1)
- check_success("check_range", check_range, 11)
- check_success("check_range2", check_range2, 26)
- check_success("check_range3", check_range3, 3)
- check_success("check_take", check_take, 10)
- check_success("check_drop", check_drop, 12)
- check_success("check_foreach", check_foreach, 190)
- check_success("check_forall", check_forall, 0)
- check_success("check_fromArray",check_fromArray, 14)
- check_success("check_toSeq", check_toSeq, "1x2x3x4x5")
- check_success("check_indexOf", check_indexOf, "3x-1")
- check_success("check_findIndexOf", check_findIndexOf, "3x-1")
- println()
- }
-}
-
-//############################################################################
diff --git a/test/files/run/kind-repl-command.check b/test/files/run/kind-repl-command.check
index 1c292572e6..586b2710e1 100644
--- a/test/files/run/kind-repl-command.check
+++ b/test/files/run/kind-repl-command.check
@@ -25,4 +25,4 @@ scala> :k Nonexisting
Nonexisting
^
-scala>
+scala> :quit
diff --git a/test/files/run/large_class.check b/test/files/run/large_class.check
new file mode 100644
index 0000000000..0585c267ac
--- /dev/null
+++ b/test/files/run/large_class.check
@@ -0,0 +1,3 @@
+newSource1.scala:1: error: Could not write class BigEnoughToFail because it exceeds JVM code size limits. Class file too large!
+class BigEnoughToFail {
+ ^
diff --git a/test/files/run/large_class.scala b/test/files/run/large_class.scala
new file mode 100644
index 0000000000..aa486ef8f7
--- /dev/null
+++ b/test/files/run/large_class.scala
@@ -0,0 +1,27 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+// a cold run of partest takes about 15s for this test on my laptop
+object Test extends DirectTest {
+ override def extraSettings: String = "-usejavacp -d " + testOutput.path
+
+ def s(n: Int) = "\""+n+"\""
+
+ override def code
+ = s"""
+ |class BigEnoughToFail {
+ | def m(a: String, b: String, c: String, d: String, e: String, f: String) = null
+ | ${(1 to 5500) map (n => "def f"+n+" = m("+ s(n+10000)+","+
+ s(n+20000)+","+
+ s(n+30000)+","+
+ s(n+40000)+","+
+ s(n+50000)+","+
+ s(n+60000)+")") mkString ";"}
+ |}""".stripMargin.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/literals.check b/test/files/run/literals.check
index ed7c6ca5b3..092340eead 100644
--- a/test/files/run/literals.check
+++ b/test/files/run/literals.check
@@ -1,57 +1,12 @@
-warning: there were 5 deprecation warning(s); re-run with -deprecation for details
-test '\u0024' == '$' was successful
-test '\u005f' == '_' was successful
-test 65.asInstanceOf[Char] == 'A' was successful
-test "\141\142" == "ab" was successful
-test "\0x61\0x62".trim() == "x61\0x62" was successful
-
-test (65 : Byte) == 'A' was successful
-
-test 0X01 == 1 was successful
-test 0x01 == 1 was successful
-test 0x10 == 16 was successful
-test 0xa == 10 was successful
-test 0x0a == 10 was successful
-test +0x01 == 1 was successful
-test +0x10 == 16 was successful
-test +0xa == 10 was successful
-test +0x0a == 10 was successful
-test -0x01 == -1 was successful
-test -0x10 == -16 was successful
-test -0xa == -10 was successful
-test -0x0a == -10 was successful
-test 0x7fffffff == 2147483647 was successful
-test 0x80000000 == -2147483648 was successful
-test 0xffffffff == -1 was successful
-
-test 1l == 1L was successful
-test 1L == 1l was successful
-test 1.asInstanceOf[Long] == 1l was successful
-test 0x7fffffffffffffffL == 9223372036854775807L was successful
-test 0x8000000000000000L == -9223372036854775808L was successful
-test 0xffffffffffffffffL == -1L was successful
-
-test 1e1f == 10.0f was successful
-test .3f == 0.3f was successful
-test 0f == 0.0f was successful
-test 01.23f == 1.23f was successful
-test 3.14f == 3.14f was successful
-test 6.022e23f == 6.022e23f was successful
-test 09f == 9.0f was successful
-test 1.asInstanceOf[Float] == 1.0 was successful
-test 1l.asInstanceOf[Float] == 1.0 was successful
-
-test 1e1 == 10.0 was successful
-test .3 == 0.3 was successful
-test 0.0 == 0.0 was successful
-test 0d == 0.0 was successful
-test 01.23 == 1.23 was successful
-test 01.23d == 1.23d was successful
-test 3.14 == 3.14 was successful
-test 1e-9d == 1.0e-9 was successful
-test 1e137 == 1.0e137 was successful
-test 1.asInstanceOf[Double] == 1.0 was successful
-test 1l.asInstanceOf[Double] == 1.0 was successful
-
-test "".length() was successful
-test ggg == 3 was successful
+literals.scala:34: warning: Octal escape literals are deprecated, use \u0061 instead.
+ check_success("\"\\141\\142\" == \"ab\"", "\141\142", "ab")
+ ^
+literals.scala:34: warning: Octal escape literals are deprecated, use \u0062 instead.
+ check_success("\"\\141\\142\" == \"ab\"", "\141\142", "ab")
+ ^
+literals.scala:37: warning: Octal escape literals are deprecated, use \u0000 instead.
+ "\0x61\0x62".getBytes(io.Codec.UTF8.charSet) sameElements Array[Byte](0, 120, 54, 49, 0, 120, 54, 50),
+ ^
+literals.scala:37: warning: Octal escape literals are deprecated, use \u0000 instead.
+ "\0x61\0x62".getBytes(io.Codec.UTF8.charSet) sameElements Array[Byte](0, 120, 54, 49, 0, 120, 54, 50),
+ ^
diff --git a/test/files/run/literals.flags b/test/files/run/literals.flags
new file mode 100644
index 0000000000..dcc59ebe32
--- /dev/null
+++ b/test/files/run/literals.flags
@@ -0,0 +1 @@
+-deprecation
diff --git a/test/files/run/literals.scala b/test/files/run/literals.scala
index 5f23e6b492..13fda05876 100644
--- a/test/files/run/literals.scala
+++ b/test/files/run/literals.scala
@@ -14,21 +14,16 @@ object Test {
def \u03b1\u03b1(that: GGG) = i + that.i
}
- def check_success[a](name: String, closure: => a, expected: a) {
- print("test " + name)
- try {
- val actual: a = closure
- if (actual == expected) {
- print(" was successful");
- } else {
- print(" failed: expected "+ expected +", found "+ actual);
+ def check_success[A](name: String, closure: => A, expected: A) {
+ val res: Option[String] =
+ try {
+ val actual: A = closure
+ if (actual == expected) None //print(" was successful")
+ else Some(s" failed: expected $expected, found $actual")
+ } catch {
+ case exception: Throwable => Some(s" raised exception $exception")
}
- } catch {
- case exception: Throwable => {
- print(" raised exception " + exception);
- }
- }
- println
+ for (e <- res) println(s"test $name $e")
}
def main(args: Array[String]) {
@@ -37,15 +32,14 @@ object Test {
check_success("'\\u005f' == '_'", '\u005f', '_')
check_success("65.asInstanceOf[Char] == 'A'", 65.asInstanceOf[Char], 'A')
check_success("\"\\141\\142\" == \"ab\"", "\141\142", "ab")
- check_success("\"\\0x61\\0x62\".trim() == \"x61\\0x62\"", "\0x61\0x62".substring(1), "x61\0x62")
-
- println
+ //check_success("\"\\0x61\\0x62\".trim() == \"x61\\0x62\"", "\0x61\0x62".substring(1), "x61\0x62")
+ check_success(""""\0x61\0x62".getBytes == Array(0, 120, ...)""",
+ "\0x61\0x62".getBytes(io.Codec.UTF8.charSet) sameElements Array[Byte](0, 120, 54, 49, 0, 120, 54, 50),
+ true)
// boolean
check_success("(65 : Byte) == 'A'", (65: Byte) == 'A', true) // contrib #176
- println
-
// int
check_success("0X01 == 1", 0X01, 1)
check_success("0x01 == 1", 0x01, 1)
@@ -67,8 +61,6 @@ object Test {
check_success("0x80000000 == -2147483648", 0x80000000, -2147483648)
check_success("0xffffffff == -1", 0xffffffff, -1)
- println
-
// long
check_success("1l == 1L", 1l, 1L)
check_success("1L == 1l", 1L, 1l)
@@ -81,8 +73,6 @@ object Test {
check_success("0xffffffffffffffffL == -1L",
0xffffffffffffffffL, -1L)
- println
-
// see JLS at address:
// http://java.sun.com/docs/books/jls/second_edition/html/lexical.doc.html#230798
@@ -97,8 +87,6 @@ object Test {
check_success("1.asInstanceOf[Float] == 1.0", 1.asInstanceOf[Float], 1.0f)
check_success("1l.asInstanceOf[Float] == 1.0", 1l.asInstanceOf[Float], 1.0f)
- println
-
// double
check_success("1e1 == 10.0", 1e1, 10.0)
check_success(".3 == 0.3", .3, 0.3)
@@ -112,7 +100,6 @@ object Test {
check_success("1.asInstanceOf[Double] == 1.0", 1.asInstanceOf[Double], 1.0)
check_success("1l.asInstanceOf[Double] == 1.0", 1l.asInstanceOf[Double], 1.0)
- println
check_success("\"\".length()", "\u001a".length(), 1)
val ggg = GGG(1) \u03b1\u03b1 GGG(2)
diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check
index f76579412e..70734966f0 100644
--- a/test/files/run/lub-visibility.check
+++ b/test/files/run/lub-visibility.check
@@ -8,4 +8,4 @@ scala> // but reverted that for SI-5534.
scala> val x = List(List(), Vector())
x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing] with java.io.Serializable] = List(List(), Vector())
-scala>
+scala> :quit
diff --git a/test/files/run/macro-bundle-repl.check b/test/files/run/macro-bundle-repl.check
index 4a0b421606..75c5c2adda 100644
--- a/test/files/run/macro-bundle-repl.check
+++ b/test/files/run/macro-bundle-repl.check
@@ -21,4 +21,4 @@ defined term macro foo: Unit
scala> foo
-scala>
+scala> :quit
diff --git a/test/files/run/macro-openmacros.flags b/test/files/run/macro-openmacros.flags
index cd66464f2f..2433c055a4 100644
--- a/test/files/run/macro-openmacros.flags
+++ b/test/files/run/macro-openmacros.flags
@@ -1 +1,2 @@
--language:experimental.macros \ No newline at end of file
+-Yrangepos:false
+-language:experimental.macros
diff --git a/test/files/run/macro-parse-position.flags b/test/files/run/macro-parse-position.flags
new file mode 100644
index 0000000000..ea7fc37e1a
--- /dev/null
+++ b/test/files/run/macro-parse-position.flags
@@ -0,0 +1 @@
+-Yrangepos:false
diff --git a/test/files/run/macro-rangepos-args.check b/test/files/run/macro-rangepos-args.check
new file mode 100644
index 0000000000..d779505c66
--- /dev/null
+++ b/test/files/run/macro-rangepos-args.check
@@ -0,0 +1 @@
+Line: 3. Width: 5.
diff --git a/test/files/run/macro-rangepos-args.flags b/test/files/run/macro-rangepos-args.flags
new file mode 100644
index 0000000000..fcf951d907
--- /dev/null
+++ b/test/files/run/macro-rangepos-args.flags
@@ -0,0 +1 @@
+-Yrangepos \ No newline at end of file
diff --git a/test/files/run/macro-rangepos-args/Macros_1.scala b/test/files/run/macro-rangepos-args/Macros_1.scala
new file mode 100644
index 0000000000..97b938613c
--- /dev/null
+++ b/test/files/run/macro-rangepos-args/Macros_1.scala
@@ -0,0 +1,10 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+ def impl(c: Context)(x: c.Tree): c.Tree = {
+ import c.universe._
+ Literal(Constant(s"Line: ${x.pos.line}. Width: ${x.pos.end - x.pos.start}."))
+ }
+ def pos(x: Any): String = macro impl
+}
diff --git a/test/files/run/macro-rangepos-args/Test_2.scala b/test/files/run/macro-rangepos-args/Test_2.scala
new file mode 100644
index 0000000000..8c770e9010
--- /dev/null
+++ b/test/files/run/macro-rangepos-args/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val x = 2
+ println(Macros.pos(x + 2))
+} \ No newline at end of file
diff --git a/test/files/run/macro-rangepos-subpatterns.check b/test/files/run/macro-rangepos-subpatterns.check
new file mode 100644
index 0000000000..760e15d019
--- /dev/null
+++ b/test/files/run/macro-rangepos-subpatterns.check
@@ -0,0 +1 @@
+The width of the subpattern is: 2
diff --git a/test/files/run/macro-rangepos-subpatterns.flags b/test/files/run/macro-rangepos-subpatterns.flags
new file mode 100644
index 0000000000..fcf951d907
--- /dev/null
+++ b/test/files/run/macro-rangepos-subpatterns.flags
@@ -0,0 +1 @@
+-Yrangepos \ No newline at end of file
diff --git a/test/files/run/macro-rangepos-subpatterns/Macros_1.scala b/test/files/run/macro-rangepos-subpatterns/Macros_1.scala
new file mode 100644
index 0000000000..0f30862347
--- /dev/null
+++ b/test/files/run/macro-rangepos-subpatterns/Macros_1.scala
@@ -0,0 +1,18 @@
+import scala.reflect.macros.whitebox.Context
+import language.experimental.macros
+
+object Extractor {
+ def unapply(x: Any): Any = macro unapplyImpl
+ def unapplyImpl(c: Context)(x: c.Tree) = {
+ import c.universe._
+ import internal._
+ val pos = subpatterns(x).get.head.pos
+ q"""
+ new {
+ def isEmpty = false
+ def get = ${"The width of the subpattern is: " + (pos.end - pos.start + 1)}
+ def unapply(x: Any) = this
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/run/macro-rangepos-subpatterns/Test_2.scala b/test/files/run/macro-rangepos-subpatterns/Test_2.scala
new file mode 100644
index 0000000000..7b076e6632
--- /dev/null
+++ b/test/files/run/macro-rangepos-subpatterns/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ 42 match {
+ case Extractor(a) => println(a)
+ }
+}
diff --git a/test/files/run/macro-repl-basic.check b/test/files/run/macro-repl-basic.check
index 86b4d472ed..fab03d1558 100644
--- a/test/files/run/macro-repl-basic.check
+++ b/test/files/run/macro-repl-basic.check
@@ -49,4 +49,4 @@ import Macros.Shmacros._
scala> println(foo(2) + Macros.bar(2) * new Macros().quux(4))
31
-scala>
+scala> :quit
diff --git a/test/files/run/macro-repl-dontexpand.check b/test/files/run/macro-repl-dontexpand.check
index 20d3b2d702..6ecc9245fa 100644
--- a/test/files/run/macro-repl-dontexpand.check
+++ b/test/files/run/macro-repl-dontexpand.check
@@ -13,4 +13,4 @@ bar2: (c: scala.reflect.macros.whitebox.Context)Nothing
scala> def foo2 = macro bar2
defined term macro foo2: Nothing
-scala>
+scala> :quit
diff --git a/test/files/run/macro-system-properties.check b/test/files/run/macro-system-properties.check
index ffbd5a8aa8..e2e2bd32b9 100644
--- a/test/files/run/macro-system-properties.check
+++ b/test/files/run/macro-system-properties.check
@@ -19,4 +19,4 @@ defined object Test
scala> object Test { class C(implicit a: Any) { GrabContext.grab } }
defined object Test
-scala>
+scala> :quit
diff --git a/test/files/run/macroPlugins-enterStats.check b/test/files/run/macroPlugins-enterStats.check
new file mode 100644
index 0000000000..133b1ae1af
--- /dev/null
+++ b/test/files/run/macroPlugins-enterStats.check
@@ -0,0 +1,30 @@
+[[syntax trees at end of typer]] // newSource1.scala
+package <empty> {
+ class C extends scala.AnyRef {
+ def <init>(): C = {
+ C.super.<init>();
+ ()
+ };
+ def x: Int = 2;
+ def xmacroPlugin1: Nothing = scala.this.Predef.???;
+ def xmacroPlugin2: Nothing = scala.this.Predef.???;
+ def xmacroPlugin2macroPlugin1: Nothing = scala.this.Predef.???;
+ def y: Int = 3;
+ def ymacroPlugin1: Nothing = scala.this.Predef.???;
+ def ymacroPlugin2: Nothing = scala.this.Predef.???;
+ def ymacroPlugin2macroPlugin1: Nothing = scala.this.Predef.???
+ }
+}
+
+macroPlugin2:enterStat(class C extends scala.AnyRef { def <init>() = { super.<init>(); () }; def x = 2; def y = 3 })
+macroPlugin1:enterStat(class C extends scala.AnyRef { def <init>() = { super.<init>(); () }; def x = 2; def y = 3 })
+macroPlugin2:enterStat(def <init>() = { super.<init>(); () })
+macroPlugin2:enterStat(def x = 2)
+macroPlugin2:enterStat(def y = 3)
+macroPlugin1:enterStat(def <init>() = { super.<init>(); () })
+macroPlugin1:enterStat(def x = 2)
+macroPlugin1:enterStat(def xmacroPlugin2 = $qmark$qmark$qmark)
+macroPlugin1:enterStat(def y = 3)
+macroPlugin1:enterStat(def ymacroPlugin2 = $qmark$qmark$qmark)
+macroPlugin2:enterStat(super.<init>())
+macroPlugin1:enterStat(super.<init>())
diff --git a/test/files/run/macroPlugins-enterStats.scala b/test/files/run/macroPlugins-enterStats.scala
new file mode 100644
index 0000000000..917233e990
--- /dev/null
+++ b/test/files/run/macroPlugins-enterStats.scala
@@ -0,0 +1,50 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+
+object Test extends DirectTest {
+ override def extraSettings: String = "-usejavacp -Xprint:typer"
+
+ def code = """
+ class C {
+ def x = 2
+ def y = 3
+ }
+ """.trim
+
+ def show() {
+ val global = newCompiler()
+ import global._
+ import analyzer._
+
+ val output = collection.mutable.ListBuffer[String]()
+ def log(what: String) = output += what.replace(String.format("%n"), " ")
+
+ def logEnterStat(pluginName: String, stat: Tree): Unit = log(s"$pluginName:enterStat($stat)")
+ def deriveStat(pluginName: String, typer: Typer, stat: Tree): List[Tree] = stat match {
+ case DefDef(mods, name, Nil, Nil, TypeTree(), body) =>
+ val derived = DefDef(NoMods, TermName(name + pluginName), Nil, Nil, TypeTree(), Ident(TermName("$qmark$qmark$qmark")))
+ newNamer(typer.context).enterSym(derived)
+ List(derived)
+ case _ =>
+ Nil
+ }
+
+ object macroPlugin1 extends MacroPlugin {
+ override def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = {
+ stats.foreach(stat => logEnterStat("macroPlugin1", stat))
+ stats.flatMap(stat => stat +: deriveStat("macroPlugin1", typer, stat))
+ }
+ }
+ object macroPlugin2 extends MacroPlugin {
+ override def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = {
+ stats.foreach(stat => logEnterStat("macroPlugin2", stat))
+ stats.flatMap(stat => stat +: deriveStat("macroPlugin2", typer, stat))
+ }
+ }
+
+ addMacroPlugin(macroPlugin1)
+ addMacroPlugin(macroPlugin2)
+ compileString(global)(code)
+ println(output.mkString("\n"))
+ }
+}
diff --git a/test/files/run/macroPlugins-isBlackbox/Macros_2.scala b/test/files/run/macroPlugins-isBlackbox/Macros_2.scala
new file mode 100644
index 0000000000..a90dd702df
--- /dev/null
+++ b/test/files/run/macroPlugins-isBlackbox/Macros_2.scala
@@ -0,0 +1,11 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.blackbox.Context
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ q"42"
+ }
+
+ def foo: Any = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-isBlackbox/Plugin_1.scala b/test/files/run/macroPlugins-isBlackbox/Plugin_1.scala
new file mode 100644
index 0000000000..b78a18ea6a
--- /dev/null
+++ b/test/files/run/macroPlugins-isBlackbox/Plugin_1.scala
@@ -0,0 +1,21 @@
+package isblackbox
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.plugins.{Plugin => NscPlugin}
+
+class Plugin(val global: Global) extends NscPlugin {
+ import global._
+ import analyzer._
+ import scala.reflect.internal.Mode
+
+ val name = "isBlackbox"
+ val description = "A sample analyzer plugin that overrides isBlackbox."
+ val components = Nil
+ addMacroPlugin(MacroPlugin)
+
+ object MacroPlugin extends MacroPlugin {
+ override def pluginsIsBlackbox(macroDef: Symbol): Option[Boolean] = {
+ Some(false)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-isBlackbox/Test_3.flags b/test/files/run/macroPlugins-isBlackbox/Test_3.flags
new file mode 100644
index 0000000000..966df731d0
--- /dev/null
+++ b/test/files/run/macroPlugins-isBlackbox/Test_3.flags
@@ -0,0 +1 @@
+-Xplugin:. \ No newline at end of file
diff --git a/test/files/run/macroPlugins-isBlackbox/Test_3.scala b/test/files/run/macroPlugins-isBlackbox/Test_3.scala
new file mode 100644
index 0000000000..552e888143
--- /dev/null
+++ b/test/files/run/macroPlugins-isBlackbox/Test_3.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ val x: Int = Macros.foo
+} \ No newline at end of file
diff --git a/test/files/run/macroPlugins-isBlackbox/scalac-plugin.xml b/test/files/run/macroPlugins-isBlackbox/scalac-plugin.xml
new file mode 100644
index 0000000000..09b9c14648
--- /dev/null
+++ b/test/files/run/macroPlugins-isBlackbox/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+ <name>is-blackbox</name>
+ <classname>isblackbox.Plugin</classname>
+</plugin> \ No newline at end of file
diff --git a/test/files/run/macroPlugins-macroExpand.flags b/test/files/run/macroPlugins-macroExpand.flags
new file mode 100644
index 0000000000..ea7fc37e1a
--- /dev/null
+++ b/test/files/run/macroPlugins-macroExpand.flags
@@ -0,0 +1 @@
+-Yrangepos:false
diff --git a/test/files/run/macroPlugins-typedMacroBody.flags b/test/files/run/macroPlugins-typedMacroBody.flags
new file mode 100644
index 0000000000..ea7fc37e1a
--- /dev/null
+++ b/test/files/run/macroPlugins-typedMacroBody.flags
@@ -0,0 +1 @@
+-Yrangepos:false
diff --git a/test/files/run/mapConserve.scala b/test/files/run/mapConserve.scala
index d1d52f3107..c17754283a 100644
--- a/test/files/run/mapConserve.scala
+++ b/test/files/run/mapConserve.scala
@@ -1,5 +1,5 @@
/*
- * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ * filter: inliner warnings; re-run with
*/
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check
index 0037822f3b..c358dc5849 100644
--- a/test/files/run/names-defaults.check
+++ b/test/files/run/names-defaults.check
@@ -1,7 +1,7 @@
names-defaults.scala:269: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
spawn(b = { val ttt = 1; ttt }, a = 0)
^
-warning: there were 4 deprecation warning(s); re-run with -deprecation for details
+warning: there were four deprecation warnings; re-run with -deprecation for details
1: @
get: $
get: 2
@@ -124,3 +124,4 @@ List(1, 2)
3
3
(1,0), (1,2)
+1 1 0
diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala
index 05cd4a540c..b7ed490cbc 100644
--- a/test/files/run/names-defaults.scala
+++ b/test/files/run/names-defaults.scala
@@ -401,6 +401,10 @@ object Test extends App {
C4441a().copy()
C4441b()().copy()()
+ // SI-8117
+ def f8177(a: Int = 0, b: Int = 0, c: Int = 0) = s"$a $b $c"
+ println(f8177(a = 1, 1))
+
// DEFINITIONS
def test1(a: Int, b: String) = println(a +": "+ b)
def test2(u: Int, v: Int)(k: String, l: Int) = println(l +": "+ k +", "+ (u + v))
diff --git a/test/files/run/nothingTypeDce.flags b/test/files/run/nothingTypeDce.flags
new file mode 100644
index 0000000000..d85321ca0e
--- /dev/null
+++ b/test/files/run/nothingTypeDce.flags
@@ -0,0 +1 @@
+-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code
diff --git a/test/files/run/nothingTypeDce.scala b/test/files/run/nothingTypeDce.scala
new file mode 100644
index 0000000000..5f3692fd33
--- /dev/null
+++ b/test/files/run/nothingTypeDce.scala
@@ -0,0 +1,63 @@
+// See comment in BCodeBodyBuilder
+
+// -target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code
+// target enables stack map frames generation
+
+class C {
+ // can't just emit a call to ???, that returns value of type Nothing$ (not Int).
+ def f1: Int = ???
+
+ def f2: Int = throw new Error("")
+
+ def f3(x: Boolean) = {
+ var y = 0
+ // cannot assign an object of type Nothing$ to Int
+ if (x) y = ???
+ else y = 1
+ y
+ }
+
+ def f4(x: Boolean) = {
+ var y = 0
+ // tests that whatever is emitted after the throw is valid (what? depends on opts, presence of stack map frames)
+ if (x) y = throw new Error("")
+ else y = 1
+ y
+ }
+
+ def f5(x: Boolean) = {
+ // stack heights need to be the same. ??? looks to the jvm like returning a value of
+ // type Nothing$, need to drop or throw it.
+ println(
+ if (x) { ???; 10 }
+ else 20
+ )
+ }
+
+ def f6(x: Boolean) = {
+ println(
+ if (x) { throw new Error(""); 10 }
+ else 20
+ )
+ }
+
+ def f7(x: Boolean) = {
+ println(
+ if (x) throw new Error("")
+ else 20
+ )
+ }
+
+ def f8(x: Boolean) = {
+ println(
+ if (x) throw new Error("")
+ else 20
+ )
+ }
+}
+
+object Test extends App {
+ // creating an instance is enough to trigger bytecode verification for all methods,
+ // no need to invoke the methods.
+ new C()
+}
diff --git a/test/files/run/nothingTypeNoFramesNoDce.check b/test/files/run/nothingTypeNoFramesNoDce.check
new file mode 100644
index 0000000000..b1d08b45ff
--- /dev/null
+++ b/test/files/run/nothingTypeNoFramesNoDce.check
@@ -0,0 +1 @@
+warning: -target:jvm-1.5 is deprecated: use target for Java 1.6 or above.
diff --git a/test/files/run/nothingTypeNoFramesNoDce.flags b/test/files/run/nothingTypeNoFramesNoDce.flags
new file mode 100644
index 0000000000..a035c86179
--- /dev/null
+++ b/test/files/run/nothingTypeNoFramesNoDce.flags
@@ -0,0 +1 @@
+-target:jvm-1.5 -Ybackend:GenBCode -Yopt:l:none -deprecation
diff --git a/test/files/run/nothingTypeNoFramesNoDce.scala b/test/files/run/nothingTypeNoFramesNoDce.scala
new file mode 100644
index 0000000000..3d1298303a
--- /dev/null
+++ b/test/files/run/nothingTypeNoFramesNoDce.scala
@@ -0,0 +1,61 @@
+// See comment in BCodeBodyBuilder
+
+// -target:jvm-1.5 -Ybackend:GenBCode -Yopt:l:none
+// target disables stack map frame generation. in this mode, the ClssWriter just emits dead code as is.
+
+class C {
+ // can't just emit a call to ???, that returns value of type Nothing$ (not Int).
+ def f1: Int = ???
+
+ def f2: Int = throw new Error("")
+
+ def f3(x: Boolean) = {
+ var y = 0
+ // cannot assign an object of type Nothing$ to Int
+ if (x) y = ???
+ else y = 1
+ y
+ }
+
+ def f4(x: Boolean) = {
+ var y = 0
+ // tests that whatever is emitted after the throw is valid (what? depends on opts, presence of stack map frames)
+ if (x) y = throw new Error("")
+ else y = 1
+ y
+ }
+
+ def f5(x: Boolean) = {
+ // stack heights need to be the smae. ??? looks to the jvm like returning a value of
+ // type Nothing$, need to drop or throw it.
+ println(
+ if (x) { ???; 10 }
+ else 20
+ )
+ }
+
+ def f6(x: Boolean) = {
+ println(
+ if (x) { throw new Error(""); 10 }
+ else 20
+ )
+ }
+
+ def f7(x: Boolean) = {
+ println(
+ if (x) throw new Error("")
+ else 20
+ )
+ }
+
+ def f8(x: Boolean) = {
+ println(
+ if (x) throw new Error("")
+ else 20
+ )
+ }
+}
+
+object Test extends App {
+ new C()
+}
diff --git a/test/files/run/nothingTypeNoOpt.flags b/test/files/run/nothingTypeNoOpt.flags
new file mode 100644
index 0000000000..b3b518051b
--- /dev/null
+++ b/test/files/run/nothingTypeNoOpt.flags
@@ -0,0 +1 @@
+-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:none
diff --git a/test/files/run/nothingTypeNoOpt.scala b/test/files/run/nothingTypeNoOpt.scala
new file mode 100644
index 0000000000..5c5a20fa3b
--- /dev/null
+++ b/test/files/run/nothingTypeNoOpt.scala
@@ -0,0 +1,61 @@
+// See comment in BCodeBodyBuilder
+
+// -target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:none
+// target enables stack map frame generation
+
+class C {
+ // can't just emit a call to ???, that returns value of type Nothing$ (not Int).
+ def f1: Int = ???
+
+ def f2: Int = throw new Error("")
+
+ def f3(x: Boolean) = {
+ var y = 0
+ // cannot assign an object of type Nothing$ to Int
+ if (x) y = ???
+ else y = 1
+ y
+ }
+
+ def f4(x: Boolean) = {
+ var y = 0
+ // tests that whatever is emitted after the throw is valid (what? depends on opts, presence of stack map frames)
+ if (x) y = throw new Error("")
+ else y = 1
+ y
+ }
+
+ def f5(x: Boolean) = {
+ // stack heights need to be the smae. ??? looks to the jvm like returning a value of
+ // type Nothing$, need to drop or throw it.
+ println(
+ if (x) { ???; 10 }
+ else 20
+ )
+ }
+
+ def f6(x: Boolean) = {
+ println(
+ if (x) { throw new Error(""); 10 }
+ else 20
+ )
+ }
+
+ def f7(x: Boolean) = {
+ println(
+ if (x) throw new Error("")
+ else 20
+ )
+ }
+
+ def f8(x: Boolean) = {
+ println(
+ if (x) throw new Error("")
+ else 20
+ )
+ }
+}
+
+object Test extends App {
+ new C()
+}
diff --git a/test/files/run/pc-conversions.scala b/test/files/run/pc-conversions.scala
index 19fef355c8..d4ae305aa7 100644
--- a/test/files/run/pc-conversions.scala
+++ b/test/files/run/pc-conversions.scala
@@ -1,5 +1,5 @@
/*
- * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ * filter: inliner warning; re-run with
*/
import collection._
diff --git a/test/files/run/priorityQueue.scala b/test/files/run/priorityQueue.scala
deleted file mode 100644
index 327d8bf137..0000000000
--- a/test/files/run/priorityQueue.scala
+++ /dev/null
@@ -1,373 +0,0 @@
-
-
-
-import scala.collection.mutable.PriorityQueue
-
-
-
-
-
-
-// populate a priority queue a few different ways and make sure they all seem equal
-object Test {
-
- def main(args: Array[String]) {
- // testInsertionsAndEqualities
- // testIntensiveEnqueueDequeue
- // testTails
- // testInits
- // testFilters
- // testDrops
- // testEquality
- // testMisc
- // testReverse
- // testToList
- // testForeach
- }
-
- // def testInsertionsAndEqualities {
- // import scala.util.Random.nextInt
- // val pq1 = new PriorityQueue[String]
- // val pq2 = new PriorityQueue[String]
- // val pq3 = new PriorityQueue[String]
- // val pq4 = new PriorityQueue[String]
-
- // val strings = (1 to 20).toList map (i => List.fill((Math.abs(nextInt % 20)) + 1)("x").mkString)
-
- // pq1 ++= strings
- // pq2 ++= strings.reverse
- // for (s <- strings) pq3 += s
- // for (s <- strings.reverse) pq4 += s
-
- // val pqs = List(pq1, pq2, pq3, pq4, pq1.clone, pq2.clone)
-
- // for (queue1 <- pqs ; queue2 <- pqs) {
- // val l1: List[String] = queue1.dequeueAll[String, List[String]]
- // val l2: List[String] = queue2.dequeueAll[String, List[String]]
- // assert(l1 == l2)
- // assert(queue1.max == queue2.max)
- // }
-
- // assertPriorityDestructive(pq1)
- // }
-
- // not a sequence anymore, Mildred
- // def testIndexing {
- // val pq = new PriorityQueue[Char]
- // "The quick brown fox jumps over the lazy dog".foreach(pq += _)
-
- // // val iter = pq.iterator
- // // while (iter.hasNext) println("`" + iter.next + "`")
- // assert(pq(0) == 'z')
- // assert(pq(1) == 'y')
- // assert(pq(2) == 'x')
- // assert(pq(3) == 'w')
- // assert(pq(4) == 'v')
- // assert(pq(5) == 'u')
- // assert(pq(7) == 't')
- // assert(pq(8) == 's')
- // assert(pq(9) == 'r')
- // assert(pq(10) == 'r')
-
- // pq.clear
- // "abcdefghijklmnopqrstuvwxyz".foreach(pq += _)
- // for (i <- 0 until 26) assert(pq(i) == ('z' - i))
-
- // val intpq = new PriorityQueue[Int]
- // val intlst = new collection.mutable.ArrayBuffer ++ (0 until 100)
- // val random = new util.Random(101)
- // while (intlst.nonEmpty) {
- // val idx = random.nextInt(intlst.size)
- // intpq += intlst(idx)
- // intlst.remove(idx)
- // }
- // for (i <- 0 until 100) assert(intpq(i) == (99 - i))
- // }
-
- // def testTails {
- // val pq = new PriorityQueue[Int]
- // for (i <- 0 until 10) pq += i * 4321 % 200
-
- // assert(pq.size == 10)
- // assert(pq.nonEmpty)
-
- // val tailpq = pq.tail
- // // pq.printstate
- // // tailpq.printstate
- // assert(tailpq.size == 9)
- // assert(tailpq.nonEmpty)
- // assertPriorityDestructive(tailpq)
- // }
-
- // def assertPriorityDestructive[A](pq: PriorityQueue[A])(implicit ord: Ordering[A]) {
- // import ord._
- // var prev: A = null.asInstanceOf[A]
- // while (pq.nonEmpty) {
- // val curr = pq.dequeue
- // if (prev != null) assert(curr <= prev)
- // prev = curr
- // }
- // }
-
- // def testInits {
- // val pq = new PriorityQueue[Long]
- // for (i <- 0 until 20) pq += (i + 313) * 111 % 300
-
- // assert(pq.size == 20)
-
- // val initpq = pq.init
- // assert(initpq.size == 19)
- // assertPriorityDestructive(initpq)
- // }
-
- // def testFilters {
- // val pq = new PriorityQueue[String]
- // for (i <- 0 until 100) pq += "Some " + (i * 312 % 200)
-
- // val filpq = pq.filter(_.indexOf('0') != -1)
- // assertPriorityDestructive(filpq)
- // }
-
- // def testIntensiveEnqueueDequeue {
- // val pq = new PriorityQueue[Int]
-
- // testIntensive(1000, pq)
- // pq.clear
- // testIntensive(200, pq)
- // }
-
- // def testIntensive(sz: Int, pq: PriorityQueue[Int]) {
- // val lst = new collection.mutable.ArrayBuffer[Int] ++ (0 until sz)
- // val rand = new util.Random(7)
- // while (lst.nonEmpty) {
- // val idx = rand.nextInt(lst.size)
- // pq.enqueue(lst(idx))
- // lst.remove(idx)
- // if (rand.nextDouble < 0.25 && pq.nonEmpty) pq.dequeue
- // assertPriority(pq)
- // }
- // }
-
- // def testDrops {
- // val pq = new PriorityQueue[Int]
- // pq ++= (0 until 100)
- // val droppq = pq.drop(50)
- // assertPriority(droppq)
-
- // pq.clear
- // pq ++= droppq
- // assertPriorityDestructive(droppq)
- // assertPriority(pq)
- // assertPriorityDestructive(pq)
- // }
-
- // // your sequence days have ended, foul priority queue
- // // def testUpdates {
- // // val pq = new PriorityQueue[Int]
- // // pq ++= (0 until 36)
- // // assertPriority(pq)
-
- // // pq(0) = 100
- // // assert(pq(0) == 100)
- // // assert(pq.dequeue == 100)
- // // assertPriority(pq)
-
- // // pq.clear
-
- // // pq ++= (1 to 100)
- // // pq(5) = 200
- // // assert(pq(0) == 200)
- // // assert(pq(1) == 100)
- // // assert(pq(2) == 99)
- // // assert(pq(3) == 98)
- // // assert(pq(4) == 97)
- // // assert(pq(5) == 96)
- // // assert(pq(6) == 94)
- // // assert(pq(7) == 93)
- // // assert(pq(98) == 2)
- // // assert(pq(99) == 1)
- // // assertPriority(pq)
-
- // // pq(99) = 450
- // // assert(pq(0) == 450)
- // // assert(pq(1) == 200)
- // // assert(pq(99) == 2)
- // // assertPriority(pq)
-
- // // pq(1) = 0
- // // assert(pq(1) == 100)
- // // assert(pq(99) == 0)
- // // assertPriority(pq)
- // // assertPriorityDestructive(pq)
- // // }
-
- // def testEquality {
- // val pq1 = new PriorityQueue[Int]
- // val pq2 = new PriorityQueue[Int]
-
- // pq1 ++= (0 until 50)
- // var i = 49
- // while (i >= 0) {
- // pq2 += i
- // i -= 1
- // }
- // assert(pq1 == pq2)
- // assertPriority(pq2)
-
- // pq1 += 100
- // assert(pq1 != pq2)
- // pq2 += 100
- // assert(pq1 == pq2)
- // pq2 += 200
- // assert(pq1 != pq2)
- // pq1 += 200
- // assert(pq1 == pq2)
- // assertPriorityDestructive(pq1)
- // assertPriorityDestructive(pq2)
- // }
-
- // def testMisc {
- // val pq = new PriorityQueue[Int]
- // pq ++= (0 until 100)
- // assert(pq.size == 100)
-
- // val (p1, p2) = pq.partition(_ < 50)
- // assertPriorityDestructive(p1)
- // assertPriorityDestructive(p2)
-
- // val spq = pq.slice(25, 75)
- // assertPriorityDestructive(spq)
-
- // pq.clear
- // pq ++= (0 until 10)
- // pq += 5
- // assert(pq.size == 11)
-
- // val ind = pq.lastIndexWhere(_ == 5)
- // assert(ind == 5)
- // assertPriorityDestructive(pq)
-
- // pq.clear
- // pq ++= (0 until 10)
- // assert(pq.lastIndexWhere(_ == 9) == 0)
- // assert(pq.lastIndexOf(8) == 1)
- // assert(pq.lastIndexOf(7) == 2)
-
- // pq += 5
- // pq += 9
- // assert(pq.lastIndexOf(9) == 1)
- // assert(pq.lastIndexWhere(_ % 2 == 1) == 10)
- // assert(pq.lastIndexOf(5) == 6)
-
- // val lst = pq.reverseIterator.toList
- // for (i <- 0 until 5) assert(lst(i) == i)
- // assert(lst(5) == 5)
- // assert(lst(6) == 5)
- // assert(lst(7) == 6)
- // assert(lst(8) == 7)
- // assert(lst(9) == 8)
- // assert(lst(10) == 9)
- // assert(lst(11) == 9)
-
- // pq.clear
- // assert(pq.reverseIterator.toList.isEmpty)
-
- // pq ++= (50 to 75)
- // assert(pq.lastIndexOf(70) == 5)
-
- // pq += 55
- // pq += 70
- // assert(pq.lastIndexOf(70) == 6)
- // assert(pq.lastIndexOf(55) == 22)
- // assert(pq.lastIndexOf(55, 21) == 21)
- // assert(pq.lastIndexWhere(_ > 54) == 22)
- // assert(pq.lastIndexWhere(_ > 54, 21) == 21)
- // assert(pq.lastIndexWhere(_ > 69, 5) == 5)
- // }
-
- // def testReverse {
- // val pq = new PriorityQueue[(Int, Int)]
- // pq ++= (for (i <- 0 until 10) yield (i, i * i % 10))
-
- // assert(pq.reverse.size == pq.reverseIterator.toList.size)
- // assert((pq.reverse zip pq.reverseIterator.toList).forall(p => p._1 == p._2))
- // assert(pq.reverse.sameElements(pq.reverseIterator.toSeq))
- // assert(pq.reverse(0)._1 == pq(9)._1)
- // assert(pq.reverse(1)._1 == pq(8)._1)
- // assert(pq.reverse(4)._1 == pq(5)._1)
- // assert(pq.reverse(9)._1 == pq(0)._1)
-
- // pq += ((7, 7))
- // pq += ((7, 9))
- // pq += ((7, 8))
- // assert(pq.reverse.reverse == pq)
- // assert(pq.reverse.lastIndexWhere(_._2 == 6) == 6)
- // assertPriorityDestructive(pq.reverse.reverse)
-
- // val iq = new PriorityQueue[Int]
- // iq ++= (0 until 50)
- // assert(iq.reverse == iq.reverseIterator.toSeq)
- // assert(iq.reverse.reverse == iq)
-
- // iq += 25
- // iq += 40
- // iq += 10
- // assert(iq.reverse == iq.reverseIterator.toList)
- // assert(iq.reverse.reverse == iq)
- // assert(iq.reverse.lastIndexWhere(_ == 10) == 11)
- // assertPriorityDestructive(iq.reverse.reverse)
- // }
-
- // def testToList {
- // val pq = new PriorityQueue[Int]
-
- // pq += 1
- // pq += 4
- // pq += 0
- // pq += 5
- // pq += 3
- // pq += 2
- // assert(pq.toList == pq)
- // assert(pq == List(5, 4, 3, 2, 1, 0))
- // assert(pq.reverse == List(0, 1, 2, 3, 4, 5))
-
- // pq.clear
- // for (i <- -50 until 50) pq += i
- // assert(pq.toList == pq)
- // assert(pq.toList == (-50 until 50).reverse)
- // }
-
- // def testForeach {
- // val pq = new PriorityQueue[Char]
-
- // pq += 't'
- // pq += 'o'
- // pq += 'b'
- // pq += 'y'
- // val sbf = new StringBuilder
- // val sbi = new StringBuilder
- // pq.foreach(sbf += _)
- // pq.iterator.foreach(sbi += _)
- // assert(sbf.toString == sbi.toString)
- // assert(sbf.toString == "ytob")
- // }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/test/files/run/private-override.check b/test/files/run/private-override.check
deleted file mode 100644
index 00750edc07..0000000000
--- a/test/files/run/private-override.check
+++ /dev/null
@@ -1 +0,0 @@
-3
diff --git a/test/files/run/reflection-attachments.check b/test/files/run/reflection-attachments.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/reflection-attachments.check
diff --git a/test/files/run/reflection-equality.check b/test/files/run/reflection-equality.check
index 682326bc18..d60d861a90 100644
--- a/test/files/run/reflection-equality.check
+++ b/test/files/run/reflection-equality.check
@@ -48,4 +48,4 @@ res2: Boolean = true
scala> t2 <:< t1
res3: Boolean = true
-scala>
+scala> :quit
diff --git a/test/files/run/reflection-java-annotations.check b/test/files/run/reflection-java-annotations.check
index 72d40989fe..842037254e 100644
--- a/test/files/run/reflection-java-annotations.check
+++ b/test/files/run/reflection-java-annotations.check
@@ -1,4 +1,4 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
List(JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee_1], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false))
=======
new JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = Array(101, 101), v102 = Array(102, 102), v103 = Array('g', 'g'), v104 = Array(104, 104), v105 = Array(105L, 105L), v106 = Array(106.0, 106.0), v107 = Array(107.0, 107.0), v108 = Array(false, true), v11 = classOf[JavaAnnottee_1], v110 = Array("hello", "world"), v111 = Array(classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]), v112 = Array(FOO, BAR), v113 = Array(new JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)), v12 = FOO, v13 = new JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false)
diff --git a/test/files/run/reflection-magicsymbols-repl.check b/test/files/run/reflection-magicsymbols-repl.check
index a1bee76652..ca8857ada4 100644
--- a/test/files/run/reflection-magicsymbols-repl.check
+++ b/test/files/run/reflection-magicsymbols-repl.check
@@ -21,7 +21,7 @@ scala> def test(n: Int): Unit = {
val x = sig.asInstanceOf[MethodType].params.head
println(x.info)
}
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
test: (n: Int)Unit
scala> for (i <- 1 to 8) test(i)
@@ -34,4 +34,4 @@ scala.Null
scala.Nothing
scala.Singleton
-scala>
+scala> :quit
diff --git a/test/files/run/reflection-repl-classes.check b/test/files/run/reflection-repl-classes.check
index 03a6aef2b5..5ebf993a87 100644
--- a/test/files/run/reflection-repl-classes.check
+++ b/test/files/run/reflection-repl-classes.check
@@ -30,4 +30,4 @@ scala>
scala> mm(new A)
res0: Any = 1
-scala>
+scala> :quit
diff --git a/test/files/run/reflection-repl-elementary.check b/test/files/run/reflection-repl-elementary.check
index 4a223e8a24..e948c9fd61 100644
--- a/test/files/run/reflection-repl-elementary.check
+++ b/test/files/run/reflection-repl-elementary.check
@@ -4,4 +4,4 @@ Type :help for more information.
scala> scala.reflect.runtime.universe.typeOf[List[Nothing]]
res0: reflect.runtime.universe.Type = scala.List[Nothing]
-scala>
+scala> :quit
diff --git a/test/files/run/reify-repl-fail-gracefully.check b/test/files/run/reify-repl-fail-gracefully.check
index 29ccee3cc6..c9e69744d6 100644
--- a/test/files/run/reify-repl-fail-gracefully.check
+++ b/test/files/run/reify-repl-fail-gracefully.check
@@ -14,4 +14,4 @@ scala> reify
reify
^
-scala>
+scala> :quit
diff --git a/test/files/run/reify_newimpl_22.check b/test/files/run/reify_newimpl_22.check
index 1432d10127..952f384a1c 100644
--- a/test/files/run/reify_newimpl_22.check
+++ b/test/files/run/reify_newimpl_22.check
@@ -22,4 +22,4 @@ scala> {
^
2
-scala>
+scala> :quit
diff --git a/test/files/run/reify_newimpl_23.check b/test/files/run/reify_newimpl_23.check
index 217f0a98c7..b7e9bfdfbc 100644
--- a/test/files/run/reify_newimpl_23.check
+++ b/test/files/run/reify_newimpl_23.check
@@ -21,4 +21,4 @@ scala> def foo[T]{
^
foo: [T]=> Unit
-scala>
+scala> :quit
diff --git a/test/files/run/reify_newimpl_25.check b/test/files/run/reify_newimpl_25.check
index 93ad69defa..4f36ba10ee 100644
--- a/test/files/run/reify_newimpl_25.check
+++ b/test/files/run/reify_newimpl_25.check
@@ -12,4 +12,4 @@ scala> {
^
TypeTag[x.type]
-scala>
+scala> :quit
diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check
index 8e0ad87bf2..681b862795 100644
--- a/test/files/run/reify_newimpl_26.check
+++ b/test/files/run/reify_newimpl_26.check
@@ -14,4 +14,4 @@ foo: [T]=> Unit
scala> foo[Int]
WeakTypeTag[scala.List[T]]
-scala>
+scala> :quit
diff --git a/test/files/run/reify_newimpl_35.check b/test/files/run/reify_newimpl_35.check
index f884d2c0d0..bd9b3a2fb1 100644
--- a/test/files/run/reify_newimpl_35.check
+++ b/test/files/run/reify_newimpl_35.check
@@ -10,4 +10,4 @@ foo: [T](implicit evidence$1: reflect.runtime.universe.TypeTag[T])reflect.runtim
scala> println(foo)
Expr[List[Nothing]](Nil)
-scala>
+scala> :quit
diff --git a/test/files/run/repl-assign.check b/test/files/run/repl-assign.check
index bdc7793c37..faa8a93244 100644
--- a/test/files/run/repl-assign.check
+++ b/test/files/run/repl-assign.check
@@ -13,4 +13,4 @@ x: Int = 12
scala> y = 13
y: Int = 13
-scala>
+scala> :quit
diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check
index 97ae208ff4..07cf23412f 100644
--- a/test/files/run/repl-bare-expr.check
+++ b/test/files/run/repl-bare-expr.check
@@ -47,4 +47,4 @@ Bovine.x: List[Any] = List(Ruminant(5), Cow, Moooooo)
scala> Bovine.x
res4: List[Any] = List(Ruminant(5), Cow, Moooooo)
-scala>
+scala> :quit
diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check
index 1f6d3e2b39..9898027c1d 100644
--- a/test/files/run/repl-colon-type.check
+++ b/test/files/run/repl-colon-type.check
@@ -218,4 +218,4 @@ Unit
scala> :type println("side effect!")
Unit
-scala>
+scala> :quit
diff --git a/test/files/run/repl-empty-package.check b/test/files/run/repl-empty-package.check
index ecf79c2c6d..d3b75f685e 100644
--- a/test/files/run/repl-empty-package.check
+++ b/test/files/run/repl-empty-package.check
@@ -4,4 +4,4 @@ Type :help for more information.
scala> println(Bippy.bippy)
bippy!
-scala>
+scala> :quit
diff --git a/test/files/run/repl-javap-app.check b/test/files/run/repl-javap-app.check
index 490860585c..eb3718f44b 100644
--- a/test/files/run/repl-javap-app.check
+++ b/test/files/run/repl-javap-app.check
@@ -1,4 +1,5 @@
#partest java6
+Welcome to Scala
Type in expressions to have them evaluated.
Type :help for more information.
@@ -6,16 +7,17 @@ scala> :javap -app MyApp$
public final void delayedEndpoint$MyApp$1();
Code:
Stack=2, Locals=1, Args_size=1
- 0: getstatic #61; //Field scala/Console$.MODULE$:Lscala/Console$;
- 3: ldc #63; //String Hello, delayed world.
- 5: invokevirtual #67; //Method scala/Console$.println:(Ljava/lang/Object;)V
+ 0: getstatic #XX; //Field scala/Console$.MODULE$:Lscala/Console$;
+ 3: ldc #XX; //String Hello, delayed world.
+ 5: invokevirtual #XX; //Method scala/Console$.println:(Ljava/lang/Object;)V
8: return
LocalVariableTable:
Start Length Slot Name Signature
0 9 0 this LMyApp$;
-scala>
-#partest !java6
+scala> :quit
+#partest java7
+Welcome to Scala
Type in expressions to have them evaluated.
Type :help for more information.
@@ -24,9 +26,9 @@ scala> :javap -app MyApp$
flags: ACC_PUBLIC, ACC_FINAL
Code:
stack=2, locals=1, args_size=1
- 0: getstatic #61 // Field scala/Console$.MODULE$:Lscala/Console$;
- 3: ldc #63 // String Hello, delayed world.
- 5: invokevirtual #67 // Method scala/Console$.println:(Ljava/lang/Object;)V
+ 0: getstatic #XX // Field scala/Console$.MODULE$:Lscala/Console$;
+ 3: ldc #XX // String Hello, delayed world.
+ 5: invokevirtual #XX // Method scala/Console$.println:(Ljava/lang/Object;)V
8: return
LocalVariableTable:
Start Length Slot Name Signature
@@ -35,4 +37,27 @@ scala> :javap -app MyApp$
line 5: 0
}
-scala>
+scala> :quit
+#partest java8
+Welcome to Scala
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :javap -app MyApp$
+ public final void delayedEndpoint$MyApp$1();
+ descriptor: ()V
+ flags: ACC_PUBLIC, ACC_FINAL
+ Code:
+ stack=2, locals=1, args_size=1
+ 0: getstatic #XX // Field scala/Console$.MODULE$:Lscala/Console$;
+ 3: ldc #XX // String Hello, delayed world.
+ 5: invokevirtual #XX // Method scala/Console$.println:(Ljava/lang/Object;)V
+ 8: return
+ LocalVariableTable:
+ Start Length Slot Name Signature
+ 0 9 0 this LMyApp$;
+ LineNumberTable:
+ line 5: 0
+}
+
+scala> :quit
diff --git a/test/files/run/repl-javap-app.scala b/test/files/run/repl-javap-app.scala
index be04920be1..ad6076c2d5 100644
--- a/test/files/run/repl-javap-app.scala
+++ b/test/files/run/repl-javap-app.scala
@@ -7,4 +7,15 @@ object MyApp extends App {
object Test extends ReplTest {
def code = ":javap -app MyApp$"
+
+ override def welcoming = true
+
+ // The constant pool indices are not the same for GenASM / GenBCode, so
+ // replacing the exact numbers by XX.
+ lazy val hasConstantPoolRef = """(.*)(#\d\d)(.*)""".r
+
+ override def normalize(s: String) = s match {
+ case hasConstantPoolRef(start, ref, end) => start + "#XX" + end
+ case _ => super.normalize(s)
+ }
}
diff --git a/test/files/run/repl-javap-lambdas.scala b/test/files/run/repl-javap-lambdas.scala
new file mode 100644
index 0000000000..76a6ec8450
--- /dev/null
+++ b/test/files/run/repl-javap-lambdas.scala
@@ -0,0 +1,23 @@
+import scala.tools.partest.JavapTest
+import scala.tools.nsc.Settings
+
+// see repl-javap-memfun.java for the complementary version
+object Test extends JavapTest {
+ override def transformSettings(s: Settings) = { s.Ydelambdafy.value = "method" ; s }
+ def code = """
+ |object Betty {
+ | List(1,2,3) count (_ % 2 != 0)
+ | def f = List(1,2,3) filter ((x: Any) => true) map (x => "m1")
+ | def g = List(1,2,3) filter ((x: Any) => true) map (x => "m1") map (x => "m2")
+ |}
+ |:javap -fun Betty#g
+ """.stripMargin
+
+ // three anonfuns of Betty#g
+ override def yah(res: Seq[String]) = {
+ import PartialFunction.{ cond => when }
+ val r = """.*final .* .*\$anonfun\$\d+\(.*""".r
+ def filtered = res filter (when(_) { case r(_*) => true })
+ 3 == filtered.size
+ }
+}
diff --git a/test/files/run/repl-javap-memfun.scala b/test/files/run/repl-javap-memfun.scala
index d2b4243c8b..d10ebcb399 100644
--- a/test/files/run/repl-javap-memfun.scala
+++ b/test/files/run/repl-javap-memfun.scala
@@ -1,6 +1,10 @@
import scala.tools.partest.JavapTest
+import scala.tools.nsc.Settings
+// see repl-javap-lambdas.scala for the complementary version
object Test extends JavapTest {
+ // asserting the default
+ override def transformSettings(s: Settings) = { s.Ydelambdafy.value = "inline" ; s }
def code = """
|object Betty {
| List(1,2,3) count (_ % 2 != 0)
diff --git a/test/files/run/repl-javap-outdir-funs.flags b/test/files/run/repl-javap-outdir-funs.flags
new file mode 100644
index 0000000000..ac96850b69
--- /dev/null
+++ b/test/files/run/repl-javap-outdir-funs.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline \ No newline at end of file
diff --git a/test/files/run/repl-javap-outdir-funs/run-repl_7.scala b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
index 6c6fe2d515..af9651a8a3 100644
--- a/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
+++ b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
@@ -1,6 +1,8 @@
import scala.tools.partest.JavapTest
object Test extends JavapTest {
+ // note the '-fun': it makes :javap search for some anonfun.
+ // for that reason, this test has a flags file that forces delambdafy:inline (doesn't allow :method)
def code = """
|:javap -fun disktest/Foo.class
""".stripMargin
@@ -11,7 +13,8 @@ object Test extends JavapTest {
if (scala.tools.partest.utils.Properties.isAvian)
true
else {
- def filtered = res filter (_ contains "public final class disktest.Foo")
+ val r = "public final class disktest.Foo.*extends scala.runtime.AbstractFunction1".r
+ def filtered = res filter (r.findFirstIn(_).nonEmpty)
1 == filtered.size
}
}
diff --git a/test/files/run/repl-out-dir.check b/test/files/run/repl-out-dir.check
index 3e51c63155..c354492898 100644
--- a/test/files/run/repl-out-dir.check
+++ b/test/files/run/repl-out-dir.check
@@ -46,4 +46,4 @@ repl-out-dir-run.obj
Test$.class
Test.class
-scala>
+scala> :quit
diff --git a/test/files/run/repl-parens.check b/test/files/run/repl-parens.check
index 15f4b4524a..74d15ff93c 100644
--- a/test/files/run/repl-parens.check
+++ b/test/files/run/repl-parens.check
@@ -81,4 +81,4 @@ scala>
scala> List(1) ++ List('a')
res16: List[AnyVal] = List(1, a)
-scala>
+scala> :quit
diff --git a/test/files/run/repl-paste-2.check b/test/files/run/repl-paste-2.check
index ab3809a2e0..6ea8e2f419 100644
--- a/test/files/run/repl-paste-2.check
+++ b/test/files/run/repl-paste-2.check
@@ -58,4 +58,4 @@ scala> x.length + res5
res3: Int = 129
-scala>
+scala> :quit
diff --git a/test/files/run/repl-paste-3.check b/test/files/run/repl-paste-3.check
index 8fae61792e..23e402852f 100644
--- a/test/files/run/repl-paste-3.check
+++ b/test/files/run/repl-paste-3.check
@@ -7,4 +7,4 @@ scala> println(3)
scala> List(1,2)
res1: List[Int] = List(1, 2)
-scala>
+scala> :quit
diff --git a/test/files/run/repl-paste-4.scala b/test/files/run/repl-paste-4.scala
index 0060dc1ff6..cb0a6aa768 100644
--- a/test/files/run/repl-paste-4.scala
+++ b/test/files/run/repl-paste-4.scala
@@ -14,7 +14,7 @@ s"""|Type in expressions to have them evaluated.
|scala> Foo(new Foo)
|res0: Int = 7
|
- |scala> """
+ |scala> :quit"""
def pastie = testPath changeExtension "pastie"
}
diff --git a/test/files/run/repl-paste-raw.scala b/test/files/run/repl-paste-raw.scala
index 2953796f99..3b41254e96 100644
--- a/test/files/run/repl-paste-raw.scala
+++ b/test/files/run/repl-paste-raw.scala
@@ -15,6 +15,6 @@ s"""|Type in expressions to have them evaluated.
|scala> favoriteThing.hasString
|res0: Boolean = true
|
- |scala> """
+ |scala> :quit"""
def pastie = testPath changeExtension "pastie"
}
diff --git a/test/files/run/repl-paste.check b/test/files/run/repl-paste.check
index 97f177ddc4..171447214f 100644
--- a/test/files/run/repl-paste.check
+++ b/test/files/run/repl-paste.check
@@ -23,4 +23,4 @@ defined class Dingus
defined object Dingus
x: Int = 110
-scala>
+scala> :quit
diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check
index e56901e0f2..e2318c93f2 100644
--- a/test/files/run/repl-power.check
+++ b/test/files/run/repl-power.check
@@ -11,11 +11,11 @@ scala> :power
scala> // guarding against "error: reference to global is ambiguous"
scala> global.emptyValDef // "it is imported twice in the same scope by ..."
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
res0: $r.global.noSelfType.type = private val _ = _
scala> val tp = ArrayClass[scala.util.Random] // magic with tags
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
tp: $r.global.Type = Array[scala.util.Random]
scala> tp.memberType(Array_apply) // evidence
@@ -27,4 +27,4 @@ m: $r.treedsl.global.Literal = 10
scala> typed(m).tpe // typed is in scope
res2: $r.treedsl.global.Type = Int(10)
-scala>
+scala> :quit
diff --git a/test/files/run/repl-reset.check b/test/files/run/repl-reset.check
index ed95c7b8ff..cd7893bbc3 100644
--- a/test/files/run/repl-reset.check
+++ b/test/files/run/repl-reset.check
@@ -54,4 +54,4 @@ defined class BippyBungus
scala> { new BippyBungus ; x1 }
res2: Int = 4
-scala>
+scala> :quit
diff --git a/test/files/run/repl-save.scala b/test/files/run/repl-save.scala
index 4539790b1a..c98e6aebc3 100644
--- a/test/files/run/repl-save.scala
+++ b/test/files/run/repl-save.scala
@@ -16,7 +16,7 @@ s"""|Type in expressions to have them evaluated.
|
|scala> :save $saveto
|
- |scala> """
+ |scala> :quit"""
def saveto = testOutput / "session.repl"
override def show() = {
super.show()
diff --git a/test/files/run/repl-term-macros.check b/test/files/run/repl-term-macros.check
index 3580bfe1f1..2cd0b93cd0 100644
--- a/test/files/run/repl-term-macros.check
+++ b/test/files/run/repl-term-macros.check
@@ -37,4 +37,4 @@ defined term macro foo3: (x: Int)(y: Int)Unit
scala> foo3(2)(3)
-scala>
+scala> :quit
diff --git a/test/files/run/repl-transcript.check b/test/files/run/repl-transcript.check
index 49891af900..b0f106387b 100644
--- a/test/files/run/repl-transcript.check
+++ b/test/files/run/repl-transcript.check
@@ -35,4 +35,4 @@ scala> res6.sum + res5
res0: Int = 5273
-scala>
+scala> :quit
diff --git a/test/files/run/repl-trim-stack-trace.scala b/test/files/run/repl-trim-stack-trace.scala
index 483659146a..a53ce3b3e4 100644
--- a/test/files/run/repl-trim-stack-trace.scala
+++ b/test/files/run/repl-trim-stack-trace.scala
@@ -32,7 +32,7 @@ java.lang.Exception
at .f(<console>:7)
... 69 elided
-scala> """
+scala> :quit"""
// normalize the "elided" lines because the frame count depends on test context
lazy val elided = """(\s+\.{3} )\d+( elided)""".r
diff --git a/test/files/run/repl-type-verbose.check b/test/files/run/repl-type-verbose.check
index e37754a060..6f6b47b86d 100644
--- a/test/files/run/repl-type-verbose.check
+++ b/test/files/run/repl-type-verbose.check
@@ -187,4 +187,4 @@ PolyType(
)
)
-scala>
+scala> :quit
diff --git a/test/files/run/richs.check b/test/files/run/richs.check
index 02a98b376d..cf265ae007 100644
--- a/test/files/run/richs.check
+++ b/test/files/run/richs.check
@@ -1,4 +1,4 @@
-warning: there were 2 deprecation warning(s); re-run with -deprecation for details
+warning: there were two deprecation warnings; re-run with -deprecation for details
RichCharTest1:
true
diff --git a/test/files/run/sammy_repeated.check b/test/files/run/sammy_repeated.check
new file mode 100644
index 0000000000..1cff0f067c
--- /dev/null
+++ b/test/files/run/sammy_repeated.check
@@ -0,0 +1 @@
+WrappedArray(1)
diff --git a/test/files/run/t5532.flags b/test/files/run/sammy_repeated.flags
index e1b37447c9..e1b37447c9 100644
--- a/test/files/run/t5532.flags
+++ b/test/files/run/sammy_repeated.flags
diff --git a/test/files/run/sammy_repeated.scala b/test/files/run/sammy_repeated.scala
new file mode 100644
index 0000000000..c24dc41909
--- /dev/null
+++ b/test/files/run/sammy_repeated.scala
@@ -0,0 +1,8 @@
+trait RepeatedSink { def accept(a: Any*): Unit }
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val f: RepeatedSink = (a) => println(a)
+ f.accept(1)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/search.check b/test/files/run/search.check
index a885696509..e0c55043e3 100644
--- a/test/files/run/search.check
+++ b/test/files/run/search.check
@@ -1,6 +1,6 @@
Found(2)
Found(4)
-InsertionPoint(9)
+InsertionPoint(10)
Found(2)
Found(4)
-InsertionPoint(9)
+InsertionPoint(10)
diff --git a/test/files/run/settings-parse.scala b/test/files/run/settings-parse.scala
index 2754feb972..8d83caf68f 100644
--- a/test/files/run/settings-parse.scala
+++ b/test/files/run/settings-parse.scala
@@ -3,9 +3,8 @@ import scala.language.postfixOps
import scala.tools.nsc._
object Test {
- val tokens = List("", "-deprecation", "foo.scala")
- val subsets = tokens.toSet.subsets.toList
- val permutations0 = subsets.flatMap(_.toList.permutations).distinct
+ val tokens = "" :: "-deprecation" :: "foo.scala" :: Nil
+ val permutations0 = tokens.toSet.subsets.flatMap(_.toList.permutations).toList.distinct
def runWithCp(cp: String) = {
val permutations = permutations0 flatMap ("-cp CPTOKEN" :: _ permutations)
diff --git a/test/files/run/stringinterpolation_macro-run.check b/test/files/run/stringinterpolation_macro-run.check
index ead61e76ac..c7f46bac87 100644
--- a/test/files/run/stringinterpolation_macro-run.check
+++ b/test/files/run/stringinterpolation_macro-run.check
@@ -63,5 +63,9 @@ She is 4 feet tall.
05/26/12
05/26/12
%
+ mind
+------
+matter
+
7 7 9
7 9 9
diff --git a/test/files/run/stringinterpolation_macro-run.scala b/test/files/run/stringinterpolation_macro-run.scala
index ff779dd1d3..ae7c0e5d7a 100644
--- a/test/files/run/stringinterpolation_macro-run.scala
+++ b/test/files/run/stringinterpolation_macro-run.scala
@@ -1,5 +1,5 @@
/*
- * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ * filter: inliner warnings; re-run with
*/
object Test extends App {
@@ -115,6 +115,7 @@ println(f"""${"1234"}%TD""")
// literals and arg indexes
println(f"%%")
+println(f" mind%n------%nmatter%n")
println(f"${7}%d %<d ${9}%d")
println(f"${7}%d %2$$d ${9}%d")
diff --git a/test/files/run/synchronized.check b/test/files/run/synchronized.check
index 6e99739633..9add05ea0c 100644
--- a/test/files/run/synchronized.check
+++ b/test/files/run/synchronized.check
@@ -1,4 +1,8 @@
-warning: there were 14 inliner warning(s); re-run with -Yinline-warnings for details
+#partest !-Ybackend:GenBCode
+warning: there were 14 inliner warnings; re-run with -Yinline-warnings for details
+#partest -Ybackend:GenBCode
+warning: there were 14 inliner warnings; re-run with -Yopt-warnings for details
+#partest
.|. c1.f1: OK
.|. c1.fi: OK
.|... c1.fv: OK
diff --git a/test/files/run/t1994.scala b/test/files/run/t1994.scala
new file mode 100644
index 0000000000..0b463e3444
--- /dev/null
+++ b/test/files/run/t1994.scala
@@ -0,0 +1,20 @@
+class A {
+ protected def x = 0
+ protected[A] def y = 0
+}
+
+class B extends A {
+ override def x = 1
+ def superY = super[A].y
+ override def y = 1
+}
+
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val b = new B
+ assert(b.x == 1)
+ assert(b.y == 1)
+ assert(b.superY == 0)
+ }
+}
diff --git a/test/files/run/t2212.check b/test/files/run/t2212.check
index 8ab4d60ab3..1465f1341a 100644
--- a/test/files/run/t2212.check
+++ b/test/files/run/t2212.check
@@ -1,4 +1,4 @@
-warning: there were 2 deprecation warning(s); re-run with -deprecation for details
+warning: there were two deprecation warnings; re-run with -deprecation for details
LinkedList(1)
LinkedList(1)
true
diff --git a/test/files/run/t2318.scala b/test/files/run/t2318.scala
index b638c433f5..4239c017b8 100644
--- a/test/files/run/t2318.scala
+++ b/test/files/run/t2318.scala
@@ -11,6 +11,7 @@ object Test {
case _: java.io.FilePermission => ()
case x: java.security.SecurityPermission if x.getName contains ".networkaddress." => () // generality ftw
case x: java.util.PropertyPermission if x.getName == "sun.net.inetaddr.ttl" => ()
+ case _: java.lang.reflect.ReflectPermission => () // needed for LambdaMetaFactory
case _ => super.checkPermission(perm)
}
}
diff --git a/test/files/run/t2866.check b/test/files/run/t2866.check
new file mode 100644
index 0000000000..7f52da85fb
--- /dev/null
+++ b/test/files/run/t2866.check
@@ -0,0 +1,3 @@
+t2866.scala:30: warning: imported `one' is permanently hidden by definition of value one
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ ^
diff --git a/test/files/run/t2866.scala b/test/files/run/t2866.scala
new file mode 100644
index 0000000000..8059107583
--- /dev/null
+++ b/test/files/run/t2866.scala
@@ -0,0 +1,44 @@
+// for 2.7.x compatibility
+
+object A {
+ implicit val one = 1
+}
+
+object Test extends App {
+
+ locally {
+ import A._
+ locally {
+ // assert(implicitly[Int] == 1) // error: could not find implicit value for parameter e: Int.
+ // !!! Why one A.one?
+ // (I assume you mean: why _not_ A.one? A.one is shadowed by local one.
+ // but the local one cannot be used yet because it does not have an explicit type.
+ implicit val one = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+ }
+
+ locally {
+ import A._
+ implicit val one: Int = 2
+ assert(implicitly[Int] == 2)
+ assert(one == 2)
+ }
+
+ locally {
+ import A.one // warning: imported `one' is permanently hidden by definition of value one.
+ // !!! Really?
+ //assert(implicitly[Int] == 1)
+ implicit val one = 2
+ assert(implicitly[Int] == 2) // !!! why not 2?
+ assert(one == 2)
+ }
+
+ locally {
+ import A.{one => _, _}
+ implicit val two = 2
+ assert(implicitly[Int] == 2) // not ambiguous in 2.8.0 nor im ambiguous in 2.7.6
+ }
+
+}
diff --git a/test/files/run/t3361.check b/test/files/run/t3361.check
index c18bdc9aff..5e0a763501 100644
--- a/test/files/run/t3361.check
+++ b/test/files/run/t3361.check
@@ -1 +1 @@
-warning: there were 16 deprecation warning(s); re-run with -deprecation for details
+warning: there were 16 deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/run/t3368-b.check b/test/files/run/t3368-b.check
new file mode 100644
index 0000000000..4cbe98c577
--- /dev/null
+++ b/test/files/run/t3368-b.check
@@ -0,0 +1,89 @@
+[[syntax trees at end of parser]] // newSource1.scala
+package <empty> {
+ abstract trait X extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def x = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
+ $buf
+ }
+ };
+ abstract trait Y extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def y = {
+ {
+ new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("start"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("world"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("stuff"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
+ $buf
+ }: _*))
+ }
+ }
+ };
+ abstract trait Z extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def d = new _root_.scala.xml.PCData("hello, world");
+ def e = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ };
+ def f = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("x"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def g = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def h = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ }
+ }
+}
+
diff --git a/test/files/run/t3368-b.scala b/test/files/run/t3368-b.scala
new file mode 100644
index 0000000000..108cb9a5ee
--- /dev/null
+++ b/test/files/run/t3368-b.scala
@@ -0,0 +1,26 @@
+
+import scala.tools.partest.ParserTest
+
+
+object Test extends ParserTest {
+
+ override def code = """
+ trait X {
+ // error: in XML literal: name expected, but char '!' cannot start a name
+ def x = <![CDATA[hi & bye]]> <![CDATA[red & black]]>
+ }
+ trait Y {
+ def y = <a><b/>start<![CDATA[hi & bye]]><c/>world<d/>stuff<![CDATA[red & black]]></a>
+ }
+ trait Z {
+ def d = <![CDATA[hello, world]]>
+ def e = <![CDATA[hello, world]]><![CDATA[hello, world]]> // top level not coalesced
+ def f = <foo>x<![CDATA[hello, world]]></foo> // adjoining text
+ def g = <foo><![CDATA[hello, world]]></foo> // text node when coalescing
+ def h = <foo><![CDATA[hello, world]]><![CDATA[hello, world]]></foo>
+ }
+ """
+
+ // not coalescing
+ override def extraSettings = s"${super.extraSettings} -Xxml:-coalescing"
+}
diff --git a/test/files/run/t3368-c.check b/test/files/run/t3368-c.check
new file mode 100644
index 0000000000..e0c10cc0dd
--- /dev/null
+++ b/test/files/run/t3368-c.check
@@ -0,0 +1,85 @@
+[[syntax trees at end of parser]] // newSource1.scala
+package <empty> {
+ abstract trait X extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def x = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hi & bye"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("red & black"));
+ $buf
+ }
+ };
+ abstract trait Y extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def y = {
+ {
+ new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("starthi & bye"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("world"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("stuffred & black"));
+ $buf
+ }: _*))
+ }
+ }
+ };
+ abstract trait Z extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def d = new _root_.scala.xml.Text("hello, world");
+ def e = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf
+ };
+ def f = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("xhello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def g = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def h = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, worldhello, world"));
+ $buf
+ }: _*))
+ }
+ }
+ }
+}
+
diff --git a/test/files/run/t3368-c.scala b/test/files/run/t3368-c.scala
new file mode 100644
index 0000000000..5121794463
--- /dev/null
+++ b/test/files/run/t3368-c.scala
@@ -0,0 +1,26 @@
+
+import scala.tools.partest.ParserTest
+
+
+object Test extends ParserTest {
+
+ override def code = """
+ trait X {
+ // error: in XML literal: name expected, but char '!' cannot start a name
+ def x = <![CDATA[hi & bye]]> <![CDATA[red & black]]>
+ }
+ trait Y {
+ def y = <a><b/>start<![CDATA[hi & bye]]><c/>world<d/>stuff<![CDATA[red & black]]></a>
+ }
+ trait Z {
+ def d = <![CDATA[hello, world]]>
+ def e = <![CDATA[hello, world]]><![CDATA[hello, world]]> // top level not coalesced
+ def f = <foo>x<![CDATA[hello, world]]></foo> // adjoining text
+ def g = <foo><![CDATA[hello, world]]></foo> // text node when coalescing
+ def h = <foo><![CDATA[hello, world]]><![CDATA[hello, world]]></foo>
+ }
+ """
+
+ // default coalescing behavior, whatever that is today.
+ //override def extraSettings = s"${super.extraSettings} -Xxml:coalescing"
+}
diff --git a/test/files/run/t3368-d.check b/test/files/run/t3368-d.check
new file mode 100644
index 0000000000..4cbe98c577
--- /dev/null
+++ b/test/files/run/t3368-d.check
@@ -0,0 +1,89 @@
+[[syntax trees at end of parser]] // newSource1.scala
+package <empty> {
+ abstract trait X extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def x = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
+ $buf
+ }
+ };
+ abstract trait Y extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def y = {
+ {
+ new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("start"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hi & bye"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("world"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("stuff"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("red & black"));
+ $buf
+ }: _*))
+ }
+ }
+ };
+ abstract trait Z extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def d = new _root_.scala.xml.PCData("hello, world");
+ def e = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ };
+ def f = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("x"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def g = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def h = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.PCData("hello, world"));
+ $buf
+ }: _*))
+ }
+ }
+ }
+}
+
diff --git a/test/files/run/t3368-d.scala b/test/files/run/t3368-d.scala
new file mode 100644
index 0000000000..5777c1a81e
--- /dev/null
+++ b/test/files/run/t3368-d.scala
@@ -0,0 +1,26 @@
+
+import scala.tools.partest.ParserTest
+
+
+object Test extends ParserTest {
+
+ override def code = """
+ trait X {
+ // error: in XML literal: name expected, but char '!' cannot start a name
+ def x = <![CDATA[hi & bye]]> <![CDATA[red & black]]>
+ }
+ trait Y {
+ def y = <a><b/>start<![CDATA[hi & bye]]><c/>world<d/>stuff<![CDATA[red & black]]></a>
+ }
+ trait Z {
+ def d = <![CDATA[hello, world]]>
+ def e = <![CDATA[hello, world]]><![CDATA[hello, world]]> // top level not coalesced
+ def f = <foo>x<![CDATA[hello, world]]></foo> // adjoining text
+ def g = <foo><![CDATA[hello, world]]></foo> // text node when coalescing
+ def h = <foo><![CDATA[hello, world]]><![CDATA[hello, world]]></foo>
+ }
+ """
+
+ // default under 2.12 is not coalescing
+ override def extraSettings = s"${super.extraSettings} -Xsource:212"
+}
diff --git a/test/files/run/t3368.check b/test/files/run/t3368.check
new file mode 100644
index 0000000000..e0c10cc0dd
--- /dev/null
+++ b/test/files/run/t3368.check
@@ -0,0 +1,85 @@
+[[syntax trees at end of parser]] // newSource1.scala
+package <empty> {
+ abstract trait X extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def x = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hi & bye"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("red & black"));
+ $buf
+ }
+ };
+ abstract trait Y extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def y = {
+ {
+ new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("starthi & bye"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("world"));
+ $buf.$amp$plus({
+ {
+ new _root_.scala.xml.Elem(null, "d", _root_.scala.xml.Null, $scope, true)
+ }
+ });
+ $buf.$amp$plus(new _root_.scala.xml.Text("stuffred & black"));
+ $buf
+ }: _*))
+ }
+ }
+ };
+ abstract trait Z extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ def d = new _root_.scala.xml.Text("hello, world");
+ def e = {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf
+ };
+ def f = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("xhello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def g = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, world"));
+ $buf
+ }: _*))
+ }
+ };
+ def h = {
+ {
+ new _root_.scala.xml.Elem(null, "foo", _root_.scala.xml.Null, $scope, false, ({
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Text("hello, worldhello, world"));
+ $buf
+ }: _*))
+ }
+ }
+ }
+}
+
diff --git a/test/files/run/t3368.scala b/test/files/run/t3368.scala
new file mode 100644
index 0000000000..284fed0784
--- /dev/null
+++ b/test/files/run/t3368.scala
@@ -0,0 +1,26 @@
+
+import scala.tools.partest.ParserTest
+
+
+object Test extends ParserTest {
+
+ override def code = """
+ trait X {
+ // error: in XML literal: name expected, but char '!' cannot start a name
+ def x = <![CDATA[hi & bye]]> <![CDATA[red & black]]>
+ }
+ trait Y {
+ def y = <a><b/>start<![CDATA[hi & bye]]><c/>world<d/>stuff<![CDATA[red & black]]></a>
+ }
+ trait Z {
+ def d = <![CDATA[hello, world]]>
+ def e = <![CDATA[hello, world]]><![CDATA[hello, world]]> // top level not coalesced
+ def f = <foo>x<![CDATA[hello, world]]></foo> // adjoining text
+ def g = <foo><![CDATA[hello, world]]></foo> // text node when coalescing
+ def h = <foo><![CDATA[hello, world]]><![CDATA[hello, world]]></foo>
+ }
+ """
+
+ // coalescing
+ override def extraSettings = s"${super.extraSettings} -Xxml:coalescing"
+}
diff --git a/test/files/run/t3376.check b/test/files/run/t3376.check
index cc6949d326..b8fd2843f6 100644
--- a/test/files/run/t3376.check
+++ b/test/files/run/t3376.check
@@ -13,4 +13,4 @@ m2: M[Float] = mmm
scala> val m3 = new M[String]()
m3: M[String] = mmm
-scala>
+scala> :quit
diff --git a/test/files/run/t3516.check b/test/files/run/t3516.check
deleted file mode 100644
index d0d10d82fa..0000000000
--- a/test/files/run/t3516.check
+++ /dev/null
@@ -1,3 +0,0 @@
-1
-1
-21
diff --git a/test/files/run/t3516.scala b/test/files/run/t3516.scala
deleted file mode 100644
index aa302ce85a..0000000000
--- a/test/files/run/t3516.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object Test {
- def mkIterator = (1 to 5).iterator map (x => { println(x) ; x })
- def mkInfinite = Iterator continually { println(1) ; 1 }
-
- def main(args: Array[String]): Unit = {
- // Stream is strict in its head so we should see 1 from each of them.
- val s1 = mkIterator.toStream
- val s2 = mkInfinite.toStream
- // back and forth without slipping into nontermination.
- println((Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next)
- ()
- }
-}
diff --git a/test/files/run/t3569.scala b/test/files/run/t3569.scala
index 91d437e0e3..eb3b424439 100644
--- a/test/files/run/t3569.scala
+++ b/test/files/run/t3569.scala
@@ -26,7 +26,8 @@ object Test {
s.x += 1
println(s.x)
- (classOf[X].getDeclaredFields map ("" + _)).sorted foreach println
+ // under -Xcheckinit there's an additional $init$ field
+ (classOf[X].getDeclaredFields map ("" + _)).sorted.filter(_ != "private volatile byte Test$X.bitmap$init$0") foreach println
(classOf[Y].getDeclaredFields map ("" + _)).sorted foreach println
}
}
diff --git a/test/files/run/t3888.check b/test/files/run/t3888.check
index 844ca54682..df1629dd7e 100644
--- a/test/files/run/t3888.check
+++ b/test/files/run/t3888.check
@@ -1 +1 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
diff --git a/test/files/run/t3970.check b/test/files/run/t3970.check
index bd89fff9d9..0683a6c1a6 100644
--- a/test/files/run/t3970.check
+++ b/test/files/run/t3970.check
@@ -1 +1 @@
-warning: there were 5 deprecation warning(s); re-run with -deprecation for details
+warning: there were 5 deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/run/t3996.check b/test/files/run/t3996.check
index a92ddc0e51..a9ecc29fea 100644
--- a/test/files/run/t3996.check
+++ b/test/files/run/t3996.check
@@ -1 +1 @@
-warning: there were 2 deprecation warning(s); re-run with -deprecation for details
+warning: there were two deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/run/t4025.check b/test/files/run/t4025.check
index 2d4f644c5a..e8c6851236 100644
--- a/test/files/run/t4025.check
+++ b/test/files/run/t4025.check
@@ -14,4 +14,4 @@ scala>
scala> def f(c: Any) = c match { case Red(_) => () }
f: (c: Any)Unit
-scala>
+scala> :quit
diff --git a/test/files/run/t4080.check b/test/files/run/t4080.check
index 1953a68ad3..462e925b76 100644
--- a/test/files/run/t4080.check
+++ b/test/files/run/t4080.check
@@ -1,2 +1,2 @@
-warning: there were 3 deprecation warning(s); re-run with -deprecation for details
+warning: there were three deprecation warnings; re-run with -deprecation for details
LinkedList(1, 0, 2, 3)
diff --git a/test/files/run/t4172.check b/test/files/run/t4172.check
index d94638d27e..315c1c9dbd 100644
--- a/test/files/run/t4172.check
+++ b/test/files/run/t4172.check
@@ -2,7 +2,7 @@ Type in expressions to have them evaluated.
Type :help for more information.
scala> val c = { class C { override def toString = "C" }; ((new C, new C { def f = 2 })) }
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
c: (C, C{def f: Int}) forSome { type C <: AnyRef } = (C,C)
-scala>
+scala> :quit
diff --git a/test/files/run/t4216.check b/test/files/run/t4216.check
index 091e55a0c7..e4610e87d3 100644
--- a/test/files/run/t4216.check
+++ b/test/files/run/t4216.check
@@ -34,4 +34,4 @@ res4: java.util.List[V] = [V@0]
scala> o(new V(0))
res5: java.util.List[Any] = [V@0]
-scala>
+scala> :quit
diff --git a/test/files/run/t4285.check b/test/files/run/t4285.check
index 314c8e5a35..b952cb8e1b 100644
--- a/test/files/run/t4285.check
+++ b/test/files/run/t4285.check
@@ -10,4 +10,4 @@ y: scala.collection.mutable.WrappedArray[Int] = WrappedArray(2, 4, 6, 8, 10, 12,
scala> println(y.sum)
56
-scala>
+scala> :quit
diff --git a/test/files/run/t4396.check b/test/files/run/t4396.check
index a75e1f257f..d38fb7fae7 100644
--- a/test/files/run/t4396.check
+++ b/test/files/run/t4396.check
@@ -1,4 +1,4 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
hallo
constructor
out:22
diff --git a/test/files/run/t4461.check b/test/files/run/t4461.check
index 9488669324..346993af6f 100644
--- a/test/files/run/t4461.check
+++ b/test/files/run/t4461.check
@@ -1,4 +1,4 @@
-warning: there were 4 deprecation warning(s); re-run with -deprecation for details
+warning: there were four deprecation warnings; re-run with -deprecation for details
Include(End,1)
Include(End,2)
Include(End,3)
diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check
index a53f31a3c7..f7716dc2f0 100644
--- a/test/files/run/t4542.check
+++ b/test/files/run/t4542.check
@@ -12,4 +12,4 @@ scala> val f = new Foo
^
f: Foo = Bippy
-scala>
+scala> :quit
diff --git a/test/files/run/t4594-repl-settings.scala b/test/files/run/t4594-repl-settings.scala
index d2335460e5..db5dc19866 100644
--- a/test/files/run/t4594-repl-settings.scala
+++ b/test/files/run/t4594-repl-settings.scala
@@ -11,10 +11,10 @@ object Test extends SessionTest {
|depp: String
|
|scala> def a = depp
- |warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+ |warning: there was one deprecation warning; re-run with -deprecation for details
|a: String
|
- |scala> :settings +deprecation
+ |scala> :settings -deprecation
|
|scala> def b = depp
|<console>:8: warning: method depp is deprecated: Please don't do that.
@@ -22,5 +22,5 @@ object Test extends SessionTest {
| ^
|b: String
|
- |scala> """
+ |scala> :quit"""
}
diff --git a/test/files/run/t4671.check b/test/files/run/t4671.check
index 0c36083759..1640dac8e4 100644
--- a/test/files/run/t4671.check
+++ b/test/files/run/t4671.check
@@ -43,4 +43,4 @@ println(s.mkString(""))
}
-scala>
+scala> :quit
diff --git a/test/files/run/t4680.check b/test/files/run/t4680.check
index 512bfd4b54..21a1e0cd15 100644
--- a/test/files/run/t4680.check
+++ b/test/files/run/t4680.check
@@ -4,7 +4,7 @@ t4680.scala:51: warning: a pure expression does nothing in statement position; y
t4680.scala:69: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
new { val x = 5 } with E() { 5 }
^
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
// new C { }
diff --git a/test/files/run/t4710.check b/test/files/run/t4710.check
index f2335d1bdd..0dd49dfbd3 100644
--- a/test/files/run/t4710.check
+++ b/test/files/run/t4710.check
@@ -2,7 +2,7 @@ Type in expressions to have them evaluated.
Type :help for more information.
scala> def method : String = { implicit def f(s: Symbol) = "" ; 'symbol }
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
method: String
-scala>
+scala> :quit
diff --git a/test/files/run/t4788-separate-compilation.check b/test/files/run/t4788-separate-compilation.check
new file mode 100644
index 0000000000..172ad90102
--- /dev/null
+++ b/test/files/run/t4788-separate-compilation.check
@@ -0,0 +1,5 @@
+Some(@Ljava/lang/Deprecated;())
+None
+None
+Some(@LCAnnotation;() // invisible)
+Some(@LRAnnotation;())
diff --git a/test/files/run/t4788-separate-compilation/CAnnotation_1.java b/test/files/run/t4788-separate-compilation/CAnnotation_1.java
new file mode 100644
index 0000000000..7120218d62
--- /dev/null
+++ b/test/files/run/t4788-separate-compilation/CAnnotation_1.java
@@ -0,0 +1,5 @@
+import java.lang.annotation.Retention;
+import static java.lang.annotation.RetentionPolicy.CLASS;
+
+@Retention(value=CLASS)
+@interface CAnnotation {}
diff --git a/test/files/run/t4788-separate-compilation/C_1.scala b/test/files/run/t4788-separate-compilation/C_1.scala
new file mode 100644
index 0000000000..aba9b595e4
--- /dev/null
+++ b/test/files/run/t4788-separate-compilation/C_1.scala
@@ -0,0 +1,2 @@
+@CAnnotation
+class C
diff --git a/test/files/run/t4788-separate-compilation/D_1.scala b/test/files/run/t4788-separate-compilation/D_1.scala
new file mode 100644
index 0000000000..c2479fba86
--- /dev/null
+++ b/test/files/run/t4788-separate-compilation/D_1.scala
@@ -0,0 +1,5 @@
+@Deprecated
+class DJava
+
+@deprecated("", "")
+class DScala
diff --git a/test/files/run/t4788-separate-compilation/RAnnotation_1.java b/test/files/run/t4788-separate-compilation/RAnnotation_1.java
new file mode 100644
index 0000000000..f24cf66f7b
--- /dev/null
+++ b/test/files/run/t4788-separate-compilation/RAnnotation_1.java
@@ -0,0 +1,5 @@
+import java.lang.annotation.Retention;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+@Retention(value=RUNTIME)
+@interface RAnnotation {}
diff --git a/test/files/run/t4788-separate-compilation/R_1.scala b/test/files/run/t4788-separate-compilation/R_1.scala
new file mode 100644
index 0000000000..ab0cd065d9
--- /dev/null
+++ b/test/files/run/t4788-separate-compilation/R_1.scala
@@ -0,0 +1,2 @@
+@RAnnotation
+class R
diff --git a/test/files/run/t4788-separate-compilation/SAnnotation_1.java b/test/files/run/t4788-separate-compilation/SAnnotation_1.java
new file mode 100644
index 0000000000..471f27d82a
--- /dev/null
+++ b/test/files/run/t4788-separate-compilation/SAnnotation_1.java
@@ -0,0 +1,5 @@
+import java.lang.annotation.Retention;
+import static java.lang.annotation.RetentionPolicy.SOURCE;
+
+@Retention(value=SOURCE)
+@interface SAnnotation {}
diff --git a/test/files/run/t4788-separate-compilation/S_1.scala b/test/files/run/t4788-separate-compilation/S_1.scala
new file mode 100644
index 0000000000..f8756d9bc8
--- /dev/null
+++ b/test/files/run/t4788-separate-compilation/S_1.scala
@@ -0,0 +1,2 @@
+@SAnnotation
+class S
diff --git a/test/files/run/t4788-separate-compilation/Test_2.scala b/test/files/run/t4788-separate-compilation/Test_2.scala
new file mode 100644
index 0000000000..cbbb5ff386
--- /dev/null
+++ b/test/files/run/t4788-separate-compilation/Test_2.scala
@@ -0,0 +1,35 @@
+import java.io.PrintWriter;
+
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm.util._
+import scala.tools.nsc.util.stringFromWriter
+
+object Test extends BytecodeTest {
+ def annotationsForClass(className: String): Option[String] = {
+ val classNode = loadClassNode(className, skipDebugInfo = false)
+ val textifier = new Textifier
+ classNode.accept(new TraceClassVisitor(null, textifier, null))
+
+ val classString = stringFromWriter(w => textifier.print(w))
+ classString
+ .split('\n')
+ .filterNot(_.contains("@Lscala/reflect/ScalaSignature"))
+ .find(_.contains("@L"))
+ .map(_.trim)
+ }
+
+ def show {
+ // It seems like @java.lang.Deprecated shows up in both the
+ // Deprecated attribute and RuntimeVisibleAnnotation attribute,
+ // while @scala.deprecated only shows up in the Deprecated attribute.
+ // The check file just documents status quo, not sure if Scala
+ // should brought in line with Java or not...
+ // See the commit message and SI-8883 for more info.
+ println(annotationsForClass("DJava"))
+ println(annotationsForClass("DScala"))
+
+ println(annotationsForClass("S"))
+ println(annotationsForClass("C"))
+ println(annotationsForClass("R"))
+ }
+}
diff --git a/test/files/run/t4788.check b/test/files/run/t4788.check
new file mode 100644
index 0000000000..172ad90102
--- /dev/null
+++ b/test/files/run/t4788.check
@@ -0,0 +1,5 @@
+Some(@Ljava/lang/Deprecated;())
+None
+None
+Some(@LCAnnotation;() // invisible)
+Some(@LRAnnotation;())
diff --git a/test/files/run/t4788/C.scala b/test/files/run/t4788/C.scala
new file mode 100644
index 0000000000..aba9b595e4
--- /dev/null
+++ b/test/files/run/t4788/C.scala
@@ -0,0 +1,2 @@
+@CAnnotation
+class C
diff --git a/test/files/run/t4788/CAnnotation.java b/test/files/run/t4788/CAnnotation.java
new file mode 100644
index 0000000000..7120218d62
--- /dev/null
+++ b/test/files/run/t4788/CAnnotation.java
@@ -0,0 +1,5 @@
+import java.lang.annotation.Retention;
+import static java.lang.annotation.RetentionPolicy.CLASS;
+
+@Retention(value=CLASS)
+@interface CAnnotation {}
diff --git a/test/files/run/t4788/D.scala b/test/files/run/t4788/D.scala
new file mode 100644
index 0000000000..c2479fba86
--- /dev/null
+++ b/test/files/run/t4788/D.scala
@@ -0,0 +1,5 @@
+@Deprecated
+class DJava
+
+@deprecated("", "")
+class DScala
diff --git a/test/files/run/t4788/R.scala b/test/files/run/t4788/R.scala
new file mode 100644
index 0000000000..ab0cd065d9
--- /dev/null
+++ b/test/files/run/t4788/R.scala
@@ -0,0 +1,2 @@
+@RAnnotation
+class R
diff --git a/test/files/run/t4788/RAnnotation.java b/test/files/run/t4788/RAnnotation.java
new file mode 100644
index 0000000000..f24cf66f7b
--- /dev/null
+++ b/test/files/run/t4788/RAnnotation.java
@@ -0,0 +1,5 @@
+import java.lang.annotation.Retention;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+@Retention(value=RUNTIME)
+@interface RAnnotation {}
diff --git a/test/files/run/t4788/S.scala b/test/files/run/t4788/S.scala
new file mode 100644
index 0000000000..f8756d9bc8
--- /dev/null
+++ b/test/files/run/t4788/S.scala
@@ -0,0 +1,2 @@
+@SAnnotation
+class S
diff --git a/test/files/run/t4788/SAnnotation.java b/test/files/run/t4788/SAnnotation.java
new file mode 100644
index 0000000000..471f27d82a
--- /dev/null
+++ b/test/files/run/t4788/SAnnotation.java
@@ -0,0 +1,5 @@
+import java.lang.annotation.Retention;
+import static java.lang.annotation.RetentionPolicy.SOURCE;
+
+@Retention(value=SOURCE)
+@interface SAnnotation {}
diff --git a/test/files/run/t4788/Test.scala b/test/files/run/t4788/Test.scala
new file mode 100644
index 0000000000..cbbb5ff386
--- /dev/null
+++ b/test/files/run/t4788/Test.scala
@@ -0,0 +1,35 @@
+import java.io.PrintWriter;
+
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm.util._
+import scala.tools.nsc.util.stringFromWriter
+
+object Test extends BytecodeTest {
+ def annotationsForClass(className: String): Option[String] = {
+ val classNode = loadClassNode(className, skipDebugInfo = false)
+ val textifier = new Textifier
+ classNode.accept(new TraceClassVisitor(null, textifier, null))
+
+ val classString = stringFromWriter(w => textifier.print(w))
+ classString
+ .split('\n')
+ .filterNot(_.contains("@Lscala/reflect/ScalaSignature"))
+ .find(_.contains("@L"))
+ .map(_.trim)
+ }
+
+ def show {
+ // It seems like @java.lang.Deprecated shows up in both the
+ // Deprecated attribute and RuntimeVisibleAnnotation attribute,
+ // while @scala.deprecated only shows up in the Deprecated attribute.
+ // The check file just documents status quo, not sure if Scala
+ // should brought in line with Java or not...
+ // See the commit message and SI-8883 for more info.
+ println(annotationsForClass("DJava"))
+ println(annotationsForClass("DScala"))
+
+ println(annotationsForClass("S"))
+ println(annotationsForClass("C"))
+ println(annotationsForClass("R"))
+ }
+}
diff --git a/test/files/run/t4813.check b/test/files/run/t4813.check
index a92ddc0e51..a9ecc29fea 100644
--- a/test/files/run/t4813.check
+++ b/test/files/run/t4813.check
@@ -1 +1 @@
-warning: there were 2 deprecation warning(s); re-run with -deprecation for details
+warning: there were two deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/run/t4950.check b/test/files/run/t4950.check
new file mode 100644
index 0000000000..3f3a302b62
--- /dev/null
+++ b/test/files/run/t4950.check
@@ -0,0 +1,9 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> val 1 = 2
+scala.MatchError: 2 (of class java.lang.Integer)
+
+scala> val List(1) = List(1)
+
+scala> :quit
diff --git a/test/files/run/t4950.scala b/test/files/run/t4950.scala
new file mode 100644
index 0000000000..cef06027bf
--- /dev/null
+++ b/test/files/run/t4950.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ // Filter out the abbreviated stacktrace "... X elided"
+ // because the number seems to differ between versions/platforms/...
+ override def show = eval() filterNot (_ contains "elided") foreach println
+ def code =
+"""
+val 1 = 2
+val List(1) = List(1)
+"""
+}
diff --git a/test/files/run/t5072.check b/test/files/run/t5072.check
index ddd49c71cb..ab34e49869 100644
--- a/test/files/run/t5072.check
+++ b/test/files/run/t5072.check
@@ -7,4 +7,4 @@ defined class C
scala> Thread.currentThread.getContextClassLoader.loadClass(classOf[C].getName)
res0: Class[_] = class C
-scala>
+scala> :quit
diff --git a/test/files/run/t5256c.check b/test/files/run/t5256c.check
index 7fcd0eb722..3eb7b13a97 100644
--- a/test/files/run/t5256c.check
+++ b/test/files/run/t5256c.check
@@ -2,5 +2,5 @@ class A$1
Test.A$1
java.lang.Object {
def foo(): Nothing
- def <init>(): A$1
+ def <init>(): Test.A$1
}
diff --git a/test/files/run/t5256d.check b/test/files/run/t5256d.check
index d42d234386..c2b49989ab 100644
--- a/test/files/run/t5256d.check
+++ b/test/files/run/t5256d.check
@@ -25,4 +25,4 @@ scala.AnyRef {
def foo: scala.Nothing
}
-scala>
+scala> :quit
diff --git a/test/files/run/t5256h.scala b/test/files/run/t5256h.scala
index f58aa6dbe7..435124a469 100644
--- a/test/files/run/t5256h.scala
+++ b/test/files/run/t5256h.scala
@@ -6,5 +6,6 @@ object Test extends App {
val c = cm.classSymbol(mutant.getClass)
println(c)
println(c.fullName)
- println(c.info)
+ // under -Xcheckinit there's an additional $init$ field
+ c.info.toString.lines.filter(_ != " private var bitmap$init$0: Boolean") foreach println
}
diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala
index 0d7168fa89..7f5af74c3f 100644
--- a/test/files/run/t5313.scala
+++ b/test/files/run/t5313.scala
@@ -11,7 +11,7 @@ object Test extends IcodeComparison {
def bar = {
var kept1 = new Object
val result = new java.lang.ref.WeakReference(kept1)
- kept1 = null // we can't eliminate this assigment because result can observe
+ kept1 = null // we can't eliminate this assignment because result can observe
// when the object has no more references. See SI-5313
kept1 = new Object // but we can eliminate this one because kept1 has already been clobbered
var erased2 = null // we can eliminate this store because it's never used
diff --git a/test/files/run/t5428.check b/test/files/run/t5428.check
index a46514ae7c..52fce09399 100644
--- a/test/files/run/t5428.check
+++ b/test/files/run/t5428.check
@@ -1,2 +1,2 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
Stack(8, 7, 6, 5, 4, 3)
diff --git a/test/files/run/t5535.check b/test/files/run/t5535.check
index a0c87a47f4..84097ccea9 100644
--- a/test/files/run/t5535.check
+++ b/test/files/run/t5535.check
@@ -13,4 +13,4 @@ f: Int => Int = <function1>
scala> println(f(10))
11
-scala>
+scala> :quit
diff --git a/test/files/run/t5537.check b/test/files/run/t5537.check
index b9d521f301..98265ccc92 100644
--- a/test/files/run/t5537.check
+++ b/test/files/run/t5537.check
@@ -13,4 +13,4 @@ res2: List[scala.collection.immutable.List.type] = List()
scala> List[Set.type]()
res3: List[Set.type] = List()
-scala>
+scala> :quit
diff --git a/test/files/run/t5583.check b/test/files/run/t5583.check
index af96405bdd..32d285cbb3 100644
--- a/test/files/run/t5583.check
+++ b/test/files/run/t5583.check
@@ -13,4 +13,4 @@ scala> for (i <- 1 to 10) {s += i}
scala> println(s)
165
-scala>
+scala> :quit
diff --git a/test/files/run/t5655.check b/test/files/run/t5655.check
index 06c6b32599..4bbc54b641 100644
--- a/test/files/run/t5655.check
+++ b/test/files/run/t5655.check
@@ -23,4 +23,4 @@ and import x
x
^
-scala>
+scala> :quit
diff --git a/test/files/run/t5665.scala b/test/files/run/t5665.scala
new file mode 100644
index 0000000000..3ac498b5c0
--- /dev/null
+++ b/test/files/run/t5665.scala
@@ -0,0 +1,13 @@
+object O {
+ trait T {
+ private[this] val c: Int = 42
+ def f =
+ { x: Int => c }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(new O.T{}.f(0) == 42)
+ }
+}
diff --git a/test/files/run/t5699.scala b/test/files/run/t5699.scala
index ec3b1d26b4..409bcd250c 100755
--- a/test/files/run/t5699.scala
+++ b/test/files/run/t5699.scala
@@ -1,21 +1,13 @@
-import scala.tools.partest.DirectTest
+import scala.tools.partest.ParserTest
import scala.reflect.internal.util.BatchSourceFile
-object Test extends DirectTest {
+object Test extends ParserTest {
// Java code
override def code = """
|public @interface MyAnnotation { String value(); }
""".stripMargin
- override def extraSettings: String = "-usejavacp -Ystop-after:typer -Xprint:parser"
-
- override def show(): Unit = {
- // redirect err to out, for logging
- val prevErr = System.err
- System.setErr(System.out)
- compile()
- System.setErr(prevErr)
- }
+ override def extraSettings: String = "-usejavacp -Ystop-after:namer -Xprint:parser"
override def newSources(sourceCodes: String*) = {
assert(sourceCodes.size == 1)
diff --git a/test/files/run/t576.check b/test/files/run/t576.check
index 6458d5d743..22f3843abf 100644
--- a/test/files/run/t576.check
+++ b/test/files/run/t576.check
@@ -1,4 +1,4 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
1
2
3
diff --git a/test/files/run/t5789.check b/test/files/run/t5789.check
index bcb2382559..193abfaff0 100644
--- a/test/files/run/t5789.check
+++ b/test/files/run/t5789.check
@@ -7,4 +7,4 @@ n: Int = 2
scala> () => n
res0: () => Int = <function0>
-scala>
+scala> :quit
diff --git a/test/files/run/t5830.check b/test/files/run/t5830.check
index 675387eb8e..9260854676 100644
--- a/test/files/run/t5830.check
+++ b/test/files/run/t5830.check
@@ -1,5 +1,6 @@
a with oef
a with oef
+a with oef
a
def with oef
def
diff --git a/test/files/run/t5830.scala b/test/files/run/t5830.scala
index 5d808bfa28..03b9c540e0 100644
--- a/test/files/run/t5830.scala
+++ b/test/files/run/t5830.scala
@@ -1,12 +1,11 @@
import scala.annotation.switch
object Test extends App {
- // TODO: should not emit a switch
- // def noSwitch(ch: Char, eof: Boolean) = (ch: @switch) match {
- // case 'a' if eof => println("a with oef") // then branch
- // }
+ def noSwitch(ch: Char, eof: Boolean) = ch match {
+ case 'a' if eof => println("a with oef") // then branch
+ }
- def onlyThen(ch: Char, eof: Boolean) = (ch: @switch) match {
+ def onlyThen(ch: Char, eof: Boolean) = ch match {
case 'a' if eof => println("a with oef") // then branch
case 'c' =>
}
@@ -18,7 +17,7 @@ object Test extends App {
case 'c' =>
}
- def defaultUnguarded(ch: Char, eof: Boolean) = (ch: @switch) match {
+ def defaultUnguarded(ch: Char, eof: Boolean) = ch match {
case ' ' if eof => println("spacey oef")
case _ => println("default")
}
@@ -44,7 +43,7 @@ object Test extends App {
// case 'c' =>
// }
- // noSwitch('a', true)
+ noSwitch('a', true)
onlyThen('a', true) // 'a with oef'
ifThenElse('a', true) // 'a with oef'
ifThenElse('a', false) // 'a'
diff --git a/test/files/run/t5905-features.flags b/test/files/run/t5905-features.flags
new file mode 100644
index 0000000000..ad51758c39
--- /dev/null
+++ b/test/files/run/t5905-features.flags
@@ -0,0 +1 @@
+-nowarn
diff --git a/test/files/run/t5905-features.scala b/test/files/run/t5905-features.scala
new file mode 100644
index 0000000000..b518d61145
--- /dev/null
+++ b/test/files/run/t5905-features.scala
@@ -0,0 +1,31 @@
+
+import tools.partest.DirectTest
+
+// verify that all languageFeature names are accepted by -language
+object Test extends DirectTest {
+ override def code = "class Code { def f = (1 to 10) size }" // exercise a feature to sanity-check coverage of -language options
+
+ override def extraSettings = s"-usejavacp -d ${testOutput.path}"
+
+ override def show() = {
+ val global = newCompiler("-Ystop-after:typer")
+ compileString(global)("") // warm me up, scotty
+ import global._
+ exitingTyper {
+ //def isFeature(s: Symbol) = s.annotations.exists((a: AnnotationInfo) => a.tpe <:< typeOf[scala.annotation.meta.languageFeature])
+ def isFeature(s: Symbol) = s hasAnnotation definitions.LanguageFeatureAnnot
+ val langf = definitions.languageFeatureModule.typeSignature
+ val feats = langf.declarations filter (s => isFeature(s)) map (_.name.decoded)
+ val xmen = langf.member(TermName("experimental")).typeSignature.declarations filter (s => isFeature(s)) map (s => s"experimental.${s.name.decoded}")
+ val all = (feats ++ xmen) mkString ","
+
+ assert(feats.nonEmpty, "Test must find feature flags.")
+
+ //compile("junk") // tragically, does not fail the test, i.e., arg must not be totally borked
+
+ //dynamics,postfixOps,reflectiveCalls,implicitConversions,higherKinds,existentials,experimental.macros
+ compile(s"-language:$all")
+ }
+ }
+}
+
diff --git a/test/files/run/t5905b-features.check b/test/files/run/t5905b-features.check
new file mode 100644
index 0000000000..08c76d74aa
--- /dev/null
+++ b/test/files/run/t5905b-features.check
@@ -0,0 +1 @@
+'noob' is not a valid choice for '-language'
diff --git a/test/files/run/t5905b-features.scala b/test/files/run/t5905b-features.scala
new file mode 100644
index 0000000000..627df8334b
--- /dev/null
+++ b/test/files/run/t5905b-features.scala
@@ -0,0 +1,15 @@
+
+import tools.partest.DirectTest
+
+// verify that only languageFeature names are accepted by -language
+object Test extends DirectTest {
+ override def code = "class Code"
+
+ override def extraSettings = s"-usejavacp -d ${testOutput.path}"
+
+ override def show() = {
+ //compile("-language", "--") // no error
+ compile(s"-language:noob")
+ }
+}
+
diff --git a/test/files/run/t5938.scala b/test/files/run/t5938.scala
new file mode 100644
index 0000000000..59a95ac37f
--- /dev/null
+++ b/test/files/run/t5938.scala
@@ -0,0 +1,35 @@
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+ override def extraSettings: String =
+ s"-usejavacp -d ${testOutput.path}"
+
+ override def code = """
+object O extends C {
+ def main(args: Array[String]): Unit = {
+ }
+ // Static forwarder for foo and setter_foo_= added more once in a multi-run compile.
+}
+ """.trim
+
+ override def show(): Unit = {
+ val global = newCompiler()
+ Console.withErr(System.out) {
+ compileString(global)(code)
+ compileString(global)(code)
+ loadClass // was "duplicate name and signature in class X"
+ }
+ }
+
+ def loadClass: Class[_] = {
+ val cl = new java.net.URLClassLoader(Array(testOutput.toFile.toURL));
+ cl.loadClass("O")
+ }
+}
+
+trait T {
+ val foo: String = ""
+}
+class C extends T
+
diff --git a/test/files/run/t6011c.scala b/test/files/run/t6011c.scala
index 0647e3f81a..96a685b9cf 100644
--- a/test/files/run/t6011c.scala
+++ b/test/files/run/t6011c.scala
@@ -6,7 +6,7 @@ object Test extends App {
// at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:50)
// at scala.tools.nsc.Global.abort(Global.scala:249)
// at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder$jcode$.emitSWITCH(GenASM.scala:1850)
- ((1: Byte): @unchecked @annotation.switch) match {
+ ((1: Byte): @unchecked) match {
case 1 => 2
case 1 => 3 // crash
}
diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check
index a6c4db8f11..edc8b22d6d 100644
--- a/test/files/run/t6028.check
+++ b/test/files/run/t6028.check
@@ -24,7 +24,7 @@ package <empty> {
(new <$anon: Function0>(T.this, tryyParam, tryyLocal): Function0)
}
};
- @SerialVersionUID(0) final <synthetic> class $anonfun$foo$1 extends scala.runtime.AbstractFunction0$mcI$sp with Serializable {
+ @SerialVersionUID(value = 0) final <synthetic> class $anonfun$foo$1 extends scala.runtime.AbstractFunction0$mcI$sp with Serializable {
def <init>($outer: T, methodParam$1: Int, methodLocal$1: Int): <$anon: Function0> = {
$anonfun$foo$1.super.<init>();
()
@@ -60,7 +60,7 @@ package <empty> {
};
scala.this.Predef.print(scala.Int.box(barParam$1))
};
- @SerialVersionUID(0) final <synthetic> class $anonfun$tryy$1 extends scala.runtime.AbstractFunction0$mcV$sp with Serializable {
+ @SerialVersionUID(value = 0) final <synthetic> class $anonfun$tryy$1 extends scala.runtime.AbstractFunction0$mcV$sp with Serializable {
def <init>($outer: T, tryyParam$1: Int, tryyLocal$1: runtime.IntRef): <$anon: Function0> = {
$anonfun$tryy$1.super.<init>();
()
@@ -81,4 +81,4 @@ package <empty> {
}
}
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
diff --git a/test/files/run/t6086-repl.check b/test/files/run/t6086-repl.check
index 115eff5f85..b904f118e8 100644
--- a/test/files/run/t6086-repl.check
+++ b/test/files/run/t6086-repl.check
@@ -7,4 +7,4 @@ defined class X
scala> scala.reflect.runtime.universe.typeOf[X]
res0: reflect.runtime.universe.Type = X
-scala>
+scala> :quit
diff --git a/test/files/run/t6111.check b/test/files/run/t6111.check
index 1f23a87f73..5880658001 100644
--- a/test/files/run/t6111.check
+++ b/test/files/run/t6111.check
@@ -1,3 +1,3 @@
-warning: there were 2 deprecation warning(s); re-run with -deprecation for details
+warning: there were two deprecation warnings; re-run with -deprecation for details
(8,8)
(x,x)
diff --git a/test/files/run/t6114.scala b/test/files/run/t6114.scala
index cb880ece00..8ad02d5bb2 100644
--- a/test/files/run/t6114.scala
+++ b/test/files/run/t6114.scala
@@ -51,7 +51,7 @@ object Test extends App {
val next = list.asScala ++ List(4,5,6)
assert(next != list.asScala)
- // Note: Clone is hidden at this level, so no overriden cloning.
+ // Note: Clone is hidden at this level, so no overridden cloning.
}
testList
diff --git a/test/files/run/t6146b.check b/test/files/run/t6146b.check
index a3b09efcd9..6998873fb7 100644
--- a/test/files/run/t6146b.check
+++ b/test/files/run/t6146b.check
@@ -60,4 +60,4 @@ res2: u.Type = O.S3
scala> memType(S4, fTpe)
res3: u.Type = S4
-scala>
+scala> :quit
diff --git a/test/files/run/t6187.check b/test/files/run/t6187.check
index 0180125809..9a9e266ec6 100644
--- a/test/files/run/t6187.check
+++ b/test/files/run/t6187.check
@@ -29,4 +29,4 @@ res1: List[Int] = List(1)
scala> List("") collect { case x => x }
res2: List[String] = List("")
-scala>
+scala> :quit
diff --git a/test/files/run/t6260c.check b/test/files/run/t6260c.check
index 1a57f2d741..78e9b27371 100644
--- a/test/files/run/t6260c.check
+++ b/test/files/run/t6260c.check
@@ -1,5 +1,9 @@
f(C@2e)
+#partest !-Ydelambdafy:method
Test$$anonfun$$apply
+#partest -Ydelambdafy:method
+Test$lambda$1$$apply
+#partest
apply
g(C@2e)
diff --git a/test/files/run/t6273.check b/test/files/run/t6273.check
index bef0b227d2..3b682800df 100644
--- a/test/files/run/t6273.check
+++ b/test/files/run/t6273.check
@@ -12,4 +12,4 @@ x: String =
y = 55
"
-scala>
+scala> :quit
diff --git a/test/files/run/t6292.check b/test/files/run/t6292.check
index 6232ba7519..6f7430d5b8 100644
--- a/test/files/run/t6292.check
+++ b/test/files/run/t6292.check
@@ -1 +1 @@
-warning: there were 7 deprecation warning(s); re-run with -deprecation for details
+warning: there were 7 deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/run/t6318_primitives.check b/test/files/run/t6318_primitives.check
index b330f91276..4bc5e598eb 100644
--- a/test/files/run/t6318_primitives.check
+++ b/test/files/run/t6318_primitives.check
@@ -1,36 +1,54 @@
-true
+Checking if byte matches byte
Some(1)
-false
+Checking if byte matches short
None
-true
+Checking if class java.lang.Byte matches byte
Some(1)
-false
+Checking if short matches short
+Some(1)
+Checking if short matches char
None
-true
+Checking if class java.lang.Short matches short
+Some(1)
+Checking if char matches char
Some()
-false
+Checking if char matches int
None
-true
+Checking if class java.lang.Character matches char
+Some()
+Checking if int matches int
Some(1)
-false
+Checking if int matches long
None
-true
+Checking if class java.lang.Integer matches int
Some(1)
-false
+Checking if long matches long
+Some(1)
+Checking if long matches float
None
-true
+Checking if class java.lang.Long matches long
+Some(1)
+Checking if float matches float
Some(1.0)
-false
+Checking if float matches double
None
-true
+Checking if class java.lang.Float matches float
Some(1.0)
-false
+Checking if double matches double
+Some(1.0)
+Checking if double matches boolean
None
-true
+Checking if class java.lang.Double matches double
+Some(1.0)
+Checking if boolean matches boolean
Some(true)
-false
+Checking if boolean matches void
None
-true
+Checking if class java.lang.Boolean matches boolean
+Some(true)
+Checking if void matches void
Some(())
-false
+Checking if void matches byte
None
+Checking if class scala.runtime.BoxedUnit matches void
+Some(())
diff --git a/test/files/run/t6318_primitives.scala b/test/files/run/t6318_primitives.scala
index 30f27120b3..bc8ec88359 100644
--- a/test/files/run/t6318_primitives.scala
+++ b/test/files/run/t6318_primitives.scala
@@ -2,70 +2,88 @@ import scala.reflect.{ClassTag, classTag}
object Test extends App {
def test[T: ClassTag](x: T) {
- println(classTag[T].runtimeClass.isAssignableFrom(x.getClass))
+ println(s"Checking if ${x.getClass} matches ${classTag[T].runtimeClass}")
println(classTag[T].unapply(x))
}
{
val x = 1.toByte
- println(ClassTag.Byte.runtimeClass.isAssignableFrom(x.getClass))
+ println(s"Checking if ${x.getClass} matches ${classTag[Byte].runtimeClass}")
println(ClassTag.Byte.unapply(x))
+ println(s"Checking if ${x.getClass} matches ${classTag[Short].runtimeClass}")
+ println(ClassTag.Short.unapply(x))
test(x)
}
{
val x = 1.toShort
- println(ClassTag.Short.runtimeClass.isAssignableFrom(x.getClass))
+ println(s"Checking if ${x.getClass} matches ${classTag[Short].runtimeClass}")
println(ClassTag.Short.unapply(x))
+ println(s"Checking if ${x.getClass} matches ${classTag[Char].runtimeClass}")
+ println(ClassTag.Char.unapply(x))
test(x)
}
{
val x = 1.toChar
- println(ClassTag.Char.runtimeClass.isAssignableFrom(x.getClass))
+ println(s"Checking if ${x.getClass} matches ${classTag[Char].runtimeClass}")
println(ClassTag.Char.unapply(x))
+ println(s"Checking if ${x.getClass} matches ${classTag[Int].runtimeClass}")
+ println(ClassTag.Int.unapply(x))
test(x)
}
{
val x = 1.toInt
- println(ClassTag.Int.runtimeClass.isAssignableFrom(x.getClass))
+ println(s"Checking if ${x.getClass} matches ${classTag[Int].runtimeClass}")
println(ClassTag.Int.unapply(x))
+ println(s"Checking if ${x.getClass} matches ${classTag[Long].runtimeClass}")
+ println(ClassTag.Long.unapply(x))
test(x)
}
{
val x = 1.toLong
- println(ClassTag.Long.runtimeClass.isAssignableFrom(x.getClass))
+ println(s"Checking if ${x.getClass} matches ${classTag[Long].runtimeClass}")
println(ClassTag.Long.unapply(x))
+ println(s"Checking if ${x.getClass} matches ${classTag[Float].runtimeClass}")
+ println(ClassTag.Float.unapply(x))
test(x)
}
{
val x = 1.toFloat
- println(ClassTag.Float.runtimeClass.isAssignableFrom(x.getClass))
+ println(s"Checking if ${x.getClass} matches ${classTag[Float].runtimeClass}")
println(ClassTag.Float.unapply(x))
+ println(s"Checking if ${x.getClass} matches ${classTag[Double].runtimeClass}")
+ println(ClassTag.Double.unapply(x))
test(x)
}
{
val x = 1.toDouble
- println(ClassTag.Double.runtimeClass.isAssignableFrom(x.getClass))
+ println(s"Checking if ${x.getClass} matches ${classTag[Double].runtimeClass}")
println(ClassTag.Double.unapply(x))
+ println(s"Checking if ${x.getClass} matches ${classTag[Boolean].runtimeClass}")
+ println(ClassTag.Boolean.unapply(x))
test(x)
}
{
val x = true
- println(ClassTag.Boolean.runtimeClass.isAssignableFrom(x.getClass))
+ println(s"Checking if ${x.getClass} matches ${classTag[Boolean].runtimeClass}")
println(ClassTag.Boolean.unapply(x))
+ println(s"Checking if ${x.getClass} matches ${classTag[Unit].runtimeClass}")
+ println(ClassTag.Unit.unapply(x))
test(x)
}
{
val x = ()
- println(ClassTag.Unit.runtimeClass.isAssignableFrom(x.getClass))
+ println(s"Checking if ${x.getClass} matches ${classTag[Unit].runtimeClass}")
println(ClassTag.Unit.unapply(x))
+ println(s"Checking if ${x.getClass} matches ${classTag[Byte].runtimeClass}")
+ println(ClassTag.Byte.unapply(x))
test(x)
}
-} \ No newline at end of file
+}
diff --git a/test/files/run/t6320.check b/test/files/run/t6320.check
index 013acc1c54..af7c865690 100644
--- a/test/files/run/t6320.check
+++ b/test/files/run/t6320.check
@@ -10,4 +10,4 @@ defined class Dyn
scala> new Dyn(Map("foo" -> 10)).foo[Int]
res0: Int = 10
-scala>
+scala> :quit
diff --git a/test/files/run/t6327.flags b/test/files/run/t6327.flags
new file mode 100644
index 0000000000..ea7fc37e1a
--- /dev/null
+++ b/test/files/run/t6327.flags
@@ -0,0 +1 @@
+-Yrangepos:false
diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check
index 5049426ab4..ebb1aace7c 100644
--- a/test/files/run/t6329_repl.check
+++ b/test/files/run/t6329_repl.check
@@ -5,31 +5,31 @@ scala> import scala.reflect.classTag
import scala.reflect.classTag
scala> classManifest[scala.List[_]]
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
scala> classTag[scala.List[_]]
res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
scala> classManifest[scala.collection.immutable.List[_]]
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
res2: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
scala> classTag[scala.collection.immutable.List[_]]
res3: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
scala> classManifest[Predef.Set[_]]
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
res4: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[<?>]
scala> classTag[Predef.Set[_]]
res5: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set
scala> classManifest[scala.collection.immutable.Set[_]]
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
res6: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[<?>]
scala> classTag[scala.collection.immutable.Set[_]]
res7: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set
-scala>
+scala> :quit
diff --git a/test/files/run/t6329_repl_bug.check b/test/files/run/t6329_repl_bug.check
index 44c41cfd03..84297a629f 100644
--- a/test/files/run/t6329_repl_bug.check
+++ b/test/files/run/t6329_repl_bug.check
@@ -8,10 +8,10 @@ scala> import scala.reflect.runtime._
import scala.reflect.runtime._
scala> classManifest[List[_]]
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
scala> scala.reflect.classTag[List[_]]
res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
-scala>
+scala> :quit
diff --git a/test/files/run/t6329_vanilla_bug.check b/test/files/run/t6329_vanilla_bug.check
index 640d168a8a..01bf0636ea 100644
--- a/test/files/run/t6329_vanilla_bug.check
+++ b/test/files/run/t6329_vanilla_bug.check
@@ -1,3 +1,3 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
scala.collection.immutable.List[<?>]
scala.collection.immutable.List
diff --git a/test/files/run/t6381.check b/test/files/run/t6381.check
index 4ed11d59ff..49c6a784ad 100644
--- a/test/files/run/t6381.check
+++ b/test/files/run/t6381.check
@@ -16,4 +16,4 @@ defined term macro pos: String
scala> pos
res0: String = class scala.reflect.internal.util.RangePosition
-scala>
+scala> :quit
diff --git a/test/files/run/t6434.check b/test/files/run/t6434.check
index f898b6b781..0a75ae2bd5 100644
--- a/test/files/run/t6434.check
+++ b/test/files/run/t6434.check
@@ -7,4 +7,4 @@ f: (x: => Int)Int
scala> f _
res0: (=> Int) => Int = <function1>
-scala>
+scala> :quit
diff --git a/test/files/run/t6439.check b/test/files/run/t6439.check
index f8d5b3a8cd..c4b7591069 100644
--- a/test/files/run/t6439.check
+++ b/test/files/run/t6439.check
@@ -70,4 +70,4 @@ defined object lookup
scala> lookup("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
res0: $r.intp.global.Symbol = type F
-scala>
+scala> :quit
diff --git a/test/files/run/t6440.check b/test/files/run/t6440.check
index 27d5d1380e..4d8618182b 100644
--- a/test/files/run/t6440.check
+++ b/test/files/run/t6440.check
@@ -1,4 +1,4 @@
-pos: source-newSource1.scala,line-9,offset=109 bad symbolic reference to <root>.pack1 encountered in class file 'U.class'.
-Cannot access term pack1 in package <root>. The current classpath may be
-missing a definition for <root>.pack1, or U.class may have been compiled against a version that's
-incompatible with the one found on the current classpath. ERROR
+pos: source-newSource1.scala,line-9,offset=109 reference to U is ambiguous;
+it is imported twice in the same scope by
+import pack2._
+and import X._ ERROR
diff --git a/test/files/run/t6440.scala b/test/files/run/t6440.scala
index 5a3a4150d9..94eda3642e 100644
--- a/test/files/run/t6440.scala
+++ b/test/files/run/t6440.scala
@@ -41,7 +41,7 @@ object Test extends StoreReporterDirectTest {
assert(tClass.delete())
assert(pack1.delete())
- // bad symbolic reference error expected (but no stack trace!)
+ // should report ambiguous import, despite the fact that a parent of pack2.U is absent
compileCode(app)
println(filteredInfos.mkString("\n"))
}
diff --git a/test/files/run/t6440b.check b/test/files/run/t6440b.check
index 0b642c2c35..a6100d6d1e 100644
--- a/test/files/run/t6440b.check
+++ b/test/files/run/t6440b.check
@@ -1,4 +1,5 @@
-pos: NoPosition bad symbolic reference to pack1.T encountered in class file 'U.class'.
-Cannot access type T in package pack1. The current classpath may be
-missing a definition for pack1.T, or U.class may have been compiled against a version that's
-incompatible with the one found on the current classpath. ERROR
+pos: NoPosition missing or invalid dependency detected while loading class file 'U.class'.
+Could not access type T in package pack1,
+because it (or its dependencies) are missing. Check your build definition for
+missing or conflicting dependencies. (Re-run with `-Ylog-classpath` to see the problematic classpath.)
+A full rebuild may help if 'U.class' was compiled against an incompatible version of pack1. ERROR
diff --git a/test/files/run/t6481.check b/test/files/run/t6481.check
index df40722242..4a3f6f7ee9 100644
--- a/test/files/run/t6481.check
+++ b/test/files/run/t6481.check
@@ -1,4 +1,4 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
delayed init
new foo(1, 2)
delayed init
diff --git a/test/files/run/t6502.scala b/test/files/run/t6502.scala
new file mode 100644
index 0000000000..52fabef6b8
--- /dev/null
+++ b/test/files/run/t6502.scala
@@ -0,0 +1,146 @@
+import scala.tools.nsc.Settings
+import scala.tools.nsc.interpreter.ILoop
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.partest._
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String, jarFileName: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", s"${testOutput.path}/$jarFileName"))(code)
+ }
+
+ // TODO flat classpath doesn't support the classpath invalidation yet so we force using the recursive one
+ // it's the only test which needed such a workaround
+ override def settings = {
+ val settings = new Settings
+ settings.YclasspathImpl.value = ClassPathRepresentationType.Recursive
+ settings
+ }
+
+ def app1 = """
+ package test
+
+ object Test extends App {
+ def test(): Unit = {
+ println("testing...")
+ }
+ }"""
+
+ def app2 = """
+ package test
+
+ object Test extends App {
+ def test(): Unit = {
+ println("testing differently...")
+ }
+ }"""
+
+ def app3 = """
+ package test
+
+ object Test3 extends App {
+ def test(): Unit = {
+ println("new object in existing package")
+ }
+ }"""
+
+ def app6 = """
+ package test6
+ class A extends Test { println("created test6.A") }
+ class Z extends Test { println("created test6.Z") }
+ trait Test"""
+
+ def test1(): Unit = {
+ val jar = "test1.jar"
+ compileCode(app1, jar)
+
+ val codeToRun = toCodeInSeparateLines(s":require ${testOutput.path}/$jar", "test.Test.test()")
+ val output = ILoop.run(codeToRun, settings)
+ val lines = output.split("\n")
+ assert {
+ lines(4).contains("Added") && lines(4).contains("test1.jar")
+ }
+ assert {
+ lines(lines.length-3).contains("testing...")
+ }
+ }
+
+ def test2(): Unit = {
+ // should reject jars with conflicting entries
+ val jar1 = "test1.jar"
+ val jar2 = "test2.jar"
+ compileCode(app2, jar2)
+
+ val codeToRun = toCodeInSeparateLines(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar2")
+ val output = ILoop.run(codeToRun, settings)
+ val lines = output.split("\n")
+ assert {
+ lines(4).contains("Added") && lines(4).contains("test1.jar")
+ }
+ assert {
+ lines(lines.length-3).contains("test2.jar") && lines(lines.length-3).contains("existing classpath entries conflict")
+ }
+ }
+
+ def test3(): Unit = {
+ // should accept jars with overlapping packages, but no conflicts
+ val jar1 = "test1.jar"
+ val jar3 = "test3.jar"
+ compileCode(app3, jar3)
+
+ val codeToRun = toCodeInSeparateLines(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar3", "test.Test3.test()")
+ val output = ILoop.run(codeToRun, settings)
+ val lines = output.split("\n")
+ assert {
+ lines(4).contains("Added") && lines(4).contains("test1.jar")
+ }
+ assert {
+ lines(lines.length-3).contains("new object in existing package")
+ }
+ }
+
+ def test4(): Unit = {
+ // twice the same jar should be rejected
+ val jar1 = "test1.jar"
+ val codeToRun = toCodeInSeparateLines(s":require ${testOutput.path}/$jar1", s":require ${testOutput.path}/$jar1")
+ val output = ILoop.run(codeToRun, settings)
+ val lines = output.split("\n")
+ assert {
+ lines(4).contains("Added") && lines(4).contains("test1.jar")
+ }
+ assert {
+ lines(lines.length-3).contains("test1.jar") && lines(lines.length-3).contains("existing classpath entries conflict")
+ }
+ }
+
+ def test5(): Unit = {
+ val codeToRun = ":require /does/not/exist.jar"
+ val output = ILoop.run(codeToRun, settings)
+ assert(!output.contains("NullPointerException"), output)
+ assert(output.contains("Cannot load '/does/not/exist.jar'"), output)
+ }
+
+ def test6(): Unit = {
+ // Avoid java.lang.NoClassDefFoundError triggered by the old appoach of using a Java
+ // classloader to parse .class files in order to read their names.
+ val jar = "test6.jar"
+ compileCode(app6, jar)
+ val codeToRun = toCodeInSeparateLines(s":require ${testOutput.path}/$jar", "import test6._; new A; new Z")
+ val output = ILoop.run(codeToRun, settings)
+ assert(output.contains("created test6.A"), output)
+ assert(output.contains("created test6.Z"), output)
+ }
+
+ def show(): Unit = {
+ test1()
+ test2()
+ test3()
+ test4()
+ test5()
+ test6()
+ }
+
+ def toCodeInSeparateLines(lines: String*): String = lines mkString "\n"
+}
diff --git a/test/files/run/t6507.check b/test/files/run/t6507.check
index 3536c42381..5da4aa3a24 100644
--- a/test/files/run/t6507.check
+++ b/test/files/run/t6507.check
@@ -21,4 +21,4 @@ scala> res0
!
res1: A = A
-scala>
+scala> :quit
diff --git a/test/files/run/t6541-option.scala b/test/files/run/t6541-option.scala
new file mode 100644
index 0000000000..2c10c9e09d
--- /dev/null
+++ b/test/files/run/t6541-option.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+:setting -Xsource:2.12
+case class C12(clazz: Class[_])
+val o: Option[Class[T] forSome { type T}] = C12.unapply(C12(classOf[String]))
+
+:setting -Xsource:2.11
+import scala.language.existentials
+case class C11(clazz: Class[_])
+val o: Option[Class[T]] forSome { type T } = C11.unapply(C11(classOf[String]))
+ """
+
+ override def show() = {
+ val r = eval().mkString("\n")
+ assert(!(r.contains("warning") || r.contains("error")), r)
+ }
+}
diff --git a/test/files/run/t6541.flags b/test/files/run/t6541.flags
new file mode 100644
index 0000000000..68d0ddfec2
--- /dev/null
+++ b/test/files/run/t6541.flags
@@ -0,0 +1 @@
+-feature -Xfatal-warnings -Xsource:2.12 \ No newline at end of file
diff --git a/test/files/run/t6541.scala b/test/files/run/t6541.scala
new file mode 100644
index 0000000000..f127143691
--- /dev/null
+++ b/test/files/run/t6541.scala
@@ -0,0 +1,25 @@
+class A
+class B[T](x: T)
+case class C(a: A, b: B[_])
+
+case class D(a: A, b: B[_]*)
+
+case class E(c: Class[_])
+
+object Test extends App {
+ def f1(c: C) = c match {
+ case C(a, b) => ()
+ }
+
+ def f2(d: D) = d match {
+ case D(a, b1, b2) => ()
+ }
+
+ def f3(e: E) = e match {
+ case E(c) => ()
+ }
+
+ f1(C(new A, new B(1)))
+ f2(D(new A, new B(1), new B(2)))
+ f3(E(classOf[E]))
+}
diff --git a/test/files/run/t6549.check b/test/files/run/t6549.check
index d5dfc5ebe8..be3445927e 100644
--- a/test/files/run/t6549.check
+++ b/test/files/run/t6549.check
@@ -25,4 +25,4 @@ m(scala.Symbol("s")).xxx: Any = 's
scala> val `"` = 0
": Int = 0
-scala>
+scala> :quit
diff --git a/test/files/run/t6555.check b/test/files/run/t6555.check
index 9ac115a13f..e3b467ce7c 100644
--- a/test/files/run/t6555.check
+++ b/test/files/run/t6555.check
@@ -6,7 +6,7 @@ package <empty> {
()
};
private[this] val f: Int => Int = {
- @SerialVersionUID(0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction1$mcII$sp with Serializable {
+ @SerialVersionUID(value = 0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction1$mcII$sp with Serializable {
def <init>(): <$anon: Int => Int> = {
$anonfun.super.<init>();
()
diff --git a/test/files/run/t6622.check b/test/files/run/t6622.check
new file mode 100644
index 0000000000..5d006d88e6
--- /dev/null
+++ b/test/files/run/t6622.check
@@ -0,0 +1,10 @@
+ O1.resultVal isMemberClass = false, null
+class A$1
+ O1.resultDef isMemberClass = false, public void O1$.resultDef()
+class A$2
+ C2.resultVal isMemberClass = false, null
+class $B$1
+ O3.resultDef isMemberClass = false, public void O3$.resultDef()
+class C$1
+ O4.resultDefDefault isMemberClass = false, public java.lang.Object O4$.resultDefDefault$default$1()
+class C$2
diff --git a/test/files/run/t6622.scala b/test/files/run/t6622.scala
new file mode 100644
index 0000000000..de8ffa01bf
--- /dev/null
+++ b/test/files/run/t6622.scala
@@ -0,0 +1,50 @@
+import Test.check
+
+object O1 {
+ lazy val resultVal = {
+ class A
+ check("O1.resultVal", classOf[A])
+ }
+
+ def resultDef = {
+ class A
+ check("O1.resultDef", classOf[A])
+ }
+}
+
+class C2 {
+ val resultVal = {
+ val tmp = {
+ class B
+ check("C2.resultVal", classOf[B])
+ }
+ }
+}
+
+object O3 {
+ def resultDef = {
+ class C
+ check("O3.resultDef", classOf[C])
+ }
+}
+
+object O4 {
+ def resultDefDefault(a: Any = {
+ class C
+ check("O4.resultDefDefault", classOf[C])
+ }) = ();
+}
+
+
+object Test extends App {
+ def check(desc: String, clazz: Class[_]) {
+ println(s" $desc isMemberClass = ${clazz.isMemberClass}, ${clazz.getEnclosingMethod}")
+ println(reflect.runtime.currentMirror.classSymbol(clazz))
+ }
+
+ O1.resultVal
+ O1.resultDef
+ new C2().resultVal
+ O3.resultDef
+ O4.resultDefDefault()
+}
diff --git a/test/files/run/t6631.scala b/test/files/run/t6631.scala
deleted file mode 100644
index e472b83d50..0000000000
--- a/test/files/run/t6631.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import reflect.ClassTag
-
-object Test extends App {
- def intercept[T <: Throwable : ClassTag](act: => Any) = try {
- act
- } catch {
- case x: Throwable =>
- val cls = implicitly[ClassTag[T]].runtimeClass
- assert(cls.isInstance(x), (x.getClass, x, cls).toString)
- }
- assert(s"""\f\r\n\t""" == "\f\r\n\t")
-
- import StringContext.InvalidEscapeException
- intercept[InvalidEscapeException](s"""\""")
- intercept[InvalidEscapeException](s"""\x""")
- intercept[InvalidEscapeException](s"\")
-
-}
diff --git a/test/files/run/t6663.flags b/test/files/run/t6663.flags
new file mode 100644
index 0000000000..ea7fc37e1a
--- /dev/null
+++ b/test/files/run/t6663.flags
@@ -0,0 +1 @@
+-Yrangepos:false
diff --git a/test/files/run/t6669.scala b/test/files/run/t6669.scala
index e18f2514a9..27c4970d60 100644
--- a/test/files/run/t6669.scala
+++ b/test/files/run/t6669.scala
@@ -1,4 +1,5 @@
import java.io.{ByteArrayOutputStream, PrintStream}
+import scala.reflect.io.File
object Test extends App {
val baos = new ByteArrayOutputStream()
@@ -9,9 +10,11 @@ object Test extends App {
scala.tools.scalap.Main.main(Array("-verbose", "java.lang.Object"))
}
+ val currentLocationCpFragment = File.pathSeparator + "."
+
// now make sure we saw the '.' in the classpath
val msg1 = baos.toString()
- assert(msg1 contains "directory classpath: .", s"Did not see '.' in the default class path. Full results were:\n$msg1")
+ assert(msg1 contains currentLocationCpFragment, s"Did not see '.' in the default class path. Full results were:\n$msg1")
// then test again with a user specified classpath
baos.reset
@@ -22,5 +25,5 @@ object Test extends App {
// now make sure we did not see the '.' in the classpath
val msg2 = baos.toString()
- assert(!(msg2 contains "directory classpath: ."), s"Did saw '.' in the user specified class path. Full results were:\n$msg2")
+ assert(!(msg2 contains currentLocationCpFragment), s"Did saw '.' in the user specified class path. Full results were:\n$msg2")
}
diff --git a/test/files/run/t6690.check b/test/files/run/t6690.check
index a92ddc0e51..a9ecc29fea 100644
--- a/test/files/run/t6690.check
+++ b/test/files/run/t6690.check
@@ -1 +1 @@
-warning: there were 2 deprecation warning(s); re-run with -deprecation for details
+warning: there were two deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/run/t6731.flags b/test/files/run/t6731.flags
new file mode 100644
index 0000000000..ea7fc37e1a
--- /dev/null
+++ b/test/files/run/t6731.flags
@@ -0,0 +1 @@
+-Yrangepos:false
diff --git a/test/files/run/t6863.check b/test/files/run/t6863.check
index fea22b582f..d4df5f7a74 100644
--- a/test/files/run/t6863.check
+++ b/test/files/run/t6863.check
@@ -10,4 +10,4 @@ t6863.scala:46: warning: comparing values of types Unit and Unit using `==' will
t6863.scala:59: warning: comparing values of types Unit and Unit using `==' will always yield true
assert({ () => x }.apply == ())
^
-warning: there were 4 deprecation warning(s); re-run with -deprecation for details
+warning: there were four deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/run/t6935.check b/test/files/run/t6935.check
index 844ca54682..df1629dd7e 100644
--- a/test/files/run/t6935.check
+++ b/test/files/run/t6935.check
@@ -1 +1 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
diff --git a/test/files/run/t6937.check b/test/files/run/t6937.check
index 4729dc7006..5c5d4485b6 100644
--- a/test/files/run/t6937.check
+++ b/test/files/run/t6937.check
@@ -19,4 +19,4 @@ apiru: scala.reflect.api.Universe = <lazy>
scala> apiru.typeTag[A].in(cm)
res0: reflect.runtime.universe.TypeTag[A] = TypeTag[A]
-scala>
+scala> :quit
diff --git a/test/files/run/t6988.check b/test/files/run/t6988.check
new file mode 100644
index 0000000000..5db04832d6
--- /dev/null
+++ b/test/files/run/t6988.check
@@ -0,0 +1,2 @@
+#1 13
+#2 13
diff --git a/test/files/run/t6988.scala b/test/files/run/t6988.scala
new file mode 100644
index 0000000000..45dfe33461
--- /dev/null
+++ b/test/files/run/t6988.scala
@@ -0,0 +1,9 @@
+case class User()
+
+@SerialVersionUID(13l) case class IdentifyMessage1(userName: String, user: User, code: Int)
+@SerialVersionUID(10l + 3l) case class IdentifyMessage2(userName: String, user: User, code: Int)
+
+object Test extends App {
+ println("#1 " + java.io.ObjectStreamClass.lookup(IdentifyMessage1("hei", User(), 8).getClass).getSerialVersionUID)
+ println("#2 " + java.io.ObjectStreamClass.lookup(IdentifyMessage2("hei", User(), 8).getClass).getSerialVersionUID)
+}
diff --git a/test/files/run/t7019.scala b/test/files/run/t7019.scala
new file mode 100644
index 0000000000..5dcc09d2b6
--- /dev/null
+++ b/test/files/run/t7019.scala
@@ -0,0 +1,10 @@
+final class Foo(val i: Int) extends AnyVal {
+ def foo() = go(i)
+ private[this] def go(i: Int) = i * 2
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(new Foo(1).foo() == 2)
+ }
+}
diff --git a/test/files/run/t7096.scala b/test/files/run/t7096.scala
index e7a894fc23..f723d70abe 100644
--- a/test/files/run/t7096.scala
+++ b/test/files/run/t7096.scala
@@ -1,5 +1,5 @@
/*
- * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
+ * filter: inliner warning; re-run with
*/
import scala.tools.partest._
import scala.tools.nsc._
diff --git a/test/files/run/t7185.check b/test/files/run/t7185.check
index ebf85b731f..e4f80a8ff9 100644
--- a/test/files/run/t7185.check
+++ b/test/files/run/t7185.check
@@ -29,4 +29,4 @@ res0: Any =
}
}
-scala>
+scala> :quit
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
index b7443aa0c4..e35cfc90c0 100644
--- a/test/files/run/t7319.check
+++ b/test/files/run/t7319.check
@@ -5,15 +5,15 @@ scala> class M[A]
defined class M
scala> implicit def ma0[A](a: A): M[A] = null
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
ma0: [A](a: A)M[A]
scala> implicit def ma1[A](a: A): M[A] = null
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
ma1: [A](a: A)M[A]
scala> def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int
scala> convert(Some[Int](0))
@@ -40,4 +40,4 @@ scala> Range(1,2).toArray: Seq[_]
scala> 0
res2: Int = 0
-scala>
+scala> :quit
diff --git a/test/files/run/t7407.flags b/test/files/run/t7407.flags
index c8547a27dc..ffc65f4b81 100644
--- a/test/files/run/t7407.flags
+++ b/test/files/run/t7407.flags
@@ -1 +1 @@
--Ynooptimise -Ybackend:GenBCode
+-Yopt:l:none -Ybackend:GenBCode
diff --git a/test/files/run/t7407b.flags b/test/files/run/t7407b.flags
index c8547a27dc..c30091d3de 100644
--- a/test/files/run/t7407b.flags
+++ b/test/files/run/t7407b.flags
@@ -1 +1 @@
--Ynooptimise -Ybackend:GenBCode
+-Ybackend:GenBCode
diff --git a/test/files/run/t7459a.scala b/test/files/run/t7459a.scala
new file mode 100644
index 0000000000..e9653c6e79
--- /dev/null
+++ b/test/files/run/t7459a.scala
@@ -0,0 +1,14 @@
+class LM {
+ class Node[B1]
+ case class CC(n: LM)
+
+ // crash
+ val f: (LM => Any) = {
+ case tttt =>
+ new tttt.Node[Any]()
+ }
+}
+
+object Test extends App {
+ new LM().f(new LM())
+}
diff --git a/test/files/run/t7459b-optimize.flags b/test/files/run/t7459b-optimize.flags
new file mode 100644
index 0000000000..49d036a887
--- /dev/null
+++ b/test/files/run/t7459b-optimize.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/t7459b-optimize.scala b/test/files/run/t7459b-optimize.scala
new file mode 100644
index 0000000000..605890962c
--- /dev/null
+++ b/test/files/run/t7459b-optimize.scala
@@ -0,0 +1,21 @@
+class LM {
+ class Node[B1]
+
+ // crash
+ val g: (CC => Any) = {
+ case CC(tttt) =>
+ new tttt.Node[Any]()
+ }
+
+ val h: (Some[CC] => Any) = {
+ case Some(CC(tttt)) =>
+ new tttt.Node[Any]()
+ }
+}
+
+object Test extends App {
+ new LM().g(new CC(new LM()))
+ new LM().h(Some(new CC(new LM())))
+}
+case class CC(n: LM)
+
diff --git a/test/files/run/t7459b.scala b/test/files/run/t7459b.scala
new file mode 100644
index 0000000000..605890962c
--- /dev/null
+++ b/test/files/run/t7459b.scala
@@ -0,0 +1,21 @@
+class LM {
+ class Node[B1]
+
+ // crash
+ val g: (CC => Any) = {
+ case CC(tttt) =>
+ new tttt.Node[Any]()
+ }
+
+ val h: (Some[CC] => Any) = {
+ case Some(CC(tttt)) =>
+ new tttt.Node[Any]()
+ }
+}
+
+object Test extends App {
+ new LM().g(new CC(new LM()))
+ new LM().h(Some(new CC(new LM())))
+}
+case class CC(n: LM)
+
diff --git a/test/files/run/t7459c.scala b/test/files/run/t7459c.scala
new file mode 100644
index 0000000000..144c5d793b
--- /dev/null
+++ b/test/files/run/t7459c.scala
@@ -0,0 +1,16 @@
+class LM {
+ class Node[B1]
+
+ // crash
+ val g: (CC => Any) = {
+ case CC(tttt) =>
+ tttt.## // no crash
+ new tttt.Node[Any]()
+ }
+}
+
+object Test extends App {
+ new LM().g(new CC(new LM()))
+}
+case class CC(n: LM)
+
diff --git a/test/files/run/t7459d.scala b/test/files/run/t7459d.scala
new file mode 100644
index 0000000000..3263701f9d
--- /dev/null
+++ b/test/files/run/t7459d.scala
@@ -0,0 +1,15 @@
+class LM {
+ class Node[B1]
+ case class CC(n: LM)
+
+ // crash
+ val f: (LM => Any) = {
+ case tttt =>
+ val uuuu: (tttt.type, Any) = (tttt, 0)
+ new uuuu._1.Node[Any]()
+ }
+}
+
+object Test extends App {
+ new LM().f(new LM())
+}
diff --git a/test/files/run/t7459f.scala b/test/files/run/t7459f.scala
new file mode 100644
index 0000000000..63e2109560
--- /dev/null
+++ b/test/files/run/t7459f.scala
@@ -0,0 +1,12 @@
+object Test extends App {
+ class C
+
+ case class FooSeq(x: Int, y: String, z: C*)
+
+ FooSeq(1, "a", new C()) match {
+ case FooSeq(1, "a", x@_* ) =>
+ //println(x.toList)
+ x.asInstanceOf[x.type]
+ assert(x.isInstanceOf[x.type])
+ }
+}
diff --git a/test/files/run/t7482a.check b/test/files/run/t7482a.check
index 943538f352..a21ef7b68f 100644
--- a/test/files/run/t7482a.check
+++ b/test/files/run/t7482a.check
@@ -7,4 +7,4 @@ v: java.util.ArrayList[String] = []
scala> val v: java.util.ArrayList[String] = new java.util.ArrayList[String](5)
v: java.util.ArrayList[String] = []
-scala>
+scala> :quit
diff --git a/test/files/run/t7582.check b/test/files/run/t7582.check
index 225fb1ace8..2a11210000 100644
--- a/test/files/run/t7582.check
+++ b/test/files/run/t7582.check
@@ -1,2 +1,6 @@
-warning: there were 1 inliner warning(s); re-run with -Yinline-warnings for details
+#partest !-Ybackend:GenBCode
+warning: there was one inliner warning; re-run with -Yinline-warnings for details
+#partest -Ybackend:GenBCode
+warning: there was one inliner warning; re-run with -Yopt-warnings for details
+#partest
2
diff --git a/test/files/run/t7582b.check b/test/files/run/t7582b.check
index 225fb1ace8..2a11210000 100644
--- a/test/files/run/t7582b.check
+++ b/test/files/run/t7582b.check
@@ -1,2 +1,6 @@
-warning: there were 1 inliner warning(s); re-run with -Yinline-warnings for details
+#partest !-Ybackend:GenBCode
+warning: there was one inliner warning; re-run with -Yinline-warnings for details
+#partest -Ybackend:GenBCode
+warning: there was one inliner warning; re-run with -Yopt-warnings for details
+#partest
2
diff --git a/test/files/run/t7634.check b/test/files/run/t7634.check
index aea3b94da5..9c6b8b47dd 100644
--- a/test/files/run/t7634.check
+++ b/test/files/run/t7634.check
@@ -5,4 +5,4 @@ Type :help for more information.
scala> .lines
res1: List[String] = List(shello, world.)
-scala>
+scala> :quit
diff --git a/test/files/run/t7741a/GroovyInterface$1Dump.java b/test/files/run/t7741a/GroovyInterface$1Dump.java
new file mode 100644
index 0000000000..0c0eab3f1b
--- /dev/null
+++ b/test/files/run/t7741a/GroovyInterface$1Dump.java
@@ -0,0 +1,222 @@
+import java.util.*;
+import scala.tools.asm.*;
+
+// generated with
+// git clone alewando/scala_groovy_interop
+// SCALA_HOME=... GROOVY_HOME=... ant
+// cd /code/scala2
+// java -classpath build/asm/classes:/Users/jason/code/scala_groovy_interop/classes:/code/scala2/build/pack/lib/scala-library.jar:/usr/local/Cellar/groovy/2.4.1/libexec/embeddable/groovy-all-2.4.1.jar scala.tools.asm.util.ASMifier 'GroovyInterface$1'
+// java -classpath build/asm/classes:/Users/jason/code/scala_groovy_interop/classes:/code/scala2/build/pack/lib/scala-library.jar:/usr/local/Cellar/groovy/2.4.1/libexec/embeddable/groovy-all-2.4.1.jar scala.tools.asm.util.ASMifier 'GroovyInterface$1'
+public class GroovyInterface$1Dump implements Opcodes {
+
+ public static byte[] dump () throws Exception {
+
+ ClassWriter cw = new ClassWriter(0);
+ FieldVisitor fv;
+ MethodVisitor mv;
+ AnnotationVisitor av0;
+
+ cw.visit(V1_5, ACC_SUPER + ACC_SYNTHETIC, "GroovyInterface$1", null, "java/lang/Object", new String[] {});
+
+ cw.visitInnerClass("GroovyInterface$1", "GroovyInterface", "1", ACC_SYNTHETIC);
+
+ {
+ fv = cw.visitField(ACC_STATIC + ACC_SYNTHETIC, "$class$GroovyInterface", "Ljava/lang/Class;", null, null);
+ fv.visitEnd();
+ }
+ {
+ fv = cw.visitField(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$staticClassInfo", "Lorg/codehaus/groovy/reflection/ClassInfo;", null, null);
+ fv.visitEnd();
+ }
+ {
+ fv = cw.visitField(ACC_PUBLIC + ACC_STATIC + ACC_TRANSIENT + ACC_SYNTHETIC, "__$stMC", "Z", null, null);
+ fv.visitEnd();
+ }
+ {
+ fv = cw.visitField(ACC_PRIVATE + ACC_TRANSIENT + ACC_SYNTHETIC, "metaClass", "Lgroovy/lang/MetaClass;", null, null);
+ fv.visitEnd();
+ }
+ {
+ fv = cw.visitField(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$callSiteArray", "Ljava/lang/ref/SoftReference;", null, null);
+ fv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_PUBLIC, "<init>", "()V", null, null);
+ mv.visitCode();
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "<init>", "()V", false);
+ mv.visitMethodInsn(INVOKESTATIC, "GroovyInterface$1", "$getCallSiteArray", "()[Lorg/codehaus/groovy/runtime/callsite/CallSite;", false);
+ mv.visitVarInsn(ASTORE, 1);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKEVIRTUAL, "GroovyInterface$1", "$getStaticMetaClass", "()Lgroovy/lang/MetaClass;", false);
+ mv.visitVarInsn(ASTORE, 2);
+ mv.visitVarInsn(ALOAD, 2);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitInsn(SWAP);
+ mv.visitFieldInsn(PUTFIELD, "GroovyInterface$1", "metaClass", "Lgroovy/lang/MetaClass;");
+ mv.visitVarInsn(ALOAD, 2);
+ mv.visitInsn(POP);
+ mv.visitInsn(RETURN);
+ mv.visitMaxs(2, 3);
+ mv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_PROTECTED + ACC_SYNTHETIC, "$getStaticMetaClass", "()Lgroovy/lang/MetaClass;", null, null);
+ mv.visitCode();
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Object", "getClass", "()Ljava/lang/Class;", false);
+ mv.visitLdcInsn(Type.getType("LGroovyInterface$1;"));
+ Label l0 = new Label();
+ mv.visitJumpInsn(IF_ACMPEQ, l0);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKESTATIC, "org/codehaus/groovy/runtime/ScriptBytecodeAdapter", "initMetaClass", "(Ljava/lang/Object;)Lgroovy/lang/MetaClass;", false);
+ mv.visitInsn(ARETURN);
+ mv.visitLabel(l0);
+ mv.visitFieldInsn(GETSTATIC, "GroovyInterface$1", "$staticClassInfo", "Lorg/codehaus/groovy/reflection/ClassInfo;");
+ mv.visitVarInsn(ASTORE, 1);
+ mv.visitVarInsn(ALOAD, 1);
+ Label l1 = new Label();
+ mv.visitJumpInsn(IFNONNULL, l1);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Object", "getClass", "()Ljava/lang/Class;", false);
+ mv.visitMethodInsn(INVOKESTATIC, "org/codehaus/groovy/reflection/ClassInfo", "getClassInfo", "(Ljava/lang/Class;)Lorg/codehaus/groovy/reflection/ClassInfo;", false);
+ mv.visitInsn(DUP);
+ mv.visitVarInsn(ASTORE, 1);
+ mv.visitFieldInsn(PUTSTATIC, "GroovyInterface$1", "$staticClassInfo", "Lorg/codehaus/groovy/reflection/ClassInfo;");
+ mv.visitLabel(l1);
+ mv.visitVarInsn(ALOAD, 1);
+ mv.visitMethodInsn(INVOKEVIRTUAL, "org/codehaus/groovy/reflection/ClassInfo", "getMetaClass", "()Lgroovy/lang/MetaClass;", false);
+ mv.visitInsn(ARETURN);
+ mv.visitMaxs(2, 2);
+ mv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_PUBLIC + ACC_SYNTHETIC, "getMetaClass", "()Lgroovy/lang/MetaClass;", null, null);
+ mv.visitCode();
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitFieldInsn(GETFIELD, "GroovyInterface$1", "metaClass", "Lgroovy/lang/MetaClass;");
+ mv.visitInsn(DUP);
+ Label l0 = new Label();
+ mv.visitJumpInsn(IFNULL, l0);
+ mv.visitInsn(ARETURN);
+ mv.visitLabel(l0);
+ mv.visitInsn(POP);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitInsn(DUP);
+ mv.visitMethodInsn(INVOKEVIRTUAL, "GroovyInterface$1", "$getStaticMetaClass", "()Lgroovy/lang/MetaClass;", false);
+ mv.visitFieldInsn(PUTFIELD, "GroovyInterface$1", "metaClass", "Lgroovy/lang/MetaClass;");
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitFieldInsn(GETFIELD, "GroovyInterface$1", "metaClass", "Lgroovy/lang/MetaClass;");
+ mv.visitInsn(ARETURN);
+ mv.visitMaxs(2, 1);
+ mv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_PUBLIC + ACC_SYNTHETIC, "setMetaClass", "(Lgroovy/lang/MetaClass;)V", null, null);
+ mv.visitCode();
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ALOAD, 1);
+ mv.visitFieldInsn(PUTFIELD, "GroovyInterface$1", "metaClass", "Lgroovy/lang/MetaClass;");
+ mv.visitInsn(RETURN);
+ mv.visitMaxs(2, 2);
+ mv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_PUBLIC + ACC_SYNTHETIC, "invokeMethod", "(Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/Object;", null, null);
+ mv.visitCode();
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKEVIRTUAL, "GroovyInterface$1", "getMetaClass", "()Lgroovy/lang/MetaClass;", false);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ALOAD, 1);
+ mv.visitVarInsn(ALOAD, 2);
+ mv.visitMethodInsn(INVOKEINTERFACE, "groovy/lang/MetaClass", "invokeMethod", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)Ljava/lang/Object;", true);
+ mv.visitInsn(ARETURN);
+ mv.visitMaxs(4, 3);
+ mv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_PUBLIC + ACC_SYNTHETIC, "getProperty", "(Ljava/lang/String;)Ljava/lang/Object;", null, null);
+ mv.visitCode();
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKEVIRTUAL, "GroovyInterface$1", "getMetaClass", "()Lgroovy/lang/MetaClass;", false);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ALOAD, 1);
+ mv.visitMethodInsn(INVOKEINTERFACE, "groovy/lang/MetaClass", "getProperty", "(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;", true);
+ mv.visitInsn(ARETURN);
+ mv.visitMaxs(3, 2);
+ mv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_PUBLIC + ACC_SYNTHETIC, "setProperty", "(Ljava/lang/String;Ljava/lang/Object;)V", null, null);
+ mv.visitCode();
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKEVIRTUAL, "GroovyInterface$1", "getMetaClass", "()Lgroovy/lang/MetaClass;", false);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitVarInsn(ALOAD, 1);
+ mv.visitVarInsn(ALOAD, 2);
+ mv.visitMethodInsn(INVOKEINTERFACE, "groovy/lang/MetaClass", "setProperty", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/Object;)V", true);
+ mv.visitInsn(RETURN);
+ mv.visitMaxs(4, 3);
+ mv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_STATIC, "<clinit>", "()V", null, null);
+ mv.visitCode();
+ mv.visitLdcInsn(Type.getType("LGroovyInterface;"));
+ mv.visitVarInsn(ASTORE, 0);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitFieldInsn(PUTSTATIC, "GroovyInterface$1", "$class$GroovyInterface", "Ljava/lang/Class;");
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitInsn(POP);
+ mv.visitInsn(RETURN);
+ mv.visitMaxs(1, 1);
+ mv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$createCallSiteArray", "()Lorg/codehaus/groovy/runtime/callsite/CallSiteArray;", null, null);
+ mv.visitCode();
+ mv.visitLdcInsn(new Integer(0));
+ mv.visitTypeInsn(ANEWARRAY, "java/lang/String");
+ mv.visitVarInsn(ASTORE, 0);
+ mv.visitTypeInsn(NEW, "org/codehaus/groovy/runtime/callsite/CallSiteArray");
+ mv.visitInsn(DUP);
+ mv.visitLdcInsn(Type.getType("LGroovyInterface$1;"));
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKESPECIAL, "org/codehaus/groovy/runtime/callsite/CallSiteArray", "<init>", "(Ljava/lang/Class;[Ljava/lang/String;)V", false);
+ mv.visitInsn(ARETURN);
+ mv.visitMaxs(4, 1);
+ mv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_PUBLIC + ACC_STATIC + ACC_SYNTHETIC, "$getCallSiteArray", "()[Lorg/codehaus/groovy/runtime/callsite/CallSite;", null, null);
+ mv.visitCode();
+ mv.visitFieldInsn(GETSTATIC, "GroovyInterface$1", "$callSiteArray", "Ljava/lang/ref/SoftReference;");
+ Label l0 = new Label();
+ mv.visitJumpInsn(IFNULL, l0);
+ mv.visitFieldInsn(GETSTATIC, "GroovyInterface$1", "$callSiteArray", "Ljava/lang/ref/SoftReference;");
+ mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/ref/SoftReference", "get", "()Ljava/lang/Object;", false);
+ mv.visitTypeInsn(CHECKCAST, "org/codehaus/groovy/runtime/callsite/CallSiteArray");
+ mv.visitInsn(DUP);
+ mv.visitVarInsn(ASTORE, 0);
+ Label l1 = new Label();
+ mv.visitJumpInsn(IFNONNULL, l1);
+ mv.visitLabel(l0);
+ mv.visitMethodInsn(INVOKESTATIC, "GroovyInterface$1", "$createCallSiteArray", "()Lorg/codehaus/groovy/runtime/callsite/CallSiteArray;", false);
+ mv.visitVarInsn(ASTORE, 0);
+ mv.visitTypeInsn(NEW, "java/lang/ref/SoftReference");
+ mv.visitInsn(DUP);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitMethodInsn(INVOKESPECIAL, "java/lang/ref/SoftReference", "<init>", "(Ljava/lang/Object;)V", false);
+ mv.visitFieldInsn(PUTSTATIC, "GroovyInterface$1", "$callSiteArray", "Ljava/lang/ref/SoftReference;");
+ mv.visitLabel(l1);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitFieldInsn(GETFIELD, "org/codehaus/groovy/runtime/callsite/CallSiteArray", "array", "[Lorg/codehaus/groovy/runtime/callsite/CallSite;");
+ mv.visitInsn(ARETURN);
+ mv.visitMaxs(3, 1);
+ mv.visitEnd();
+ }
+ cw.visitEnd();
+
+ return cw.toByteArray();
+ }
+}
diff --git a/test/files/run/t7741a/GroovyInterfaceDump.java b/test/files/run/t7741a/GroovyInterfaceDump.java
new file mode 100644
index 0000000000..87c09e272f
--- /dev/null
+++ b/test/files/run/t7741a/GroovyInterfaceDump.java
@@ -0,0 +1,51 @@
+import java.util.*;
+import scala.tools.asm.*;
+
+// generated with
+// git clone alewando/scala_groovy_interop
+// SCALA_HOME=... GROOVY_HOME=... ant
+// cd /code/scala2
+// java -classpath build/asm/classes:/Users/jason/code/scala_groovy_interop/classes:/code/scala2/build/pack/lib/scala-library.jar:/usr/local/Cellar/groovy/2.4.1/libexec/embeddable/groovy-all-2.4.1.jar scala.tools.asm.util.ASMifier 'GroovyInterface$1'
+// java -classpath build/asm/classes:/Users/jason/code/scala_groovy_interop/classes:/code/scala2/build/pack/lib/scala-library.jar:/usr/local/Cellar/groovy/2.4.1/libexec/embeddable/groovy-all-2.4.1.jar scala.tools.asm.util.ASMifier 'GroovyInterface$1'
+public class GroovyInterfaceDump implements Opcodes {
+
+ public static byte[] dump () throws Exception {
+
+ ClassWriter cw = new ClassWriter(0);
+ FieldVisitor fv;
+ MethodVisitor mv;
+ AnnotationVisitor av0;
+
+ cw.visit(V1_5, ACC_PUBLIC + ACC_ABSTRACT + ACC_INTERFACE, "GroovyInterface", null, "java/lang/Object", null);
+
+ cw.visitInnerClass("GroovyInterface$1", "GroovyInterface", "1", ACC_SYNTHETIC);
+
+ cw.visitInnerClass("GroovyInterface$__clinit__closure1", null, null, 0);
+
+ {
+ fv = cw.visitField(ACC_PUBLIC + ACC_FINAL + ACC_STATIC, "closure", "Ljava/lang/Object;", null, null);
+ fv.visitEnd();
+ }
+ {
+ mv = cw.visitMethod(ACC_STATIC, "<clinit>", "()V", null, null);
+ mv.visitCode();
+ mv.visitTypeInsn(NEW, "GroovyInterface$__clinit__closure1");
+ mv.visitInsn(DUP);
+ mv.visitFieldInsn(GETSTATIC, "GroovyInterface$1", "$class$GroovyInterface", "Ljava/lang/Class;");
+ mv.visitFieldInsn(GETSTATIC, "GroovyInterface$1", "$class$GroovyInterface", "Ljava/lang/Class;");
+ mv.visitMethodInsn(INVOKESPECIAL, "GroovyInterface$__clinit__closure1", "<init>", "(Ljava/lang/Object;Ljava/lang/Object;)V", false);
+ mv.visitVarInsn(ASTORE, 0);
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitFieldInsn(PUTSTATIC, "GroovyInterface", "closure", "Ljava/lang/Object;");
+ mv.visitVarInsn(ALOAD, 0);
+ mv.visitInsn(POP);
+ mv.visitInsn(RETURN);
+ mv.visitMaxs(4, 1);
+ mv.visitEnd();
+ }
+ cw.visitEnd();
+
+ return cw.toByteArray();
+ }
+}
+
diff --git a/test/files/run/t7741a/Test.scala b/test/files/run/t7741a/Test.scala
new file mode 100644
index 0000000000..cdba1cccf8
--- /dev/null
+++ b/test/files/run/t7741a/Test.scala
@@ -0,0 +1,47 @@
+import java.io.{ByteArrayInputStream, FileOutputStream, BufferedOutputStream}
+import java.util
+
+import java.io.File
+
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+ def code = ""
+
+ override def show(): Unit = {
+
+ val class1: Array[Byte] = GroovyInterfaceDump.dump()
+ val class2: Array[Byte] = GroovyInterface$1Dump.dump()
+ def writeFile(contents: Array[Byte], f: java.io.File): Unit = {
+ val out = new BufferedOutputStream(new FileOutputStream(f))
+ try {
+ out.write(contents)
+ } finally out.close()
+ }
+
+ val outdir = testOutput.jfile
+
+ // interface GroovyInterface {
+ //
+ // // This is the line that causes scalac to choke.
+ // // It results in a GroovyInterface$1 class, which is a non-static inner class but its constructor does not
+ // // include the implicit parameter that is the immediate enclosing instance.
+ // // See http://jira.codehaus.org/browse/GROOVY-7312
+ // //
+ // // Scalac error:
+ // // [scalac] error: error while loading 1, class file '..../scala_groovy_interop/classes/com/example/groovy/GroovyInterface$1.class' is broken
+ // // [scalac] (class java.util.NoSuchElementException/head of empty list)
+ // final static def closure = { x -> "banana" }
+ //
+ // }
+ writeFile(GroovyInterfaceDump.dump(), new File(outdir, "GroovyInterface.class"))
+ writeFile(GroovyInterface$1Dump.dump(), new File(outdir, "GroovyInterface$1.class"))
+ compileCode("object Test { def foo(g: GroovyInterface) = g.toString }")
+ }
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+}
diff --git a/test/files/run/t7741b.check b/test/files/run/t7741b.check
new file mode 100644
index 0000000000..a19e54aa3a
--- /dev/null
+++ b/test/files/run/t7741b.check
@@ -0,0 +1,3 @@
+1. Don't refer to Inner
+2. Refering to Inner
+pos: NoPosition Class file for HasInner$Inner not found ERROR
diff --git a/test/files/run/t7741b/HasInner.java b/test/files/run/t7741b/HasInner.java
new file mode 100644
index 0000000000..a1d0d0d81a
--- /dev/null
+++ b/test/files/run/t7741b/HasInner.java
@@ -0,0 +1,3 @@
+class HasInner {
+ class Inner {}
+}
diff --git a/test/files/run/t7741b/Test.scala b/test/files/run/t7741b/Test.scala
new file mode 100644
index 0000000000..569ae6b679
--- /dev/null
+++ b/test/files/run/t7741b/Test.scala
@@ -0,0 +1,29 @@
+import java.io.File
+
+import scala.tools.partest.StoreReporterDirectTest
+
+object Test extends StoreReporterDirectTest {
+
+ def code = ""
+
+ override def show(): Unit = {
+ deleteClass("HasInner$Inner")
+ println("1. Don't refer to Inner")
+ compileCode("class Test { def test(x: HasInner) = x }")
+ assert(filteredInfos.isEmpty, filteredInfos)
+ println("2. Refering to Inner")
+ compileCode("class Test { def test(x: HasInner#Inner) = x }")
+ println(filteredInfos.mkString("\n"))
+ }
+
+ def deleteClass(name: String) {
+ val classFile = new File(testOutput.path, name + ".class")
+ assert(classFile.exists)
+ assert(classFile.delete())
+ }
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+}
diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check
index ad924f482c..105b238d01 100644
--- a/test/files/run/t7747-repl.check
+++ b/test/files/run/t7747-repl.check
@@ -283,4 +283,4 @@ object $read extends $read {
}
res3: List[Product with Serializable] = List(BippyBups(), PuppyPups(), Bingo())
-scala>
+scala> :quit
diff --git a/test/files/run/t7801.check b/test/files/run/t7801.check
index d72060c684..e0b656b784 100644
--- a/test/files/run/t7801.check
+++ b/test/files/run/t7801.check
@@ -8,4 +8,4 @@ import g.abort
scala> class C(val a: Any) extends AnyVal
defined class C
-scala>
+scala> :quit
diff --git a/test/files/run/t7805-repl-i.check b/test/files/run/t7805-repl-i.check
index eecfff079a..7f66c06a11 100644
--- a/test/files/run/t7805-repl-i.check
+++ b/test/files/run/t7805-repl-i.check
@@ -8,4 +8,4 @@ Type :help for more information.
scala> Console println Try(8)
Success(8)
-scala>
+scala> :quit
diff --git a/test/files/run/t7852.scala b/test/files/run/t7852.scala
index c93db718fd..1679067510 100644
--- a/test/files/run/t7852.scala
+++ b/test/files/run/t7852.scala
@@ -12,7 +12,7 @@ object Test extends BytecodeTest {
val classNode = loadClassNode("Lean")
val methodNode = getMethod(classNode, methodName)
val got = countNullChecks(methodNode.instructions)
- assert(got == expected, s"expected $expected but got $got comparisons")
+ assert(got == expected, s"$methodName: expected $expected but got $got comparisons")
}
test("string", expected = 0)
test("module", expected = 0)
diff --git a/test/files/run/t7932.check b/test/files/run/t7932.check
index 13d64f1d3c..3f0a0c4f62 100644
--- a/test/files/run/t7932.check
+++ b/test/files/run/t7932.check
@@ -1,3 +1,3 @@
-warning: there were 1 feature warning(s); re-run with -feature for details
+warning: there was one feature warning; re-run with -feature for details
public Category<?> C.category()
public Category<scala.Tuple2> C.category1()
diff --git a/test/files/run/t7965.scala b/test/files/run/t7965.scala
new file mode 100644
index 0000000000..df80d4b5bb
--- /dev/null
+++ b/test/files/run/t7965.scala
@@ -0,0 +1,54 @@
+// Test that scala doesn't apply boxing or varargs conversions to the
+// @PolymorphicSignature magical methods, MethodHandle#{invoke, invokeExact}
+object Test {
+ val code = """
+
+object O {
+ private def foo = "foo"
+ private def bar(x: Int): Int = -x
+ private def baz(x: Box): Unit = x.a = "present"
+ val lookup = java.lang.invoke.MethodHandles.lookup
+}
+
+import java.lang.invoke._
+class Box(var a: Any)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ def lookup(name: String, params: Array[Class[_]], ret: Class[_]) = {
+ val mt = MethodType.methodType(ret, params)
+ O.lookup.findVirtual(O.getClass, name, mt)
+ }
+ val fooResult = (lookup("foo", Array(), classOf[String]).invokeExact(O): Int)
+ assert(fooResult == "foo")
+
+ val barResult = (lookup("bar", Array(classOf[Int]), classOf[Int]).invokeExact(O, 42): Int)
+ assert(barResult == -42)
+
+ val box = new Box(null)
+ (lookup("baz", Array(classOf[Box]), Void.TYPE).invokeExact(O, box) : Unit)
+ assert(box.a == "present")
+
+ // Note: Application in statement position in a block in Java also infers return type of Unit,
+ // but we don't support that, ascribe the type to Unit as above.
+ // as done in Java.
+ // lookup("baz", Array(classOf[Box]), Void.TYPE).invokeExact(O, box)
+ ()
+ }
+}
+
+"""
+ def main(args: Array[String]): Unit = {
+ if (util.Properties.isJavaAtLeast("1.7")) test()
+ }
+
+ def test() {
+ import scala.reflect.runtime._
+ import scala.tools.reflect.ToolBox
+
+ val m = currentMirror
+ val tb = m.mkToolBox()
+ import tb._
+ eval(parse(code))
+ }
+}
diff --git a/test/files/run/t7974.check b/test/files/run/t7974.check
index 0be496d8d0..4eae5eb152 100644
--- a/test/files/run/t7974.check
+++ b/test/files/run/t7974.check
@@ -1,37 +1,14 @@
-public class Symbols {
-
- // compiled from: Symbols.scala
-
-
-
- // access flags 0x12
- private final Lscala/Symbol; someSymbol3
-
- // access flags 0xA
- private static Lscala/Symbol; symbol$1
-
- // access flags 0xA
- private static Lscala/Symbol; symbol$2
-
- // access flags 0xA
- private static Lscala/Symbol; symbol$3
// access flags 0x9
public static <clinit>()V
- L0
- LINENUMBER 2 L0
GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
LDC "Symbolic1"
INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
PUTSTATIC Symbols.symbol$1 : Lscala/Symbol;
- L1
- LINENUMBER 3 L1
GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
LDC "Symbolic2"
INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
PUTSTATIC Symbols.symbol$2 : Lscala/Symbol;
- L2
- LINENUMBER 5 L2
GETSTATIC scala/Symbol$.MODULE$ : Lscala/Symbol$;
LDC "Symbolic3"
INVOKEVIRTUAL scala/Symbol$.apply (Ljava/lang/String;)Lscala/Symbol;
@@ -40,65 +17,48 @@ public class Symbols {
MAXSTACK = 2
MAXLOCALS = 0
+
// access flags 0x1
public someSymbol1()Lscala/Symbol;
- L0
- LINENUMBER 2 L0
GETSTATIC Symbols.symbol$1 : Lscala/Symbol;
ARETURN
- L1
- LOCALVARIABLE this LSymbols; L0 L1 0
MAXSTACK = 1
MAXLOCALS = 1
+
// access flags 0x1
public someSymbol2()Lscala/Symbol;
- L0
- LINENUMBER 3 L0
GETSTATIC Symbols.symbol$2 : Lscala/Symbol;
ARETURN
- L1
- LOCALVARIABLE this LSymbols; L0 L1 0
MAXSTACK = 1
MAXLOCALS = 1
+
// access flags 0x1
public sameSymbol1()Lscala/Symbol;
- L0
- LINENUMBER 4 L0
GETSTATIC Symbols.symbol$1 : Lscala/Symbol;
ARETURN
- L1
- LOCALVARIABLE this LSymbols; L0 L1 0
MAXSTACK = 1
MAXLOCALS = 1
+
// access flags 0x1
public someSymbol3()Lscala/Symbol;
- L0
- LINENUMBER 5 L0
ALOAD 0
GETFIELD Symbols.someSymbol3 : Lscala/Symbol;
ARETURN
- L1
- LOCALVARIABLE this LSymbols; L0 L1 0
MAXSTACK = 1
MAXLOCALS = 1
+
// access flags 0x1
public <init>()V
- L0
- LINENUMBER 6 L0
ALOAD 0
INVOKESPECIAL java/lang/Object.<init> ()V
- L1
- LINENUMBER 5 L1
ALOAD 0
GETSTATIC Symbols.symbol$3 : Lscala/Symbol;
PUTFIELD Symbols.someSymbol3 : Lscala/Symbol;
RETURN
- L2
- LOCALVARIABLE this LSymbols; L0 L2 0
MAXSTACK = 2
MAXLOCALS = 1
-}
+
diff --git a/test/files/run/t7974.flags b/test/files/run/t7974.flags
new file mode 100644
index 0000000000..5fc2a03894
--- /dev/null
+++ b/test/files/run/t7974.flags
@@ -0,0 +1 @@
+-Xcheckinit:false
diff --git a/test/files/run/t7974/Test.scala b/test/files/run/t7974/Test.scala
index 9403ea332b..296ec32ee2 100644
--- a/test/files/run/t7974/Test.scala
+++ b/test/files/run/t7974/Test.scala
@@ -1,20 +1,14 @@
-import java.io.PrintWriter;
+import java.io.PrintWriter
import scala.tools.partest.BytecodeTest
+import scala.tools.nsc.backend.jvm.AsmUtils
import scala.tools.asm.util._
import scala.tools.nsc.util.stringFromWriter
+import scala.collection.convert.decorateAsScala._
object Test extends BytecodeTest {
def show {
- val classNode = loadClassNode("Symbols", skipDebugInfo = false)
- val textifier = new Textifier
- classNode.accept(new TraceClassVisitor(null, textifier, null))
-
- val classString = stringFromWriter(w => textifier.print(w))
- val result =
- classString.split('\n')
- .dropWhile(elem => elem != "public class Symbols {")
- .filterNot(elem => elem.startsWith(" @Lscala/reflect/ScalaSignature") || elem.startsWith(" ATTRIBUTE ScalaSig"))
- result foreach println
+ val classNode = loadClassNode("Symbols", skipDebugInfo = true)
+ classNode.methods.asScala.foreach(m => println(AsmUtils.textify(m)))
}
}
diff --git a/test/files/run/t7992.scala b/test/files/run/t7992.scala
new file mode 100644
index 0000000000..fde231b961
--- /dev/null
+++ b/test/files/run/t7992.scala
@@ -0,0 +1,20 @@
+class C {
+ def foo: Int = 0
+}
+
+class D extends C {
+ override def foo: Int = {
+ val f = () => {
+ class C // comment this line to fix.
+ D.super.foo // no super accessor generated here!
+ // java.lang.VerifyError: (class: D$$anonfun$1, method: apply$mcI$sp signature: ()I) Illegal use of nonvirtual function call
+ }
+ f()
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ new D().foo
+ }
+}
diff --git a/test/files/run/t7992b.scala b/test/files/run/t7992b.scala
new file mode 100644
index 0000000000..6fe1f990d5
--- /dev/null
+++ b/test/files/run/t7992b.scala
@@ -0,0 +1,18 @@
+class C {
+ def foo: Int = 0
+}
+
+class E extends C {
+ override def foo: Int = {
+ (None: Option[Int]).getOrElse {
+ class C
+ E.super.foo
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ new E().foo
+ }
+}
diff --git a/test/files/run/t8087.scala b/test/files/run/t8087.scala
new file mode 100644
index 0000000000..6047211756
--- /dev/null
+++ b/test/files/run/t8087.scala
@@ -0,0 +1,12 @@
+trait Foo {
+ @volatile private[this] var x: String = ""
+ @volatile private var y: String = ""
+}
+
+class Bar extends Foo
+
+object Test extends App {
+ classOf[Bar].getDeclaredFields.foreach(f => {
+ assert(java.lang.reflect.Modifier.isVolatile(f.getModifiers), f.getName)
+ })
+}
diff --git a/test/files/run/t8196.check b/test/files/run/t8196.check
new file mode 100644
index 0000000000..d11dc27e68
--- /dev/null
+++ b/test/files/run/t8196.check
@@ -0,0 +1,7 @@
+t8196.scala:26: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ form2.g1 // comment this line in order to make the test pass
+ ^
+warning: there were two feature warnings; re-run with -feature for details
+Scope{
+ final private val f1: Int
+}
diff --git a/test/files/run/t8196.scala b/test/files/run/t8196.scala
new file mode 100644
index 0000000000..e219ac166b
--- /dev/null
+++ b/test/files/run/t8196.scala
@@ -0,0 +1,51 @@
+import scala.reflect.runtime.{ universe => ru }
+
+object Test extends App {
+
+ trait FormTrait {
+
+ val runtimeMirror = ru.runtimeMirror(this.getClass.getClassLoader)
+ val instanceMirror = runtimeMirror.reflect(this)
+ val members = instanceMirror.symbol.typeSignature.members
+ def fields = members.filter(_.typeSignature <:< ru.typeOf[Int])
+ }
+
+ val f = () => {
+
+ class Form1 extends FormTrait {
+ val f1 = 5
+ }
+ val form1 = new Form1
+
+ println(form1.fields)
+
+ val form2 = new FormTrait {
+ val g1 = new Form1
+ }
+
+ form2.g1 // comment this line in order to make the test pass
+ ()
+ }
+
+ val g = () => {
+ // Reported as SI-8195, same root cause
+ trait Form {
+
+ private val runtimeMirror = ru.runtimeMirror(this.getClass.getClassLoader)
+ private val instanceMirror = runtimeMirror.reflect(this)
+ private val members = instanceMirror.symbol.typeSignature.members
+
+ }
+
+ val f1 = new Form {
+ val a = 1
+ }
+
+ val f2 = new Form {
+ val b = f1.a
+ }
+ }
+
+ f()
+ g()
+}
diff --git a/test/files/run/t8253.check b/test/files/run/t8253.check
new file mode 100644
index 0000000000..0b4cb2d1f7
--- /dev/null
+++ b/test/files/run/t8253.check
@@ -0,0 +1,40 @@
+
+<sample xmlns='ns1'/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding(null, "ns1", $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
+
+<sample xmlns={identity(ns1)}/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding(null, ns1, $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
+
+<sample xmlns:foo='ns1'/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding("foo", "ns1", $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
+
+<sample xmlns:foo={identity(ns1)}/>
+{
+ var $tmpscope: _root_.scala.xml.NamespaceBinding = $scope;
+ $tmpscope = new _root_.scala.xml.NamespaceBinding("foo", ns1, $tmpscope);
+ {
+ val $scope: _root_.scala.xml.NamespaceBinding = $tmpscope;
+ new _root_.scala.xml.Elem(null, "sample", _root_.scala.xml.Null, $scope, true)
+ }
+}
diff --git a/test/files/run/t8253.scala b/test/files/run/t8253.scala
new file mode 100644
index 0000000000..a00d8b91a4
--- /dev/null
+++ b/test/files/run/t8253.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+ import reflect.runtime.universe._ // not using the XML library in compiler tests
+
+ def show(code: String, t: Tree) = println(s"\n$code\n$t")
+
+ val ns1 = "ns1"
+ show("<sample xmlns='ns1'/>", q"<sample xmlns='ns1'/>")
+ show("<sample xmlns={identity(ns1)}/>", q"<sample xmlns={ns1}/>")
+ show("<sample xmlns:foo='ns1'/>", q"<sample xmlns:foo='ns1'/>")
+ show("<sample xmlns:foo={identity(ns1)}/>", q"<sample xmlns:foo={ns1}/>")
+
+ // `identity(foo)` used to match the overly permissive match in SymbolXMLBuilder
+ // which was intended to more specifically match `_root_.scala.xml.Text(...)`
+}
diff --git a/test/files/run/t8346.check b/test/files/run/t8346.check
new file mode 100644
index 0000000000..1ba5c31abe
--- /dev/null
+++ b/test/files/run/t8346.check
@@ -0,0 +1,6 @@
+BitSet: List(invariant, invariant, invariant, invariant)
+HashSet: List(covariant (true), covariant (true), covariant (true), covariant (true))
+ListSet: List(covariant (true), covariant (true), covariant (true), covariant (true))
+SortedSet: List(invariant, invariant, invariant, invariant)
+TreeSet: List(invariant, invariant, invariant, invariant)
+ValueSet: invariant
diff --git a/test/files/run/t8346.scala b/test/files/run/t8346.scala
new file mode 100644
index 0000000000..5f3df84174
--- /dev/null
+++ b/test/files/run/t8346.scala
@@ -0,0 +1,34 @@
+object Test extends App {
+ import reflect.ClassTag
+
+ object SomeEnum extends Enumeration {
+ val one, two, three, four = Value
+ }
+
+ def sctor[A <: Set[Int]](f: Int => A)(implicit A: ClassTag[A])
+ : (String, Int => Set[Int]) =
+ (A.runtimeClass.getSimpleName, f)
+
+ val inits: Seq[(String, Int => Set[Int])] = {
+ import collection.immutable.{Seq => _, _}
+ Seq(sctor(BitSet(_)),
+ sctor(HashSet(_)),
+ sctor(ListSet(_)),
+ sctor(SortedSet(_)),
+ sctor(TreeSet(_)))
+ }
+
+ def sVarInfo[A](sa: Set[A]): String = {
+ val saa = sa.toSet[Any]
+ if (sa eq saa) s"""covariant (${(saa + "hi") contains "hi"})"""
+ else "invariant"
+ }
+
+ inits foreach {case (name, singleton) =>
+ print(s"${name}: ")
+ val one = singleton(1)
+ println(Seq(2,3,4).scanLeft(one)(_ + _) map sVarInfo toList)
+ }
+
+ println(s"ValueSet: ${sVarInfo(SomeEnum.values)}")
+}
diff --git a/test/files/run/t8442.check b/test/files/run/t8442.check
new file mode 100644
index 0000000000..ce9e8b52ff
--- /dev/null
+++ b/test/files/run/t8442.check
@@ -0,0 +1 @@
+pos: NoPosition Class A_1 not found - continuing with a stub. WARNING
diff --git a/test/files/run/t8442/A_1.java b/test/files/run/t8442/A_1.java
new file mode 100644
index 0000000000..227451eecd
--- /dev/null
+++ b/test/files/run/t8442/A_1.java
@@ -0,0 +1,4 @@
+@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME)
+public @interface A_1 {
+
+} \ No newline at end of file
diff --git a/test/files/run/t8442/B_1.java b/test/files/run/t8442/B_1.java
new file mode 100644
index 0000000000..1680684495
--- /dev/null
+++ b/test/files/run/t8442/B_1.java
@@ -0,0 +1,3 @@
+public class B_1 {
+ @A_1 public String get() { return ""; }
+}
diff --git a/test/files/run/t8442/C_2.scala b/test/files/run/t8442/C_2.scala
new file mode 100644
index 0000000000..d75d4bd910
--- /dev/null
+++ b/test/files/run/t8442/C_2.scala
@@ -0,0 +1,5 @@
+class C_2 {
+ def foo(b: B_1) {
+ b.get()
+ }
+}
diff --git a/test/files/run/t8442/Test.scala b/test/files/run/t8442/Test.scala
new file mode 100644
index 0000000000..ff6da4e206
--- /dev/null
+++ b/test/files/run/t8442/Test.scala
@@ -0,0 +1,29 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def app = """
+ class C_2 {
+ def foo(b: B_1) {
+ b.get()
+ }
+ }
+ """
+
+ def show(): Unit = {
+ val tClass = new File(testOutput.path, "A_1.class")
+ assert(tClass.exists)
+ assert(tClass.delete())
+
+ // Expecting stub symbol warning, but no stack trace!
+ compileCode(app)
+ println(filteredInfos.mkString("\n"))
+ }
+}
diff --git a/test/files/run/t8445.check b/test/files/run/t8445.check
new file mode 100644
index 0000000000..41fd6d3ed1
--- /dev/null
+++ b/test/files/run/t8445.check
@@ -0,0 +1 @@
+warning: there was one feature warning; re-run with -feature for details
diff --git a/test/files/run/t8445.scala b/test/files/run/t8445.scala
new file mode 100644
index 0000000000..ed196b62a2
--- /dev/null
+++ b/test/files/run/t8445.scala
@@ -0,0 +1,11 @@
+object X {
+ class Y
+ def y = new Y {
+ class Z
+ def z = classOf[Z]
+ }
+}
+
+object Test extends App {
+ assert(X.y.z.getEnclosingClass.getName == "X$$anon$1")
+}
diff --git a/test/files/run/t8502.scala b/test/files/run/t8502.scala
new file mode 100644
index 0000000000..903e573711
--- /dev/null
+++ b/test/files/run/t8502.scala
@@ -0,0 +1,41 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def show(): Unit = {
+ compileCode("""
+ object U {
+ def foo(log: vanishing.Vanishing) = ()
+ }
+
+ package vanishing {
+ class Vanishing
+ }
+ """)
+ assert(filteredInfos.isEmpty, filteredInfos)
+ deletePackage("vanishing")
+ compileCode("""
+ class Test {
+ U
+ }
+ """)
+ assert(storeReporter.infos.isEmpty, storeReporter.infos.mkString("\n")) // Included a MissingRequirementError before.
+ }
+
+ def deletePackage(name: String) {
+ val directory = new File(testOutput.path, name)
+ for (f <- directory.listFiles()) {
+ assert(f.getName.endsWith(".class"))
+ assert(f.delete())
+ }
+ assert(directory.listFiles().isEmpty)
+ assert(directory.delete())
+ }
+}
diff --git a/test/files/run/t8549.check b/test/files/run/t8549.check
new file mode 100644
index 0000000000..a9ecc29fea
--- /dev/null
+++ b/test/files/run/t8549.check
@@ -0,0 +1 @@
+warning: there were two deprecation warnings; re-run with -deprecation for details
diff --git a/test/files/run/t8549.scala b/test/files/run/t8549.scala
new file mode 100644
index 0000000000..cb254e3810
--- /dev/null
+++ b/test/files/run/t8549.scala
@@ -0,0 +1,189 @@
+import javax.xml.bind.DatatypeConverter._
+import scala.reflect.io.File
+
+// This test is self-modifying when run as follows:
+//
+// (export V=v2.10.4
+// scalac-hash $V test/files/run/t8549.scala
+// scala-hash $V -Doverwrite.source=test/files/run/t8549.scala Test
+// )
+//
+// Use this to re-establish a baseline for serialization compatibility.
+object Test extends App {
+ val overwrite: Option[File] = sys.props.get("overwrite.source").map(s => new File(new java.io.File(s)))
+
+ def serialize(o: AnyRef): String = {
+ val bos = new java.io.ByteArrayOutputStream()
+ val out = new java.io.ObjectOutputStream(bos)
+ out.writeObject(o)
+ out.flush()
+ printBase64Binary(bos.toByteArray())
+ }
+
+ def amend(file: File)(f: String => String) {
+ file.writeAll(f(file.slurp))
+ }
+ def quote(s: String) = List("\"", s, "\"").mkString
+
+ def patch(file: File, line: Int, prevResult: String, result: String) {
+ amend(file) {
+ content =>
+ content.lines.toList.zipWithIndex.map {
+ case (content, i) if i == line - 1 =>
+ val newContent = content.replaceAllLiterally(quote(prevResult), quote(result))
+ if (newContent != content)
+ println(s"- $content\n+ $newContent\n")
+ newContent
+ case (content, _) => content
+ }.mkString("\n")
+ }
+ }
+
+ def updateComment(file: File) {
+ val timestamp = {
+ import java.text.SimpleDateFormat
+ val sdf = new SimpleDateFormat("yyyyMMdd-HH:mm:ss")
+ sdf.format(new java.util.Date)
+ }
+ val newComment = s" // Generated on $timestamp with Scala ${scala.util.Properties.versionString})"
+ amend(file) {
+ content =>
+ content.lines.toList.map {
+ f => f.replaceAll("""^ +// Generated on.*""", newComment)
+ }.mkString("\n")
+ }
+ }
+
+ def deserialize(string: String): AnyRef = {
+ val bis = new java.io.ByteArrayInputStream(parseBase64Binary(string))
+ val in = new java.io.ObjectInputStream(bis)
+ in.readObject()
+ }
+
+ def checkRoundTrip[T <: AnyRef](instance: T)(f: T => AnyRef) {
+ val result = serialize(instance)
+ val reconstituted = deserialize(result).asInstanceOf[T]
+ assert(f(instance) == f(reconstituted), (f(instance), f(reconstituted)))
+ }
+
+ def check[T <: AnyRef](instance: => T)(prevResult: String, f: T => AnyRef = (x: T) => x) {
+ val result = serialize(instance)
+ overwrite match {
+ case Some(f) =>
+ val lineNumberOfLiteralString = Thread.currentThread.getStackTrace.apply(2).getLineNumber
+ patch(f, lineNumberOfLiteralString, prevResult, result)
+ case None =>
+ checkRoundTrip(instance)(f)
+ assert(f(deserialize(prevResult).asInstanceOf[T]) == f(instance), s"$instance != f(deserialize(prevResult))")
+ assert(prevResult == result, s"instance = $instance : ${instance.getClass}\n serialization unstable: ${prevResult}\n found: ${result}")
+ }
+ }
+
+ // Generated on 20141010-14:01:28 with Scala version 2.11.2)
+ overwrite.foreach(updateComment)
+
+ check(Some(1))("rO0ABXNyAApzY2FsYS5Tb21lESLyaV6hi3QCAAFMAAF4dAASTGphdmEvbGFuZy9PYmplY3Q7eHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAQ==")
+ check(None)("rO0ABXNyAAtzY2FsYS5Ob25lJEZQJPZTypSsAgAAeHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHA=")
+
+ check(List(1, 2, 3))( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAABAwAAeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHg=")
+ check(Nil)( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAABAwAAeHBzcgAsc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdFNlcmlhbGl6ZUVuZCSKXGNb91MLbQIAAHhweA==")
+
+ // TODO SI-8576 unstable under -Xcheckinit
+ // check(Vector(1))( "rO0ABXNyACFzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5WZWN0b3Lkd3dcHq6PXAIAC0kABWRlcHRoWgAFZGlydHlJAAhlbmRJbmRleEkABWZvY3VzSQAKc3RhcnRJbmRleFsACGRpc3BsYXkwdAATW0xqYXZhL2xhbmcvT2JqZWN0O1sACGRpc3BsYXkxcQB+AAFbAAhkaXNwbGF5MnEAfgABWwAIZGlzcGxheTNxAH4AAVsACGRpc3BsYXk0cQB+AAFbAAhkaXNwbGF5NXEAfgABeHAAAAABAAAAAAEAAAAAAAAAAHVyABNbTGphdmEubGFuZy5PYmplY3Q7kM5YnxBzKWwCAAB4cAAAACBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXBwcHBwcHBwcHBwcHBwcHBwcHBwcHBwcHBwcHBwcHBwcHBwcA==")
+ // check(Vector())( "rO0ABXNyACFzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5WZWN0b3Lkd3dcHq6PXAIAC0kABWRlcHRoWgAFZGlydHlJAAhlbmRJbmRleEkABWZvY3VzSQAKc3RhcnRJbmRleFsACGRpc3BsYXkwdAATW0xqYXZhL2xhbmcvT2JqZWN0O1sACGRpc3BsYXkxcQB+AAFbAAhkaXNwbGF5MnEAfgABWwAIZGlzcGxheTNxAH4AAVsACGRpc3BsYXk0cQB+AAFbAAhkaXNwbGF5NXEAfgABeHAAAAAAAAAAAAAAAAAAAAAAAHBwcHBwcA==")
+
+ import collection.{ mutable, immutable }
+
+ class C
+ check(reflect.classTag[C])("rO0ABXNyAB5zY2FsYS5yZWZsZWN0LkNsYXNzVGFnJCRhbm9uJDG7ePPrmQBkhgIAAUwAD3J1bnRpbWVDbGFzczEkMXQAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAGVGVzdCRDAAAAAAAAAAAAAAB4cA==")
+ check(reflect.classTag[Int])("rO0ABXNyACVzY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSQkYW5vbiQ5zfmiSVNjtVICAAB4cgAcc2NhbGEucmVmbGVjdC5BbnlWYWxNYW5pZmVzdAAAAAAAAAABAgABTAAIdG9TdHJpbmd0ABJMamF2YS9sYW5nL1N0cmluZzt4cHQAA0ludA==")
+ check(reflect.classTag[String])("rO0ABXNyAB5zY2FsYS5yZWZsZWN0LkNsYXNzVGFnJCRhbm9uJDG7ePPrmQBkhgIAAUwAD3J1bnRpbWVDbGFzczEkMXQAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAQamF2YS5sYW5nLlN0cmluZ6DwpDh6O7NCAgAAeHA=")
+ check(reflect.classTag[Object])("rO0ABXNyACVzY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSQkYW5vbiQymPrtq/Ci1gsCAAB4cgAtc2NhbGEucmVmbGVjdC5NYW5pZmVzdEZhY3RvcnkkUGhhbnRvbU1hbmlmZXN0rzigP7KRh/kCAAFMAAh0b1N0cmluZ3QAEkxqYXZhL2xhbmcvU3RyaW5nO3hyAC9zY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSRDbGFzc1R5cGVNYW5pZmVzdFq6NWvfTgYFAgADTAAGcHJlZml4dAAOTHNjYWxhL09wdGlvbjtMAAxydW50aW1lQ2xhc3N0ABFMamF2YS9sYW5nL0NsYXNzO0wADXR5cGVBcmd1bWVudHN0ACFMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvTGlzdDt4cHNyAAtzY2FsYS5Ob25lJEZQJPZTypSsAgAAeHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHB2cgAQamF2YS5sYW5nLk9iamVjdAAAAAAAAAAAAAAAeHBzcgAyc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdCRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAQMAAHhwc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHh0AAZPYmplY3Q=")
+
+ // TODO SI-8576 unstable under -Xcheckinit
+ // check(Enum)( "rO0ABXNyAApUZXN0JEVudW0ketCIyQ8C23MCAAJMAAJWMXQAGUxzY2FsYS9FbnVtZXJhdGlvbiRWYWx1ZTtMAAJWMnQAF0xzY2FsYS9FbnVtZXJhdGlvbiRWYWw7eHIAEXNjYWxhLkVudW1lcmF0aW9udaDN3ZgOWY4CAAhJAAZuZXh0SWRJABtzY2FsYSRFbnVtZXJhdGlvbiQkYm90dG9tSWRJABhzY2FsYSRFbnVtZXJhdGlvbiQkdG9wSWRMABRWYWx1ZU9yZGVyaW5nJG1vZHVsZXQAIkxzY2FsYS9FbnVtZXJhdGlvbiRWYWx1ZU9yZGVyaW5nJDtMAA9WYWx1ZVNldCRtb2R1bGV0AB1Mc2NhbGEvRW51bWVyYXRpb24kVmFsdWVTZXQkO0wACG5leHROYW1ldAAbTHNjYWxhL2NvbGxlY3Rpb24vSXRlcmF0b3I7TAAXc2NhbGEkRW51bWVyYXRpb24kJG5tYXB0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL01hcDtMABdzY2FsYSRFbnVtZXJhdGlvbiQkdm1hcHEAfgAHeHAAAAArAAAAAAAAACtwcHBzcgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkhhc2hNYXAAAAAAAAAAAQMAAHhwdw0AAALuAAAAAAAAAAQAeHNxAH4ACXcNAAAC7gAAAAEAAAAEAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAAqc3IAFXNjYWxhLkVudW1lcmF0aW9uJFZhbM9pZ6/J/O1PAgACSQAYc2NhbGEkRW51bWVyYXRpb24kVmFsJCRpTAAEbmFtZXQAEkxqYXZhL2xhbmcvU3RyaW5nO3hyABdzY2FsYS5FbnVtZXJhdGlvbiRWYWx1ZWJpfC/tIR1RAgACTAAGJG91dGVydAATTHNjYWxhL0VudW1lcmF0aW9uO0wAHHNjYWxhJEVudW1lcmF0aW9uJCRvdXRlckVudW1xAH4AEnhwcQB+AAhxAH4ACAAAACpweHNyABFUZXN0JEVudW0kJGFub24kMVlIjlmE1sXaAgAAeHEAfgARcQB+AAhxAH4ACHEAfgAT")
+ // check(Enum.V1)( "rO0ABXNyABFUZXN0JEVudW0kJGFub24kMVlIjlmE1sXaAgAAeHIAF3NjYWxhLkVudW1lcmF0aW9uJFZhbHVlYml8L+0hHVECAAJMAAYkb3V0ZXJ0ABNMc2NhbGEvRW51bWVyYXRpb247TAAcc2NhbGEkRW51bWVyYXRpb24kJG91dGVyRW51bXEAfgACeHBzcgAKVGVzdCRFbnVtJHrQiMkPAttzAgACTAACVjF0ABlMc2NhbGEvRW51bWVyYXRpb24kVmFsdWU7TAACVjJ0ABdMc2NhbGEvRW51bWVyYXRpb24kVmFsO3hyABFzY2FsYS5FbnVtZXJhdGlvbnWgzd2YDlmOAgAISQAGbmV4dElkSQAbc2NhbGEkRW51bWVyYXRpb24kJGJvdHRvbUlkSQAYc2NhbGEkRW51bWVyYXRpb24kJHRvcElkTAAUVmFsdWVPcmRlcmluZyRtb2R1bGV0ACJMc2NhbGEvRW51bWVyYXRpb24kVmFsdWVPcmRlcmluZyQ7TAAPVmFsdWVTZXQkbW9kdWxldAAdTHNjYWxhL0VudW1lcmF0aW9uJFZhbHVlU2V0JDtMAAhuZXh0TmFtZXQAG0xzY2FsYS9jb2xsZWN0aW9uL0l0ZXJhdG9yO0wAF3NjYWxhJEVudW1lcmF0aW9uJCRubWFwdAAeTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9NYXA7TAAXc2NhbGEkRW51bWVyYXRpb24kJHZtYXBxAH4AC3hwAAAAKwAAAAAAAAArcHBwc3IAIHNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5IYXNoTWFwAAAAAAAAAAEDAAB4cHcNAAAC7gAAAAAAAAAEAHhzcQB+AA13DQAAAu4AAAABAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAKnNyABVzY2FsYS5FbnVtZXJhdGlvbiRWYWzPaWevyfztTwIAAkkAGHNjYWxhJEVudW1lcmF0aW9uJFZhbCQkaUwABG5hbWV0ABJMamF2YS9sYW5nL1N0cmluZzt4cQB+AAFxAH4ADHEAfgAMAAAAKnB4cQB+AANxAH4AFXEAfgAM")
+ // check(Enum.V2)( "rO0ABXNyABVzY2FsYS5FbnVtZXJhdGlvbiRWYWzPaWevyfztTwIAAkkAGHNjYWxhJEVudW1lcmF0aW9uJFZhbCQkaUwABG5hbWV0ABJMamF2YS9sYW5nL1N0cmluZzt4cgAXc2NhbGEuRW51bWVyYXRpb24kVmFsdWViaXwv7SEdUQIAAkwABiRvdXRlcnQAE0xzY2FsYS9FbnVtZXJhdGlvbjtMABxzY2FsYSRFbnVtZXJhdGlvbiQkb3V0ZXJFbnVtcQB+AAN4cHNyAApUZXN0JEVudW0ketCIyQ8C23MCAAJMAAJWMXQAGUxzY2FsYS9FbnVtZXJhdGlvbiRWYWx1ZTtMAAJWMnQAF0xzY2FsYS9FbnVtZXJhdGlvbiRWYWw7eHIAEXNjYWxhLkVudW1lcmF0aW9udaDN3ZgOWY4CAAhJAAZuZXh0SWRJABtzY2FsYSRFbnVtZXJhdGlvbiQkYm90dG9tSWRJABhzY2FsYSRFbnVtZXJhdGlvbiQkdG9wSWRMABRWYWx1ZU9yZGVyaW5nJG1vZHVsZXQAIkxzY2FsYS9FbnVtZXJhdGlvbiRWYWx1ZU9yZGVyaW5nJDtMAA9WYWx1ZVNldCRtb2R1bGV0AB1Mc2NhbGEvRW51bWVyYXRpb24kVmFsdWVTZXQkO0wACG5leHROYW1ldAAbTHNjYWxhL2NvbGxlY3Rpb24vSXRlcmF0b3I7TAAXc2NhbGEkRW51bWVyYXRpb24kJG5tYXB0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL01hcDtMABdzY2FsYSRFbnVtZXJhdGlvbiQkdm1hcHEAfgAMeHAAAAArAAAAAAAAACtwcHBzcgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkhhc2hNYXAAAAAAAAAAAQMAAHhwdw0AAALuAAAAAAAAAAQAeHNxAH4ADncNAAAC7gAAAAEAAAAEAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAAqcQB+AAR4c3IAEVRlc3QkRW51bSQkYW5vbiQxWUiOWYTWxdoCAAB4cQB+AAJxAH4ADXEAfgANcQB+AARxAH4ADQAAACpw")
+
+ // IndexedSeqLike#Elements
+ // TODO SI-8576 throws scala.UnitializedFieldError under -Xcheckinit
+ // check(new immutable.Range(0, 1, 1).iterator)("rO0ABXNyAChzY2FsYS5jb2xsZWN0aW9uLkluZGV4ZWRTZXFMaWtlJEVsZW1lbnRzGF+1cBwmcx0CAANJAANlbmRJAAVpbmRleEwABiRvdXRlcnQAIUxzY2FsYS9jb2xsZWN0aW9uL0luZGV4ZWRTZXFMaWtlO3hwAAAAAQAAAABzcgAgc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuUmFuZ2Vpu6NUqxUyDQIAB0kAA2VuZFoAB2lzRW1wdHlJAAtsYXN0RWxlbWVudEkAEG51bVJhbmdlRWxlbWVudHNJAAVzdGFydEkABHN0ZXBJAA90ZXJtaW5hbEVsZW1lbnR4cAAAAAEAAAAAAAAAAAEAAAAAAAAAAQAAAAE="
+ // , _.toList)
+
+ // check(new collection.concurrent.TrieMap[Any, Any]())( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmNvbmN1cnJlbnQuVHJpZU1hcKckxpgOIYHPAwAETAALZXF1YWxpdHlvYmp0ABJMc2NhbGEvbWF0aC9FcXVpdjtMAApoYXNoaW5nb2JqdAAcTHNjYWxhL3V0aWwvaGFzaGluZy9IYXNoaW5nO0wABHJvb3R0ABJMamF2YS9sYW5nL09iamVjdDtMAAtyb290dXBkYXRlcnQAOUxqYXZhL3V0aWwvY29uY3VycmVudC9hdG9taWMvQXRvbWljUmVmZXJlbmNlRmllbGRVcGRhdGVyO3hwc3IAMnNjYWxhLmNvbGxlY3Rpb24uY29uY3VycmVudC5UcmllTWFwJE1hbmdsZWRIYXNoaW5nhTBoJQ/mgb0CAAB4cHNyABhzY2FsYS5tYXRoLkVxdWl2JCRhbm9uJDLBbyx4dy/qGwIAAHhwc3IANHNjYWxhLmNvbGxlY3Rpb24uY29uY3VycmVudC5UcmllTWFwU2VyaWFsaXphdGlvbkVuZCSbjdgbbGCt2gIAAHhweA==")
+ // not sure why this one needs stable serialization.
+
+ // TODO SI-8576 unstable under -Xcheckinit
+ check(collection.convert.Wrappers)( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcA==")
+
+ check(immutable.BitSet(1, 2, 3))( "rO0ABXNyAClzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5CaXRTZXQkQml0U2V0MR9dg8JGRI8UAgABSgAFZWxlbXN4cgAhc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuQml0U2V0Flz5Ms3qxsoCAAB4cAAAAAAAAAAO")
+ check(immutable.HashMap())( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoTWFwJFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAAB4")
+ check(immutable.HashMap(1 -> 2))( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoTWFwJFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAAFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJ4")
+ check(immutable.HashMap(1 -> 2, 3 -> 4))( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoTWFwJFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAAJzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADc3EAfgACAAAABHg=")
+ // TODO provoke HashMapCollision1
+
+ check(immutable.HashSet())( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoU2V0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAAB4")
+ check(immutable.HashSet(1))( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoU2V0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAAFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXg=")
+ check(immutable.HashSet(1, 2))( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoU2V0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAAJzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJ4")
+ check(immutable.HashSet(1, 2, 3))( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoU2V0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==")
+ // TODO provoke HashSetCollision1
+
+ check(immutable.ListMap())( "rO0ABXNyADBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwJEVtcHR5TGlzdE1hcCSNalsvpBZeDgIAAHhyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwBC1gfIkUSKsCAAB4cA==")
+ check(immutable.ListMap(1 -> 2))( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwJE5vZGWmciM1Yav+8gIAA0wABiRvdXRlcnQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9MaXN0TWFwO0wAA2tleXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABXZhbHVlcQB+AAJ4cgAic2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdE1hcAQtYHyJFEirAgAAeHBzcgAwc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdE1hcCRFbXB0eUxpc3RNYXAkjWpbL6QWXg4CAAB4cQB+AANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABwAAAAI=")
+ check(immutable.Queue())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5RdWV1ZZY146W3qSuhAgACTAACaW50ACFMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvTGlzdDtMAANvdXRxAH4AAXhwc3IAMnNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3QkU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAEDAAB4cHNyACxzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2VyaWFsaXplRW5kJIpcY1v3UwttAgAAeHB4cQB+AAQ=")
+ check(immutable.Queue(1, 2, 3))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5RdWV1ZZY146W3qSuhAgACTAACaW50ACFMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvTGlzdDtMAANvdXRxAH4AAXhwc3IAMnNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3QkU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAEDAAB4cHNyACxzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2VyaWFsaXplRW5kJIpcY1v3UwttAgAAeHB4c3EAfgADc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAgAAAACc3EAfgAIAAAAA3EAfgAGeA==")
+
+ // TODO SI-8576 throws scala.UnitializedFieldError under -Xcheckinit
+ // check(new immutable.Range(0, 1, 1))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5SYW5nZWm7o1SrFTINAgAHSQADZW5kWgAHaXNFbXB0eUkAC2xhc3RFbGVtZW50SQAQbnVtUmFuZ2VFbGVtZW50c0kABXN0YXJ0SQAEc3RlcEkAD3Rlcm1pbmFsRWxlbWVudHhwAAAAAQAAAAAAAAAAAQAAAAAAAAABAAAAAQ==")
+
+ check(immutable.Set())( "rO0ABXNyAChzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkRW1wdHlTZXQk8Hk3TFN0uDYCAAB4cA==")
+ check(immutable.Set(1))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0MREd3c4yqtWTAgABTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDt4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAAB")
+ check(immutable.Set(1, 2))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0MqaV02sZQzV0AgACTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDtMAAVlbGVtMnEAfgABeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAwAAAAI=")
+ check(immutable.Set(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0M84syT0560SgAgADTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDtMAAVlbGVtMnEAfgABTAAFZWxlbTNxAH4AAXhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAMAAAACc3EAfgADAAAAAw==")
+ check(immutable.Set(1, 2, 3, 4))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0NM26psRRbei1AgAETAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDtMAAVlbGVtMnEAfgABTAAFZWxlbTNxAH4AAUwABWVsZW00cQB+AAF4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgADAAAAAnNxAH4AAwAAAANzcQB+AAMAAAAE")
+ check(immutable.Set(1, 2, 3, 4, 5))( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoU2V0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAAVzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAABXNxAH4AAgAAAAFzcQB+AAIAAAACc3EAfgACAAAAA3NxAH4AAgAAAAR4")
+
+ check(immutable.Stack(1, 2, 3))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TdGFjaxtt3qEbMvq+AgABTAAFZWxlbXN0ACFMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvTGlzdDt4cHNyADJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAABAwAAeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABQAAAAJzcQB+AAUAAAADc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHg=")
+
+ // TODO SI-8576 Uninitialized field: IndexedSeqLike.scala: 56
+ // check(immutable.Stream(1, 2, 3))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TdHJlYW0kQ29uc/ekjBXM3TlFAgADTAACaGR0ABJMamF2YS9sYW5nL09iamVjdDtMAAV0bEdlbnQAEUxzY2FsYS9GdW5jdGlvbjA7TAAFdGxWYWx0ACNMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvU3RyZWFtO3hyACFzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TdHJlYW0552RDntM42gIAAHhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcgAtc2NhbGEuY29sbGVjdGlvbi5JdGVyYXRvciQkYW5vbmZ1biR0b1N0cmVhbSQxRWR4We0SX0UCAAFMAAYkb3V0ZXJ0ABtMc2NhbGEvY29sbGVjdGlvbi9JdGVyYXRvcjt4cHNyAChzY2FsYS5jb2xsZWN0aW9uLkluZGV4ZWRTZXFMaWtlJEVsZW1lbnRzGF+1cBwmcx0CAANJAANlbmRJAAVpbmRleEwABiRvdXRlcnQAIUxzY2FsYS9jb2xsZWN0aW9uL0luZGV4ZWRTZXFMaWtlO3hwAAAAAwAAAAFzcgArc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLldyYXBwZWRBcnJheSRvZkludMmRLBcI15VjAgABWwAFYXJyYXl0AAJbSXhwdXIAAltJTbpgJnbqsqUCAAB4cAAAAAMAAAABAAAAAgAAAANw")
+
+ check(immutable.TreeSet[Int]())( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5UcmVlU2V0sRdVIDjbWAsCAAJMAAhvcmRlcmluZ3QAFUxzY2FsYS9tYXRoL09yZGVyaW5nO0wABHRyZWV0AC5Mc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkC4BMdr1Z51wCAAB4cHA=")
+
+ // TODO SI-8576 unstable under -Xcheckinit
+ // check(immutable.TreeSet(1, 2, 3))( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5UcmVlU2V0sRdVIDjbWAsCAAJMAAhvcmRlcmluZ3QAFUxzY2FsYS9tYXRoL09yZGVyaW5nO0wABHRyZWV0AC5Mc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkC4BMdr1Z51wCAAB4cHNyADFzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5SZWRCbGFja1RyZWUkQmxhY2tUcmVlzRxnCKenVAECAAB4cgAsc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWVrqCSyHJbsMgIABUkABWNvdW50TAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgACTAAFcmlnaHRxAH4AAkwABXZhbHVlcQB+AAh4cAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAnNxAH4ABgAAAAFzcQB+AAoAAAABcHBzcgAXc2NhbGEucnVudGltZS5Cb3hlZFVuaXR0pn1HHezLmgIAAHhwc3EAfgAGAAAAAXNxAH4ACgAAAANwcHEAfgAQcQB+ABA=")
+
+ // TODO SI-8576 Uninitialized field under -Xcheckinit
+ // check(mutable.ArrayBuffer(1, 2, 3))( "rO0ABXNyACRzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlCdWZmZXIVOLBTg4KOcwIAA0kAC2luaXRpYWxTaXplSQAFc2l6ZTBbAAVhcnJheXQAE1tMamF2YS9sYW5nL09iamVjdDt4cAAAABAAAAADdXIAE1tMamF2YS5sYW5nLk9iamVjdDuQzlifEHMpbAIAAHhwAAAAEHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgAFAAAAAnNxAH4ABQAAAANwcHBwcHBwcHBwcHBw")
+ // TODO SI-8576 Uninitialized field under -Xcheckinit
+ // check(mutable.ArraySeq(1, 2, 3))( "rO0ABXNyACFzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlTZXEVPD3SKEkOcwIAAkkABmxlbmd0aFsABWFycmF5dAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwAAAAA3VyABNbTGphdmEubGFuZy5PYmplY3Q7kM5YnxBzKWwCAAB4cAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABQAAAAJzcQB+AAUAAAAD")
+ check(mutable.ArrayStack(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlTdGFja3bdxXbcnLBeAgACSQAqc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJEFycmF5U3RhY2skJGluZGV4WwAqc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJEFycmF5U3RhY2skJHRhYmxldAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwAAAAA3VyABNbTGphdmEubGFuZy5PYmplY3Q7kM5YnxBzKWwCAAB4cAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAA3NxAH4ABQAAAAJzcQB+AAUAAAAB")
+ check(mutable.DoubleLinkedList(1, 2, 3))( "rO0ABXNyAClzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuRG91YmxlTGlua2VkTGlzdI73LKsKRr1RAgADTAAEZWxlbXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABG5leHR0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1NlcTtMAARwcmV2cQB+AAJ4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgAAc3EAfgAEAAAAAnNxAH4AAHNxAH4ABAAAAANzcQB+AABwcQB+AAtxAH4ACXEAfgAHcQB+AANw")
+
+ check(mutable.HashMap())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAAAAAAABAB4")
+ check(mutable.HashMap(1 -> 1))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAABAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXEAfgAEeA==")
+ check(mutable.HashSet(1, 2, 3))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAcIAAAADAAAABQBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==")
+ // TODO SI-8576 Uninitialized field under -Xcheckinit
+ // check(new mutable.History())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGlzdG9yeUhuXxDIFJrsAgACSQAKbWF4SGlzdG9yeUwAA2xvZ3QAIExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUXVldWU7eHAAAAPoc3IAHnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5RdWV1ZbjMURVfOuHHAgAAeHIAJHNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5NdXRhYmxlTGlzdFJpnjJ+gFbAAgADSQADbGVuTAAGZmlyc3QwdAAlTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9MaW5rZWRMaXN0O0wABWxhc3QwcQB+AAV4cAAAAABzcgAjc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkxpbmtlZExpc3Sak+nGCZHaUQIAAkwABGVsZW10ABJMamF2YS9sYW5nL09iamVjdDtMAARuZXh0dAAeTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9TZXE7eHBwcQB+AApxAH4ACg==")
+ check(mutable.LinkedHashMap(1 -> 2))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAABAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJ4")
+ check(mutable.LinkedHashSet(1, 2, 3))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAu4AAAADAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==")
+ check(mutable.LinkedList(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkTGlzdJqT6cYJkdpRAgACTAAEZWxlbXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABG5leHR0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1NlcTt4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgAAc3EAfgAEAAAAAnNxAH4AAHNxAH4ABAAAAANzcQB+AABwcQB+AAs=")
+
+ // TODO SI-8576 unstable under -Xcheckinit
+ // check(mutable.ListBuffer(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlzdEJ1ZmZlci9y9I7QyWzGAwAEWgAIZXhwb3J0ZWRJAANsZW5MAAVsYXN0MHQAKUxzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS8kY29sb24kY29sb247TAAqc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJExpc3RCdWZmZXIkJHN0YXJ0dAAhTHNjYWxhL2NvbGxlY3Rpb24vaW1tdXRhYmxlL0xpc3Q7eHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABAAAAAJzcQB+AAQAAAADc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHcFAAAAAAN4")
+ check(new mutable.StringBuilder(new java.lang.StringBuilder("123")))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuU3RyaW5nQnVpbGRlcomvqgGv1tTxAgABTAAKdW5kZXJseWluZ3QAGUxqYXZhL2xhbmcvU3RyaW5nQnVpbGRlcjt4cHNyABdqYXZhLmxhbmcuU3RyaW5nQnVpbGRlcjzV+xRaTGrLAwAAeHB3BAAAAAN1cgACW0OwJmaw4l2ErAIAAHhwAAAAEwAxADIAMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAeA==")
+ check(mutable.UnrolledBuffer[Int]())( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVW5yb2xsZWRCdWZmZXIAAAAAAAAAAQMAAUwAA3RhZ3QAGExzY2FsYS9yZWZsZWN0L0NsYXNzVGFnO3hwc3IAJXNjYWxhLnJlZmxlY3QuTWFuaWZlc3RGYWN0b3J5JCRhbm9uJDnN+aJJU2O1UgIAAHhyABxzY2FsYS5yZWZsZWN0LkFueVZhbE1hbmlmZXN0AAAAAAAAAAECAAFMAAh0b1N0cmluZ3QAEkxqYXZhL2xhbmcvU3RyaW5nO3hwdAADSW50dwQAAAAAeA==")
+
+ import collection.parallel
+ check(parallel.immutable.ParHashMap(1 -> 2))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoTWFwAAAAAAAAAAECAANMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAEdHJpZXQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9IYXNoTWFwO3hwcHBzcgA1c2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuSGFzaE1hcCRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAgMAAHhwdwQAAAABc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAcAAAACeA==")
+ check(parallel.immutable.ParHashSet(1, 2, 3))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoU2V0AAAAAAAAAAECAANMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAEdHJpZXQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9IYXNoU2V0O3hwcHBzcgA1c2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuSGFzaFNldCRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAgMAAHhwdwQAAAADc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAcAAAACc3EAfgAHAAAAA3g=")
+ // TODO SI-8576 Uninitialized field under -Xcheckinit
+ // check(new parallel.immutable.ParRange(new Range(0, 1, 2)))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJSYW5nZQAAAAAAAAABAgAETAAXUGFyUmFuZ2VJdGVyYXRvciRtb2R1bGV0AEBMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9pbW11dGFibGUvUGFyUmFuZ2UkUGFyUmFuZ2VJdGVyYXRvciQ7TAAPU2NhbkxlYWYkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvUGFySXRlcmFibGVMaWtlJFNjYW5MZWFmJDtMAA9TY2FuTm9kZSRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2Nhbk5vZGUkO0wABXJhbmdldAAiTHNjYWxhL2NvbGxlY3Rpb24vaW1tdXRhYmxlL1JhbmdlO3hwcHBwc3IAIHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLlJhbmdlabujVKsVMg0CAAdJAANlbmRaAAdpc0VtcHR5SQALbGFzdEVsZW1lbnRJABBudW1SYW5nZUVsZW1lbnRzSQAFc3RhcnRJAARzdGVwSQAPdGVybWluYWxFbGVtZW50eHAAAAABAAAAAAAAAAABAAAAAAAAAAIAAAAC")
+ // TODO SI-8576 unstable under -Xcheckinit
+ // check(parallel.mutable.ParArray(1, 2, 3))( "rO0ABXNyACpzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFyQXJyYXkAAAAAAAAAAQMABEwAF1BhckFycmF5SXRlcmF0b3IkbW9kdWxldAA+THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvbXV0YWJsZS9QYXJBcnJheSRQYXJBcnJheUl0ZXJhdG9yJDtMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAIYXJyYXlzZXF0ACNMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL0FycmF5U2VxO3hwcHBwc3IAMXNjYWxhLmNvbGxlY3Rpb24ucGFyYWxsZWwubXV0YWJsZS5FeHBvc2VkQXJyYXlTZXGx2OTefAodSQIAAkkABmxlbmd0aFsABWFycmF5dAATW0xqYXZhL2xhbmcvT2JqZWN0O3hyACFzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlTZXEVPD3SKEkOcwIAAkkABmxlbmd0aFsABWFycmF5cQB+AAd4cAAAAAN1cgATW0xqYXZhLmxhbmcuT2JqZWN0O5DOWJ8QcylsAgAAeHAAAAADcHBwAAAAA3VxAH4ACgAAABBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ADQAAAAJzcQB+AA0AAAADcHBwcHBwcHBwcHBwcHg=")
+ check(parallel.mutable.ParHashMap(1 -> 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFySGFzaE1hcAAAAAAAAAABAwACTAAPU2NhbkxlYWYkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvUGFySXRlcmFibGVMaWtlJFNjYW5MZWFmJDtMAA9TY2FuTm9kZSRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2Nhbk5vZGUkO3hwcHB3DQAAAu4AAAABAAAABAFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABAAAAAJ4")
+ check(parallel.mutable.ParHashSet(1, 2, 3))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFySGFzaFNldAAAAAAAAAABAwACTAAPU2NhbkxlYWYkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvUGFySXRlcmFibGVMaWtlJFNjYW5MZWFmJDtMAA9TY2FuTm9kZSRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2Nhbk5vZGUkO3hwcHB3DQAAAcIAAAADAAAAGwFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABAAAAAJzcQB+AAQAAAADeA==")
+
+ check("...".r)("rO0ABXNyABlzY2FsYS51dGlsLm1hdGNoaW5nLlJlZ2V44u3Vap7wIb8CAAJMAAdwYXR0ZXJudAAZTGphdmEvdXRpbC9yZWdleC9QYXR0ZXJuO0wAJXNjYWxhJHV0aWwkbWF0Y2hpbmckUmVnZXgkJGdyb3VwTmFtZXN0ABZMc2NhbGEvY29sbGVjdGlvbi9TZXE7eHBzcgAXamF2YS51dGlsLnJlZ2V4LlBhdHRlcm5GZ9VrbkkCDQIAAkkABWZsYWdzTAAHcGF0dGVybnQAEkxqYXZhL2xhbmcvU3RyaW5nO3hwAAAAAHQAAy4uLnNyADJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAABAwAAeHBzcgAsc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdFNlcmlhbGl6ZUVuZCSKXGNb91MLbQIAAHhweA==",
+ r => (r.toString))
+}
diff --git a/test/files/run/t8549b.scala b/test/files/run/t8549b.scala
new file mode 100644
index 0000000000..1e1bf2c0bc
--- /dev/null
+++ b/test/files/run/t8549b.scala
@@ -0,0 +1,16 @@
+
+@SerialVersionUID(42)
+class C
+
+@SerialVersionUID(43 - 1)
+class D
+
+
+object Test extends App {
+ def checkId(cls: Class[_]) {
+ val id = cls.getDeclaredField("serialVersionUID").get(null)
+ assert(id == 42, (cls, id))
+ }
+ checkId(classOf[C])
+ checkId(classOf[D])
+}
diff --git a/test/files/run/t8570.flags b/test/files/run/t8570.flags
new file mode 100644
index 0000000000..3d1ee4760a
--- /dev/null
+++ b/test/files/run/t8570.flags
@@ -0,0 +1 @@
+-Xcheckinit
diff --git a/test/files/run/t8570.scala b/test/files/run/t8570.scala
new file mode 100644
index 0000000000..bbe83e9080
--- /dev/null
+++ b/test/files/run/t8570.scala
@@ -0,0 +1,10 @@
+trait Trait40_1 {
+ val value37_2 = ()
+ def run = { value37_2 }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ (new Trait40_1 {}).run
+ }
+}
diff --git a/test/files/run/t8570a.check b/test/files/run/t8570a.check
new file mode 100644
index 0000000000..6a452c185a
--- /dev/null
+++ b/test/files/run/t8570a.check
@@ -0,0 +1 @@
+()
diff --git a/test/files/run/t8570a.flags b/test/files/run/t8570a.flags
new file mode 100644
index 0000000000..3d1ee4760a
--- /dev/null
+++ b/test/files/run/t8570a.flags
@@ -0,0 +1 @@
+-Xcheckinit
diff --git a/test/files/run/t8570a.scala b/test/files/run/t8570a.scala
new file mode 100644
index 0000000000..ef116e2a8a
--- /dev/null
+++ b/test/files/run/t8570a.scala
@@ -0,0 +1,14 @@
+trait Trait40_1 {
+ val value37_2 = ()
+ def run = { value37_2 }
+}
+
+trait T1 extends Trait40_1 {
+ override val value37_2 = ()
+}
+
+object Test {
+ def main(args: Array[String]) {
+ println((new T1 {}).run)
+ }
+}
diff --git a/test/files/run/t8574.scala b/test/files/run/t8574.scala
new file mode 100644
index 0000000000..8c23ada482
--- /dev/null
+++ b/test/files/run/t8574.scala
@@ -0,0 +1,27 @@
+import annotation._
+
+@SerialVersionUID(42) @strictfp class Foo[@specialized(Int) T] extends Serializable {
+ def foo(t: T) = t
+}
+
+object Test extends App {
+ def checkUID(cls: Class[_], expected: Long) = {
+ val actual = java.io.ObjectStreamClass.lookup(cls).getSerialVersionUID
+ assert(actual == expected, s"$actual != expected for ${cls}")
+ }
+ def checkStrictFp(cls: Class[_]) = {
+ import java.lang.reflect._
+ for (m <- cls.getDeclaredMethods) {
+ val isStrict = Modifier.isStrict(m.getModifiers)
+ assert(isStrict, cls)
+ }
+ }
+ def check(x: AnyRef) {
+ checkUID(x.getClass, 42)
+ checkStrictFp(x.getClass)
+ }
+
+ check(new Foo[String])
+ check(new Foo[Int])
+}
+
diff --git a/test/files/run/t8601-closure-elim.flags b/test/files/run/t8601-closure-elim.flags
new file mode 100644
index 0000000000..2b5fd8a7b2
--- /dev/null
+++ b/test/files/run/t8601-closure-elim.flags
@@ -0,0 +1 @@
+-optimize -Ydelambdafy:inline
diff --git a/test/files/run/t8601-closure-elim.scala b/test/files/run/t8601-closure-elim.scala
new file mode 100644
index 0000000000..2c5b03af77
--- /dev/null
+++ b/test/files/run/t8601-closure-elim.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import scala.tools.asm.util._
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ val nullChecks = Set(asm.Opcodes.NEW)
+
+ def show: Unit = {
+ def test(methodName: String) {
+ val classNode = loadClassNode("Foo")
+ val methodNode = getMethod(classNode, "b")
+ val ops = methodNode.instructions.iterator.asScala.map(_.getOpcode).toList
+ assert(!ops.contains(asm.Opcodes.NEW), ops)// should be allocation free if the closure is eliminiated
+ }
+ test("b")
+ }
+}
+
+class Foo {
+ @inline final def a(x: Int => Int) = x(1)
+ final def b {
+ val delta = 0
+ a(x => delta + 1)
+ }
+}
diff --git a/test/files/run/t8601.flags b/test/files/run/t8601.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/run/t8601.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/run/t8601.scala b/test/files/run/t8601.scala
new file mode 100644
index 0000000000..e1afc23cc4
--- /dev/null
+++ b/test/files/run/t8601.scala
@@ -0,0 +1,15 @@
+object Test {
+ def idiv(x: Int): Unit = x / 0
+ def ldiv(x: Long): Unit = x / 0
+ def irem(x: Int): Unit = x % 0
+ def lrem(x: Long): Unit = x % 0
+
+ def check(x: => Any) = try { x; sys.error("failed to throw divide by zero!") } catch { case _: ArithmeticException => }
+
+ def main(args: Array[String]) {
+ check(idiv(1))
+ check(ldiv(1L))
+ check(irem(1))
+ check(lrem(1L))
+ }
+}
diff --git a/test/files/run/t8601b.flags b/test/files/run/t8601b.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/run/t8601b.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/run/t8601b.scala b/test/files/run/t8601b.scala
new file mode 100644
index 0000000000..9c37ce33d6
--- /dev/null
+++ b/test/files/run/t8601b.scala
@@ -0,0 +1,14 @@
+object Test {
+ def len(x: Array[String]): Unit = x.length
+ def load(x: Array[String]): Unit = x(0)
+ def newarray(i: Int): Unit = new Array[Int](i)
+
+ def check(x: => Any) = try { x; sys.error("failed to throw NPE!") } catch { case _: NullPointerException => }
+ def checkNegSize(x: => Any) = try { x; sys.error("failed to throw NegativeArraySizeException!") } catch { case _: NegativeArraySizeException => }
+
+ def main(args: Array[String]) {
+ check(len(null)) // bug: did not NPE
+ check(load(null))
+ checkNegSize(newarray(-1))
+ }
+}
diff --git a/test/files/run/t8601c.flags b/test/files/run/t8601c.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/run/t8601c.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/run/t8601c.scala b/test/files/run/t8601c.scala
new file mode 100644
index 0000000000..c487d6825e
--- /dev/null
+++ b/test/files/run/t8601c.scala
@@ -0,0 +1,12 @@
+object Test {
+ def loadField(x: scala.runtime.IntRef): Unit = x.elem
+ def storeField(x: scala.runtime.IntRef): Unit = x.elem = 42
+
+ def check(x: => Any) = try { x; sys.error("failed to throw NPE!") } catch { case _: NullPointerException => }
+
+ def main(args: Array[String]) {
+ check(loadField(null)) // bug: did not NPE under -Ydead-code
+ check(storeField(null))
+
+ }
+}
diff --git a/test/files/run/t8601d.flags b/test/files/run/t8601d.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/run/t8601d.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/run/t8601d.scala b/test/files/run/t8601d.scala
new file mode 100644
index 0000000000..ac89963d67
--- /dev/null
+++ b/test/files/run/t8601d.scala
@@ -0,0 +1,8 @@
+object Test {
+ def monitor(x: AnyRef): Unit = {x.synchronized(()); ()}
+ def check(x: => Any) = try { x; sys.error("failed to throw NPE") } catch { case _: NullPointerException => }
+
+ def main(args: Array[String]) {
+ check(monitor(null))
+ }
+}
diff --git a/test/files/run/t8601e.flags b/test/files/run/t8601e.flags
new file mode 100644
index 0000000000..49d036a887
--- /dev/null
+++ b/test/files/run/t8601e.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/t8601e/StaticInit.class b/test/files/run/t8601e/StaticInit.class
new file mode 100644
index 0000000000..99a0e2a643
--- /dev/null
+++ b/test/files/run/t8601e/StaticInit.class
Binary files differ
diff --git a/test/files/run/t8601e/StaticInit.java b/test/files/run/t8601e/StaticInit.java
new file mode 100644
index 0000000000..7543ed98b8
--- /dev/null
+++ b/test/files/run/t8601e/StaticInit.java
@@ -0,0 +1,8 @@
+public class StaticInit {
+ static {
+ if ("".isEmpty()) {
+ throw new RuntimeException();
+ }
+ }
+ public static int fld = 42;
+}
diff --git a/test/files/run/t8601e/Test.scala b/test/files/run/t8601e/Test.scala
new file mode 100644
index 0000000000..838114f6a7
--- /dev/null
+++ b/test/files/run/t8601e/Test.scala
@@ -0,0 +1,12 @@
+class C {
+ def foo: Unit = {StaticInit.fld}
+}
+
+object Test extends App {
+ try {
+ new C().foo
+ sys.error("StaticInit.<clinit> was not run!")
+ } catch {
+ case t: ExceptionInInitializerError =>
+ }
+}
diff --git a/test/files/run/t8607.scala b/test/files/run/t8607.scala
new file mode 100644
index 0000000000..1b8ef9bbd0
--- /dev/null
+++ b/test/files/run/t8607.scala
@@ -0,0 +1,36 @@
+package p1 {
+ private[p1] trait B extends Any {
+ def a: Any = ""
+ }
+
+ class C(val value: Int) extends AnyVal with B {
+ // def b = ""
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val c = new p1.C(42)
+ c.a
+ /*
+ new p1.C.<init>(
+ c.$asInstanceOf[scala.this.Int]()
+ ).a();
+
+
+ new p1.C.<init>(
+ new p1.C.<init>(
+ c.$asInstanceOf[scala.this.Int]()
+ ).$asInstanceOf[ErasedValueType(class C, scala.this.Int)]()
+ .$asInstanceOf[scala.this.Int]()
+ ).a();
+
+ new p1.C.<init>(
+ new p1.C.<init>(c)
+ .$asInstanceOf[scala.this.Int]()
+ .$asInstanceOf[scala.this.Int]()
+ ).a();
+
+ */
+ }
+}
diff --git a/test/files/run/t8608-no-format.scala b/test/files/run/t8608-no-format.scala
new file mode 100644
index 0000000000..71c369a7ea
--- /dev/null
+++ b/test/files/run/t8608-no-format.scala
@@ -0,0 +1,15 @@
+
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+ def code = """
+ |f"hello, world"
+ |:javap -prv -
+ """.stripMargin
+
+ // no format
+ override def yah(res: Seq[String]) = {
+ // note: avoid the word "information"
+ res forall (!_.contains("StringOps.format"))
+ }
+}
diff --git a/test/files/run/t8610.check b/test/files/run/t8610.check
new file mode 100644
index 0000000000..b3ab7a9cef
--- /dev/null
+++ b/test/files/run/t8610.check
@@ -0,0 +1,7 @@
+t8610.scala:6: warning: Adapting argument list by creating a 2-tuple: this may not be what you want.
+ signature: X.f(p: (Int, Int)): Int
+ given arguments: 3, 4
+ after adaptation: X.f((3, 4): (Int, Int))
+ def g = f(3, 4) // adapted
+ ^
+Hi, $name
diff --git a/test/files/run/t8610.flags b/test/files/run/t8610.flags
new file mode 100644
index 0000000000..4195dec383
--- /dev/null
+++ b/test/files/run/t8610.flags
@@ -0,0 +1 @@
+-Xlint:adapted-args
diff --git a/test/files/run/t8610.scala b/test/files/run/t8610.scala
new file mode 100644
index 0000000000..dd9e8e861e
--- /dev/null
+++ b/test/files/run/t8610.scala
@@ -0,0 +1,13 @@
+
+// flags don't warn on u
+case class X(name: String) {
+ def x = "Hi, $name" // missing interp
+ def f(p: (Int, Int)): Int = p._1 * p._2
+ def g = f(3, 4) // adapted
+ def u: Unit = () // unitarian universalist
+}
+
+object Test extends App {
+ // poignant demonstration
+ Console println X("Bob").x
+}
diff --git a/test/files/run/t8611a.flags b/test/files/run/t8611a.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/run/t8611a.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/run/t8611a.scala b/test/files/run/t8611a.scala
new file mode 100644
index 0000000000..99304df762
--- /dev/null
+++ b/test/files/run/t8611a.scala
@@ -0,0 +1,16 @@
+trait K
+trait L
+
+object O {
+ type LK = K with L
+ val A: LK = new K with L
+ val B: LK = new K with L
+}
+
+object Test extends App {
+ val scrut: O.LK = O.B
+ scrut match {
+ case O.A => ???
+ case O.B => // spurious unreachable
+ }
+}
diff --git a/test/files/run/t8611b.flags b/test/files/run/t8611b.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/run/t8611b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/run/t8611b.scala b/test/files/run/t8611b.scala
new file mode 100644
index 0000000000..2df17c9ca0
--- /dev/null
+++ b/test/files/run/t8611b.scala
@@ -0,0 +1,54 @@
+sealed trait KrafsDescription
+
+abstract class NotWorkingEnum extends Enumeration {
+
+ type ExtendedValue = Value with KrafsDescription
+
+ def Enum(inDescription: String): ExtendedValue = {
+ new Val(nextId) with KrafsDescription {
+ }
+ }
+}
+
+abstract class WorkingEnum extends Enumeration {
+
+ type ExtendedValue = Value
+
+ def Enum(inDescription: String): ExtendedValue = {
+ new Val(nextId) {
+ }
+ }
+}
+
+object NotWorkingTab extends NotWorkingEnum {
+ val a = Enum("A")
+ val b = Enum("B")
+}
+
+object WorkingTab extends WorkingEnum {
+ val a = Enum("A")
+ val b = Enum("B")
+}
+
+object Test extends App {
+ testGris()
+ testWorking()
+
+ def testGris() {
+ val pipp = NotWorkingTab.b
+ pipp match {
+ case NotWorkingTab.a => ???
+ case NotWorkingTab.b =>
+ case _ => ???
+ }
+ }
+
+ def testWorking() {
+ val stuff = WorkingTab.a
+ stuff match {
+ case WorkingTab.a =>
+ case WorkingTab.b => ???
+ case _ => ???
+ }
+ }
+}
diff --git a/test/files/run/t8611c.flags b/test/files/run/t8611c.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/run/t8611c.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/run/t8611c.scala b/test/files/run/t8611c.scala
new file mode 100644
index 0000000000..2bd17f29a5
--- /dev/null
+++ b/test/files/run/t8611c.scala
@@ -0,0 +1,21 @@
+trait K
+trait L
+
+object O {
+ type LK = K with L
+}
+
+object Test extends App {
+ local
+
+ def local = {
+ val A: O.LK = new K with L
+ val B: O.LK = new K with L
+ val scrut: O.LK = A
+ scrut match {
+ case B if "".isEmpty => ???
+ case A =>
+ case B => ???
+ }
+ }
+}
diff --git a/test/files/run/t8637.check b/test/files/run/t8637.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t8637.check
diff --git a/test/files/run/t8637.scala b/test/files/run/t8637.scala
new file mode 100644
index 0000000000..99c8d4c413
--- /dev/null
+++ b/test/files/run/t8637.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.currentMirror
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = currentMirror.mkToolBox()
+ tb.compile(q"true > true")
+ tb.typecheck(q"true > true")
+} \ No newline at end of file
diff --git a/test/files/run/t8680.scala b/test/files/run/t8680.scala
new file mode 100644
index 0000000000..2bce09c507
--- /dev/null
+++ b/test/files/run/t8680.scala
@@ -0,0 +1,53 @@
+object Test extends App {
+ def pre(n: Int) = (-n to -1).toStream
+
+ def cyc(m: Int) = {
+ lazy val s: Stream[Int] = (0 until m).toStream #::: s
+ s
+ }
+
+ def precyc(n: Int, m: Int) = pre(n) #::: cyc(m)
+
+ def str(s: Stream[Int]) = {
+ val b = new StringBuilder
+ s.addString(b, "", "", "")
+ b.toString
+ }
+
+ def goal(n: Int, m: Int) = (-n until m).mkString + "..."
+
+ // Check un-forced cyclic and non-cyclic streams
+ assert(str(pre(2)) == pre(2).take(1).toList.mkString + "?")
+ assert(str(cyc(2)) == cyc(2).take(1).toList.mkString + "?")
+ assert(str(precyc(2,2)) == precyc(2,2).take(1).toList.mkString + "?")
+ assert(!pre(2).hasDefiniteSize)
+ assert(!cyc(2).hasDefiniteSize)
+ assert(!precyc(2,2).hasDefiniteSize)
+
+ // Check forced cyclic and non-cyclic streams
+ assert(str(pre(2).force) == (-2 to -1).mkString)
+ assert(str(cyc(2).force) == (0 until 2).mkString + "...")
+ assert(str(precyc(2,2).force) == (-2 until 2).mkString + "...")
+ assert(pre(2).force.hasDefiniteSize)
+ assert(!cyc(2).force.hasDefiniteSize)
+ assert(!precyc(2,2).force.hasDefiniteSize)
+
+ // Special cases
+ assert(str(cyc(1).force) == goal(0,1))
+ assert(str(precyc(1,6).force) == goal(1,6))
+ assert(str(precyc(6,1).force) == goal(6,1))
+
+ // Make sure there are no odd/even problems
+ for (n <- 3 to 4; m <- 3 to 4) {
+ assert(precyc(n,m).mkString == goal(n,m), s"mkString $n $m")
+ assert(!precyc(n,m).force.hasDefiniteSize, s"hasDef $n$m")
+ }
+
+ // Make sure there are no cycle/prefix modulus problems
+ for (i <- 6 to 8) {
+ assert(precyc(i,3).mkString == goal(i,3), s"mkString $i 3")
+ assert(precyc(3,i).mkString == goal(3,i), s"mkString 3 $i")
+ assert(!precyc(i,3).force.hasDefiniteSize, s"hasDef $i 3")
+ assert(!precyc(3,i).force.hasDefiniteSize, s"hasDef 3 $i")
+ }
+}
diff --git a/test/files/run/t8690.check b/test/files/run/t8690.check
new file mode 100644
index 0000000000..72f076c4d8
--- /dev/null
+++ b/test/files/run/t8690.check
@@ -0,0 +1,2 @@
+non-empty iterator
+abcdef
diff --git a/test/files/run/t8690.scala b/test/files/run/t8690.scala
new file mode 100644
index 0000000000..ab8b45b2a7
--- /dev/null
+++ b/test/files/run/t8690.scala
@@ -0,0 +1,12 @@
+import scala.io.Source
+import java.io.ByteArrayInputStream
+
+object Test extends App {
+ val txt = "abcdef"
+
+ val in = new ByteArrayInputStream(txt.getBytes());
+ val source = Source.fromInputStream(in);
+ println(source.toString) // forces the BufferedSource to look at the head of the input
+
+ println(source.mkString) // used to return "bcdef" ...
+}
diff --git a/test/files/run/t8708_b.check b/test/files/run/t8708_b.check
new file mode 100644
index 0000000000..30be62a307
--- /dev/null
+++ b/test/files/run/t8708_b.check
@@ -0,0 +1,8 @@
+Scope{
+ def <init>: <?>;
+ sealed abstract trait T extends ;
+ def foo: <?>
+}
+Scope{
+ def f: <?>
+}
diff --git a/test/files/run/t8708_b/A_1.scala b/test/files/run/t8708_b/A_1.scala
new file mode 100644
index 0000000000..e767420f9e
--- /dev/null
+++ b/test/files/run/t8708_b/A_1.scala
@@ -0,0 +1,8 @@
+package p
+
+class C {
+
+ sealed trait T { def f: Int }
+
+ def foo: T = new T { def f = 1 }
+}
diff --git a/test/files/run/t8708_b/Test_2.scala b/test/files/run/t8708_b/Test_2.scala
new file mode 100644
index 0000000000..c978490609
--- /dev/null
+++ b/test/files/run/t8708_b/Test_2.scala
@@ -0,0 +1,21 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -cp " + testOutput.path
+
+ override def code = ""
+
+ override def show(): Unit = {
+ val g = newCompiler()
+ withRun(g)(r => {
+ val c = g.rootMirror.getRequiredClass("p.C")
+ println(c.info.decls)
+ val t = c.info.member(g.newTypeName("T"))
+ // this test ensrues that the <local child> dummy class symbol is not entered in the
+ // scope of trait T during unpickling.
+ println(t.info.decls)
+ })
+ }
+}
diff --git a/test/files/run/t8738.scala b/test/files/run/t8738.scala
new file mode 100644
index 0000000000..6898301db7
--- /dev/null
+++ b/test/files/run/t8738.scala
@@ -0,0 +1,16 @@
+object Test {
+ def check(a: Range, b: Range) = (a == b) == (a.toList == b.toList)
+ def main(args: Array[String]) {
+ val lo = -2 to 2
+ val hi = lo
+ val step = List(-6,-2,-1,1,2,6)
+ for (i <- lo; j <- hi; n <- step; k <- lo; l <- hi; m <- step) {
+ assert(
+ check(i until j by n, k until l by m) &&
+ check(i until j by n, k to l by m) &&
+ check(i to j by n, k until l by m) &&
+ check(i to j by n, k to l by m)
+ )
+ }
+ }
+}
diff --git a/test/files/run/t8764.check b/test/files/run/t8764.check
new file mode 100644
index 0000000000..6260069602
--- /dev/null
+++ b/test/files/run/t8764.check
@@ -0,0 +1,5 @@
+IntOnly: should return an unboxed int
+Int: int
+IntAndDouble: should just box and return Anyval
+Double: class java.lang.Double
+Int: class java.lang.Integer
diff --git a/test/files/run/t8764.flags b/test/files/run/t8764.flags
new file mode 100644
index 0000000000..48fd867160
--- /dev/null
+++ b/test/files/run/t8764.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/run/t8764.scala b/test/files/run/t8764.scala
new file mode 100644
index 0000000000..decc658f6e
--- /dev/null
+++ b/test/files/run/t8764.scala
@@ -0,0 +1,16 @@
+object Test extends App {
+case class IntOnly(i: Int, j: Int)
+
+println("IntOnly: should return an unboxed int")
+val a = IntOnly(1, 2)
+val i: Int = a.productElement(0)
+println(s"Int: ${a.productElement(0).getClass}")
+
+case class IntAndDouble(i: Int, d: Double)
+
+println("IntAndDouble: should just box and return Anyval")
+val b = IntAndDouble(1, 2.0)
+val j: AnyVal = b.productElement(0)
+println(s"Double: ${b.productElement(1).getClass}")
+println(s"Int: ${b.productElement(0).getClass}")
+}
diff --git a/test/files/run/t8803.check b/test/files/run/t8803.check
new file mode 100644
index 0000000000..bd26a0fb14
--- /dev/null
+++ b/test/files/run/t8803.check
@@ -0,0 +1,16 @@
+a
+b
+b
+c
+a
+b
+b
+c
+a
+b
+b
+c
+a
+b
+b
+c
diff --git a/test/files/run/t8803.scala b/test/files/run/t8803.scala
new file mode 100644
index 0000000000..2e56180502
--- /dev/null
+++ b/test/files/run/t8803.scala
@@ -0,0 +1,57 @@
+class A {
+ def m = "a"
+ protected def n = "a"
+}
+
+trait B {
+ def m = "b"
+ protected def n = "b"
+}
+
+class C extends A with B {
+ override def m = "c"
+ override protected def n = "c"
+
+ val f1 = () => super[A].m
+ val f2 = () => super[B].m
+ val f3 = () => super.m
+ val f4 = () => this.m
+
+ val g1 = new runtime.AbstractFunction0[String] { def apply() = C.super[A].m }
+ val g2 = new runtime.AbstractFunction0[String] { def apply() = C.super[B].m }
+ val g3 = new runtime.AbstractFunction0[String] { def apply() = C.super.m }
+ val g4 = new runtime.AbstractFunction0[String] { def apply() = C.this.m }
+
+ val h1 = () => super[A].n
+ val h2 = () => super[B].n
+ val h3 = () => super.n
+ val h4 = () => this.n
+
+ val i1 = new runtime.AbstractFunction0[String] { def apply() = C.super[A].n }
+ val i2 = new runtime.AbstractFunction0[String] { def apply() = C.super[B].n }
+ val i3 = new runtime.AbstractFunction0[String] { def apply() = C.super.n }
+ val i4 = new runtime.AbstractFunction0[String] { def apply() = C.this.n }
+}
+
+object Test extends App {
+ val c = new C
+ println(c.f1())
+ println(c.f2())
+ println(c.f3())
+ println(c.f4())
+
+ println(c.g1())
+ println(c.g2())
+ println(c.g3())
+ println(c.g4())
+
+ println(c.h1())
+ println(c.h2())
+ println(c.h3())
+ println(c.h4())
+
+ println(c.i1())
+ println(c.i2())
+ println(c.i3())
+ println(c.i4())
+}
diff --git a/test/files/run/t8823.scala b/test/files/run/t8823.scala
new file mode 100644
index 0000000000..0ac653566a
--- /dev/null
+++ b/test/files/run/t8823.scala
@@ -0,0 +1,10 @@
+class Tuple2Int(val encoding: Long) extends AnyVal with Product2[Int, Int] {
+ def canEqual(that: Any) = false
+ def _1: Int = 1
+ def _2: Int = 2
+}
+
+object Test extends App {
+ assert(new Tuple2Int(0)._1 == 1)
+ assert(new Tuple2Int(0)._2 == 2)
+}
diff --git a/test/files/run/t8843-repl-xlat.scala b/test/files/run/t8843-repl-xlat.scala
new file mode 100644
index 0000000000..6426dbe7d4
--- /dev/null
+++ b/test/files/run/t8843-repl-xlat.scala
@@ -0,0 +1,33 @@
+
+import scala.tools.partest.SessionTest
+
+// Handy hamburger helper for repl resources
+object Test extends SessionTest {
+ def session =
+"""Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> $intp.isettings.unwrapStrings = false
+$intp.isettings.unwrapStrings: Boolean = false
+
+scala> class Bippy
+defined class Bippy
+
+scala> $intp.classLoader getResource "Bippy.class"
+res0: java.net.URL = memory:(memory)/$line4/$read$$iw$$iw$Bippy.class
+
+scala> ($intp.classLoader getResources "Bippy.class").nextElement
+res1: java.net.URL = memory:(memory)/$line4/$read$$iw$$iw$Bippy.class
+
+scala> ($intp.classLoader classBytes "Bippy").nonEmpty
+res2: Boolean = true
+
+scala> ($intp.classLoader classAsStream "Bippy") != null
+res3: Boolean = true
+
+scala> $intp.classLoader getResource "Bippy"
+res4: java.net.URL = null
+
+scala> :quit"""
+}
+
diff --git a/test/files/run/t8845.flags b/test/files/run/t8845.flags
new file mode 100644
index 0000000000..c30091d3de
--- /dev/null
+++ b/test/files/run/t8845.flags
@@ -0,0 +1 @@
+-Ybackend:GenBCode
diff --git a/test/files/run/t8845.scala b/test/files/run/t8845.scala
new file mode 100644
index 0000000000..8ccdbdadc7
--- /dev/null
+++ b/test/files/run/t8845.scala
@@ -0,0 +1,17 @@
+// crashes compiler under GenASM, works under GenBCode.
+object Interpreter {
+ def mkDataProp(i: Int) = i
+ def break(n: Int): Unit =
+ try {
+ n match {
+ case _ =>
+ val newDesc = mkDataProp(n)
+ n match { case _ => return }
+ }
+ } catch { case e: Throwable => }
+ finally { }
+}
+
+object Test extends App {
+ Interpreter.break(0)
+}
diff --git a/test/files/run/t8852a.scala b/test/files/run/t8852a.scala
new file mode 100644
index 0000000000..cbff8ab75b
--- /dev/null
+++ b/test/files/run/t8852a.scala
@@ -0,0 +1,34 @@
+import scala.tools.partest._
+
+// Test that static methods in Java interfaces (new in Java 8)
+// are callable from jointly compiler Scala code.
+object Test extends CompilerTest {
+ import global._
+
+ override lazy val units: List[CompilationUnit] = {
+ // This test itself does not depend on JDK8.
+ javaCompilationUnits(global)(staticMethodInInterface) ++
+ compilationUnits(global)(scalaClient)
+ }
+
+ private def staticMethodInInterface = """
+public interface Interface {
+ public static int staticMethod() {
+ return 42;
+ }
+}
+
+ """
+
+ private def scalaClient = """
+object Test {
+ val x: Int = Interface.staticMethod()
+}
+
+class C extends Interface // expect no errors about unimplemented members.
+
+ """
+
+ // We're only checking we can compile it.
+ def check(source: String, unit: global.CompilationUnit): Unit = ()
+}
diff --git a/test/files/run/t8888.flags b/test/files/run/t8888.flags
new file mode 100644
index 0000000000..48b438ddf8
--- /dev/null
+++ b/test/files/run/t8888.flags
@@ -0,0 +1 @@
+-Ydelambdafy:method
diff --git a/test/files/run/t8888.scala b/test/files/run/t8888.scala
new file mode 100644
index 0000000000..36cc1ddf3e
--- /dev/null
+++ b/test/files/run/t8888.scala
@@ -0,0 +1,12 @@
+class C {
+ final def resume: Unit = (this: Any) match {
+ case x : C => (x: Any) match {
+ case y : C =>
+ () => (x, y) // used to trigger a ClassFormatError under -Ydelambdafy:method
+ }
+ }
+}
+
+object Test extends App {
+ new C().resume
+}
diff --git a/test/files/run/t8893.scala b/test/files/run/t8893.scala
new file mode 100644
index 0000000000..6fef8ae912
--- /dev/null
+++ b/test/files/run/t8893.scala
@@ -0,0 +1,40 @@
+import annotation.tailrec
+
+object Test {
+ def a(): Option[String] = Some("a")
+
+ def test1: Any = {
+ a() match {
+ case Some(b1) =>
+ a() match {
+ case Some(b2) =>
+ @tailrec
+ def tick(i: Int): Unit = if (i < 0) () else tick(i - 1)
+ tick(10000000) // testing that this doesn't SOE
+ case None => None
+ }
+ case None => None
+ }
+ }
+
+ def test2: Any = {
+ a() match {
+ case Some(b1) =>
+ a() match {
+ case Some(b2) =>
+ @tailrec
+ def tick(i: Int): Unit = if (i < 0) () else tick(i - 1)
+ tick(10000000) // testing that this doesn't SOE
+ case None => test1
+ }
+ case None =>
+ test1 // not a tail call
+ test1
+ }
+ }
+
+ def main(args: Array[String]) {
+ test1
+ test2
+ }
+}
diff --git a/test/files/run/t8893b.scala b/test/files/run/t8893b.scala
new file mode 100644
index 0000000000..19120871aa
--- /dev/null
+++ b/test/files/run/t8893b.scala
@@ -0,0 +1,15 @@
+// Testing that recursive calls in tail positions are replaced with
+// jumps, even though the method contains recursive calls outside
+// of the tail position.
+object Test {
+ def tick(i : Int): Unit =
+ if (i == 0) ()
+ else if (i == 42) {
+ tick(0) /*not in tail posiiton*/
+ tick(i - 1)
+ } else tick(i - 1)
+
+ def main(args: Array[String]): Unit = {
+ tick(1000000)
+ }
+}
diff --git a/test/files/run/t8907.scala b/test/files/run/t8907.scala
new file mode 100644
index 0000000000..7952ac82d9
--- /dev/null
+++ b/test/files/run/t8907.scala
@@ -0,0 +1,39 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def show(): Unit = {
+ compileCode("""
+ class C { class Inner }
+
+ class D {
+ object O {
+ def foo(c: C)(i: c.Inner): c.Inner = ???
+ }
+ }
+ """)
+ assert(filteredInfos.isEmpty, filteredInfos)
+ deleteClass("C")
+ compileCode("""
+ class E {
+ def foo = {
+ (null: D).toString
+ }
+ }
+ """)
+ assert(storeReporter.infos.isEmpty, storeReporter.infos.mkString("\n")) // Included a MissingRequirementError before.
+ }
+
+ def deleteClass(name: String) {
+ val classFile = new File(testOutput.path, name + ".class")
+ assert(classFile.exists)
+ assert(classFile.delete())
+ }
+}
diff --git a/test/files/run/t8925.check b/test/files/run/t8925.check
new file mode 100644
index 0000000000..112e7005df
--- /dev/null
+++ b/test/files/run/t8925.check
@@ -0,0 +1,2 @@
+bar
+abcd
diff --git a/test/files/run/t8925.flags b/test/files/run/t8925.flags
new file mode 100644
index 0000000000..ffc65f4b81
--- /dev/null
+++ b/test/files/run/t8925.flags
@@ -0,0 +1 @@
+-Yopt:l:none -Ybackend:GenBCode
diff --git a/test/files/run/t8925.scala b/test/files/run/t8925.scala
new file mode 100644
index 0000000000..33f4505f03
--- /dev/null
+++ b/test/files/run/t8925.scala
@@ -0,0 +1,31 @@
+object Ex {
+ def unapply(t: Throwable): Option[Throwable] = Some(t)
+}
+
+class A {
+ var x = ""
+
+ def bar =
+ try {
+ "bar"
+ } finally {
+ try {
+ x += "a"
+ } finally {
+ x += "b"
+ try {
+ x += "c"
+ throw null
+ } catch {
+ case Ex(_) =>
+ x += "d"
+ }
+ }
+ }
+}
+
+object Test extends App {
+ val a = new A
+ println(a.bar)
+ println(a.x)
+}
diff --git a/test/files/run/t8931.check b/test/files/run/t8931.check
new file mode 100644
index 0000000000..d08546b5a9
--- /dev/null
+++ b/test/files/run/t8931.check
@@ -0,0 +1 @@
+List(interface B)
diff --git a/test/files/run/t8931.scala b/test/files/run/t8931.scala
new file mode 100644
index 0000000000..11718471bc
--- /dev/null
+++ b/test/files/run/t8931.scala
@@ -0,0 +1,15 @@
+
+trait A
+
+trait B extends A
+
+class C extends A with B
+
+object Test extends App {
+ val c = classOf[C]
+
+ println(c.getGenericInterfaces.toList)
+
+ assert(c.getGenericInterfaces.length == c.getInterfaces.length,
+ s"mismatch between ${c.getGenericInterfaces} and ${c.getInterfaces}")
+}
diff --git a/test/files/run/t8933.check b/test/files/run/t8933.check
new file mode 100644
index 0000000000..d5ef468b98
--- /dev/null
+++ b/test/files/run/t8933.check
@@ -0,0 +1 @@
+'traitSymbol
diff --git a/test/files/run/t8933/A_1.scala b/test/files/run/t8933/A_1.scala
new file mode 100644
index 0000000000..996e3b4a2c
--- /dev/null
+++ b/test/files/run/t8933/A_1.scala
@@ -0,0 +1,6 @@
+class MotherClass
+
+trait MixinWithSymbol {
+ self: MotherClass =>
+ def symbolFromTrait: Symbol = 'traitSymbol
+}
diff --git a/test/files/run/t8933/Test_2.scala b/test/files/run/t8933/Test_2.scala
new file mode 100644
index 0000000000..c506a7c51f
--- /dev/null
+++ b/test/files/run/t8933/Test_2.scala
@@ -0,0 +1,10 @@
+class MotherClass extends MixinWithSymbol {
+ val classSymbol = 'classSymbol
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val symbol = (new MotherClass).symbolFromTrait
+ println(symbol)
+ }
+}
diff --git a/test/files/run/t8933b/A.scala b/test/files/run/t8933b/A.scala
new file mode 100644
index 0000000000..d25d893c6f
--- /dev/null
+++ b/test/files/run/t8933b/A.scala
@@ -0,0 +1,4 @@
+trait MixinWithSymbol {
+ self: MotherClass =>
+ def symbolFromTrait: Any = 'traitSymbol
+}
diff --git a/test/files/run/t8933b/Test.scala b/test/files/run/t8933b/Test.scala
new file mode 100644
index 0000000000..46eedd660f
--- /dev/null
+++ b/test/files/run/t8933b/Test.scala
@@ -0,0 +1,9 @@
+class MotherClass extends MixinWithSymbol {
+ def foo = 'sym1
+}
+
+object Test {
+ def main(args: Array[String]) {
+ (new MotherClass).symbolFromTrait
+ }
+}
diff --git a/test/files/run/t8933c.scala b/test/files/run/t8933c.scala
new file mode 100644
index 0000000000..22011bc323
--- /dev/null
+++ b/test/files/run/t8933c.scala
@@ -0,0 +1,14 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ try {
+ {throw T; Symbol}.apply("a")
+ assert(false, "exception not thrown")
+ } catch {
+ case T => // ok
+ case t: Throwable =>
+ assert(false, "wrong not thrown: " + t)
+ }
+ }
+}
+
+object T extends Throwable
diff --git a/test/files/run/t8960.scala b/test/files/run/t8960.scala
new file mode 100644
index 0000000000..a58ac53d33
--- /dev/null
+++ b/test/files/run/t8960.scala
@@ -0,0 +1,72 @@
+object Test extends App {
+ def test(o: AnyRef, sp: Boolean = false) = {
+ val isSpecialized = o.getClass.getSuperclass.getName contains "$sp"
+ val isDelambdafyMethod = o.getClass.getName contains "$lambda$"
+ assert(
+ // delambdafy:method doesn't currently emit specialized anonymous function classes
+ if (sp) (isSpecialized || isDelambdafyMethod) else !isSpecialized,
+ o.getClass.getName)
+
+ val Some(f) = o.getClass.getDeclaredFields.find(_.getName == "serialVersionUID")
+ assert(f.getLong(null) == 0l)
+ }
+
+ test(() => (), sp = true)
+ test(() => 1, sp = true)
+ test(() => "")
+
+ test((x: Int) => x, sp = true)
+ test((x: Boolean) => x)
+ test((x: Int) => "")
+
+ test((x1: Int, x2: Int) => 0d, sp = true)
+ test((x1: Int, x2: AnyRef) => 0d)
+ test((x1: Any, x2: Any) => x1)
+
+ // scala> println((for (i <- 3 to 22) yield (for (j <- 1 to i) yield s"x$j: Int").mkString(" test((", ", ", ") => x1)")).mkString("\n"))
+
+ test((x1: Int, x2: Int, x3: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int) => x1)
+ test((x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int) => x1)
+
+ test({
+ case x: Int => x
+ }: PartialFunction[Int, Int], sp = true)
+
+ test({
+ case x: Int => x
+ }: PartialFunction[Any, Any])
+
+ test({
+ case x: Int => ()
+ }: PartialFunction[Int, Unit], sp = true)
+
+ test({
+ case x: String => 1
+ }: PartialFunction[String, Int])
+
+ test({
+ case x: String => ()
+ }: PartialFunction[String, Unit])
+
+ test({
+ case x: String => x
+ }: PartialFunction[String, String])
+}
diff --git a/test/files/run/t9003.flags b/test/files/run/t9003.flags
new file mode 100644
index 0000000000..49d036a887
--- /dev/null
+++ b/test/files/run/t9003.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/t9003.scala b/test/files/run/t9003.scala
new file mode 100644
index 0000000000..4f24712201
--- /dev/null
+++ b/test/files/run/t9003.scala
@@ -0,0 +1,71 @@
+object Single {
+ var i = 0
+ def isEmpty = false
+ def get = i
+ def unapply(a: Single.type) = this
+}
+
+object Product {
+ var i = 0
+ def _1: Int = i
+ def _2: String = ???
+ def productArity = 2
+ def unapply(a: Product.type) = this
+ def isEmpty = false
+ def get: this.type = this
+}
+
+object Sequence {
+ var i = 0
+ def apply(n: Int): Int = i
+ def length = 2
+ def unapplySeq(a: Sequence.type) = this
+ def isEmpty = false
+ def get = this
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ def assertZero(i: Int) = assert(i == 0)
+
+ Single match {
+ case Single(i) =>
+ Single.i = 1
+ assertZero(i) // fails under -optimize
+ }
+
+ Product match {
+ case Product(i, _) =>
+ Product.i = 1
+ assertZero(i) // fails under -optimize
+ }
+
+ Sequence match {
+ case Sequence(i, _ @ _*) =>
+ Sequence.i = 1
+ assertZero(i) // okay
+ }
+
+ Sequence.i = 0
+ Sequence match {
+ case Sequence(_, i) =>
+ Sequence.i = 1
+ assertZero(i) // okay
+ }
+
+ val buffer = collection.mutable.Buffer(0, 0)
+ buffer match {
+ case Seq(_, i) =>
+ buffer(1) = 1
+ assertZero(i) // failed
+ }
+
+ case class CaseSequence(as: Int*)
+ val buffer1 = collection.mutable.Buffer(0, 0)
+ CaseSequence(buffer1: _*) match {
+ case CaseSequence(_, i) =>
+ buffer1(1) = 1
+ assertZero(i) // failed
+ }
+ }
+}
diff --git a/test/files/run/t9027.check b/test/files/run/t9027.check
new file mode 100644
index 0000000000..3429254286
--- /dev/null
+++ b/test/files/run/t9027.check
@@ -0,0 +1,19 @@
+{
+ {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, true));
+ $buf.$amp$plus(new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true));
+ $buf
+ };
+ println("hello, world.")
+}
+{
+ {
+ val $buf = new _root_.scala.xml.NodeBuffer();
+ $buf.$amp$plus(new _root_.scala.xml.Elem(null, "a", _root_.scala.xml.Null, $scope, true));
+ $buf.$amp$plus(new _root_.scala.xml.Elem(null, "b", _root_.scala.xml.Null, $scope, true));
+ $buf.$amp$plus(new _root_.scala.xml.Elem(null, "c", _root_.scala.xml.Null, $scope, true));
+ $buf
+ };
+ println("hello, world.")
+}
diff --git a/test/files/run/t9027.scala b/test/files/run/t9027.scala
new file mode 100644
index 0000000000..26238147da
--- /dev/null
+++ b/test/files/run/t9027.scala
@@ -0,0 +1,15 @@
+
+// used to be parsed as .println
+object Test extends App {
+ import reflect.runtime._, universe._
+
+ val trees = List(
+ q"""<a/><b/>
+ println("hello, world.")""",
+ q"""<a/>
+ <b/>
+ <c/>
+ println("hello, world.")"""
+ )
+ trees foreach println
+}
diff --git a/test/files/run/t9030.scala b/test/files/run/t9030.scala
new file mode 100644
index 0000000000..48d24e5b54
--- /dev/null
+++ b/test/files/run/t9030.scala
@@ -0,0 +1,19 @@
+object Test extends App {
+
+ // For these methods, the compiler emits calls to BoxesRuntime.equalsNumNum/equalsNumChar/equalsNumObject directly
+
+ def numNum(a: java.lang.Number, b: java.lang.Number) = assert(a == b)
+ def numChar(a: java.lang.Number, b: java.lang.Character) = assert(a == b)
+ def numObject(a: java.lang.Number, b: java.lang.Object) = assert(a == b)
+
+ // The compiler doesn't use equalsCharObject directly, but still adding an example for completeness
+
+ def charObject(a: java.lang.Character, b: java.lang.Object) = assert(a == b)
+
+ numNum(new Integer(1), new Integer(1))
+ numChar(new Integer(97), new Character('a'))
+ numObject(new Integer(1), new Integer(1))
+ numObject(new Integer(97), new Character('a'))
+
+ charObject(new Character('a'), new Integer(97))
+}
diff --git a/test/files/run/t9097.scala b/test/files/run/t9097.scala
new file mode 100644
index 0000000000..aa2b23bbac
--- /dev/null
+++ b/test/files/run/t9097.scala
@@ -0,0 +1,34 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends StoreReporterDirectTest {
+
+ override def extraSettings: String = List(
+ "-usejavacp",
+ "-Xfatal-warnings",
+ "-Ybackend:GenBCode",
+ "-Ydelambdafy:method",
+ "-Xprint:delambdafy",
+ s"-d ${testOutput.path}"
+ ) mkString " "
+
+ override def code = """package o
+ |package a {
+ | class C {
+ | def hihi = List(1,2).map(_ => "")
+ | }
+ |}
+ |package object a {
+ | def f = 1
+ |}
+ |""".stripMargin.trim
+
+ override def show(): Unit = {
+ val baos = new java.io.ByteArrayOutputStream()
+ Console.withOut(baos)(Console.withErr(baos)(compile()))
+ assert(!storeReporter.hasErrors, message = filteredInfos map (_.msg) mkString "; ")
+ val out = baos.toString("UTF-8")
+ // was 2 before the fix, the two PackageDefs for a would both contain the ClassDef for the closure
+ assert(out.lines.count(_ contains "class hihi$1") == 1, out)
+ }
+}
diff --git a/test/files/run/t9102.scala b/test/files/run/t9102.scala
new file mode 100644
index 0000000000..c46cf0e4b4
--- /dev/null
+++ b/test/files/run/t9102.scala
@@ -0,0 +1,81 @@
+
+object Test extends App {
+ import reflect.runtime._, universe._
+
+ class C { def f(i: Int, j: => Int) = i + j }
+
+ class V(val v: Int) extends AnyVal { def doubled = 2 * v }
+ class D { def f(i: Int, j: V) = i + j.doubled }
+
+ class E(i: Int, j: V)
+
+ locally {
+ val ms = typeOf[C].member(TermName("f")).asMethod
+ val im = currentMirror reflect (new C)
+ val mm = im reflectMethod ms
+ assert(mm(2,3) == 5)
+ }
+ locally {
+ val ms = typeOf[D].member(TermName("f")).asMethod
+ val im = currentMirror reflect (new D)
+ val mm = im reflectMethod ms
+ assert(mm(2, new V(3)) == 8)
+ }
+ locally {
+ val ms = typeOf[E].typeSymbol.asClass.primaryConstructor
+ val cm = currentMirror reflectClass typeOf[E].typeSymbol.asClass
+ val mm = cm reflectConstructor ms.asMethod
+ assert(mm(42, new V(7)).isInstanceOf[E])
+ }
+}
+
+/* Session tests without special init code should reside in simple script files.
+ * Also, provide filters such as for `(bound to C@74f7d1d2)`.
+
+import scala.tools.partest.SessionTest
+
+object Test extends SessionTest {
+//Welcome to Scala version 2.11.6 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_40).
+ def session =
+ s"""|Type in expressions to have them evaluated.
+ |Type :help for more information.
+ |
+ |scala> import reflect.runtime._, universe._
+ |import reflect.runtime._
+ |import universe._
+ |
+ |scala> class C { def f(i: Int, j: => Int) = i + j }
+ |defined class C
+ |
+ |scala> typeOf[C].member(TermName("f"))
+ |res0: reflect.runtime.universe.Symbol = method f
+ |
+ |scala> .asMethod
+ |res1: reflect.runtime.universe.MethodSymbol = method f
+ |
+ |scala> currentMirror reflect (new C)
+ |res2: reflect.runtime.universe.InstanceMirror = instance mirror for C@74f7d1d2
+ |
+ |scala> res2 reflectMethod res1
+ |res3: reflect.runtime.universe.MethodMirror = method mirror for def f(i: scala.Int,j: => scala.Int): scala.Int (bound to C@74f7d1d2)
+ |
+ |scala> res3(2,3)
+ |res4: Any = 5
+ |
+ |scala> :quit"""
+}
+*/
+
+/* was:
+scala> res3(2,3)
+java.lang.IllegalArgumentException
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:497)
+ at scala.reflect.runtime.JavaMirrors$JavaMirror$JavaMethodMirror.jinvokeraw(JavaMirrors.scala:335)
+ at scala.reflect.runtime.JavaMirrors$JavaMirror$JavaMethodMirror.jinvoke(JavaMirrors.scala:339)
+ at scala.reflect.runtime.JavaMirrors$JavaMirror$JavaTransformingMethodMirror.apply(JavaMirrors.scala:436)
+ ... 33 elided
+*/
+
diff --git a/test/files/run/t9170.scala b/test/files/run/t9170.scala
new file mode 100644
index 0000000000..25a0e84581
--- /dev/null
+++ b/test/files/run/t9170.scala
@@ -0,0 +1,58 @@
+
+import scala.tools.partest.SessionTest
+
+object Test extends SessionTest {
+
+ override def stripMargins = false
+
+ def session =
+"""Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 }
+<console>:7: error: double definition:
+def f[A](a: => A): Int at line 7 and
+def f[A](a: => Either[Exception,A]): Int at line 7
+have same type after erasure: (a: Function0)Int
+ object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 }
+ ^
+
+scala> object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 }
+<console>:7: error: double definition:
+def f[A](a: => A): Int at line 7 and
+def f[A](a: => Either[Exception,A]): Int at line 7
+have same type after erasure: (a: Function0)Int
+ object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 }
+ ^
+
+scala> object Y {
+ | def f[A](a: => A) = 1
+ | def f[A](a: => Either[Exception, A]) = 2
+ | }
+<console>:9: error: double definition:
+def f[A](a: => A): Int at line 8 and
+def f[A](a: => Either[Exception,A]): Int at line 9
+have same type after erasure: (a: Function0)Int
+ def f[A](a: => Either[Exception, A]) = 2
+ ^
+
+scala> :pa
+// Entering paste mode (ctrl-D to finish)
+
+object Y {
+ def f[A](a: => A) = 1
+ def f[A](a: => Either[Exception, A]) = 2
+}
+
+// Exiting paste mode, now interpreting.
+
+<console>:9: error: double definition:
+def f[A](a: => A): Int at line 8 and
+def f[A](a: => Either[Exception,A]): Int at line 9
+have same type after erasure: (a: Function0)Int
+ def f[A](a: => Either[Exception, A]) = 2
+ ^
+
+scala> :quit"""
+}
+
diff --git a/test/files/run/t9182.check b/test/files/run/t9182.check
new file mode 100644
index 0000000000..80e8b6c558
--- /dev/null
+++ b/test/files/run/t9182.check
@@ -0,0 +1,3 @@
+constructor package
+method A
+object A
diff --git a/test/files/run/t9182.scala b/test/files/run/t9182.scala
new file mode 100644
index 0000000000..1768aa688e
--- /dev/null
+++ b/test/files/run/t9182.scala
@@ -0,0 +1,12 @@
+// Main.scala
+package object ops {
+ object A
+ def A(a: Any) = ()
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val pack = scala.reflect.runtime.currentMirror.staticModule("ops.package")
+ println(pack.info.decls.toList.map(_.toString).sorted.mkString("\n"))
+ }
+}
diff --git a/test/files/run/t9219.check b/test/files/run/t9219.check
new file mode 100644
index 0000000000..3509ece003
--- /dev/null
+++ b/test/files/run/t9219.check
@@ -0,0 +1,3 @@
+Stream(1, 2, ?)
+Stream(1, 2, 3, 4, ?)
+Stream(1, 2, 3, 4, 5, 6, ?)
diff --git a/test/files/run/t9219.scala b/test/files/run/t9219.scala
new file mode 100644
index 0000000000..c15f55faac
--- /dev/null
+++ b/test/files/run/t9219.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ def check[U](f: Stream[Int] => U) = {
+ val s = Stream.from(1)
+ f(s)
+ println(s)
+ }
+
+ check(_.tail)
+ check(_.take(4).force)
+ check(_(5))
+}
diff --git a/test/files/run/t9223.scala b/test/files/run/t9223.scala
new file mode 100644
index 0000000000..78767b158d
--- /dev/null
+++ b/test/files/run/t9223.scala
@@ -0,0 +1,8 @@
+class X(val x: String)
+class Y(y: => String) extends X(y) { def f = y }
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(new Y("hi").f == "hi")
+ }
+}
diff --git a/test/files/run/t9223b.scala b/test/files/run/t9223b.scala
new file mode 100644
index 0000000000..2afc7ddfe0
--- /dev/null
+++ b/test/files/run/t9223b.scala
@@ -0,0 +1,8 @@
+class X(x: => String) { def xx = x }
+class Y(y: String) extends X(y) { def f = y }
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(new Y("hi").f == "hi")
+ }
+}
diff --git a/test/files/run/t9252.check b/test/files/run/t9252.check
new file mode 100644
index 0000000000..b00d748f7f
--- /dev/null
+++ b/test/files/run/t9252.check
@@ -0,0 +1 @@
+class [Lscala.runtime.BoxedUnit;
diff --git a/test/files/run/t9252.scala b/test/files/run/t9252.scala
new file mode 100644
index 0000000000..da698948e1
--- /dev/null
+++ b/test/files/run/t9252.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(rootMirror.runtimeClass(typeOf[Array[Unit]]))
+} \ No newline at end of file
diff --git a/test/files/run/t9268.check b/test/files/run/t9268.check
new file mode 100644
index 0000000000..90ef940eb3
--- /dev/null
+++ b/test/files/run/t9268.check
@@ -0,0 +1,5 @@
+Compiling Client1
+pos: NoPosition Class Waiter not found - continuing with a stub. WARNING
+Compiling Client2
+pos: NoPosition Class Waiter not found - continuing with a stub. WARNING
+pos: NoPosition Unable to locate class corresponding to inner class entry for Predicate in owner Waiter ERROR
diff --git a/test/files/run/t9268/Java.java b/test/files/run/t9268/Java.java
new file mode 100644
index 0000000000..c9a0bec3ff
--- /dev/null
+++ b/test/files/run/t9268/Java.java
@@ -0,0 +1,12 @@
+public class Java {
+}
+
+class Partial {
+ public <E extends java.lang.Exception> long waitFor(long l, Waiter.Predicate<E> pred) throws E {
+ return 0L;
+ }
+}
+
+class Waiter {
+ interface Predicate<E> {}
+}
diff --git a/test/files/run/t9268/Test.scala b/test/files/run/t9268/Test.scala
new file mode 100644
index 0000000000..813cbe7b60
--- /dev/null
+++ b/test/files/run/t9268/Test.scala
@@ -0,0 +1,40 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def client1 = """
+ class Client1 { def p(p: Partial) = p.toString }
+ """
+
+ def client2 = """
+ class Client2 { def p(p: Partial) = p.waitFor() }
+ """
+
+ def deleteClass(s: String) = {
+ val f = new File(testOutput.path, s + ".class")
+ assert(f.exists)
+ f.delete()
+ }
+
+ def show(): Unit = {
+ deleteClass("Waiter")
+ deleteClass("Waiter$Predicate")
+
+ // Used to crash in Java Generic Signature parsing
+ println("Compiling Client1")
+ compileCode(client1)
+ println(storeReporter.infos.mkString("\n"))
+ storeReporter.reset()
+ println("Compiling Client2")
+ compileCode(client2)
+ println(storeReporter.infos.mkString("\n"))
+ }
+}
+
diff --git a/test/files/run/tailcalls.check b/test/files/run/tailcalls.check
index 7607921856..92d4f8a3c8 100644
--- a/test/files/run/tailcalls.check
+++ b/test/files/run/tailcalls.check
@@ -50,6 +50,10 @@ test NonTailCall.f2
test TailCall.b1 was successful
test TailCall.b2 was successful
test FancyTailCalls.tcTryLocal was successful
+test FancyTailCalls.tcInBooleanExprFirstOp was successful
+test FancyTailCalls.tcInBooleanExprSecondOp was successful
+test FancyTailCalls.tcInIfCond was successful
+test FancyTailCalls.tcInPatternGuard was successful
test FancyTailCalls.differentInstance was successful
test PolyObject.tramp was successful
#partest avian
@@ -104,5 +108,9 @@ test NonTailCall.f2
test TailCall.b1 was successful
test TailCall.b2 was successful
test FancyTailCalls.tcTryLocal was successful
+test FancyTailCalls.tcInBooleanExprFirstOp was successful
+test FancyTailCalls.tcInBooleanExprSecondOp was successful
+test FancyTailCalls.tcInIfCond was successful
+test FancyTailCalls.tcInPatternGuard was successful
test FancyTailCalls.differentInstance was successful
test PolyObject.tramp was successful
diff --git a/test/files/run/tailcalls.scala b/test/files/run/tailcalls.scala
index 1653b14de9..8df2dcfcb6 100644
--- a/test/files/run/tailcalls.scala
+++ b/test/files/run/tailcalls.scala
@@ -213,6 +213,33 @@ class FancyTailCalls {
} finally {}
}
+ def tcInBooleanExprFirstOp(x: Int, v: Int): Boolean = {
+ {
+ def loop(n: Int): Int = if (n == 0) v else loop(n - 1)
+ loop(x)
+ } == v && true
+ }
+ def tcInBooleanExprSecondOp(x: Int, v: Int): Boolean = {
+ true && {
+ def loop(n: Int): Int = if (n == 0) v else loop(n - 1)
+ loop(x)
+ } == v
+ }
+ def tcInIfCond(x: Int, v: Int): Boolean = {
+ if ({
+ def loop(n: Int): Int = if (n == 0) v else loop(n - 1)
+ loop(x)
+ } == v) true else false
+ }
+ def tcInPatternGuard(x: Int, v: Int): Boolean =
+ v match {
+ case _ if
+ {
+ def loop(n: Int): Int = if (n == 0) v else loop(n - 1)
+ loop(x) == v
+ } => true
+ }
+
import FancyTailCalls._
final def differentInstance(n: Int, v: Int): Int = {
if (n == 0) v
@@ -376,8 +403,12 @@ object Test {
check_success_b("TailCall.b2", TailCall.b2(max), true)
val FancyTailCalls = new FancyTailCalls;
- check_success("FancyTailCalls.tcTryLocal", FancyTailCalls.tcTryLocal(max, max), max)
- check_success("FancyTailCalls.differentInstance", FancyTailCalls.differentInstance(max, 42), 42)
+ check_success("FancyTailCalls.tcTryLocal", FancyTailCalls.tcTryLocal(max, max), max)
+ check_success_b("FancyTailCalls.tcInBooleanExprFirstOp", FancyTailCalls.tcInBooleanExprFirstOp(max, max), true)
+ check_success_b("FancyTailCalls.tcInBooleanExprSecondOp", FancyTailCalls.tcInBooleanExprSecondOp(max, max), true)
+ check_success_b("FancyTailCalls.tcInIfCond", FancyTailCalls.tcInIfCond(max, max), true)
+ check_success_b("FancyTailCalls.tcInPatternGuard", FancyTailCalls.tcInPatternGuard(max, max), true)
+ check_success("FancyTailCalls.differentInstance", FancyTailCalls.differentInstance(max, 42), 42)
check_success("PolyObject.tramp", PolyObject.tramp[Int](max), 0)
}
diff --git a/test/files/run/tpeCache-tyconCache.check b/test/files/run/tpeCache-tyconCache.check
index a892f5477a..ff604819e0 100644
--- a/test/files/run/tpeCache-tyconCache.check
+++ b/test/files/run/tpeCache-tyconCache.check
@@ -16,4 +16,4 @@ res0: Boolean = true
scala> AnyRefClass.tpe eq AnyRefClass.typeConstructor
res1: Boolean = true
-scala>
+scala> :quit
diff --git a/test/files/run/typetags_serialize.check b/test/files/run/typetags_serialize.check
index f79436ea5d..22928a2e94 100644
--- a/test/files/run/typetags_serialize.check
+++ b/test/files/run/typetags_serialize.check
@@ -1,2 +1,3 @@
-java.io.NotSerializableException: scala.reflect.api.TypeTags$PredefTypeCreator
-java.io.NotSerializableException: Test$$typecreator1$1
+TypeTag[Int]
+TypeTag[String]
+TypeTag[Test.C[Double]]
diff --git a/test/files/run/typetags_serialize.scala b/test/files/run/typetags_serialize.scala
index 3c842e6cc9..a7a7845232 100644
--- a/test/files/run/typetags_serialize.scala
+++ b/test/files/run/typetags_serialize.scala
@@ -4,6 +4,10 @@ import scala.reflect.runtime.{universe => ru}
import scala.reflect.runtime.{currentMirror => cm}
object Test extends App {
+ class C[A] {
+ def m(a: A): Int = 5
+ }
+
def test(tag: TypeTag[_]) =
try {
val fout = new ByteArrayOutputStream()
@@ -26,4 +30,5 @@ object Test extends App {
test(implicitly[TypeTag[Int]])
test(implicitly[TypeTag[String]])
+ test(implicitly[TypeTag[C[Double]]])
} \ No newline at end of file
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala
index 1fbdc62a1e..3d2b9f77be 100644
--- a/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala
+++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala
@@ -36,8 +36,8 @@ object Test extends StoreReporterDirectTest {
println(filteredInfos.mkString("\n"))
storeReporter.infos.clear()
compileApp();
- // we should get bad symbolic reference errors, because we're trying to use an implicit that can't be unpickled
+ // we should get "missing or invalid dependency detected" errors, because we're trying to use an implicit that can't be unpickled
// but we don't know the number of these errors and their order, so I just ignore them all
- println(filteredInfos.filterNot(_.msg.contains("bad symbolic reference")).mkString("\n"))
+ println(filteredInfos.filterNot(_.msg.contains("missing or invalid dependency detected")).mkString("\n"))
}
}
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala
index 6804baa0c3..a865f4d137 100644
--- a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala
+++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala
@@ -40,8 +40,8 @@ object Test extends StoreReporterDirectTest {
println(filteredInfos.mkString("\n"))
storeReporter.infos.clear()
compileApp();
- // we should get bad symbolic reference errors, because we're trying to use an implicit that can't be unpickled
+ // we should get "missing or invalid dependency detected" errors, because we're trying to use an implicit that can't be unpickled
// but we don't know the number of these errors and their order, so I just ignore them all
- println(filteredInfos.filterNot (_.msg.contains("bad symbolic reference")).mkString("\n"))
+ println(filteredInfos.filterNot (_.msg.contains("missing or invalid dependency detected")).mkString("\n"))
}
}
diff --git a/test/files/run/unittest_collection.check b/test/files/run/unittest_collection.check
index 844ca54682..df1629dd7e 100644
--- a/test/files/run/unittest_collection.check
+++ b/test/files/run/unittest_collection.check
@@ -1 +1 @@
-warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+warning: there was one deprecation warning; re-run with -deprecation for details
diff --git a/test/files/run/valueClassSelfType.scala b/test/files/run/valueClassSelfType.scala
new file mode 100644
index 0000000000..47a3764b0a
--- /dev/null
+++ b/test/files/run/valueClassSelfType.scala
@@ -0,0 +1,52 @@
+trait T
+
+class V1(val l: Long) extends AnyVal { self: T =>
+ def foo: V1 = self
+ def bar: T = self
+}
+
+class V2(val l: Long) extends AnyVal { self =>
+ def foo: V2 = self
+}
+
+class V3(val l: Long) extends AnyVal { self: Long =>
+ def foo: V3 = self
+ def bar: Long = self
+}
+
+// non-value classes
+
+class C1(val l: Long) { self: T =>
+ def foo: C1 = self
+ def bar: T = self
+}
+
+class C2(val l: Long) { self =>
+ def foo: C2 = self
+}
+
+class C3(val l: Long) { self: Long =>
+ def foo: C3 = self
+ def bar: Long = self
+}
+
+object Test extends App {
+ // Rejected: superclass V1 is not a subclass of the superclass Object of the mixin trait T
+ // new V1(1l) with T
+
+ assert(new V2(1l).foo.l == 1l)
+
+ // Rejected: V3 does not conform to its self-type V3 with Long
+ // new V3(1l)
+
+ val c2 = new C1(2l) with T
+ assert(c2.foo.l + c2.bar.asInstanceOf[C1].l == 4l)
+
+ assert(new C2(3l).foo.l == 3l)
+
+ // Rejected: C3 does not conform to its self-type C3 with Long
+ // new C3(4l)
+
+ // Rejected: class Long needs to be a trait to be mixed in
+ // new C3(4l) with Long
+}
diff --git a/test/files/run/various-flat-classpath-types.check b/test/files/run/various-flat-classpath-types.check
new file mode 100644
index 0000000000..401f707d0e
--- /dev/null
+++ b/test/files/run/various-flat-classpath-types.check
@@ -0,0 +1,12 @@
+ZipBin()
+JarBin()
+DirBin()
+ZipSrc()
+JarSrc()
+DirSrc()
+NestedZipBin()
+NestedJarBin()
+NestedDirBin()
+NestedZipSrc()
+NestedJarSrc()
+NestedDirSrc() \ No newline at end of file
diff --git a/test/files/run/various-flat-classpath-types.scala b/test/files/run/various-flat-classpath-types.scala
new file mode 100644
index 0000000000..d39019e885
--- /dev/null
+++ b/test/files/run/various-flat-classpath-types.scala
@@ -0,0 +1,214 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+
+import java.io.{File => JFile, FileInputStream, FileOutputStream}
+import java.util.zip.{ZipEntry, ZipOutputStream}
+import scala.reflect.io.{Directory, File}
+import scala.tools.nsc.classpath.FlatClassPath.RootPackage
+import scala.tools.nsc.classpath.PackageNameUtils
+import scala.tools.nsc.io.Jar
+
+/**
+ * Generates directories, jars and zip files containing sources and classes
+ * (the result of a compilation which is executed here)
+ * and use them as a class- and sourcepath during compilation and running
+ * created application. At the end everything is cleaned up.
+ *
+ * It can test also current, recursive classpath. Just right now we force
+ * flat classpath to test it also when the recursive one would be set as a default.
+ */
+object Test {
+
+ private implicit class JFileOps(file: JFile) {
+
+ def createDir(newDirName: String) = {
+ val newDir = new JFile(file, newDirName)
+ newDir.mkdir()
+ newDir
+ }
+
+ def createSrcFile(newFileName: String) = createFile(newFileName + ".scala")
+
+ def createFile(fullFileName: String) = {
+ val newFile = new JFile(file, fullFileName)
+ newFile.createNewFile()
+ newFile
+ }
+
+ def writeAll(text: String): Unit = File(file) writeAll text
+
+ def moveContentToZip(zipName: String): Unit = {
+ val newZip = zipsDir createFile s"$zipName.zip"
+ val outputStream = new ZipOutputStream(new FileOutputStream(newZip))
+
+ def addFileToZip(dirPrefix: String = "")(fileToAdd: JFile): Unit =
+ if (fileToAdd.isDirectory) {
+ val dirEntryName = fileToAdd.getName + "/"
+ outputStream.putNextEntry(new ZipEntry(dirEntryName))
+ fileToAdd.listFiles() foreach addFileToZip(dirEntryName)
+ } else {
+ val inputStream = new FileInputStream(fileToAdd)
+ outputStream.putNextEntry(new ZipEntry(dirPrefix + fileToAdd.getName))
+
+ val buffer = new Array[Byte](1024)
+ var count = inputStream.read(buffer)
+ while (count > 0) {
+ outputStream.write(buffer, 0, count)
+ count = inputStream.read(buffer)
+ }
+
+ inputStream.close()
+ }
+
+ file.listFiles() foreach addFileToZip()
+ outputStream.close()
+
+ cleanDir(file)
+ }
+
+ def moveContentToJar(jarName: String): Unit = {
+ val newJar = jarsDir createFile s"$jarName.jar"
+ Jar.create(file = File(newJar), sourceDir = Directory(file), mainClass = "won't be used")
+ cleanDir(file)
+ }
+
+ def path: String = file.getAbsolutePath
+ }
+
+ private case class DirRep(name: String, nestedDirs: Seq[DirRep] = Nil, sourceFiles: Seq[String] = Nil)
+
+ private val compiler = new scala.tools.nsc.MainClass
+ private val appRunner = new scala.tools.nsc.MainGenericRunner
+ private val classPathImplFlag = "-YclasspathImpl:flat"
+ private val javaClassPath = sys.props("java.class.path")
+
+ // creates a test dir in a temporary dir containing compiled files of this test
+ // root dir will be automatically deleted after the end of test
+ private val rootDir = new JFile(sys.props("partest.output"))
+ private val testDir = rootDir createDir s"cp-tests-${System.currentTimeMillis()}"
+
+ private val jarsDir = testDir createDir "jars"
+ private val zipsDir = testDir createDir "zips"
+ private val srcDir = testDir createDir "src"
+ private val binDir = testDir createDir "bin"
+ private val outDir = testDir createDir "out"
+
+ def main(args: Array[String]): Unit = {
+ createClassesZipInZipsDir()
+ createClassesJarInJarsDir()
+ createClassesInBinDir()
+ createSourcesZipInZipsDir()
+ createSourcesJarInJarsDir()
+ createSourcesInSrcDir()
+ compileFinalApp()
+ runApp()
+ // at the end all created files will be deleted automatically
+ }
+
+ private def createClassesZipInZipsDir(): Unit = {
+ val baseFileName = "ZipBin"
+ createStandardSrcHierarchy(baseFileName)
+ compileSrc(baseFileName)
+ outDir moveContentToZip "Bin"
+ cleanDir(srcDir)
+ }
+
+ private def createClassesJarInJarsDir(): Unit = {
+ val baseFileName = "JarBin"
+ createStandardSrcHierarchy(baseFileName)
+ compileSrc(baseFileName)
+ outDir moveContentToJar "Bin"
+ cleanDir(srcDir)
+ }
+
+ private def createClassesInBinDir(): Unit = {
+ val baseFileName = "DirBin"
+ createStandardSrcHierarchy(baseFileName)
+ compileSrc(baseFileName, destination = binDir)
+ cleanDir(srcDir)
+ }
+
+ private def createSourcesZipInZipsDir(): Unit = {
+ createStandardSrcHierarchy(baseFileName = "ZipSrc")
+ srcDir moveContentToZip "Src"
+ }
+
+ private def createSourcesJarInJarsDir(): Unit = {
+ createStandardSrcHierarchy(baseFileName = "JarSrc")
+ srcDir moveContentToJar "Src"
+ }
+
+ private def createSourcesInSrcDir(): Unit = {
+ createStandardSrcHierarchy(baseFileName = "DirSrc")
+
+ val appFile = srcDir createSrcFile "Main"
+ appFile writeAll s"""import nested._
+ | object Main extends App {
+ | println(new ZipBin)
+ | println(new JarBin)
+ | println(new DirBin)
+ | println(new ZipSrc)
+ | println(new JarSrc)
+ | println(new DirSrc)
+ |
+ | println(new NestedZipBin)
+ | println(new NestedJarBin)
+ | println(new NestedDirBin)
+ | println(new NestedZipSrc)
+ | println(new NestedJarSrc)
+ | println(new NestedDirSrc)
+ | }
+ """.stripMargin
+ }
+
+ private def compileFinalApp(): Unit = {
+ val classPath = mkPath(javaClassPath, binDir.path, zipsDir.path + "/Bin.zip", jarsDir.path + "/Bin.jar")
+ val sourcePath = mkPath(srcDir.path, zipsDir.path + "/Src.zip", jarsDir.path + "/Src.jar")
+
+ compiler.process(Array(classPathImplFlag, "-cp", classPath, "-sourcepath", sourcePath,
+ "-d", outDir.path, s"${srcDir.path}/Main.scala"))
+ }
+
+ private def runApp(): Unit = {
+ val classPath = mkPath(javaClassPath, outDir.path, binDir.path, zipsDir.path + "/Bin.zip", jarsDir.path + "/Bin.jar")
+ appRunner.process(Array(classPathImplFlag, "-cp", classPath, "Main"))
+ }
+
+ private def createStandardSrcHierarchy(baseFileName: String): Unit =
+ createSources(RootPackage, srcDir,
+ DirRep("",
+ nestedDirs = Seq(DirRep("nested", sourceFiles = Seq("Nested" + baseFileName))),
+ sourceFiles = Seq(baseFileName)
+ )
+ )
+
+ private def createSources(pkg: String, dirFile: JFile, dirRep: DirRep): Unit = {
+ dirRep.nestedDirs foreach { rep =>
+ val nestedDir = dirFile createDir rep.name
+ val nestedPkg = PackageNameUtils.packagePrefix(pkg) + rep.name
+ createSources(nestedPkg, nestedDir, rep)
+ }
+
+ val pkgHeader = if (pkg == RootPackage) "" else s"package $pkg\n\n"
+ dirRep.sourceFiles foreach { srcName =>
+ val text = s"""${pkgHeader}case class $srcName(x: String = "")"""
+ val srcFile = dirFile createSrcFile srcName
+ srcFile writeAll text
+ }
+ }
+
+ private def compileSrc(baseFileName: String, destination: JFile = outDir): Unit = {
+ val srcDirPath = srcDir.path
+ compiler.process(Array(classPathImplFlag, "-cp", javaClassPath, "-d", destination.path,
+ s"$srcDirPath/$baseFileName.scala", s"$srcDirPath/nested/Nested$baseFileName.scala"))
+ }
+
+ private def cleanDir(dir: JFile): Unit =
+ dir.listFiles().foreach { file =>
+ if (file.isDirectory) cleanDir(file)
+ file.delete()
+ }
+
+ private def mkPath(pathEntries: String*) = pathEntries.mkString(File.pathSeparator)
+}
diff --git a/test/files/run/virtpatmat_nested_lists.flags b/test/files/run/virtpatmat_nested_lists.flags
new file mode 100644
index 0000000000..ca9a4c0697
--- /dev/null
+++ b/test/files/run/virtpatmat_nested_lists.flags
@@ -0,0 +1 @@
+-Ypatmat-exhaust-depth off \ No newline at end of file
diff --git a/test/files/run/virtpatmat_opt_sharing.flags b/test/files/run/virtpatmat_opt_sharing.flags
new file mode 100644
index 0000000000..ca9a4c0697
--- /dev/null
+++ b/test/files/run/virtpatmat_opt_sharing.flags
@@ -0,0 +1 @@
+-Ypatmat-exhaust-depth off \ No newline at end of file
diff --git a/test/files/run/virtpatmat_staging.flags b/test/files/run/virtpatmat_staging.flags
index 48fd867160..0a22f7c729 100644
--- a/test/files/run/virtpatmat_staging.flags
+++ b/test/files/run/virtpatmat_staging.flags
@@ -1 +1,2 @@
+-Yrangepos:false
-Xexperimental
diff --git a/test/files/run/virtpatmat_typetag.check b/test/files/run/virtpatmat_typetag.check
index cac9d9a4d6..00df8b5e81 100644
--- a/test/files/run/virtpatmat_typetag.check
+++ b/test/files/run/virtpatmat_typetag.check
@@ -1,9 +1,9 @@
-1 is not a Int; it's a class java.lang.Integer
+1 is a Int
1 is a java.lang.Integer
1 is not a java.lang.String; it's a class java.lang.Integer
true is a Any
woele is a java.lang.String
-1 is not a Int; it's a class java.lang.Integer
+1 is a Int
1 is a java.lang.Integer
1 is not a java.lang.String; it's a class java.lang.Integer
true is a Any
diff --git a/test/files/run/xMigration.check b/test/files/run/xMigration.check
new file mode 100644
index 0000000000..378f7bb6c3
--- /dev/null
+++ b/test/files/run/xMigration.check
@@ -0,0 +1,49 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> Map(1 -> "eis").values // no warn
+res0: Iterable[String] = MapLike(eis)
+
+scala> :setting -Xmigration:none
+
+scala> Map(1 -> "eis").values // no warn
+res1: Iterable[String] = MapLike(eis)
+
+scala> :setting -Xmigration:any
+
+scala> Map(1 -> "eis").values // warn
+<console>:8: warning: method values in trait MapLike has changed semantics in version 2.8.0:
+`values` returns `Iterable[B]` rather than `Iterator[B]`.
+ Map(1 -> "eis").values // warn
+ ^
+res2: Iterable[String] = MapLike(eis)
+
+scala> :setting -Xmigration:2.8
+
+scala> Map(1 -> "eis").values // no warn
+res3: Iterable[String] = MapLike(eis)
+
+scala> :setting -Xmigration:2.7
+
+scala> Map(1 -> "eis").values // warn
+<console>:8: warning: method values in trait MapLike has changed semantics in version 2.8.0:
+`values` returns `Iterable[B]` rather than `Iterator[B]`.
+ Map(1 -> "eis").values // warn
+ ^
+res4: Iterable[String] = MapLike(eis)
+
+scala> :setting -Xmigration:2.11
+
+scala> Map(1 -> "eis").values // no warn
+res5: Iterable[String] = MapLike(eis)
+
+scala> :setting -Xmigration // same as :any
+
+scala> Map(1 -> "eis").values // warn
+<console>:8: warning: method values in trait MapLike has changed semantics in version 2.8.0:
+`values` returns `Iterable[B]` rather than `Iterator[B]`.
+ Map(1 -> "eis").values // warn
+ ^
+res6: Iterable[String] = MapLike(eis)
+
+scala> :quit
diff --git a/test/files/run/xMigration.scala b/test/files/run/xMigration.scala
new file mode 100644
index 0000000000..688e878397
--- /dev/null
+++ b/test/files/run/xMigration.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+Map(1 -> "eis").values // no warn
+:setting -Xmigration:none
+Map(1 -> "eis").values // no warn
+:setting -Xmigration:any
+Map(1 -> "eis").values // warn
+:setting -Xmigration:2.8
+Map(1 -> "eis").values // no warn
+:setting -Xmigration:2.7
+Map(1 -> "eis").values // warn
+:setting -Xmigration:2.11
+Map(1 -> "eis").values // no warn
+:setting -Xmigration // same as :any
+Map(1 -> "eis").values // warn
+ """
+}
diff --git a/test/files/scalacheck/Ctrie.scala b/test/files/scalacheck/Ctrie.scala
index 714f1c3b09..eef9d06f37 100644
--- a/test/files/scalacheck/Ctrie.scala
+++ b/test/files/scalacheck/Ctrie.scala
@@ -186,6 +186,25 @@ object Test extends Properties("concurrent.TrieMap") {
})
}
+ property("concurrent getOrElseUpdate") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val totalInserts = new java.util.concurrent.atomic.AtomicInteger
+ val ct = new TrieMap[Wrap, String]
+
+ val results = inParallel(p) {
+ idx =>
+ (0 until sz) foreach {
+ i =>
+ val v = ct.getOrElseUpdate(Wrap(i), idx + ":" + i)
+ if (v == idx + ":" + i) totalInserts.incrementAndGet()
+ }
+ }
+
+ (totalInserts.get == sz) && ((0 until sz) forall {
+ case i => ct(Wrap(i)).split(":")(1).toInt == i
+ })
+ }
+
}
diff --git a/test/files/scalacheck/nan-ordering.scala b/test/files/scalacheck/nan-ordering.scala
index 2094a46e37..05e97a13c9 100644
--- a/test/files/scalacheck/nan-ordering.scala
+++ b/test/files/scalacheck/nan-ordering.scala
@@ -42,16 +42,16 @@ object Test extends Properties("NaN-Ordering") {
property("Float equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.equiv(d1, d2) == (d1 == d2) }
property("Float reverse.min") = forAll(specFloats, specFloats) { (d1, d2) => {
- val mathmin = math.min(d1, d2)
+ val mathmax = math.max(d1, d2)
val numericmin = numFloat.reverse.min(d1, d2)
- mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ mathmax == numericmin || mathmax.isNaN && numericmin.isNaN
}
}
property("Float reverse.max") = forAll(specFloats, specFloats) { (d1, d2) => {
- val mathmax = math.max(d1, d2)
+ val mathmin = math.min(d1, d2)
val numericmax = numFloat.reverse.max(d1, d2)
- mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ mathmin == numericmax || mathmin.isNaN && numericmax.isNaN
}
}
@@ -105,16 +105,16 @@ object Test extends Properties("NaN-Ordering") {
property("Double equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.equiv(d1, d2) == (d1 == d2) }
property("Double reverse.min") = forAll(specDoubles, specDoubles) { (d1, d2) => {
- val mathmin = math.min(d1, d2)
+ val mathmax = math.max(d1, d2)
val numericmin = numDouble.reverse.min(d1, d2)
- mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ mathmax == numericmin || mathmax.isNaN && numericmin.isNaN
}
}
property("Double reverse.max") = forAll(specDoubles, specDoubles) { (d1, d2) => {
- val mathmax = math.max(d1, d2)
+ val mathmin = math.min(d1, d2)
val numericmax = numDouble.reverse.max(d1, d2)
- mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ mathmin == numericmax || mathmin.isNaN && numericmax.isNaN
}
}
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index 774d6f428b..468bcb6dd1 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -36,7 +36,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
// used to check if constructed collection is valid
def checkDataStructureInvariants(orig: Traversable[T], cf: AnyRef) = {
- // can be overriden in subclasses
+ // can be overridden in subclasses
true
}
diff --git a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
index 45392de582..409f07037e 100644
--- a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
@@ -310,4 +310,16 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
val cases = List(cq"a => b", cq"c => d")
assertEqAst(q"{ case ..$cases }", "{ case a => b case c => d }")
}
+
+ property("SI-8609 a") = test {
+ val q1 = q"val x = 1"
+ val q2 = q"..$q1; val y = 2"
+ assert(q2 ≈ q"{ val x = 1; val y = 2 }")
+ }
+
+ property("SI-8609 b") = test {
+ val q1 = q"import foo.bar"
+ val q2 = q"..$q1; val y = 2"
+ assert(q2 ≈ q"{ import foo.bar; val y = 2 }")
+ }
}
diff --git a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
index 49ffaff630..07e8f3faac 100644
--- a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
@@ -246,4 +246,11 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
assert(f ≈ `new`)
assert(argss.isEmpty)
}
+
+ property("SI-8703 extract block with single expression") = test {
+ val q"{ $a }" = Block(Nil, q"1")
+ val Literal(Constant(1)) = a
+ val q"{ $b }" = q"2"
+ val Literal(Constant(2)) = b
+ }
}
diff --git a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
index a5d526191f..f84df269ca 100644
--- a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
+++ b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala
@@ -1,9 +1,8 @@
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._
-object TypecheckedProps extends QuasiquoteProperties("typechecked") {
-
-
+object TypecheckedProps extends QuasiquoteProperties("typechecked")
+ with TypecheckedTypes {
property("tuple term") = test {
val q"(..$elements)" = typecheck(q"(1, 2)")
assert(elements ≈ List(q"1", q"2"))
@@ -166,7 +165,7 @@ trait TypecheckedTypes { self: QuasiquoteProperties =>
}
property("applied type") = test {
- val tt = typecheckTyp(q"Map[Int, Int]")
+ val tt = typecheckTyp(tq"Map[Int, Int]")
val tq"$tpt[..$tpts]" = tt
val tq"scala.this.Predef.Map" = tpt
val List(tq"scala.Int", tq"scala.Int") = tpts
@@ -205,7 +204,7 @@ trait TypecheckedTypes { self: QuasiquoteProperties =>
property("annotated type") = test {
val tq"$tpt @$annot" = typecheckTyp(tq"Int @unchecked")
val tq"scala.Int" = tpt
- val q"new unchecked" = annot
+ val tq"unchecked" = annot
}
property("existential type") = test {
diff --git a/test/files/scalap/t8679.check b/test/files/scalap/t8679.check
new file mode 100644
index 0000000000..938b76783f
--- /dev/null
+++ b/test/files/scalap/t8679.check
@@ -0,0 +1,3503 @@
+class T8679 extends scala.AnyRef {
+ def this() = { /* compiled code */ }
+ def foo1(): scala.Int = { /* compiled code */ }
+ def foo2(): scala.Int = { /* compiled code */ }
+ def foo3(): scala.Int = { /* compiled code */ }
+ def foo4(): scala.Int = { /* compiled code */ }
+ def foo5(): scala.Int = { /* compiled code */ }
+ def foo6(): scala.Int = { /* compiled code */ }
+ def foo7(): scala.Int = { /* compiled code */ }
+ def foo8(): scala.Int = { /* compiled code */ }
+ def foo9(): scala.Int = { /* compiled code */ }
+ def foo10(): scala.Int = { /* compiled code */ }
+ def foo11(): scala.Int = { /* compiled code */ }
+ def foo12(): scala.Int = { /* compiled code */ }
+ def foo13(): scala.Int = { /* compiled code */ }
+ def foo14(): scala.Int = { /* compiled code */ }
+ def foo15(): scala.Int = { /* compiled code */ }
+ def foo16(): scala.Int = { /* compiled code */ }
+ def foo17(): scala.Int = { /* compiled code */ }
+ def foo18(): scala.Int = { /* compiled code */ }
+ def foo19(): scala.Int = { /* compiled code */ }
+ def foo20(): scala.Int = { /* compiled code */ }
+ def foo21(): scala.Int = { /* compiled code */ }
+ def foo22(): scala.Int = { /* compiled code */ }
+ def foo23(): scala.Int = { /* compiled code */ }
+ def foo24(): scala.Int = { /* compiled code */ }
+ def foo25(): scala.Int = { /* compiled code */ }
+ def foo26(): scala.Int = { /* compiled code */ }
+ def foo27(): scala.Int = { /* compiled code */ }
+ def foo28(): scala.Int = { /* compiled code */ }
+ def foo29(): scala.Int = { /* compiled code */ }
+ def foo30(): scala.Int = { /* compiled code */ }
+ def foo31(): scala.Int = { /* compiled code */ }
+ def foo32(): scala.Int = { /* compiled code */ }
+ def foo33(): scala.Int = { /* compiled code */ }
+ def foo34(): scala.Int = { /* compiled code */ }
+ def foo35(): scala.Int = { /* compiled code */ }
+ def foo36(): scala.Int = { /* compiled code */ }
+ def foo37(): scala.Int = { /* compiled code */ }
+ def foo38(): scala.Int = { /* compiled code */ }
+ def foo39(): scala.Int = { /* compiled code */ }
+ def foo40(): scala.Int = { /* compiled code */ }
+ def foo41(): scala.Int = { /* compiled code */ }
+ def foo42(): scala.Int = { /* compiled code */ }
+ def foo43(): scala.Int = { /* compiled code */ }
+ def foo44(): scala.Int = { /* compiled code */ }
+ def foo45(): scala.Int = { /* compiled code */ }
+ def foo46(): scala.Int = { /* compiled code */ }
+ def foo47(): scala.Int = { /* compiled code */ }
+ def foo48(): scala.Int = { /* compiled code */ }
+ def foo49(): scala.Int = { /* compiled code */ }
+ def foo50(): scala.Int = { /* compiled code */ }
+ def foo51(): scala.Int = { /* compiled code */ }
+ def foo52(): scala.Int = { /* compiled code */ }
+ def foo53(): scala.Int = { /* compiled code */ }
+ def foo54(): scala.Int = { /* compiled code */ }
+ def foo55(): scala.Int = { /* compiled code */ }
+ def foo56(): scala.Int = { /* compiled code */ }
+ def foo57(): scala.Int = { /* compiled code */ }
+ def foo58(): scala.Int = { /* compiled code */ }
+ def foo59(): scala.Int = { /* compiled code */ }
+ def foo60(): scala.Int = { /* compiled code */ }
+ def foo61(): scala.Int = { /* compiled code */ }
+ def foo62(): scala.Int = { /* compiled code */ }
+ def foo63(): scala.Int = { /* compiled code */ }
+ def foo64(): scala.Int = { /* compiled code */ }
+ def foo65(): scala.Int = { /* compiled code */ }
+ def foo66(): scala.Int = { /* compiled code */ }
+ def foo67(): scala.Int = { /* compiled code */ }
+ def foo68(): scala.Int = { /* compiled code */ }
+ def foo69(): scala.Int = { /* compiled code */ }
+ def foo70(): scala.Int = { /* compiled code */ }
+ def foo71(): scala.Int = { /* compiled code */ }
+ def foo72(): scala.Int = { /* compiled code */ }
+ def foo73(): scala.Int = { /* compiled code */ }
+ def foo74(): scala.Int = { /* compiled code */ }
+ def foo75(): scala.Int = { /* compiled code */ }
+ def foo76(): scala.Int = { /* compiled code */ }
+ def foo77(): scala.Int = { /* compiled code */ }
+ def foo78(): scala.Int = { /* compiled code */ }
+ def foo79(): scala.Int = { /* compiled code */ }
+ def foo80(): scala.Int = { /* compiled code */ }
+ def foo81(): scala.Int = { /* compiled code */ }
+ def foo82(): scala.Int = { /* compiled code */ }
+ def foo83(): scala.Int = { /* compiled code */ }
+ def foo84(): scala.Int = { /* compiled code */ }
+ def foo85(): scala.Int = { /* compiled code */ }
+ def foo86(): scala.Int = { /* compiled code */ }
+ def foo87(): scala.Int = { /* compiled code */ }
+ def foo88(): scala.Int = { /* compiled code */ }
+ def foo89(): scala.Int = { /* compiled code */ }
+ def foo90(): scala.Int = { /* compiled code */ }
+ def foo91(): scala.Int = { /* compiled code */ }
+ def foo92(): scala.Int = { /* compiled code */ }
+ def foo93(): scala.Int = { /* compiled code */ }
+ def foo94(): scala.Int = { /* compiled code */ }
+ def foo95(): scala.Int = { /* compiled code */ }
+ def foo96(): scala.Int = { /* compiled code */ }
+ def foo97(): scala.Int = { /* compiled code */ }
+ def foo98(): scala.Int = { /* compiled code */ }
+ def foo99(): scala.Int = { /* compiled code */ }
+ def foo100(): scala.Int = { /* compiled code */ }
+ def foo101(): scala.Int = { /* compiled code */ }
+ def foo102(): scala.Int = { /* compiled code */ }
+ def foo103(): scala.Int = { /* compiled code */ }
+ def foo104(): scala.Int = { /* compiled code */ }
+ def foo105(): scala.Int = { /* compiled code */ }
+ def foo106(): scala.Int = { /* compiled code */ }
+ def foo107(): scala.Int = { /* compiled code */ }
+ def foo108(): scala.Int = { /* compiled code */ }
+ def foo109(): scala.Int = { /* compiled code */ }
+ def foo110(): scala.Int = { /* compiled code */ }
+ def foo111(): scala.Int = { /* compiled code */ }
+ def foo112(): scala.Int = { /* compiled code */ }
+ def foo113(): scala.Int = { /* compiled code */ }
+ def foo114(): scala.Int = { /* compiled code */ }
+ def foo115(): scala.Int = { /* compiled code */ }
+ def foo116(): scala.Int = { /* compiled code */ }
+ def foo117(): scala.Int = { /* compiled code */ }
+ def foo118(): scala.Int = { /* compiled code */ }
+ def foo119(): scala.Int = { /* compiled code */ }
+ def foo120(): scala.Int = { /* compiled code */ }
+ def foo121(): scala.Int = { /* compiled code */ }
+ def foo122(): scala.Int = { /* compiled code */ }
+ def foo123(): scala.Int = { /* compiled code */ }
+ def foo124(): scala.Int = { /* compiled code */ }
+ def foo125(): scala.Int = { /* compiled code */ }
+ def foo126(): scala.Int = { /* compiled code */ }
+ def foo127(): scala.Int = { /* compiled code */ }
+ def foo128(): scala.Int = { /* compiled code */ }
+ def foo129(): scala.Int = { /* compiled code */ }
+ def foo130(): scala.Int = { /* compiled code */ }
+ def foo131(): scala.Int = { /* compiled code */ }
+ def foo132(): scala.Int = { /* compiled code */ }
+ def foo133(): scala.Int = { /* compiled code */ }
+ def foo134(): scala.Int = { /* compiled code */ }
+ def foo135(): scala.Int = { /* compiled code */ }
+ def foo136(): scala.Int = { /* compiled code */ }
+ def foo137(): scala.Int = { /* compiled code */ }
+ def foo138(): scala.Int = { /* compiled code */ }
+ def foo139(): scala.Int = { /* compiled code */ }
+ def foo140(): scala.Int = { /* compiled code */ }
+ def foo141(): scala.Int = { /* compiled code */ }
+ def foo142(): scala.Int = { /* compiled code */ }
+ def foo143(): scala.Int = { /* compiled code */ }
+ def foo144(): scala.Int = { /* compiled code */ }
+ def foo145(): scala.Int = { /* compiled code */ }
+ def foo146(): scala.Int = { /* compiled code */ }
+ def foo147(): scala.Int = { /* compiled code */ }
+ def foo148(): scala.Int = { /* compiled code */ }
+ def foo149(): scala.Int = { /* compiled code */ }
+ def foo150(): scala.Int = { /* compiled code */ }
+ def foo151(): scala.Int = { /* compiled code */ }
+ def foo152(): scala.Int = { /* compiled code */ }
+ def foo153(): scala.Int = { /* compiled code */ }
+ def foo154(): scala.Int = { /* compiled code */ }
+ def foo155(): scala.Int = { /* compiled code */ }
+ def foo156(): scala.Int = { /* compiled code */ }
+ def foo157(): scala.Int = { /* compiled code */ }
+ def foo158(): scala.Int = { /* compiled code */ }
+ def foo159(): scala.Int = { /* compiled code */ }
+ def foo160(): scala.Int = { /* compiled code */ }
+ def foo161(): scala.Int = { /* compiled code */ }
+ def foo162(): scala.Int = { /* compiled code */ }
+ def foo163(): scala.Int = { /* compiled code */ }
+ def foo164(): scala.Int = { /* compiled code */ }
+ def foo165(): scala.Int = { /* compiled code */ }
+ def foo166(): scala.Int = { /* compiled code */ }
+ def foo167(): scala.Int = { /* compiled code */ }
+ def foo168(): scala.Int = { /* compiled code */ }
+ def foo169(): scala.Int = { /* compiled code */ }
+ def foo170(): scala.Int = { /* compiled code */ }
+ def foo171(): scala.Int = { /* compiled code */ }
+ def foo172(): scala.Int = { /* compiled code */ }
+ def foo173(): scala.Int = { /* compiled code */ }
+ def foo174(): scala.Int = { /* compiled code */ }
+ def foo175(): scala.Int = { /* compiled code */ }
+ def foo176(): scala.Int = { /* compiled code */ }
+ def foo177(): scala.Int = { /* compiled code */ }
+ def foo178(): scala.Int = { /* compiled code */ }
+ def foo179(): scala.Int = { /* compiled code */ }
+ def foo180(): scala.Int = { /* compiled code */ }
+ def foo181(): scala.Int = { /* compiled code */ }
+ def foo182(): scala.Int = { /* compiled code */ }
+ def foo183(): scala.Int = { /* compiled code */ }
+ def foo184(): scala.Int = { /* compiled code */ }
+ def foo185(): scala.Int = { /* compiled code */ }
+ def foo186(): scala.Int = { /* compiled code */ }
+ def foo187(): scala.Int = { /* compiled code */ }
+ def foo188(): scala.Int = { /* compiled code */ }
+ def foo189(): scala.Int = { /* compiled code */ }
+ def foo190(): scala.Int = { /* compiled code */ }
+ def foo191(): scala.Int = { /* compiled code */ }
+ def foo192(): scala.Int = { /* compiled code */ }
+ def foo193(): scala.Int = { /* compiled code */ }
+ def foo194(): scala.Int = { /* compiled code */ }
+ def foo195(): scala.Int = { /* compiled code */ }
+ def foo196(): scala.Int = { /* compiled code */ }
+ def foo197(): scala.Int = { /* compiled code */ }
+ def foo198(): scala.Int = { /* compiled code */ }
+ def foo199(): scala.Int = { /* compiled code */ }
+ def foo200(): scala.Int = { /* compiled code */ }
+ def foo201(): scala.Int = { /* compiled code */ }
+ def foo202(): scala.Int = { /* compiled code */ }
+ def foo203(): scala.Int = { /* compiled code */ }
+ def foo204(): scala.Int = { /* compiled code */ }
+ def foo205(): scala.Int = { /* compiled code */ }
+ def foo206(): scala.Int = { /* compiled code */ }
+ def foo207(): scala.Int = { /* compiled code */ }
+ def foo208(): scala.Int = { /* compiled code */ }
+ def foo209(): scala.Int = { /* compiled code */ }
+ def foo210(): scala.Int = { /* compiled code */ }
+ def foo211(): scala.Int = { /* compiled code */ }
+ def foo212(): scala.Int = { /* compiled code */ }
+ def foo213(): scala.Int = { /* compiled code */ }
+ def foo214(): scala.Int = { /* compiled code */ }
+ def foo215(): scala.Int = { /* compiled code */ }
+ def foo216(): scala.Int = { /* compiled code */ }
+ def foo217(): scala.Int = { /* compiled code */ }
+ def foo218(): scala.Int = { /* compiled code */ }
+ def foo219(): scala.Int = { /* compiled code */ }
+ def foo220(): scala.Int = { /* compiled code */ }
+ def foo221(): scala.Int = { /* compiled code */ }
+ def foo222(): scala.Int = { /* compiled code */ }
+ def foo223(): scala.Int = { /* compiled code */ }
+ def foo224(): scala.Int = { /* compiled code */ }
+ def foo225(): scala.Int = { /* compiled code */ }
+ def foo226(): scala.Int = { /* compiled code */ }
+ def foo227(): scala.Int = { /* compiled code */ }
+ def foo228(): scala.Int = { /* compiled code */ }
+ def foo229(): scala.Int = { /* compiled code */ }
+ def foo230(): scala.Int = { /* compiled code */ }
+ def foo231(): scala.Int = { /* compiled code */ }
+ def foo232(): scala.Int = { /* compiled code */ }
+ def foo233(): scala.Int = { /* compiled code */ }
+ def foo234(): scala.Int = { /* compiled code */ }
+ def foo235(): scala.Int = { /* compiled code */ }
+ def foo236(): scala.Int = { /* compiled code */ }
+ def foo237(): scala.Int = { /* compiled code */ }
+ def foo238(): scala.Int = { /* compiled code */ }
+ def foo239(): scala.Int = { /* compiled code */ }
+ def foo240(): scala.Int = { /* compiled code */ }
+ def foo241(): scala.Int = { /* compiled code */ }
+ def foo242(): scala.Int = { /* compiled code */ }
+ def foo243(): scala.Int = { /* compiled code */ }
+ def foo244(): scala.Int = { /* compiled code */ }
+ def foo245(): scala.Int = { /* compiled code */ }
+ def foo246(): scala.Int = { /* compiled code */ }
+ def foo247(): scala.Int = { /* compiled code */ }
+ def foo248(): scala.Int = { /* compiled code */ }
+ def foo249(): scala.Int = { /* compiled code */ }
+ def foo250(): scala.Int = { /* compiled code */ }
+ def foo251(): scala.Int = { /* compiled code */ }
+ def foo252(): scala.Int = { /* compiled code */ }
+ def foo253(): scala.Int = { /* compiled code */ }
+ def foo254(): scala.Int = { /* compiled code */ }
+ def foo255(): scala.Int = { /* compiled code */ }
+ def foo256(): scala.Int = { /* compiled code */ }
+ def foo257(): scala.Int = { /* compiled code */ }
+ def foo258(): scala.Int = { /* compiled code */ }
+ def foo259(): scala.Int = { /* compiled code */ }
+ def foo260(): scala.Int = { /* compiled code */ }
+ def foo261(): scala.Int = { /* compiled code */ }
+ def foo262(): scala.Int = { /* compiled code */ }
+ def foo263(): scala.Int = { /* compiled code */ }
+ def foo264(): scala.Int = { /* compiled code */ }
+ def foo265(): scala.Int = { /* compiled code */ }
+ def foo266(): scala.Int = { /* compiled code */ }
+ def foo267(): scala.Int = { /* compiled code */ }
+ def foo268(): scala.Int = { /* compiled code */ }
+ def foo269(): scala.Int = { /* compiled code */ }
+ def foo270(): scala.Int = { /* compiled code */ }
+ def foo271(): scala.Int = { /* compiled code */ }
+ def foo272(): scala.Int = { /* compiled code */ }
+ def foo273(): scala.Int = { /* compiled code */ }
+ def foo274(): scala.Int = { /* compiled code */ }
+ def foo275(): scala.Int = { /* compiled code */ }
+ def foo276(): scala.Int = { /* compiled code */ }
+ def foo277(): scala.Int = { /* compiled code */ }
+ def foo278(): scala.Int = { /* compiled code */ }
+ def foo279(): scala.Int = { /* compiled code */ }
+ def foo280(): scala.Int = { /* compiled code */ }
+ def foo281(): scala.Int = { /* compiled code */ }
+ def foo282(): scala.Int = { /* compiled code */ }
+ def foo283(): scala.Int = { /* compiled code */ }
+ def foo284(): scala.Int = { /* compiled code */ }
+ def foo285(): scala.Int = { /* compiled code */ }
+ def foo286(): scala.Int = { /* compiled code */ }
+ def foo287(): scala.Int = { /* compiled code */ }
+ def foo288(): scala.Int = { /* compiled code */ }
+ def foo289(): scala.Int = { /* compiled code */ }
+ def foo290(): scala.Int = { /* compiled code */ }
+ def foo291(): scala.Int = { /* compiled code */ }
+ def foo292(): scala.Int = { /* compiled code */ }
+ def foo293(): scala.Int = { /* compiled code */ }
+ def foo294(): scala.Int = { /* compiled code */ }
+ def foo295(): scala.Int = { /* compiled code */ }
+ def foo296(): scala.Int = { /* compiled code */ }
+ def foo297(): scala.Int = { /* compiled code */ }
+ def foo298(): scala.Int = { /* compiled code */ }
+ def foo299(): scala.Int = { /* compiled code */ }
+ def foo300(): scala.Int = { /* compiled code */ }
+ def foo301(): scala.Int = { /* compiled code */ }
+ def foo302(): scala.Int = { /* compiled code */ }
+ def foo303(): scala.Int = { /* compiled code */ }
+ def foo304(): scala.Int = { /* compiled code */ }
+ def foo305(): scala.Int = { /* compiled code */ }
+ def foo306(): scala.Int = { /* compiled code */ }
+ def foo307(): scala.Int = { /* compiled code */ }
+ def foo308(): scala.Int = { /* compiled code */ }
+ def foo309(): scala.Int = { /* compiled code */ }
+ def foo310(): scala.Int = { /* compiled code */ }
+ def foo311(): scala.Int = { /* compiled code */ }
+ def foo312(): scala.Int = { /* compiled code */ }
+ def foo313(): scala.Int = { /* compiled code */ }
+ def foo314(): scala.Int = { /* compiled code */ }
+ def foo315(): scala.Int = { /* compiled code */ }
+ def foo316(): scala.Int = { /* compiled code */ }
+ def foo317(): scala.Int = { /* compiled code */ }
+ def foo318(): scala.Int = { /* compiled code */ }
+ def foo319(): scala.Int = { /* compiled code */ }
+ def foo320(): scala.Int = { /* compiled code */ }
+ def foo321(): scala.Int = { /* compiled code */ }
+ def foo322(): scala.Int = { /* compiled code */ }
+ def foo323(): scala.Int = { /* compiled code */ }
+ def foo324(): scala.Int = { /* compiled code */ }
+ def foo325(): scala.Int = { /* compiled code */ }
+ def foo326(): scala.Int = { /* compiled code */ }
+ def foo327(): scala.Int = { /* compiled code */ }
+ def foo328(): scala.Int = { /* compiled code */ }
+ def foo329(): scala.Int = { /* compiled code */ }
+ def foo330(): scala.Int = { /* compiled code */ }
+ def foo331(): scala.Int = { /* compiled code */ }
+ def foo332(): scala.Int = { /* compiled code */ }
+ def foo333(): scala.Int = { /* compiled code */ }
+ def foo334(): scala.Int = { /* compiled code */ }
+ def foo335(): scala.Int = { /* compiled code */ }
+ def foo336(): scala.Int = { /* compiled code */ }
+ def foo337(): scala.Int = { /* compiled code */ }
+ def foo338(): scala.Int = { /* compiled code */ }
+ def foo339(): scala.Int = { /* compiled code */ }
+ def foo340(): scala.Int = { /* compiled code */ }
+ def foo341(): scala.Int = { /* compiled code */ }
+ def foo342(): scala.Int = { /* compiled code */ }
+ def foo343(): scala.Int = { /* compiled code */ }
+ def foo344(): scala.Int = { /* compiled code */ }
+ def foo345(): scala.Int = { /* compiled code */ }
+ def foo346(): scala.Int = { /* compiled code */ }
+ def foo347(): scala.Int = { /* compiled code */ }
+ def foo348(): scala.Int = { /* compiled code */ }
+ def foo349(): scala.Int = { /* compiled code */ }
+ def foo350(): scala.Int = { /* compiled code */ }
+ def foo351(): scala.Int = { /* compiled code */ }
+ def foo352(): scala.Int = { /* compiled code */ }
+ def foo353(): scala.Int = { /* compiled code */ }
+ def foo354(): scala.Int = { /* compiled code */ }
+ def foo355(): scala.Int = { /* compiled code */ }
+ def foo356(): scala.Int = { /* compiled code */ }
+ def foo357(): scala.Int = { /* compiled code */ }
+ def foo358(): scala.Int = { /* compiled code */ }
+ def foo359(): scala.Int = { /* compiled code */ }
+ def foo360(): scala.Int = { /* compiled code */ }
+ def foo361(): scala.Int = { /* compiled code */ }
+ def foo362(): scala.Int = { /* compiled code */ }
+ def foo363(): scala.Int = { /* compiled code */ }
+ def foo364(): scala.Int = { /* compiled code */ }
+ def foo365(): scala.Int = { /* compiled code */ }
+ def foo366(): scala.Int = { /* compiled code */ }
+ def foo367(): scala.Int = { /* compiled code */ }
+ def foo368(): scala.Int = { /* compiled code */ }
+ def foo369(): scala.Int = { /* compiled code */ }
+ def foo370(): scala.Int = { /* compiled code */ }
+ def foo371(): scala.Int = { /* compiled code */ }
+ def foo372(): scala.Int = { /* compiled code */ }
+ def foo373(): scala.Int = { /* compiled code */ }
+ def foo374(): scala.Int = { /* compiled code */ }
+ def foo375(): scala.Int = { /* compiled code */ }
+ def foo376(): scala.Int = { /* compiled code */ }
+ def foo377(): scala.Int = { /* compiled code */ }
+ def foo378(): scala.Int = { /* compiled code */ }
+ def foo379(): scala.Int = { /* compiled code */ }
+ def foo380(): scala.Int = { /* compiled code */ }
+ def foo381(): scala.Int = { /* compiled code */ }
+ def foo382(): scala.Int = { /* compiled code */ }
+ def foo383(): scala.Int = { /* compiled code */ }
+ def foo384(): scala.Int = { /* compiled code */ }
+ def foo385(): scala.Int = { /* compiled code */ }
+ def foo386(): scala.Int = { /* compiled code */ }
+ def foo387(): scala.Int = { /* compiled code */ }
+ def foo388(): scala.Int = { /* compiled code */ }
+ def foo389(): scala.Int = { /* compiled code */ }
+ def foo390(): scala.Int = { /* compiled code */ }
+ def foo391(): scala.Int = { /* compiled code */ }
+ def foo392(): scala.Int = { /* compiled code */ }
+ def foo393(): scala.Int = { /* compiled code */ }
+ def foo394(): scala.Int = { /* compiled code */ }
+ def foo395(): scala.Int = { /* compiled code */ }
+ def foo396(): scala.Int = { /* compiled code */ }
+ def foo397(): scala.Int = { /* compiled code */ }
+ def foo398(): scala.Int = { /* compiled code */ }
+ def foo399(): scala.Int = { /* compiled code */ }
+ def foo400(): scala.Int = { /* compiled code */ }
+ def foo401(): scala.Int = { /* compiled code */ }
+ def foo402(): scala.Int = { /* compiled code */ }
+ def foo403(): scala.Int = { /* compiled code */ }
+ def foo404(): scala.Int = { /* compiled code */ }
+ def foo405(): scala.Int = { /* compiled code */ }
+ def foo406(): scala.Int = { /* compiled code */ }
+ def foo407(): scala.Int = { /* compiled code */ }
+ def foo408(): scala.Int = { /* compiled code */ }
+ def foo409(): scala.Int = { /* compiled code */ }
+ def foo410(): scala.Int = { /* compiled code */ }
+ def foo411(): scala.Int = { /* compiled code */ }
+ def foo412(): scala.Int = { /* compiled code */ }
+ def foo413(): scala.Int = { /* compiled code */ }
+ def foo414(): scala.Int = { /* compiled code */ }
+ def foo415(): scala.Int = { /* compiled code */ }
+ def foo416(): scala.Int = { /* compiled code */ }
+ def foo417(): scala.Int = { /* compiled code */ }
+ def foo418(): scala.Int = { /* compiled code */ }
+ def foo419(): scala.Int = { /* compiled code */ }
+ def foo420(): scala.Int = { /* compiled code */ }
+ def foo421(): scala.Int = { /* compiled code */ }
+ def foo422(): scala.Int = { /* compiled code */ }
+ def foo423(): scala.Int = { /* compiled code */ }
+ def foo424(): scala.Int = { /* compiled code */ }
+ def foo425(): scala.Int = { /* compiled code */ }
+ def foo426(): scala.Int = { /* compiled code */ }
+ def foo427(): scala.Int = { /* compiled code */ }
+ def foo428(): scala.Int = { /* compiled code */ }
+ def foo429(): scala.Int = { /* compiled code */ }
+ def foo430(): scala.Int = { /* compiled code */ }
+ def foo431(): scala.Int = { /* compiled code */ }
+ def foo432(): scala.Int = { /* compiled code */ }
+ def foo433(): scala.Int = { /* compiled code */ }
+ def foo434(): scala.Int = { /* compiled code */ }
+ def foo435(): scala.Int = { /* compiled code */ }
+ def foo436(): scala.Int = { /* compiled code */ }
+ def foo437(): scala.Int = { /* compiled code */ }
+ def foo438(): scala.Int = { /* compiled code */ }
+ def foo439(): scala.Int = { /* compiled code */ }
+ def foo440(): scala.Int = { /* compiled code */ }
+ def foo441(): scala.Int = { /* compiled code */ }
+ def foo442(): scala.Int = { /* compiled code */ }
+ def foo443(): scala.Int = { /* compiled code */ }
+ def foo444(): scala.Int = { /* compiled code */ }
+ def foo445(): scala.Int = { /* compiled code */ }
+ def foo446(): scala.Int = { /* compiled code */ }
+ def foo447(): scala.Int = { /* compiled code */ }
+ def foo448(): scala.Int = { /* compiled code */ }
+ def foo449(): scala.Int = { /* compiled code */ }
+ def foo450(): scala.Int = { /* compiled code */ }
+ def foo451(): scala.Int = { /* compiled code */ }
+ def foo452(): scala.Int = { /* compiled code */ }
+ def foo453(): scala.Int = { /* compiled code */ }
+ def foo454(): scala.Int = { /* compiled code */ }
+ def foo455(): scala.Int = { /* compiled code */ }
+ def foo456(): scala.Int = { /* compiled code */ }
+ def foo457(): scala.Int = { /* compiled code */ }
+ def foo458(): scala.Int = { /* compiled code */ }
+ def foo459(): scala.Int = { /* compiled code */ }
+ def foo460(): scala.Int = { /* compiled code */ }
+ def foo461(): scala.Int = { /* compiled code */ }
+ def foo462(): scala.Int = { /* compiled code */ }
+ def foo463(): scala.Int = { /* compiled code */ }
+ def foo464(): scala.Int = { /* compiled code */ }
+ def foo465(): scala.Int = { /* compiled code */ }
+ def foo466(): scala.Int = { /* compiled code */ }
+ def foo467(): scala.Int = { /* compiled code */ }
+ def foo468(): scala.Int = { /* compiled code */ }
+ def foo469(): scala.Int = { /* compiled code */ }
+ def foo470(): scala.Int = { /* compiled code */ }
+ def foo471(): scala.Int = { /* compiled code */ }
+ def foo472(): scala.Int = { /* compiled code */ }
+ def foo473(): scala.Int = { /* compiled code */ }
+ def foo474(): scala.Int = { /* compiled code */ }
+ def foo475(): scala.Int = { /* compiled code */ }
+ def foo476(): scala.Int = { /* compiled code */ }
+ def foo477(): scala.Int = { /* compiled code */ }
+ def foo478(): scala.Int = { /* compiled code */ }
+ def foo479(): scala.Int = { /* compiled code */ }
+ def foo480(): scala.Int = { /* compiled code */ }
+ def foo481(): scala.Int = { /* compiled code */ }
+ def foo482(): scala.Int = { /* compiled code */ }
+ def foo483(): scala.Int = { /* compiled code */ }
+ def foo484(): scala.Int = { /* compiled code */ }
+ def foo485(): scala.Int = { /* compiled code */ }
+ def foo486(): scala.Int = { /* compiled code */ }
+ def foo487(): scala.Int = { /* compiled code */ }
+ def foo488(): scala.Int = { /* compiled code */ }
+ def foo489(): scala.Int = { /* compiled code */ }
+ def foo490(): scala.Int = { /* compiled code */ }
+ def foo491(): scala.Int = { /* compiled code */ }
+ def foo492(): scala.Int = { /* compiled code */ }
+ def foo493(): scala.Int = { /* compiled code */ }
+ def foo494(): scala.Int = { /* compiled code */ }
+ def foo495(): scala.Int = { /* compiled code */ }
+ def foo496(): scala.Int = { /* compiled code */ }
+ def foo497(): scala.Int = { /* compiled code */ }
+ def foo498(): scala.Int = { /* compiled code */ }
+ def foo499(): scala.Int = { /* compiled code */ }
+ def foo500(): scala.Int = { /* compiled code */ }
+ def foo501(): scala.Int = { /* compiled code */ }
+ def foo502(): scala.Int = { /* compiled code */ }
+ def foo503(): scala.Int = { /* compiled code */ }
+ def foo504(): scala.Int = { /* compiled code */ }
+ def foo505(): scala.Int = { /* compiled code */ }
+ def foo506(): scala.Int = { /* compiled code */ }
+ def foo507(): scala.Int = { /* compiled code */ }
+ def foo508(): scala.Int = { /* compiled code */ }
+ def foo509(): scala.Int = { /* compiled code */ }
+ def foo510(): scala.Int = { /* compiled code */ }
+ def foo511(): scala.Int = { /* compiled code */ }
+ def foo512(): scala.Int = { /* compiled code */ }
+ def foo513(): scala.Int = { /* compiled code */ }
+ def foo514(): scala.Int = { /* compiled code */ }
+ def foo515(): scala.Int = { /* compiled code */ }
+ def foo516(): scala.Int = { /* compiled code */ }
+ def foo517(): scala.Int = { /* compiled code */ }
+ def foo518(): scala.Int = { /* compiled code */ }
+ def foo519(): scala.Int = { /* compiled code */ }
+ def foo520(): scala.Int = { /* compiled code */ }
+ def foo521(): scala.Int = { /* compiled code */ }
+ def foo522(): scala.Int = { /* compiled code */ }
+ def foo523(): scala.Int = { /* compiled code */ }
+ def foo524(): scala.Int = { /* compiled code */ }
+ def foo525(): scala.Int = { /* compiled code */ }
+ def foo526(): scala.Int = { /* compiled code */ }
+ def foo527(): scala.Int = { /* compiled code */ }
+ def foo528(): scala.Int = { /* compiled code */ }
+ def foo529(): scala.Int = { /* compiled code */ }
+ def foo530(): scala.Int = { /* compiled code */ }
+ def foo531(): scala.Int = { /* compiled code */ }
+ def foo532(): scala.Int = { /* compiled code */ }
+ def foo533(): scala.Int = { /* compiled code */ }
+ def foo534(): scala.Int = { /* compiled code */ }
+ def foo535(): scala.Int = { /* compiled code */ }
+ def foo536(): scala.Int = { /* compiled code */ }
+ def foo537(): scala.Int = { /* compiled code */ }
+ def foo538(): scala.Int = { /* compiled code */ }
+ def foo539(): scala.Int = { /* compiled code */ }
+ def foo540(): scala.Int = { /* compiled code */ }
+ def foo541(): scala.Int = { /* compiled code */ }
+ def foo542(): scala.Int = { /* compiled code */ }
+ def foo543(): scala.Int = { /* compiled code */ }
+ def foo544(): scala.Int = { /* compiled code */ }
+ def foo545(): scala.Int = { /* compiled code */ }
+ def foo546(): scala.Int = { /* compiled code */ }
+ def foo547(): scala.Int = { /* compiled code */ }
+ def foo548(): scala.Int = { /* compiled code */ }
+ def foo549(): scala.Int = { /* compiled code */ }
+ def foo550(): scala.Int = { /* compiled code */ }
+ def foo551(): scala.Int = { /* compiled code */ }
+ def foo552(): scala.Int = { /* compiled code */ }
+ def foo553(): scala.Int = { /* compiled code */ }
+ def foo554(): scala.Int = { /* compiled code */ }
+ def foo555(): scala.Int = { /* compiled code */ }
+ def foo556(): scala.Int = { /* compiled code */ }
+ def foo557(): scala.Int = { /* compiled code */ }
+ def foo558(): scala.Int = { /* compiled code */ }
+ def foo559(): scala.Int = { /* compiled code */ }
+ def foo560(): scala.Int = { /* compiled code */ }
+ def foo561(): scala.Int = { /* compiled code */ }
+ def foo562(): scala.Int = { /* compiled code */ }
+ def foo563(): scala.Int = { /* compiled code */ }
+ def foo564(): scala.Int = { /* compiled code */ }
+ def foo565(): scala.Int = { /* compiled code */ }
+ def foo566(): scala.Int = { /* compiled code */ }
+ def foo567(): scala.Int = { /* compiled code */ }
+ def foo568(): scala.Int = { /* compiled code */ }
+ def foo569(): scala.Int = { /* compiled code */ }
+ def foo570(): scala.Int = { /* compiled code */ }
+ def foo571(): scala.Int = { /* compiled code */ }
+ def foo572(): scala.Int = { /* compiled code */ }
+ def foo573(): scala.Int = { /* compiled code */ }
+ def foo574(): scala.Int = { /* compiled code */ }
+ def foo575(): scala.Int = { /* compiled code */ }
+ def foo576(): scala.Int = { /* compiled code */ }
+ def foo577(): scala.Int = { /* compiled code */ }
+ def foo578(): scala.Int = { /* compiled code */ }
+ def foo579(): scala.Int = { /* compiled code */ }
+ def foo580(): scala.Int = { /* compiled code */ }
+ def foo581(): scala.Int = { /* compiled code */ }
+ def foo582(): scala.Int = { /* compiled code */ }
+ def foo583(): scala.Int = { /* compiled code */ }
+ def foo584(): scala.Int = { /* compiled code */ }
+ def foo585(): scala.Int = { /* compiled code */ }
+ def foo586(): scala.Int = { /* compiled code */ }
+ def foo587(): scala.Int = { /* compiled code */ }
+ def foo588(): scala.Int = { /* compiled code */ }
+ def foo589(): scala.Int = { /* compiled code */ }
+ def foo590(): scala.Int = { /* compiled code */ }
+ def foo591(): scala.Int = { /* compiled code */ }
+ def foo592(): scala.Int = { /* compiled code */ }
+ def foo593(): scala.Int = { /* compiled code */ }
+ def foo594(): scala.Int = { /* compiled code */ }
+ def foo595(): scala.Int = { /* compiled code */ }
+ def foo596(): scala.Int = { /* compiled code */ }
+ def foo597(): scala.Int = { /* compiled code */ }
+ def foo598(): scala.Int = { /* compiled code */ }
+ def foo599(): scala.Int = { /* compiled code */ }
+ def foo600(): scala.Int = { /* compiled code */ }
+ def foo601(): scala.Int = { /* compiled code */ }
+ def foo602(): scala.Int = { /* compiled code */ }
+ def foo603(): scala.Int = { /* compiled code */ }
+ def foo604(): scala.Int = { /* compiled code */ }
+ def foo605(): scala.Int = { /* compiled code */ }
+ def foo606(): scala.Int = { /* compiled code */ }
+ def foo607(): scala.Int = { /* compiled code */ }
+ def foo608(): scala.Int = { /* compiled code */ }
+ def foo609(): scala.Int = { /* compiled code */ }
+ def foo610(): scala.Int = { /* compiled code */ }
+ def foo611(): scala.Int = { /* compiled code */ }
+ def foo612(): scala.Int = { /* compiled code */ }
+ def foo613(): scala.Int = { /* compiled code */ }
+ def foo614(): scala.Int = { /* compiled code */ }
+ def foo615(): scala.Int = { /* compiled code */ }
+ def foo616(): scala.Int = { /* compiled code */ }
+ def foo617(): scala.Int = { /* compiled code */ }
+ def foo618(): scala.Int = { /* compiled code */ }
+ def foo619(): scala.Int = { /* compiled code */ }
+ def foo620(): scala.Int = { /* compiled code */ }
+ def foo621(): scala.Int = { /* compiled code */ }
+ def foo622(): scala.Int = { /* compiled code */ }
+ def foo623(): scala.Int = { /* compiled code */ }
+ def foo624(): scala.Int = { /* compiled code */ }
+ def foo625(): scala.Int = { /* compiled code */ }
+ def foo626(): scala.Int = { /* compiled code */ }
+ def foo627(): scala.Int = { /* compiled code */ }
+ def foo628(): scala.Int = { /* compiled code */ }
+ def foo629(): scala.Int = { /* compiled code */ }
+ def foo630(): scala.Int = { /* compiled code */ }
+ def foo631(): scala.Int = { /* compiled code */ }
+ def foo632(): scala.Int = { /* compiled code */ }
+ def foo633(): scala.Int = { /* compiled code */ }
+ def foo634(): scala.Int = { /* compiled code */ }
+ def foo635(): scala.Int = { /* compiled code */ }
+ def foo636(): scala.Int = { /* compiled code */ }
+ def foo637(): scala.Int = { /* compiled code */ }
+ def foo638(): scala.Int = { /* compiled code */ }
+ def foo639(): scala.Int = { /* compiled code */ }
+ def foo640(): scala.Int = { /* compiled code */ }
+ def foo641(): scala.Int = { /* compiled code */ }
+ def foo642(): scala.Int = { /* compiled code */ }
+ def foo643(): scala.Int = { /* compiled code */ }
+ def foo644(): scala.Int = { /* compiled code */ }
+ def foo645(): scala.Int = { /* compiled code */ }
+ def foo646(): scala.Int = { /* compiled code */ }
+ def foo647(): scala.Int = { /* compiled code */ }
+ def foo648(): scala.Int = { /* compiled code */ }
+ def foo649(): scala.Int = { /* compiled code */ }
+ def foo650(): scala.Int = { /* compiled code */ }
+ def foo651(): scala.Int = { /* compiled code */ }
+ def foo652(): scala.Int = { /* compiled code */ }
+ def foo653(): scala.Int = { /* compiled code */ }
+ def foo654(): scala.Int = { /* compiled code */ }
+ def foo655(): scala.Int = { /* compiled code */ }
+ def foo656(): scala.Int = { /* compiled code */ }
+ def foo657(): scala.Int = { /* compiled code */ }
+ def foo658(): scala.Int = { /* compiled code */ }
+ def foo659(): scala.Int = { /* compiled code */ }
+ def foo660(): scala.Int = { /* compiled code */ }
+ def foo661(): scala.Int = { /* compiled code */ }
+ def foo662(): scala.Int = { /* compiled code */ }
+ def foo663(): scala.Int = { /* compiled code */ }
+ def foo664(): scala.Int = { /* compiled code */ }
+ def foo665(): scala.Int = { /* compiled code */ }
+ def foo666(): scala.Int = { /* compiled code */ }
+ def foo667(): scala.Int = { /* compiled code */ }
+ def foo668(): scala.Int = { /* compiled code */ }
+ def foo669(): scala.Int = { /* compiled code */ }
+ def foo670(): scala.Int = { /* compiled code */ }
+ def foo671(): scala.Int = { /* compiled code */ }
+ def foo672(): scala.Int = { /* compiled code */ }
+ def foo673(): scala.Int = { /* compiled code */ }
+ def foo674(): scala.Int = { /* compiled code */ }
+ def foo675(): scala.Int = { /* compiled code */ }
+ def foo676(): scala.Int = { /* compiled code */ }
+ def foo677(): scala.Int = { /* compiled code */ }
+ def foo678(): scala.Int = { /* compiled code */ }
+ def foo679(): scala.Int = { /* compiled code */ }
+ def foo680(): scala.Int = { /* compiled code */ }
+ def foo681(): scala.Int = { /* compiled code */ }
+ def foo682(): scala.Int = { /* compiled code */ }
+ def foo683(): scala.Int = { /* compiled code */ }
+ def foo684(): scala.Int = { /* compiled code */ }
+ def foo685(): scala.Int = { /* compiled code */ }
+ def foo686(): scala.Int = { /* compiled code */ }
+ def foo687(): scala.Int = { /* compiled code */ }
+ def foo688(): scala.Int = { /* compiled code */ }
+ def foo689(): scala.Int = { /* compiled code */ }
+ def foo690(): scala.Int = { /* compiled code */ }
+ def foo691(): scala.Int = { /* compiled code */ }
+ def foo692(): scala.Int = { /* compiled code */ }
+ def foo693(): scala.Int = { /* compiled code */ }
+ def foo694(): scala.Int = { /* compiled code */ }
+ def foo695(): scala.Int = { /* compiled code */ }
+ def foo696(): scala.Int = { /* compiled code */ }
+ def foo697(): scala.Int = { /* compiled code */ }
+ def foo698(): scala.Int = { /* compiled code */ }
+ def foo699(): scala.Int = { /* compiled code */ }
+ def foo700(): scala.Int = { /* compiled code */ }
+ def foo701(): scala.Int = { /* compiled code */ }
+ def foo702(): scala.Int = { /* compiled code */ }
+ def foo703(): scala.Int = { /* compiled code */ }
+ def foo704(): scala.Int = { /* compiled code */ }
+ def foo705(): scala.Int = { /* compiled code */ }
+ def foo706(): scala.Int = { /* compiled code */ }
+ def foo707(): scala.Int = { /* compiled code */ }
+ def foo708(): scala.Int = { /* compiled code */ }
+ def foo709(): scala.Int = { /* compiled code */ }
+ def foo710(): scala.Int = { /* compiled code */ }
+ def foo711(): scala.Int = { /* compiled code */ }
+ def foo712(): scala.Int = { /* compiled code */ }
+ def foo713(): scala.Int = { /* compiled code */ }
+ def foo714(): scala.Int = { /* compiled code */ }
+ def foo715(): scala.Int = { /* compiled code */ }
+ def foo716(): scala.Int = { /* compiled code */ }
+ def foo717(): scala.Int = { /* compiled code */ }
+ def foo718(): scala.Int = { /* compiled code */ }
+ def foo719(): scala.Int = { /* compiled code */ }
+ def foo720(): scala.Int = { /* compiled code */ }
+ def foo721(): scala.Int = { /* compiled code */ }
+ def foo722(): scala.Int = { /* compiled code */ }
+ def foo723(): scala.Int = { /* compiled code */ }
+ def foo724(): scala.Int = { /* compiled code */ }
+ def foo725(): scala.Int = { /* compiled code */ }
+ def foo726(): scala.Int = { /* compiled code */ }
+ def foo727(): scala.Int = { /* compiled code */ }
+ def foo728(): scala.Int = { /* compiled code */ }
+ def foo729(): scala.Int = { /* compiled code */ }
+ def foo730(): scala.Int = { /* compiled code */ }
+ def foo731(): scala.Int = { /* compiled code */ }
+ def foo732(): scala.Int = { /* compiled code */ }
+ def foo733(): scala.Int = { /* compiled code */ }
+ def foo734(): scala.Int = { /* compiled code */ }
+ def foo735(): scala.Int = { /* compiled code */ }
+ def foo736(): scala.Int = { /* compiled code */ }
+ def foo737(): scala.Int = { /* compiled code */ }
+ def foo738(): scala.Int = { /* compiled code */ }
+ def foo739(): scala.Int = { /* compiled code */ }
+ def foo740(): scala.Int = { /* compiled code */ }
+ def foo741(): scala.Int = { /* compiled code */ }
+ def foo742(): scala.Int = { /* compiled code */ }
+ def foo743(): scala.Int = { /* compiled code */ }
+ def foo744(): scala.Int = { /* compiled code */ }
+ def foo745(): scala.Int = { /* compiled code */ }
+ def foo746(): scala.Int = { /* compiled code */ }
+ def foo747(): scala.Int = { /* compiled code */ }
+ def foo748(): scala.Int = { /* compiled code */ }
+ def foo749(): scala.Int = { /* compiled code */ }
+ def foo750(): scala.Int = { /* compiled code */ }
+ def foo751(): scala.Int = { /* compiled code */ }
+ def foo752(): scala.Int = { /* compiled code */ }
+ def foo753(): scala.Int = { /* compiled code */ }
+ def foo754(): scala.Int = { /* compiled code */ }
+ def foo755(): scala.Int = { /* compiled code */ }
+ def foo756(): scala.Int = { /* compiled code */ }
+ def foo757(): scala.Int = { /* compiled code */ }
+ def foo758(): scala.Int = { /* compiled code */ }
+ def foo759(): scala.Int = { /* compiled code */ }
+ def foo760(): scala.Int = { /* compiled code */ }
+ def foo761(): scala.Int = { /* compiled code */ }
+ def foo762(): scala.Int = { /* compiled code */ }
+ def foo763(): scala.Int = { /* compiled code */ }
+ def foo764(): scala.Int = { /* compiled code */ }
+ def foo765(): scala.Int = { /* compiled code */ }
+ def foo766(): scala.Int = { /* compiled code */ }
+ def foo767(): scala.Int = { /* compiled code */ }
+ def foo768(): scala.Int = { /* compiled code */ }
+ def foo769(): scala.Int = { /* compiled code */ }
+ def foo770(): scala.Int = { /* compiled code */ }
+ def foo771(): scala.Int = { /* compiled code */ }
+ def foo772(): scala.Int = { /* compiled code */ }
+ def foo773(): scala.Int = { /* compiled code */ }
+ def foo774(): scala.Int = { /* compiled code */ }
+ def foo775(): scala.Int = { /* compiled code */ }
+ def foo776(): scala.Int = { /* compiled code */ }
+ def foo777(): scala.Int = { /* compiled code */ }
+ def foo778(): scala.Int = { /* compiled code */ }
+ def foo779(): scala.Int = { /* compiled code */ }
+ def foo780(): scala.Int = { /* compiled code */ }
+ def foo781(): scala.Int = { /* compiled code */ }
+ def foo782(): scala.Int = { /* compiled code */ }
+ def foo783(): scala.Int = { /* compiled code */ }
+ def foo784(): scala.Int = { /* compiled code */ }
+ def foo785(): scala.Int = { /* compiled code */ }
+ def foo786(): scala.Int = { /* compiled code */ }
+ def foo787(): scala.Int = { /* compiled code */ }
+ def foo788(): scala.Int = { /* compiled code */ }
+ def foo789(): scala.Int = { /* compiled code */ }
+ def foo790(): scala.Int = { /* compiled code */ }
+ def foo791(): scala.Int = { /* compiled code */ }
+ def foo792(): scala.Int = { /* compiled code */ }
+ def foo793(): scala.Int = { /* compiled code */ }
+ def foo794(): scala.Int = { /* compiled code */ }
+ def foo795(): scala.Int = { /* compiled code */ }
+ def foo796(): scala.Int = { /* compiled code */ }
+ def foo797(): scala.Int = { /* compiled code */ }
+ def foo798(): scala.Int = { /* compiled code */ }
+ def foo799(): scala.Int = { /* compiled code */ }
+ def foo800(): scala.Int = { /* compiled code */ }
+ def foo801(): scala.Int = { /* compiled code */ }
+ def foo802(): scala.Int = { /* compiled code */ }
+ def foo803(): scala.Int = { /* compiled code */ }
+ def foo804(): scala.Int = { /* compiled code */ }
+ def foo805(): scala.Int = { /* compiled code */ }
+ def foo806(): scala.Int = { /* compiled code */ }
+ def foo807(): scala.Int = { /* compiled code */ }
+ def foo808(): scala.Int = { /* compiled code */ }
+ def foo809(): scala.Int = { /* compiled code */ }
+ def foo810(): scala.Int = { /* compiled code */ }
+ def foo811(): scala.Int = { /* compiled code */ }
+ def foo812(): scala.Int = { /* compiled code */ }
+ def foo813(): scala.Int = { /* compiled code */ }
+ def foo814(): scala.Int = { /* compiled code */ }
+ def foo815(): scala.Int = { /* compiled code */ }
+ def foo816(): scala.Int = { /* compiled code */ }
+ def foo817(): scala.Int = { /* compiled code */ }
+ def foo818(): scala.Int = { /* compiled code */ }
+ def foo819(): scala.Int = { /* compiled code */ }
+ def foo820(): scala.Int = { /* compiled code */ }
+ def foo821(): scala.Int = { /* compiled code */ }
+ def foo822(): scala.Int = { /* compiled code */ }
+ def foo823(): scala.Int = { /* compiled code */ }
+ def foo824(): scala.Int = { /* compiled code */ }
+ def foo825(): scala.Int = { /* compiled code */ }
+ def foo826(): scala.Int = { /* compiled code */ }
+ def foo827(): scala.Int = { /* compiled code */ }
+ def foo828(): scala.Int = { /* compiled code */ }
+ def foo829(): scala.Int = { /* compiled code */ }
+ def foo830(): scala.Int = { /* compiled code */ }
+ def foo831(): scala.Int = { /* compiled code */ }
+ def foo832(): scala.Int = { /* compiled code */ }
+ def foo833(): scala.Int = { /* compiled code */ }
+ def foo834(): scala.Int = { /* compiled code */ }
+ def foo835(): scala.Int = { /* compiled code */ }
+ def foo836(): scala.Int = { /* compiled code */ }
+ def foo837(): scala.Int = { /* compiled code */ }
+ def foo838(): scala.Int = { /* compiled code */ }
+ def foo839(): scala.Int = { /* compiled code */ }
+ def foo840(): scala.Int = { /* compiled code */ }
+ def foo841(): scala.Int = { /* compiled code */ }
+ def foo842(): scala.Int = { /* compiled code */ }
+ def foo843(): scala.Int = { /* compiled code */ }
+ def foo844(): scala.Int = { /* compiled code */ }
+ def foo845(): scala.Int = { /* compiled code */ }
+ def foo846(): scala.Int = { /* compiled code */ }
+ def foo847(): scala.Int = { /* compiled code */ }
+ def foo848(): scala.Int = { /* compiled code */ }
+ def foo849(): scala.Int = { /* compiled code */ }
+ def foo850(): scala.Int = { /* compiled code */ }
+ def foo851(): scala.Int = { /* compiled code */ }
+ def foo852(): scala.Int = { /* compiled code */ }
+ def foo853(): scala.Int = { /* compiled code */ }
+ def foo854(): scala.Int = { /* compiled code */ }
+ def foo855(): scala.Int = { /* compiled code */ }
+ def foo856(): scala.Int = { /* compiled code */ }
+ def foo857(): scala.Int = { /* compiled code */ }
+ def foo858(): scala.Int = { /* compiled code */ }
+ def foo859(): scala.Int = { /* compiled code */ }
+ def foo860(): scala.Int = { /* compiled code */ }
+ def foo861(): scala.Int = { /* compiled code */ }
+ def foo862(): scala.Int = { /* compiled code */ }
+ def foo863(): scala.Int = { /* compiled code */ }
+ def foo864(): scala.Int = { /* compiled code */ }
+ def foo865(): scala.Int = { /* compiled code */ }
+ def foo866(): scala.Int = { /* compiled code */ }
+ def foo867(): scala.Int = { /* compiled code */ }
+ def foo868(): scala.Int = { /* compiled code */ }
+ def foo869(): scala.Int = { /* compiled code */ }
+ def foo870(): scala.Int = { /* compiled code */ }
+ def foo871(): scala.Int = { /* compiled code */ }
+ def foo872(): scala.Int = { /* compiled code */ }
+ def foo873(): scala.Int = { /* compiled code */ }
+ def foo874(): scala.Int = { /* compiled code */ }
+ def foo875(): scala.Int = { /* compiled code */ }
+ def foo876(): scala.Int = { /* compiled code */ }
+ def foo877(): scala.Int = { /* compiled code */ }
+ def foo878(): scala.Int = { /* compiled code */ }
+ def foo879(): scala.Int = { /* compiled code */ }
+ def foo880(): scala.Int = { /* compiled code */ }
+ def foo881(): scala.Int = { /* compiled code */ }
+ def foo882(): scala.Int = { /* compiled code */ }
+ def foo883(): scala.Int = { /* compiled code */ }
+ def foo884(): scala.Int = { /* compiled code */ }
+ def foo885(): scala.Int = { /* compiled code */ }
+ def foo886(): scala.Int = { /* compiled code */ }
+ def foo887(): scala.Int = { /* compiled code */ }
+ def foo888(): scala.Int = { /* compiled code */ }
+ def foo889(): scala.Int = { /* compiled code */ }
+ def foo890(): scala.Int = { /* compiled code */ }
+ def foo891(): scala.Int = { /* compiled code */ }
+ def foo892(): scala.Int = { /* compiled code */ }
+ def foo893(): scala.Int = { /* compiled code */ }
+ def foo894(): scala.Int = { /* compiled code */ }
+ def foo895(): scala.Int = { /* compiled code */ }
+ def foo896(): scala.Int = { /* compiled code */ }
+ def foo897(): scala.Int = { /* compiled code */ }
+ def foo898(): scala.Int = { /* compiled code */ }
+ def foo899(): scala.Int = { /* compiled code */ }
+ def foo900(): scala.Int = { /* compiled code */ }
+ def foo901(): scala.Int = { /* compiled code */ }
+ def foo902(): scala.Int = { /* compiled code */ }
+ def foo903(): scala.Int = { /* compiled code */ }
+ def foo904(): scala.Int = { /* compiled code */ }
+ def foo905(): scala.Int = { /* compiled code */ }
+ def foo906(): scala.Int = { /* compiled code */ }
+ def foo907(): scala.Int = { /* compiled code */ }
+ def foo908(): scala.Int = { /* compiled code */ }
+ def foo909(): scala.Int = { /* compiled code */ }
+ def foo910(): scala.Int = { /* compiled code */ }
+ def foo911(): scala.Int = { /* compiled code */ }
+ def foo912(): scala.Int = { /* compiled code */ }
+ def foo913(): scala.Int = { /* compiled code */ }
+ def foo914(): scala.Int = { /* compiled code */ }
+ def foo915(): scala.Int = { /* compiled code */ }
+ def foo916(): scala.Int = { /* compiled code */ }
+ def foo917(): scala.Int = { /* compiled code */ }
+ def foo918(): scala.Int = { /* compiled code */ }
+ def foo919(): scala.Int = { /* compiled code */ }
+ def foo920(): scala.Int = { /* compiled code */ }
+ def foo921(): scala.Int = { /* compiled code */ }
+ def foo922(): scala.Int = { /* compiled code */ }
+ def foo923(): scala.Int = { /* compiled code */ }
+ def foo924(): scala.Int = { /* compiled code */ }
+ def foo925(): scala.Int = { /* compiled code */ }
+ def foo926(): scala.Int = { /* compiled code */ }
+ def foo927(): scala.Int = { /* compiled code */ }
+ def foo928(): scala.Int = { /* compiled code */ }
+ def foo929(): scala.Int = { /* compiled code */ }
+ def foo930(): scala.Int = { /* compiled code */ }
+ def foo931(): scala.Int = { /* compiled code */ }
+ def foo932(): scala.Int = { /* compiled code */ }
+ def foo933(): scala.Int = { /* compiled code */ }
+ def foo934(): scala.Int = { /* compiled code */ }
+ def foo935(): scala.Int = { /* compiled code */ }
+ def foo936(): scala.Int = { /* compiled code */ }
+ def foo937(): scala.Int = { /* compiled code */ }
+ def foo938(): scala.Int = { /* compiled code */ }
+ def foo939(): scala.Int = { /* compiled code */ }
+ def foo940(): scala.Int = { /* compiled code */ }
+ def foo941(): scala.Int = { /* compiled code */ }
+ def foo942(): scala.Int = { /* compiled code */ }
+ def foo943(): scala.Int = { /* compiled code */ }
+ def foo944(): scala.Int = { /* compiled code */ }
+ def foo945(): scala.Int = { /* compiled code */ }
+ def foo946(): scala.Int = { /* compiled code */ }
+ def foo947(): scala.Int = { /* compiled code */ }
+ def foo948(): scala.Int = { /* compiled code */ }
+ def foo949(): scala.Int = { /* compiled code */ }
+ def foo950(): scala.Int = { /* compiled code */ }
+ def foo951(): scala.Int = { /* compiled code */ }
+ def foo952(): scala.Int = { /* compiled code */ }
+ def foo953(): scala.Int = { /* compiled code */ }
+ def foo954(): scala.Int = { /* compiled code */ }
+ def foo955(): scala.Int = { /* compiled code */ }
+ def foo956(): scala.Int = { /* compiled code */ }
+ def foo957(): scala.Int = { /* compiled code */ }
+ def foo958(): scala.Int = { /* compiled code */ }
+ def foo959(): scala.Int = { /* compiled code */ }
+ def foo960(): scala.Int = { /* compiled code */ }
+ def foo961(): scala.Int = { /* compiled code */ }
+ def foo962(): scala.Int = { /* compiled code */ }
+ def foo963(): scala.Int = { /* compiled code */ }
+ def foo964(): scala.Int = { /* compiled code */ }
+ def foo965(): scala.Int = { /* compiled code */ }
+ def foo966(): scala.Int = { /* compiled code */ }
+ def foo967(): scala.Int = { /* compiled code */ }
+ def foo968(): scala.Int = { /* compiled code */ }
+ def foo969(): scala.Int = { /* compiled code */ }
+ def foo970(): scala.Int = { /* compiled code */ }
+ def foo971(): scala.Int = { /* compiled code */ }
+ def foo972(): scala.Int = { /* compiled code */ }
+ def foo973(): scala.Int = { /* compiled code */ }
+ def foo974(): scala.Int = { /* compiled code */ }
+ def foo975(): scala.Int = { /* compiled code */ }
+ def foo976(): scala.Int = { /* compiled code */ }
+ def foo977(): scala.Int = { /* compiled code */ }
+ def foo978(): scala.Int = { /* compiled code */ }
+ def foo979(): scala.Int = { /* compiled code */ }
+ def foo980(): scala.Int = { /* compiled code */ }
+ def foo981(): scala.Int = { /* compiled code */ }
+ def foo982(): scala.Int = { /* compiled code */ }
+ def foo983(): scala.Int = { /* compiled code */ }
+ def foo984(): scala.Int = { /* compiled code */ }
+ def foo985(): scala.Int = { /* compiled code */ }
+ def foo986(): scala.Int = { /* compiled code */ }
+ def foo987(): scala.Int = { /* compiled code */ }
+ def foo988(): scala.Int = { /* compiled code */ }
+ def foo989(): scala.Int = { /* compiled code */ }
+ def foo990(): scala.Int = { /* compiled code */ }
+ def foo991(): scala.Int = { /* compiled code */ }
+ def foo992(): scala.Int = { /* compiled code */ }
+ def foo993(): scala.Int = { /* compiled code */ }
+ def foo994(): scala.Int = { /* compiled code */ }
+ def foo995(): scala.Int = { /* compiled code */ }
+ def foo996(): scala.Int = { /* compiled code */ }
+ def foo997(): scala.Int = { /* compiled code */ }
+ def foo998(): scala.Int = { /* compiled code */ }
+ def foo999(): scala.Int = { /* compiled code */ }
+ def foo1000(): scala.Int = { /* compiled code */ }
+ def foo1001(): scala.Int = { /* compiled code */ }
+ def foo1002(): scala.Int = { /* compiled code */ }
+ def foo1003(): scala.Int = { /* compiled code */ }
+ def foo1004(): scala.Int = { /* compiled code */ }
+ def foo1005(): scala.Int = { /* compiled code */ }
+ def foo1006(): scala.Int = { /* compiled code */ }
+ def foo1007(): scala.Int = { /* compiled code */ }
+ def foo1008(): scala.Int = { /* compiled code */ }
+ def foo1009(): scala.Int = { /* compiled code */ }
+ def foo1010(): scala.Int = { /* compiled code */ }
+ def foo1011(): scala.Int = { /* compiled code */ }
+ def foo1012(): scala.Int = { /* compiled code */ }
+ def foo1013(): scala.Int = { /* compiled code */ }
+ def foo1014(): scala.Int = { /* compiled code */ }
+ def foo1015(): scala.Int = { /* compiled code */ }
+ def foo1016(): scala.Int = { /* compiled code */ }
+ def foo1017(): scala.Int = { /* compiled code */ }
+ def foo1018(): scala.Int = { /* compiled code */ }
+ def foo1019(): scala.Int = { /* compiled code */ }
+ def foo1020(): scala.Int = { /* compiled code */ }
+ def foo1021(): scala.Int = { /* compiled code */ }
+ def foo1022(): scala.Int = { /* compiled code */ }
+ def foo1023(): scala.Int = { /* compiled code */ }
+ def foo1024(): scala.Int = { /* compiled code */ }
+ def foo1025(): scala.Int = { /* compiled code */ }
+ def foo1026(): scala.Int = { /* compiled code */ }
+ def foo1027(): scala.Int = { /* compiled code */ }
+ def foo1028(): scala.Int = { /* compiled code */ }
+ def foo1029(): scala.Int = { /* compiled code */ }
+ def foo1030(): scala.Int = { /* compiled code */ }
+ def foo1031(): scala.Int = { /* compiled code */ }
+ def foo1032(): scala.Int = { /* compiled code */ }
+ def foo1033(): scala.Int = { /* compiled code */ }
+ def foo1034(): scala.Int = { /* compiled code */ }
+ def foo1035(): scala.Int = { /* compiled code */ }
+ def foo1036(): scala.Int = { /* compiled code */ }
+ def foo1037(): scala.Int = { /* compiled code */ }
+ def foo1038(): scala.Int = { /* compiled code */ }
+ def foo1039(): scala.Int = { /* compiled code */ }
+ def foo1040(): scala.Int = { /* compiled code */ }
+ def foo1041(): scala.Int = { /* compiled code */ }
+ def foo1042(): scala.Int = { /* compiled code */ }
+ def foo1043(): scala.Int = { /* compiled code */ }
+ def foo1044(): scala.Int = { /* compiled code */ }
+ def foo1045(): scala.Int = { /* compiled code */ }
+ def foo1046(): scala.Int = { /* compiled code */ }
+ def foo1047(): scala.Int = { /* compiled code */ }
+ def foo1048(): scala.Int = { /* compiled code */ }
+ def foo1049(): scala.Int = { /* compiled code */ }
+ def foo1050(): scala.Int = { /* compiled code */ }
+ def foo1051(): scala.Int = { /* compiled code */ }
+ def foo1052(): scala.Int = { /* compiled code */ }
+ def foo1053(): scala.Int = { /* compiled code */ }
+ def foo1054(): scala.Int = { /* compiled code */ }
+ def foo1055(): scala.Int = { /* compiled code */ }
+ def foo1056(): scala.Int = { /* compiled code */ }
+ def foo1057(): scala.Int = { /* compiled code */ }
+ def foo1058(): scala.Int = { /* compiled code */ }
+ def foo1059(): scala.Int = { /* compiled code */ }
+ def foo1060(): scala.Int = { /* compiled code */ }
+ def foo1061(): scala.Int = { /* compiled code */ }
+ def foo1062(): scala.Int = { /* compiled code */ }
+ def foo1063(): scala.Int = { /* compiled code */ }
+ def foo1064(): scala.Int = { /* compiled code */ }
+ def foo1065(): scala.Int = { /* compiled code */ }
+ def foo1066(): scala.Int = { /* compiled code */ }
+ def foo1067(): scala.Int = { /* compiled code */ }
+ def foo1068(): scala.Int = { /* compiled code */ }
+ def foo1069(): scala.Int = { /* compiled code */ }
+ def foo1070(): scala.Int = { /* compiled code */ }
+ def foo1071(): scala.Int = { /* compiled code */ }
+ def foo1072(): scala.Int = { /* compiled code */ }
+ def foo1073(): scala.Int = { /* compiled code */ }
+ def foo1074(): scala.Int = { /* compiled code */ }
+ def foo1075(): scala.Int = { /* compiled code */ }
+ def foo1076(): scala.Int = { /* compiled code */ }
+ def foo1077(): scala.Int = { /* compiled code */ }
+ def foo1078(): scala.Int = { /* compiled code */ }
+ def foo1079(): scala.Int = { /* compiled code */ }
+ def foo1080(): scala.Int = { /* compiled code */ }
+ def foo1081(): scala.Int = { /* compiled code */ }
+ def foo1082(): scala.Int = { /* compiled code */ }
+ def foo1083(): scala.Int = { /* compiled code */ }
+ def foo1084(): scala.Int = { /* compiled code */ }
+ def foo1085(): scala.Int = { /* compiled code */ }
+ def foo1086(): scala.Int = { /* compiled code */ }
+ def foo1087(): scala.Int = { /* compiled code */ }
+ def foo1088(): scala.Int = { /* compiled code */ }
+ def foo1089(): scala.Int = { /* compiled code */ }
+ def foo1090(): scala.Int = { /* compiled code */ }
+ def foo1091(): scala.Int = { /* compiled code */ }
+ def foo1092(): scala.Int = { /* compiled code */ }
+ def foo1093(): scala.Int = { /* compiled code */ }
+ def foo1094(): scala.Int = { /* compiled code */ }
+ def foo1095(): scala.Int = { /* compiled code */ }
+ def foo1096(): scala.Int = { /* compiled code */ }
+ def foo1097(): scala.Int = { /* compiled code */ }
+ def foo1098(): scala.Int = { /* compiled code */ }
+ def foo1099(): scala.Int = { /* compiled code */ }
+ def foo1100(): scala.Int = { /* compiled code */ }
+ def foo1101(): scala.Int = { /* compiled code */ }
+ def foo1102(): scala.Int = { /* compiled code */ }
+ def foo1103(): scala.Int = { /* compiled code */ }
+ def foo1104(): scala.Int = { /* compiled code */ }
+ def foo1105(): scala.Int = { /* compiled code */ }
+ def foo1106(): scala.Int = { /* compiled code */ }
+ def foo1107(): scala.Int = { /* compiled code */ }
+ def foo1108(): scala.Int = { /* compiled code */ }
+ def foo1109(): scala.Int = { /* compiled code */ }
+ def foo1110(): scala.Int = { /* compiled code */ }
+ def foo1111(): scala.Int = { /* compiled code */ }
+ def foo1112(): scala.Int = { /* compiled code */ }
+ def foo1113(): scala.Int = { /* compiled code */ }
+ def foo1114(): scala.Int = { /* compiled code */ }
+ def foo1115(): scala.Int = { /* compiled code */ }
+ def foo1116(): scala.Int = { /* compiled code */ }
+ def foo1117(): scala.Int = { /* compiled code */ }
+ def foo1118(): scala.Int = { /* compiled code */ }
+ def foo1119(): scala.Int = { /* compiled code */ }
+ def foo1120(): scala.Int = { /* compiled code */ }
+ def foo1121(): scala.Int = { /* compiled code */ }
+ def foo1122(): scala.Int = { /* compiled code */ }
+ def foo1123(): scala.Int = { /* compiled code */ }
+ def foo1124(): scala.Int = { /* compiled code */ }
+ def foo1125(): scala.Int = { /* compiled code */ }
+ def foo1126(): scala.Int = { /* compiled code */ }
+ def foo1127(): scala.Int = { /* compiled code */ }
+ def foo1128(): scala.Int = { /* compiled code */ }
+ def foo1129(): scala.Int = { /* compiled code */ }
+ def foo1130(): scala.Int = { /* compiled code */ }
+ def foo1131(): scala.Int = { /* compiled code */ }
+ def foo1132(): scala.Int = { /* compiled code */ }
+ def foo1133(): scala.Int = { /* compiled code */ }
+ def foo1134(): scala.Int = { /* compiled code */ }
+ def foo1135(): scala.Int = { /* compiled code */ }
+ def foo1136(): scala.Int = { /* compiled code */ }
+ def foo1137(): scala.Int = { /* compiled code */ }
+ def foo1138(): scala.Int = { /* compiled code */ }
+ def foo1139(): scala.Int = { /* compiled code */ }
+ def foo1140(): scala.Int = { /* compiled code */ }
+ def foo1141(): scala.Int = { /* compiled code */ }
+ def foo1142(): scala.Int = { /* compiled code */ }
+ def foo1143(): scala.Int = { /* compiled code */ }
+ def foo1144(): scala.Int = { /* compiled code */ }
+ def foo1145(): scala.Int = { /* compiled code */ }
+ def foo1146(): scala.Int = { /* compiled code */ }
+ def foo1147(): scala.Int = { /* compiled code */ }
+ def foo1148(): scala.Int = { /* compiled code */ }
+ def foo1149(): scala.Int = { /* compiled code */ }
+ def foo1150(): scala.Int = { /* compiled code */ }
+ def foo1151(): scala.Int = { /* compiled code */ }
+ def foo1152(): scala.Int = { /* compiled code */ }
+ def foo1153(): scala.Int = { /* compiled code */ }
+ def foo1154(): scala.Int = { /* compiled code */ }
+ def foo1155(): scala.Int = { /* compiled code */ }
+ def foo1156(): scala.Int = { /* compiled code */ }
+ def foo1157(): scala.Int = { /* compiled code */ }
+ def foo1158(): scala.Int = { /* compiled code */ }
+ def foo1159(): scala.Int = { /* compiled code */ }
+ def foo1160(): scala.Int = { /* compiled code */ }
+ def foo1161(): scala.Int = { /* compiled code */ }
+ def foo1162(): scala.Int = { /* compiled code */ }
+ def foo1163(): scala.Int = { /* compiled code */ }
+ def foo1164(): scala.Int = { /* compiled code */ }
+ def foo1165(): scala.Int = { /* compiled code */ }
+ def foo1166(): scala.Int = { /* compiled code */ }
+ def foo1167(): scala.Int = { /* compiled code */ }
+ def foo1168(): scala.Int = { /* compiled code */ }
+ def foo1169(): scala.Int = { /* compiled code */ }
+ def foo1170(): scala.Int = { /* compiled code */ }
+ def foo1171(): scala.Int = { /* compiled code */ }
+ def foo1172(): scala.Int = { /* compiled code */ }
+ def foo1173(): scala.Int = { /* compiled code */ }
+ def foo1174(): scala.Int = { /* compiled code */ }
+ def foo1175(): scala.Int = { /* compiled code */ }
+ def foo1176(): scala.Int = { /* compiled code */ }
+ def foo1177(): scala.Int = { /* compiled code */ }
+ def foo1178(): scala.Int = { /* compiled code */ }
+ def foo1179(): scala.Int = { /* compiled code */ }
+ def foo1180(): scala.Int = { /* compiled code */ }
+ def foo1181(): scala.Int = { /* compiled code */ }
+ def foo1182(): scala.Int = { /* compiled code */ }
+ def foo1183(): scala.Int = { /* compiled code */ }
+ def foo1184(): scala.Int = { /* compiled code */ }
+ def foo1185(): scala.Int = { /* compiled code */ }
+ def foo1186(): scala.Int = { /* compiled code */ }
+ def foo1187(): scala.Int = { /* compiled code */ }
+ def foo1188(): scala.Int = { /* compiled code */ }
+ def foo1189(): scala.Int = { /* compiled code */ }
+ def foo1190(): scala.Int = { /* compiled code */ }
+ def foo1191(): scala.Int = { /* compiled code */ }
+ def foo1192(): scala.Int = { /* compiled code */ }
+ def foo1193(): scala.Int = { /* compiled code */ }
+ def foo1194(): scala.Int = { /* compiled code */ }
+ def foo1195(): scala.Int = { /* compiled code */ }
+ def foo1196(): scala.Int = { /* compiled code */ }
+ def foo1197(): scala.Int = { /* compiled code */ }
+ def foo1198(): scala.Int = { /* compiled code */ }
+ def foo1199(): scala.Int = { /* compiled code */ }
+ def foo1200(): scala.Int = { /* compiled code */ }
+ def foo1201(): scala.Int = { /* compiled code */ }
+ def foo1202(): scala.Int = { /* compiled code */ }
+ def foo1203(): scala.Int = { /* compiled code */ }
+ def foo1204(): scala.Int = { /* compiled code */ }
+ def foo1205(): scala.Int = { /* compiled code */ }
+ def foo1206(): scala.Int = { /* compiled code */ }
+ def foo1207(): scala.Int = { /* compiled code */ }
+ def foo1208(): scala.Int = { /* compiled code */ }
+ def foo1209(): scala.Int = { /* compiled code */ }
+ def foo1210(): scala.Int = { /* compiled code */ }
+ def foo1211(): scala.Int = { /* compiled code */ }
+ def foo1212(): scala.Int = { /* compiled code */ }
+ def foo1213(): scala.Int = { /* compiled code */ }
+ def foo1214(): scala.Int = { /* compiled code */ }
+ def foo1215(): scala.Int = { /* compiled code */ }
+ def foo1216(): scala.Int = { /* compiled code */ }
+ def foo1217(): scala.Int = { /* compiled code */ }
+ def foo1218(): scala.Int = { /* compiled code */ }
+ def foo1219(): scala.Int = { /* compiled code */ }
+ def foo1220(): scala.Int = { /* compiled code */ }
+ def foo1221(): scala.Int = { /* compiled code */ }
+ def foo1222(): scala.Int = { /* compiled code */ }
+ def foo1223(): scala.Int = { /* compiled code */ }
+ def foo1224(): scala.Int = { /* compiled code */ }
+ def foo1225(): scala.Int = { /* compiled code */ }
+ def foo1226(): scala.Int = { /* compiled code */ }
+ def foo1227(): scala.Int = { /* compiled code */ }
+ def foo1228(): scala.Int = { /* compiled code */ }
+ def foo1229(): scala.Int = { /* compiled code */ }
+ def foo1230(): scala.Int = { /* compiled code */ }
+ def foo1231(): scala.Int = { /* compiled code */ }
+ def foo1232(): scala.Int = { /* compiled code */ }
+ def foo1233(): scala.Int = { /* compiled code */ }
+ def foo1234(): scala.Int = { /* compiled code */ }
+ def foo1235(): scala.Int = { /* compiled code */ }
+ def foo1236(): scala.Int = { /* compiled code */ }
+ def foo1237(): scala.Int = { /* compiled code */ }
+ def foo1238(): scala.Int = { /* compiled code */ }
+ def foo1239(): scala.Int = { /* compiled code */ }
+ def foo1240(): scala.Int = { /* compiled code */ }
+ def foo1241(): scala.Int = { /* compiled code */ }
+ def foo1242(): scala.Int = { /* compiled code */ }
+ def foo1243(): scala.Int = { /* compiled code */ }
+ def foo1244(): scala.Int = { /* compiled code */ }
+ def foo1245(): scala.Int = { /* compiled code */ }
+ def foo1246(): scala.Int = { /* compiled code */ }
+ def foo1247(): scala.Int = { /* compiled code */ }
+ def foo1248(): scala.Int = { /* compiled code */ }
+ def foo1249(): scala.Int = { /* compiled code */ }
+ def foo1250(): scala.Int = { /* compiled code */ }
+ def foo1251(): scala.Int = { /* compiled code */ }
+ def foo1252(): scala.Int = { /* compiled code */ }
+ def foo1253(): scala.Int = { /* compiled code */ }
+ def foo1254(): scala.Int = { /* compiled code */ }
+ def foo1255(): scala.Int = { /* compiled code */ }
+ def foo1256(): scala.Int = { /* compiled code */ }
+ def foo1257(): scala.Int = { /* compiled code */ }
+ def foo1258(): scala.Int = { /* compiled code */ }
+ def foo1259(): scala.Int = { /* compiled code */ }
+ def foo1260(): scala.Int = { /* compiled code */ }
+ def foo1261(): scala.Int = { /* compiled code */ }
+ def foo1262(): scala.Int = { /* compiled code */ }
+ def foo1263(): scala.Int = { /* compiled code */ }
+ def foo1264(): scala.Int = { /* compiled code */ }
+ def foo1265(): scala.Int = { /* compiled code */ }
+ def foo1266(): scala.Int = { /* compiled code */ }
+ def foo1267(): scala.Int = { /* compiled code */ }
+ def foo1268(): scala.Int = { /* compiled code */ }
+ def foo1269(): scala.Int = { /* compiled code */ }
+ def foo1270(): scala.Int = { /* compiled code */ }
+ def foo1271(): scala.Int = { /* compiled code */ }
+ def foo1272(): scala.Int = { /* compiled code */ }
+ def foo1273(): scala.Int = { /* compiled code */ }
+ def foo1274(): scala.Int = { /* compiled code */ }
+ def foo1275(): scala.Int = { /* compiled code */ }
+ def foo1276(): scala.Int = { /* compiled code */ }
+ def foo1277(): scala.Int = { /* compiled code */ }
+ def foo1278(): scala.Int = { /* compiled code */ }
+ def foo1279(): scala.Int = { /* compiled code */ }
+ def foo1280(): scala.Int = { /* compiled code */ }
+ def foo1281(): scala.Int = { /* compiled code */ }
+ def foo1282(): scala.Int = { /* compiled code */ }
+ def foo1283(): scala.Int = { /* compiled code */ }
+ def foo1284(): scala.Int = { /* compiled code */ }
+ def foo1285(): scala.Int = { /* compiled code */ }
+ def foo1286(): scala.Int = { /* compiled code */ }
+ def foo1287(): scala.Int = { /* compiled code */ }
+ def foo1288(): scala.Int = { /* compiled code */ }
+ def foo1289(): scala.Int = { /* compiled code */ }
+ def foo1290(): scala.Int = { /* compiled code */ }
+ def foo1291(): scala.Int = { /* compiled code */ }
+ def foo1292(): scala.Int = { /* compiled code */ }
+ def foo1293(): scala.Int = { /* compiled code */ }
+ def foo1294(): scala.Int = { /* compiled code */ }
+ def foo1295(): scala.Int = { /* compiled code */ }
+ def foo1296(): scala.Int = { /* compiled code */ }
+ def foo1297(): scala.Int = { /* compiled code */ }
+ def foo1298(): scala.Int = { /* compiled code */ }
+ def foo1299(): scala.Int = { /* compiled code */ }
+ def foo1300(): scala.Int = { /* compiled code */ }
+ def foo1301(): scala.Int = { /* compiled code */ }
+ def foo1302(): scala.Int = { /* compiled code */ }
+ def foo1303(): scala.Int = { /* compiled code */ }
+ def foo1304(): scala.Int = { /* compiled code */ }
+ def foo1305(): scala.Int = { /* compiled code */ }
+ def foo1306(): scala.Int = { /* compiled code */ }
+ def foo1307(): scala.Int = { /* compiled code */ }
+ def foo1308(): scala.Int = { /* compiled code */ }
+ def foo1309(): scala.Int = { /* compiled code */ }
+ def foo1310(): scala.Int = { /* compiled code */ }
+ def foo1311(): scala.Int = { /* compiled code */ }
+ def foo1312(): scala.Int = { /* compiled code */ }
+ def foo1313(): scala.Int = { /* compiled code */ }
+ def foo1314(): scala.Int = { /* compiled code */ }
+ def foo1315(): scala.Int = { /* compiled code */ }
+ def foo1316(): scala.Int = { /* compiled code */ }
+ def foo1317(): scala.Int = { /* compiled code */ }
+ def foo1318(): scala.Int = { /* compiled code */ }
+ def foo1319(): scala.Int = { /* compiled code */ }
+ def foo1320(): scala.Int = { /* compiled code */ }
+ def foo1321(): scala.Int = { /* compiled code */ }
+ def foo1322(): scala.Int = { /* compiled code */ }
+ def foo1323(): scala.Int = { /* compiled code */ }
+ def foo1324(): scala.Int = { /* compiled code */ }
+ def foo1325(): scala.Int = { /* compiled code */ }
+ def foo1326(): scala.Int = { /* compiled code */ }
+ def foo1327(): scala.Int = { /* compiled code */ }
+ def foo1328(): scala.Int = { /* compiled code */ }
+ def foo1329(): scala.Int = { /* compiled code */ }
+ def foo1330(): scala.Int = { /* compiled code */ }
+ def foo1331(): scala.Int = { /* compiled code */ }
+ def foo1332(): scala.Int = { /* compiled code */ }
+ def foo1333(): scala.Int = { /* compiled code */ }
+ def foo1334(): scala.Int = { /* compiled code */ }
+ def foo1335(): scala.Int = { /* compiled code */ }
+ def foo1336(): scala.Int = { /* compiled code */ }
+ def foo1337(): scala.Int = { /* compiled code */ }
+ def foo1338(): scala.Int = { /* compiled code */ }
+ def foo1339(): scala.Int = { /* compiled code */ }
+ def foo1340(): scala.Int = { /* compiled code */ }
+ def foo1341(): scala.Int = { /* compiled code */ }
+ def foo1342(): scala.Int = { /* compiled code */ }
+ def foo1343(): scala.Int = { /* compiled code */ }
+ def foo1344(): scala.Int = { /* compiled code */ }
+ def foo1345(): scala.Int = { /* compiled code */ }
+ def foo1346(): scala.Int = { /* compiled code */ }
+ def foo1347(): scala.Int = { /* compiled code */ }
+ def foo1348(): scala.Int = { /* compiled code */ }
+ def foo1349(): scala.Int = { /* compiled code */ }
+ def foo1350(): scala.Int = { /* compiled code */ }
+ def foo1351(): scala.Int = { /* compiled code */ }
+ def foo1352(): scala.Int = { /* compiled code */ }
+ def foo1353(): scala.Int = { /* compiled code */ }
+ def foo1354(): scala.Int = { /* compiled code */ }
+ def foo1355(): scala.Int = { /* compiled code */ }
+ def foo1356(): scala.Int = { /* compiled code */ }
+ def foo1357(): scala.Int = { /* compiled code */ }
+ def foo1358(): scala.Int = { /* compiled code */ }
+ def foo1359(): scala.Int = { /* compiled code */ }
+ def foo1360(): scala.Int = { /* compiled code */ }
+ def foo1361(): scala.Int = { /* compiled code */ }
+ def foo1362(): scala.Int = { /* compiled code */ }
+ def foo1363(): scala.Int = { /* compiled code */ }
+ def foo1364(): scala.Int = { /* compiled code */ }
+ def foo1365(): scala.Int = { /* compiled code */ }
+ def foo1366(): scala.Int = { /* compiled code */ }
+ def foo1367(): scala.Int = { /* compiled code */ }
+ def foo1368(): scala.Int = { /* compiled code */ }
+ def foo1369(): scala.Int = { /* compiled code */ }
+ def foo1370(): scala.Int = { /* compiled code */ }
+ def foo1371(): scala.Int = { /* compiled code */ }
+ def foo1372(): scala.Int = { /* compiled code */ }
+ def foo1373(): scala.Int = { /* compiled code */ }
+ def foo1374(): scala.Int = { /* compiled code */ }
+ def foo1375(): scala.Int = { /* compiled code */ }
+ def foo1376(): scala.Int = { /* compiled code */ }
+ def foo1377(): scala.Int = { /* compiled code */ }
+ def foo1378(): scala.Int = { /* compiled code */ }
+ def foo1379(): scala.Int = { /* compiled code */ }
+ def foo1380(): scala.Int = { /* compiled code */ }
+ def foo1381(): scala.Int = { /* compiled code */ }
+ def foo1382(): scala.Int = { /* compiled code */ }
+ def foo1383(): scala.Int = { /* compiled code */ }
+ def foo1384(): scala.Int = { /* compiled code */ }
+ def foo1385(): scala.Int = { /* compiled code */ }
+ def foo1386(): scala.Int = { /* compiled code */ }
+ def foo1387(): scala.Int = { /* compiled code */ }
+ def foo1388(): scala.Int = { /* compiled code */ }
+ def foo1389(): scala.Int = { /* compiled code */ }
+ def foo1390(): scala.Int = { /* compiled code */ }
+ def foo1391(): scala.Int = { /* compiled code */ }
+ def foo1392(): scala.Int = { /* compiled code */ }
+ def foo1393(): scala.Int = { /* compiled code */ }
+ def foo1394(): scala.Int = { /* compiled code */ }
+ def foo1395(): scala.Int = { /* compiled code */ }
+ def foo1396(): scala.Int = { /* compiled code */ }
+ def foo1397(): scala.Int = { /* compiled code */ }
+ def foo1398(): scala.Int = { /* compiled code */ }
+ def foo1399(): scala.Int = { /* compiled code */ }
+ def foo1400(): scala.Int = { /* compiled code */ }
+ def foo1401(): scala.Int = { /* compiled code */ }
+ def foo1402(): scala.Int = { /* compiled code */ }
+ def foo1403(): scala.Int = { /* compiled code */ }
+ def foo1404(): scala.Int = { /* compiled code */ }
+ def foo1405(): scala.Int = { /* compiled code */ }
+ def foo1406(): scala.Int = { /* compiled code */ }
+ def foo1407(): scala.Int = { /* compiled code */ }
+ def foo1408(): scala.Int = { /* compiled code */ }
+ def foo1409(): scala.Int = { /* compiled code */ }
+ def foo1410(): scala.Int = { /* compiled code */ }
+ def foo1411(): scala.Int = { /* compiled code */ }
+ def foo1412(): scala.Int = { /* compiled code */ }
+ def foo1413(): scala.Int = { /* compiled code */ }
+ def foo1414(): scala.Int = { /* compiled code */ }
+ def foo1415(): scala.Int = { /* compiled code */ }
+ def foo1416(): scala.Int = { /* compiled code */ }
+ def foo1417(): scala.Int = { /* compiled code */ }
+ def foo1418(): scala.Int = { /* compiled code */ }
+ def foo1419(): scala.Int = { /* compiled code */ }
+ def foo1420(): scala.Int = { /* compiled code */ }
+ def foo1421(): scala.Int = { /* compiled code */ }
+ def foo1422(): scala.Int = { /* compiled code */ }
+ def foo1423(): scala.Int = { /* compiled code */ }
+ def foo1424(): scala.Int = { /* compiled code */ }
+ def foo1425(): scala.Int = { /* compiled code */ }
+ def foo1426(): scala.Int = { /* compiled code */ }
+ def foo1427(): scala.Int = { /* compiled code */ }
+ def foo1428(): scala.Int = { /* compiled code */ }
+ def foo1429(): scala.Int = { /* compiled code */ }
+ def foo1430(): scala.Int = { /* compiled code */ }
+ def foo1431(): scala.Int = { /* compiled code */ }
+ def foo1432(): scala.Int = { /* compiled code */ }
+ def foo1433(): scala.Int = { /* compiled code */ }
+ def foo1434(): scala.Int = { /* compiled code */ }
+ def foo1435(): scala.Int = { /* compiled code */ }
+ def foo1436(): scala.Int = { /* compiled code */ }
+ def foo1437(): scala.Int = { /* compiled code */ }
+ def foo1438(): scala.Int = { /* compiled code */ }
+ def foo1439(): scala.Int = { /* compiled code */ }
+ def foo1440(): scala.Int = { /* compiled code */ }
+ def foo1441(): scala.Int = { /* compiled code */ }
+ def foo1442(): scala.Int = { /* compiled code */ }
+ def foo1443(): scala.Int = { /* compiled code */ }
+ def foo1444(): scala.Int = { /* compiled code */ }
+ def foo1445(): scala.Int = { /* compiled code */ }
+ def foo1446(): scala.Int = { /* compiled code */ }
+ def foo1447(): scala.Int = { /* compiled code */ }
+ def foo1448(): scala.Int = { /* compiled code */ }
+ def foo1449(): scala.Int = { /* compiled code */ }
+ def foo1450(): scala.Int = { /* compiled code */ }
+ def foo1451(): scala.Int = { /* compiled code */ }
+ def foo1452(): scala.Int = { /* compiled code */ }
+ def foo1453(): scala.Int = { /* compiled code */ }
+ def foo1454(): scala.Int = { /* compiled code */ }
+ def foo1455(): scala.Int = { /* compiled code */ }
+ def foo1456(): scala.Int = { /* compiled code */ }
+ def foo1457(): scala.Int = { /* compiled code */ }
+ def foo1458(): scala.Int = { /* compiled code */ }
+ def foo1459(): scala.Int = { /* compiled code */ }
+ def foo1460(): scala.Int = { /* compiled code */ }
+ def foo1461(): scala.Int = { /* compiled code */ }
+ def foo1462(): scala.Int = { /* compiled code */ }
+ def foo1463(): scala.Int = { /* compiled code */ }
+ def foo1464(): scala.Int = { /* compiled code */ }
+ def foo1465(): scala.Int = { /* compiled code */ }
+ def foo1466(): scala.Int = { /* compiled code */ }
+ def foo1467(): scala.Int = { /* compiled code */ }
+ def foo1468(): scala.Int = { /* compiled code */ }
+ def foo1469(): scala.Int = { /* compiled code */ }
+ def foo1470(): scala.Int = { /* compiled code */ }
+ def foo1471(): scala.Int = { /* compiled code */ }
+ def foo1472(): scala.Int = { /* compiled code */ }
+ def foo1473(): scala.Int = { /* compiled code */ }
+ def foo1474(): scala.Int = { /* compiled code */ }
+ def foo1475(): scala.Int = { /* compiled code */ }
+ def foo1476(): scala.Int = { /* compiled code */ }
+ def foo1477(): scala.Int = { /* compiled code */ }
+ def foo1478(): scala.Int = { /* compiled code */ }
+ def foo1479(): scala.Int = { /* compiled code */ }
+ def foo1480(): scala.Int = { /* compiled code */ }
+ def foo1481(): scala.Int = { /* compiled code */ }
+ def foo1482(): scala.Int = { /* compiled code */ }
+ def foo1483(): scala.Int = { /* compiled code */ }
+ def foo1484(): scala.Int = { /* compiled code */ }
+ def foo1485(): scala.Int = { /* compiled code */ }
+ def foo1486(): scala.Int = { /* compiled code */ }
+ def foo1487(): scala.Int = { /* compiled code */ }
+ def foo1488(): scala.Int = { /* compiled code */ }
+ def foo1489(): scala.Int = { /* compiled code */ }
+ def foo1490(): scala.Int = { /* compiled code */ }
+ def foo1491(): scala.Int = { /* compiled code */ }
+ def foo1492(): scala.Int = { /* compiled code */ }
+ def foo1493(): scala.Int = { /* compiled code */ }
+ def foo1494(): scala.Int = { /* compiled code */ }
+ def foo1495(): scala.Int = { /* compiled code */ }
+ def foo1496(): scala.Int = { /* compiled code */ }
+ def foo1497(): scala.Int = { /* compiled code */ }
+ def foo1498(): scala.Int = { /* compiled code */ }
+ def foo1499(): scala.Int = { /* compiled code */ }
+ def foo1500(): scala.Int = { /* compiled code */ }
+ def foo1501(): scala.Int = { /* compiled code */ }
+ def foo1502(): scala.Int = { /* compiled code */ }
+ def foo1503(): scala.Int = { /* compiled code */ }
+ def foo1504(): scala.Int = { /* compiled code */ }
+ def foo1505(): scala.Int = { /* compiled code */ }
+ def foo1506(): scala.Int = { /* compiled code */ }
+ def foo1507(): scala.Int = { /* compiled code */ }
+ def foo1508(): scala.Int = { /* compiled code */ }
+ def foo1509(): scala.Int = { /* compiled code */ }
+ def foo1510(): scala.Int = { /* compiled code */ }
+ def foo1511(): scala.Int = { /* compiled code */ }
+ def foo1512(): scala.Int = { /* compiled code */ }
+ def foo1513(): scala.Int = { /* compiled code */ }
+ def foo1514(): scala.Int = { /* compiled code */ }
+ def foo1515(): scala.Int = { /* compiled code */ }
+ def foo1516(): scala.Int = { /* compiled code */ }
+ def foo1517(): scala.Int = { /* compiled code */ }
+ def foo1518(): scala.Int = { /* compiled code */ }
+ def foo1519(): scala.Int = { /* compiled code */ }
+ def foo1520(): scala.Int = { /* compiled code */ }
+ def foo1521(): scala.Int = { /* compiled code */ }
+ def foo1522(): scala.Int = { /* compiled code */ }
+ def foo1523(): scala.Int = { /* compiled code */ }
+ def foo1524(): scala.Int = { /* compiled code */ }
+ def foo1525(): scala.Int = { /* compiled code */ }
+ def foo1526(): scala.Int = { /* compiled code */ }
+ def foo1527(): scala.Int = { /* compiled code */ }
+ def foo1528(): scala.Int = { /* compiled code */ }
+ def foo1529(): scala.Int = { /* compiled code */ }
+ def foo1530(): scala.Int = { /* compiled code */ }
+ def foo1531(): scala.Int = { /* compiled code */ }
+ def foo1532(): scala.Int = { /* compiled code */ }
+ def foo1533(): scala.Int = { /* compiled code */ }
+ def foo1534(): scala.Int = { /* compiled code */ }
+ def foo1535(): scala.Int = { /* compiled code */ }
+ def foo1536(): scala.Int = { /* compiled code */ }
+ def foo1537(): scala.Int = { /* compiled code */ }
+ def foo1538(): scala.Int = { /* compiled code */ }
+ def foo1539(): scala.Int = { /* compiled code */ }
+ def foo1540(): scala.Int = { /* compiled code */ }
+ def foo1541(): scala.Int = { /* compiled code */ }
+ def foo1542(): scala.Int = { /* compiled code */ }
+ def foo1543(): scala.Int = { /* compiled code */ }
+ def foo1544(): scala.Int = { /* compiled code */ }
+ def foo1545(): scala.Int = { /* compiled code */ }
+ def foo1546(): scala.Int = { /* compiled code */ }
+ def foo1547(): scala.Int = { /* compiled code */ }
+ def foo1548(): scala.Int = { /* compiled code */ }
+ def foo1549(): scala.Int = { /* compiled code */ }
+ def foo1550(): scala.Int = { /* compiled code */ }
+ def foo1551(): scala.Int = { /* compiled code */ }
+ def foo1552(): scala.Int = { /* compiled code */ }
+ def foo1553(): scala.Int = { /* compiled code */ }
+ def foo1554(): scala.Int = { /* compiled code */ }
+ def foo1555(): scala.Int = { /* compiled code */ }
+ def foo1556(): scala.Int = { /* compiled code */ }
+ def foo1557(): scala.Int = { /* compiled code */ }
+ def foo1558(): scala.Int = { /* compiled code */ }
+ def foo1559(): scala.Int = { /* compiled code */ }
+ def foo1560(): scala.Int = { /* compiled code */ }
+ def foo1561(): scala.Int = { /* compiled code */ }
+ def foo1562(): scala.Int = { /* compiled code */ }
+ def foo1563(): scala.Int = { /* compiled code */ }
+ def foo1564(): scala.Int = { /* compiled code */ }
+ def foo1565(): scala.Int = { /* compiled code */ }
+ def foo1566(): scala.Int = { /* compiled code */ }
+ def foo1567(): scala.Int = { /* compiled code */ }
+ def foo1568(): scala.Int = { /* compiled code */ }
+ def foo1569(): scala.Int = { /* compiled code */ }
+ def foo1570(): scala.Int = { /* compiled code */ }
+ def foo1571(): scala.Int = { /* compiled code */ }
+ def foo1572(): scala.Int = { /* compiled code */ }
+ def foo1573(): scala.Int = { /* compiled code */ }
+ def foo1574(): scala.Int = { /* compiled code */ }
+ def foo1575(): scala.Int = { /* compiled code */ }
+ def foo1576(): scala.Int = { /* compiled code */ }
+ def foo1577(): scala.Int = { /* compiled code */ }
+ def foo1578(): scala.Int = { /* compiled code */ }
+ def foo1579(): scala.Int = { /* compiled code */ }
+ def foo1580(): scala.Int = { /* compiled code */ }
+ def foo1581(): scala.Int = { /* compiled code */ }
+ def foo1582(): scala.Int = { /* compiled code */ }
+ def foo1583(): scala.Int = { /* compiled code */ }
+ def foo1584(): scala.Int = { /* compiled code */ }
+ def foo1585(): scala.Int = { /* compiled code */ }
+ def foo1586(): scala.Int = { /* compiled code */ }
+ def foo1587(): scala.Int = { /* compiled code */ }
+ def foo1588(): scala.Int = { /* compiled code */ }
+ def foo1589(): scala.Int = { /* compiled code */ }
+ def foo1590(): scala.Int = { /* compiled code */ }
+ def foo1591(): scala.Int = { /* compiled code */ }
+ def foo1592(): scala.Int = { /* compiled code */ }
+ def foo1593(): scala.Int = { /* compiled code */ }
+ def foo1594(): scala.Int = { /* compiled code */ }
+ def foo1595(): scala.Int = { /* compiled code */ }
+ def foo1596(): scala.Int = { /* compiled code */ }
+ def foo1597(): scala.Int = { /* compiled code */ }
+ def foo1598(): scala.Int = { /* compiled code */ }
+ def foo1599(): scala.Int = { /* compiled code */ }
+ def foo1600(): scala.Int = { /* compiled code */ }
+ def foo1601(): scala.Int = { /* compiled code */ }
+ def foo1602(): scala.Int = { /* compiled code */ }
+ def foo1603(): scala.Int = { /* compiled code */ }
+ def foo1604(): scala.Int = { /* compiled code */ }
+ def foo1605(): scala.Int = { /* compiled code */ }
+ def foo1606(): scala.Int = { /* compiled code */ }
+ def foo1607(): scala.Int = { /* compiled code */ }
+ def foo1608(): scala.Int = { /* compiled code */ }
+ def foo1609(): scala.Int = { /* compiled code */ }
+ def foo1610(): scala.Int = { /* compiled code */ }
+ def foo1611(): scala.Int = { /* compiled code */ }
+ def foo1612(): scala.Int = { /* compiled code */ }
+ def foo1613(): scala.Int = { /* compiled code */ }
+ def foo1614(): scala.Int = { /* compiled code */ }
+ def foo1615(): scala.Int = { /* compiled code */ }
+ def foo1616(): scala.Int = { /* compiled code */ }
+ def foo1617(): scala.Int = { /* compiled code */ }
+ def foo1618(): scala.Int = { /* compiled code */ }
+ def foo1619(): scala.Int = { /* compiled code */ }
+ def foo1620(): scala.Int = { /* compiled code */ }
+ def foo1621(): scala.Int = { /* compiled code */ }
+ def foo1622(): scala.Int = { /* compiled code */ }
+ def foo1623(): scala.Int = { /* compiled code */ }
+ def foo1624(): scala.Int = { /* compiled code */ }
+ def foo1625(): scala.Int = { /* compiled code */ }
+ def foo1626(): scala.Int = { /* compiled code */ }
+ def foo1627(): scala.Int = { /* compiled code */ }
+ def foo1628(): scala.Int = { /* compiled code */ }
+ def foo1629(): scala.Int = { /* compiled code */ }
+ def foo1630(): scala.Int = { /* compiled code */ }
+ def foo1631(): scala.Int = { /* compiled code */ }
+ def foo1632(): scala.Int = { /* compiled code */ }
+ def foo1633(): scala.Int = { /* compiled code */ }
+ def foo1634(): scala.Int = { /* compiled code */ }
+ def foo1635(): scala.Int = { /* compiled code */ }
+ def foo1636(): scala.Int = { /* compiled code */ }
+ def foo1637(): scala.Int = { /* compiled code */ }
+ def foo1638(): scala.Int = { /* compiled code */ }
+ def foo1639(): scala.Int = { /* compiled code */ }
+ def foo1640(): scala.Int = { /* compiled code */ }
+ def foo1641(): scala.Int = { /* compiled code */ }
+ def foo1642(): scala.Int = { /* compiled code */ }
+ def foo1643(): scala.Int = { /* compiled code */ }
+ def foo1644(): scala.Int = { /* compiled code */ }
+ def foo1645(): scala.Int = { /* compiled code */ }
+ def foo1646(): scala.Int = { /* compiled code */ }
+ def foo1647(): scala.Int = { /* compiled code */ }
+ def foo1648(): scala.Int = { /* compiled code */ }
+ def foo1649(): scala.Int = { /* compiled code */ }
+ def foo1650(): scala.Int = { /* compiled code */ }
+ def foo1651(): scala.Int = { /* compiled code */ }
+ def foo1652(): scala.Int = { /* compiled code */ }
+ def foo1653(): scala.Int = { /* compiled code */ }
+ def foo1654(): scala.Int = { /* compiled code */ }
+ def foo1655(): scala.Int = { /* compiled code */ }
+ def foo1656(): scala.Int = { /* compiled code */ }
+ def foo1657(): scala.Int = { /* compiled code */ }
+ def foo1658(): scala.Int = { /* compiled code */ }
+ def foo1659(): scala.Int = { /* compiled code */ }
+ def foo1660(): scala.Int = { /* compiled code */ }
+ def foo1661(): scala.Int = { /* compiled code */ }
+ def foo1662(): scala.Int = { /* compiled code */ }
+ def foo1663(): scala.Int = { /* compiled code */ }
+ def foo1664(): scala.Int = { /* compiled code */ }
+ def foo1665(): scala.Int = { /* compiled code */ }
+ def foo1666(): scala.Int = { /* compiled code */ }
+ def foo1667(): scala.Int = { /* compiled code */ }
+ def foo1668(): scala.Int = { /* compiled code */ }
+ def foo1669(): scala.Int = { /* compiled code */ }
+ def foo1670(): scala.Int = { /* compiled code */ }
+ def foo1671(): scala.Int = { /* compiled code */ }
+ def foo1672(): scala.Int = { /* compiled code */ }
+ def foo1673(): scala.Int = { /* compiled code */ }
+ def foo1674(): scala.Int = { /* compiled code */ }
+ def foo1675(): scala.Int = { /* compiled code */ }
+ def foo1676(): scala.Int = { /* compiled code */ }
+ def foo1677(): scala.Int = { /* compiled code */ }
+ def foo1678(): scala.Int = { /* compiled code */ }
+ def foo1679(): scala.Int = { /* compiled code */ }
+ def foo1680(): scala.Int = { /* compiled code */ }
+ def foo1681(): scala.Int = { /* compiled code */ }
+ def foo1682(): scala.Int = { /* compiled code */ }
+ def foo1683(): scala.Int = { /* compiled code */ }
+ def foo1684(): scala.Int = { /* compiled code */ }
+ def foo1685(): scala.Int = { /* compiled code */ }
+ def foo1686(): scala.Int = { /* compiled code */ }
+ def foo1687(): scala.Int = { /* compiled code */ }
+ def foo1688(): scala.Int = { /* compiled code */ }
+ def foo1689(): scala.Int = { /* compiled code */ }
+ def foo1690(): scala.Int = { /* compiled code */ }
+ def foo1691(): scala.Int = { /* compiled code */ }
+ def foo1692(): scala.Int = { /* compiled code */ }
+ def foo1693(): scala.Int = { /* compiled code */ }
+ def foo1694(): scala.Int = { /* compiled code */ }
+ def foo1695(): scala.Int = { /* compiled code */ }
+ def foo1696(): scala.Int = { /* compiled code */ }
+ def foo1697(): scala.Int = { /* compiled code */ }
+ def foo1698(): scala.Int = { /* compiled code */ }
+ def foo1699(): scala.Int = { /* compiled code */ }
+ def foo1700(): scala.Int = { /* compiled code */ }
+ def foo1701(): scala.Int = { /* compiled code */ }
+ def foo1702(): scala.Int = { /* compiled code */ }
+ def foo1703(): scala.Int = { /* compiled code */ }
+ def foo1704(): scala.Int = { /* compiled code */ }
+ def foo1705(): scala.Int = { /* compiled code */ }
+ def foo1706(): scala.Int = { /* compiled code */ }
+ def foo1707(): scala.Int = { /* compiled code */ }
+ def foo1708(): scala.Int = { /* compiled code */ }
+ def foo1709(): scala.Int = { /* compiled code */ }
+ def foo1710(): scala.Int = { /* compiled code */ }
+ def foo1711(): scala.Int = { /* compiled code */ }
+ def foo1712(): scala.Int = { /* compiled code */ }
+ def foo1713(): scala.Int = { /* compiled code */ }
+ def foo1714(): scala.Int = { /* compiled code */ }
+ def foo1715(): scala.Int = { /* compiled code */ }
+ def foo1716(): scala.Int = { /* compiled code */ }
+ def foo1717(): scala.Int = { /* compiled code */ }
+ def foo1718(): scala.Int = { /* compiled code */ }
+ def foo1719(): scala.Int = { /* compiled code */ }
+ def foo1720(): scala.Int = { /* compiled code */ }
+ def foo1721(): scala.Int = { /* compiled code */ }
+ def foo1722(): scala.Int = { /* compiled code */ }
+ def foo1723(): scala.Int = { /* compiled code */ }
+ def foo1724(): scala.Int = { /* compiled code */ }
+ def foo1725(): scala.Int = { /* compiled code */ }
+ def foo1726(): scala.Int = { /* compiled code */ }
+ def foo1727(): scala.Int = { /* compiled code */ }
+ def foo1728(): scala.Int = { /* compiled code */ }
+ def foo1729(): scala.Int = { /* compiled code */ }
+ def foo1730(): scala.Int = { /* compiled code */ }
+ def foo1731(): scala.Int = { /* compiled code */ }
+ def foo1732(): scala.Int = { /* compiled code */ }
+ def foo1733(): scala.Int = { /* compiled code */ }
+ def foo1734(): scala.Int = { /* compiled code */ }
+ def foo1735(): scala.Int = { /* compiled code */ }
+ def foo1736(): scala.Int = { /* compiled code */ }
+ def foo1737(): scala.Int = { /* compiled code */ }
+ def foo1738(): scala.Int = { /* compiled code */ }
+ def foo1739(): scala.Int = { /* compiled code */ }
+ def foo1740(): scala.Int = { /* compiled code */ }
+ def foo1741(): scala.Int = { /* compiled code */ }
+ def foo1742(): scala.Int = { /* compiled code */ }
+ def foo1743(): scala.Int = { /* compiled code */ }
+ def foo1744(): scala.Int = { /* compiled code */ }
+ def foo1745(): scala.Int = { /* compiled code */ }
+ def foo1746(): scala.Int = { /* compiled code */ }
+ def foo1747(): scala.Int = { /* compiled code */ }
+ def foo1748(): scala.Int = { /* compiled code */ }
+ def foo1749(): scala.Int = { /* compiled code */ }
+ def foo1750(): scala.Int = { /* compiled code */ }
+ def foo1751(): scala.Int = { /* compiled code */ }
+ def foo1752(): scala.Int = { /* compiled code */ }
+ def foo1753(): scala.Int = { /* compiled code */ }
+ def foo1754(): scala.Int = { /* compiled code */ }
+ def foo1755(): scala.Int = { /* compiled code */ }
+ def foo1756(): scala.Int = { /* compiled code */ }
+ def foo1757(): scala.Int = { /* compiled code */ }
+ def foo1758(): scala.Int = { /* compiled code */ }
+ def foo1759(): scala.Int = { /* compiled code */ }
+ def foo1760(): scala.Int = { /* compiled code */ }
+ def foo1761(): scala.Int = { /* compiled code */ }
+ def foo1762(): scala.Int = { /* compiled code */ }
+ def foo1763(): scala.Int = { /* compiled code */ }
+ def foo1764(): scala.Int = { /* compiled code */ }
+ def foo1765(): scala.Int = { /* compiled code */ }
+ def foo1766(): scala.Int = { /* compiled code */ }
+ def foo1767(): scala.Int = { /* compiled code */ }
+ def foo1768(): scala.Int = { /* compiled code */ }
+ def foo1769(): scala.Int = { /* compiled code */ }
+ def foo1770(): scala.Int = { /* compiled code */ }
+ def foo1771(): scala.Int = { /* compiled code */ }
+ def foo1772(): scala.Int = { /* compiled code */ }
+ def foo1773(): scala.Int = { /* compiled code */ }
+ def foo1774(): scala.Int = { /* compiled code */ }
+ def foo1775(): scala.Int = { /* compiled code */ }
+ def foo1776(): scala.Int = { /* compiled code */ }
+ def foo1777(): scala.Int = { /* compiled code */ }
+ def foo1778(): scala.Int = { /* compiled code */ }
+ def foo1779(): scala.Int = { /* compiled code */ }
+ def foo1780(): scala.Int = { /* compiled code */ }
+ def foo1781(): scala.Int = { /* compiled code */ }
+ def foo1782(): scala.Int = { /* compiled code */ }
+ def foo1783(): scala.Int = { /* compiled code */ }
+ def foo1784(): scala.Int = { /* compiled code */ }
+ def foo1785(): scala.Int = { /* compiled code */ }
+ def foo1786(): scala.Int = { /* compiled code */ }
+ def foo1787(): scala.Int = { /* compiled code */ }
+ def foo1788(): scala.Int = { /* compiled code */ }
+ def foo1789(): scala.Int = { /* compiled code */ }
+ def foo1790(): scala.Int = { /* compiled code */ }
+ def foo1791(): scala.Int = { /* compiled code */ }
+ def foo1792(): scala.Int = { /* compiled code */ }
+ def foo1793(): scala.Int = { /* compiled code */ }
+ def foo1794(): scala.Int = { /* compiled code */ }
+ def foo1795(): scala.Int = { /* compiled code */ }
+ def foo1796(): scala.Int = { /* compiled code */ }
+ def foo1797(): scala.Int = { /* compiled code */ }
+ def foo1798(): scala.Int = { /* compiled code */ }
+ def foo1799(): scala.Int = { /* compiled code */ }
+ def foo1800(): scala.Int = { /* compiled code */ }
+ def foo1801(): scala.Int = { /* compiled code */ }
+ def foo1802(): scala.Int = { /* compiled code */ }
+ def foo1803(): scala.Int = { /* compiled code */ }
+ def foo1804(): scala.Int = { /* compiled code */ }
+ def foo1805(): scala.Int = { /* compiled code */ }
+ def foo1806(): scala.Int = { /* compiled code */ }
+ def foo1807(): scala.Int = { /* compiled code */ }
+ def foo1808(): scala.Int = { /* compiled code */ }
+ def foo1809(): scala.Int = { /* compiled code */ }
+ def foo1810(): scala.Int = { /* compiled code */ }
+ def foo1811(): scala.Int = { /* compiled code */ }
+ def foo1812(): scala.Int = { /* compiled code */ }
+ def foo1813(): scala.Int = { /* compiled code */ }
+ def foo1814(): scala.Int = { /* compiled code */ }
+ def foo1815(): scala.Int = { /* compiled code */ }
+ def foo1816(): scala.Int = { /* compiled code */ }
+ def foo1817(): scala.Int = { /* compiled code */ }
+ def foo1818(): scala.Int = { /* compiled code */ }
+ def foo1819(): scala.Int = { /* compiled code */ }
+ def foo1820(): scala.Int = { /* compiled code */ }
+ def foo1821(): scala.Int = { /* compiled code */ }
+ def foo1822(): scala.Int = { /* compiled code */ }
+ def foo1823(): scala.Int = { /* compiled code */ }
+ def foo1824(): scala.Int = { /* compiled code */ }
+ def foo1825(): scala.Int = { /* compiled code */ }
+ def foo1826(): scala.Int = { /* compiled code */ }
+ def foo1827(): scala.Int = { /* compiled code */ }
+ def foo1828(): scala.Int = { /* compiled code */ }
+ def foo1829(): scala.Int = { /* compiled code */ }
+ def foo1830(): scala.Int = { /* compiled code */ }
+ def foo1831(): scala.Int = { /* compiled code */ }
+ def foo1832(): scala.Int = { /* compiled code */ }
+ def foo1833(): scala.Int = { /* compiled code */ }
+ def foo1834(): scala.Int = { /* compiled code */ }
+ def foo1835(): scala.Int = { /* compiled code */ }
+ def foo1836(): scala.Int = { /* compiled code */ }
+ def foo1837(): scala.Int = { /* compiled code */ }
+ def foo1838(): scala.Int = { /* compiled code */ }
+ def foo1839(): scala.Int = { /* compiled code */ }
+ def foo1840(): scala.Int = { /* compiled code */ }
+ def foo1841(): scala.Int = { /* compiled code */ }
+ def foo1842(): scala.Int = { /* compiled code */ }
+ def foo1843(): scala.Int = { /* compiled code */ }
+ def foo1844(): scala.Int = { /* compiled code */ }
+ def foo1845(): scala.Int = { /* compiled code */ }
+ def foo1846(): scala.Int = { /* compiled code */ }
+ def foo1847(): scala.Int = { /* compiled code */ }
+ def foo1848(): scala.Int = { /* compiled code */ }
+ def foo1849(): scala.Int = { /* compiled code */ }
+ def foo1850(): scala.Int = { /* compiled code */ }
+ def foo1851(): scala.Int = { /* compiled code */ }
+ def foo1852(): scala.Int = { /* compiled code */ }
+ def foo1853(): scala.Int = { /* compiled code */ }
+ def foo1854(): scala.Int = { /* compiled code */ }
+ def foo1855(): scala.Int = { /* compiled code */ }
+ def foo1856(): scala.Int = { /* compiled code */ }
+ def foo1857(): scala.Int = { /* compiled code */ }
+ def foo1858(): scala.Int = { /* compiled code */ }
+ def foo1859(): scala.Int = { /* compiled code */ }
+ def foo1860(): scala.Int = { /* compiled code */ }
+ def foo1861(): scala.Int = { /* compiled code */ }
+ def foo1862(): scala.Int = { /* compiled code */ }
+ def foo1863(): scala.Int = { /* compiled code */ }
+ def foo1864(): scala.Int = { /* compiled code */ }
+ def foo1865(): scala.Int = { /* compiled code */ }
+ def foo1866(): scala.Int = { /* compiled code */ }
+ def foo1867(): scala.Int = { /* compiled code */ }
+ def foo1868(): scala.Int = { /* compiled code */ }
+ def foo1869(): scala.Int = { /* compiled code */ }
+ def foo1870(): scala.Int = { /* compiled code */ }
+ def foo1871(): scala.Int = { /* compiled code */ }
+ def foo1872(): scala.Int = { /* compiled code */ }
+ def foo1873(): scala.Int = { /* compiled code */ }
+ def foo1874(): scala.Int = { /* compiled code */ }
+ def foo1875(): scala.Int = { /* compiled code */ }
+ def foo1876(): scala.Int = { /* compiled code */ }
+ def foo1877(): scala.Int = { /* compiled code */ }
+ def foo1878(): scala.Int = { /* compiled code */ }
+ def foo1879(): scala.Int = { /* compiled code */ }
+ def foo1880(): scala.Int = { /* compiled code */ }
+ def foo1881(): scala.Int = { /* compiled code */ }
+ def foo1882(): scala.Int = { /* compiled code */ }
+ def foo1883(): scala.Int = { /* compiled code */ }
+ def foo1884(): scala.Int = { /* compiled code */ }
+ def foo1885(): scala.Int = { /* compiled code */ }
+ def foo1886(): scala.Int = { /* compiled code */ }
+ def foo1887(): scala.Int = { /* compiled code */ }
+ def foo1888(): scala.Int = { /* compiled code */ }
+ def foo1889(): scala.Int = { /* compiled code */ }
+ def foo1890(): scala.Int = { /* compiled code */ }
+ def foo1891(): scala.Int = { /* compiled code */ }
+ def foo1892(): scala.Int = { /* compiled code */ }
+ def foo1893(): scala.Int = { /* compiled code */ }
+ def foo1894(): scala.Int = { /* compiled code */ }
+ def foo1895(): scala.Int = { /* compiled code */ }
+ def foo1896(): scala.Int = { /* compiled code */ }
+ def foo1897(): scala.Int = { /* compiled code */ }
+ def foo1898(): scala.Int = { /* compiled code */ }
+ def foo1899(): scala.Int = { /* compiled code */ }
+ def foo1900(): scala.Int = { /* compiled code */ }
+ def foo1901(): scala.Int = { /* compiled code */ }
+ def foo1902(): scala.Int = { /* compiled code */ }
+ def foo1903(): scala.Int = { /* compiled code */ }
+ def foo1904(): scala.Int = { /* compiled code */ }
+ def foo1905(): scala.Int = { /* compiled code */ }
+ def foo1906(): scala.Int = { /* compiled code */ }
+ def foo1907(): scala.Int = { /* compiled code */ }
+ def foo1908(): scala.Int = { /* compiled code */ }
+ def foo1909(): scala.Int = { /* compiled code */ }
+ def foo1910(): scala.Int = { /* compiled code */ }
+ def foo1911(): scala.Int = { /* compiled code */ }
+ def foo1912(): scala.Int = { /* compiled code */ }
+ def foo1913(): scala.Int = { /* compiled code */ }
+ def foo1914(): scala.Int = { /* compiled code */ }
+ def foo1915(): scala.Int = { /* compiled code */ }
+ def foo1916(): scala.Int = { /* compiled code */ }
+ def foo1917(): scala.Int = { /* compiled code */ }
+ def foo1918(): scala.Int = { /* compiled code */ }
+ def foo1919(): scala.Int = { /* compiled code */ }
+ def foo1920(): scala.Int = { /* compiled code */ }
+ def foo1921(): scala.Int = { /* compiled code */ }
+ def foo1922(): scala.Int = { /* compiled code */ }
+ def foo1923(): scala.Int = { /* compiled code */ }
+ def foo1924(): scala.Int = { /* compiled code */ }
+ def foo1925(): scala.Int = { /* compiled code */ }
+ def foo1926(): scala.Int = { /* compiled code */ }
+ def foo1927(): scala.Int = { /* compiled code */ }
+ def foo1928(): scala.Int = { /* compiled code */ }
+ def foo1929(): scala.Int = { /* compiled code */ }
+ def foo1930(): scala.Int = { /* compiled code */ }
+ def foo1931(): scala.Int = { /* compiled code */ }
+ def foo1932(): scala.Int = { /* compiled code */ }
+ def foo1933(): scala.Int = { /* compiled code */ }
+ def foo1934(): scala.Int = { /* compiled code */ }
+ def foo1935(): scala.Int = { /* compiled code */ }
+ def foo1936(): scala.Int = { /* compiled code */ }
+ def foo1937(): scala.Int = { /* compiled code */ }
+ def foo1938(): scala.Int = { /* compiled code */ }
+ def foo1939(): scala.Int = { /* compiled code */ }
+ def foo1940(): scala.Int = { /* compiled code */ }
+ def foo1941(): scala.Int = { /* compiled code */ }
+ def foo1942(): scala.Int = { /* compiled code */ }
+ def foo1943(): scala.Int = { /* compiled code */ }
+ def foo1944(): scala.Int = { /* compiled code */ }
+ def foo1945(): scala.Int = { /* compiled code */ }
+ def foo1946(): scala.Int = { /* compiled code */ }
+ def foo1947(): scala.Int = { /* compiled code */ }
+ def foo1948(): scala.Int = { /* compiled code */ }
+ def foo1949(): scala.Int = { /* compiled code */ }
+ def foo1950(): scala.Int = { /* compiled code */ }
+ def foo1951(): scala.Int = { /* compiled code */ }
+ def foo1952(): scala.Int = { /* compiled code */ }
+ def foo1953(): scala.Int = { /* compiled code */ }
+ def foo1954(): scala.Int = { /* compiled code */ }
+ def foo1955(): scala.Int = { /* compiled code */ }
+ def foo1956(): scala.Int = { /* compiled code */ }
+ def foo1957(): scala.Int = { /* compiled code */ }
+ def foo1958(): scala.Int = { /* compiled code */ }
+ def foo1959(): scala.Int = { /* compiled code */ }
+ def foo1960(): scala.Int = { /* compiled code */ }
+ def foo1961(): scala.Int = { /* compiled code */ }
+ def foo1962(): scala.Int = { /* compiled code */ }
+ def foo1963(): scala.Int = { /* compiled code */ }
+ def foo1964(): scala.Int = { /* compiled code */ }
+ def foo1965(): scala.Int = { /* compiled code */ }
+ def foo1966(): scala.Int = { /* compiled code */ }
+ def foo1967(): scala.Int = { /* compiled code */ }
+ def foo1968(): scala.Int = { /* compiled code */ }
+ def foo1969(): scala.Int = { /* compiled code */ }
+ def foo1970(): scala.Int = { /* compiled code */ }
+ def foo1971(): scala.Int = { /* compiled code */ }
+ def foo1972(): scala.Int = { /* compiled code */ }
+ def foo1973(): scala.Int = { /* compiled code */ }
+ def foo1974(): scala.Int = { /* compiled code */ }
+ def foo1975(): scala.Int = { /* compiled code */ }
+ def foo1976(): scala.Int = { /* compiled code */ }
+ def foo1977(): scala.Int = { /* compiled code */ }
+ def foo1978(): scala.Int = { /* compiled code */ }
+ def foo1979(): scala.Int = { /* compiled code */ }
+ def foo1980(): scala.Int = { /* compiled code */ }
+ def foo1981(): scala.Int = { /* compiled code */ }
+ def foo1982(): scala.Int = { /* compiled code */ }
+ def foo1983(): scala.Int = { /* compiled code */ }
+ def foo1984(): scala.Int = { /* compiled code */ }
+ def foo1985(): scala.Int = { /* compiled code */ }
+ def foo1986(): scala.Int = { /* compiled code */ }
+ def foo1987(): scala.Int = { /* compiled code */ }
+ def foo1988(): scala.Int = { /* compiled code */ }
+ def foo1989(): scala.Int = { /* compiled code */ }
+ def foo1990(): scala.Int = { /* compiled code */ }
+ def foo1991(): scala.Int = { /* compiled code */ }
+ def foo1992(): scala.Int = { /* compiled code */ }
+ def foo1993(): scala.Int = { /* compiled code */ }
+ def foo1994(): scala.Int = { /* compiled code */ }
+ def foo1995(): scala.Int = { /* compiled code */ }
+ def foo1996(): scala.Int = { /* compiled code */ }
+ def foo1997(): scala.Int = { /* compiled code */ }
+ def foo1998(): scala.Int = { /* compiled code */ }
+ def foo1999(): scala.Int = { /* compiled code */ }
+ def foo2000(): scala.Int = { /* compiled code */ }
+ def foo2001(): scala.Int = { /* compiled code */ }
+ def foo2002(): scala.Int = { /* compiled code */ }
+ def foo2003(): scala.Int = { /* compiled code */ }
+ def foo2004(): scala.Int = { /* compiled code */ }
+ def foo2005(): scala.Int = { /* compiled code */ }
+ def foo2006(): scala.Int = { /* compiled code */ }
+ def foo2007(): scala.Int = { /* compiled code */ }
+ def foo2008(): scala.Int = { /* compiled code */ }
+ def foo2009(): scala.Int = { /* compiled code */ }
+ def foo2010(): scala.Int = { /* compiled code */ }
+ def foo2011(): scala.Int = { /* compiled code */ }
+ def foo2012(): scala.Int = { /* compiled code */ }
+ def foo2013(): scala.Int = { /* compiled code */ }
+ def foo2014(): scala.Int = { /* compiled code */ }
+ def foo2015(): scala.Int = { /* compiled code */ }
+ def foo2016(): scala.Int = { /* compiled code */ }
+ def foo2017(): scala.Int = { /* compiled code */ }
+ def foo2018(): scala.Int = { /* compiled code */ }
+ def foo2019(): scala.Int = { /* compiled code */ }
+ def foo2020(): scala.Int = { /* compiled code */ }
+ def foo2021(): scala.Int = { /* compiled code */ }
+ def foo2022(): scala.Int = { /* compiled code */ }
+ def foo2023(): scala.Int = { /* compiled code */ }
+ def foo2024(): scala.Int = { /* compiled code */ }
+ def foo2025(): scala.Int = { /* compiled code */ }
+ def foo2026(): scala.Int = { /* compiled code */ }
+ def foo2027(): scala.Int = { /* compiled code */ }
+ def foo2028(): scala.Int = { /* compiled code */ }
+ def foo2029(): scala.Int = { /* compiled code */ }
+ def foo2030(): scala.Int = { /* compiled code */ }
+ def foo2031(): scala.Int = { /* compiled code */ }
+ def foo2032(): scala.Int = { /* compiled code */ }
+ def foo2033(): scala.Int = { /* compiled code */ }
+ def foo2034(): scala.Int = { /* compiled code */ }
+ def foo2035(): scala.Int = { /* compiled code */ }
+ def foo2036(): scala.Int = { /* compiled code */ }
+ def foo2037(): scala.Int = { /* compiled code */ }
+ def foo2038(): scala.Int = { /* compiled code */ }
+ def foo2039(): scala.Int = { /* compiled code */ }
+ def foo2040(): scala.Int = { /* compiled code */ }
+ def foo2041(): scala.Int = { /* compiled code */ }
+ def foo2042(): scala.Int = { /* compiled code */ }
+ def foo2043(): scala.Int = { /* compiled code */ }
+ def foo2044(): scala.Int = { /* compiled code */ }
+ def foo2045(): scala.Int = { /* compiled code */ }
+ def foo2046(): scala.Int = { /* compiled code */ }
+ def foo2047(): scala.Int = { /* compiled code */ }
+ def foo2048(): scala.Int = { /* compiled code */ }
+ def foo2049(): scala.Int = { /* compiled code */ }
+ def foo2050(): scala.Int = { /* compiled code */ }
+ def foo2051(): scala.Int = { /* compiled code */ }
+ def foo2052(): scala.Int = { /* compiled code */ }
+ def foo2053(): scala.Int = { /* compiled code */ }
+ def foo2054(): scala.Int = { /* compiled code */ }
+ def foo2055(): scala.Int = { /* compiled code */ }
+ def foo2056(): scala.Int = { /* compiled code */ }
+ def foo2057(): scala.Int = { /* compiled code */ }
+ def foo2058(): scala.Int = { /* compiled code */ }
+ def foo2059(): scala.Int = { /* compiled code */ }
+ def foo2060(): scala.Int = { /* compiled code */ }
+ def foo2061(): scala.Int = { /* compiled code */ }
+ def foo2062(): scala.Int = { /* compiled code */ }
+ def foo2063(): scala.Int = { /* compiled code */ }
+ def foo2064(): scala.Int = { /* compiled code */ }
+ def foo2065(): scala.Int = { /* compiled code */ }
+ def foo2066(): scala.Int = { /* compiled code */ }
+ def foo2067(): scala.Int = { /* compiled code */ }
+ def foo2068(): scala.Int = { /* compiled code */ }
+ def foo2069(): scala.Int = { /* compiled code */ }
+ def foo2070(): scala.Int = { /* compiled code */ }
+ def foo2071(): scala.Int = { /* compiled code */ }
+ def foo2072(): scala.Int = { /* compiled code */ }
+ def foo2073(): scala.Int = { /* compiled code */ }
+ def foo2074(): scala.Int = { /* compiled code */ }
+ def foo2075(): scala.Int = { /* compiled code */ }
+ def foo2076(): scala.Int = { /* compiled code */ }
+ def foo2077(): scala.Int = { /* compiled code */ }
+ def foo2078(): scala.Int = { /* compiled code */ }
+ def foo2079(): scala.Int = { /* compiled code */ }
+ def foo2080(): scala.Int = { /* compiled code */ }
+ def foo2081(): scala.Int = { /* compiled code */ }
+ def foo2082(): scala.Int = { /* compiled code */ }
+ def foo2083(): scala.Int = { /* compiled code */ }
+ def foo2084(): scala.Int = { /* compiled code */ }
+ def foo2085(): scala.Int = { /* compiled code */ }
+ def foo2086(): scala.Int = { /* compiled code */ }
+ def foo2087(): scala.Int = { /* compiled code */ }
+ def foo2088(): scala.Int = { /* compiled code */ }
+ def foo2089(): scala.Int = { /* compiled code */ }
+ def foo2090(): scala.Int = { /* compiled code */ }
+ def foo2091(): scala.Int = { /* compiled code */ }
+ def foo2092(): scala.Int = { /* compiled code */ }
+ def foo2093(): scala.Int = { /* compiled code */ }
+ def foo2094(): scala.Int = { /* compiled code */ }
+ def foo2095(): scala.Int = { /* compiled code */ }
+ def foo2096(): scala.Int = { /* compiled code */ }
+ def foo2097(): scala.Int = { /* compiled code */ }
+ def foo2098(): scala.Int = { /* compiled code */ }
+ def foo2099(): scala.Int = { /* compiled code */ }
+ def foo2100(): scala.Int = { /* compiled code */ }
+ def foo2101(): scala.Int = { /* compiled code */ }
+ def foo2102(): scala.Int = { /* compiled code */ }
+ def foo2103(): scala.Int = { /* compiled code */ }
+ def foo2104(): scala.Int = { /* compiled code */ }
+ def foo2105(): scala.Int = { /* compiled code */ }
+ def foo2106(): scala.Int = { /* compiled code */ }
+ def foo2107(): scala.Int = { /* compiled code */ }
+ def foo2108(): scala.Int = { /* compiled code */ }
+ def foo2109(): scala.Int = { /* compiled code */ }
+ def foo2110(): scala.Int = { /* compiled code */ }
+ def foo2111(): scala.Int = { /* compiled code */ }
+ def foo2112(): scala.Int = { /* compiled code */ }
+ def foo2113(): scala.Int = { /* compiled code */ }
+ def foo2114(): scala.Int = { /* compiled code */ }
+ def foo2115(): scala.Int = { /* compiled code */ }
+ def foo2116(): scala.Int = { /* compiled code */ }
+ def foo2117(): scala.Int = { /* compiled code */ }
+ def foo2118(): scala.Int = { /* compiled code */ }
+ def foo2119(): scala.Int = { /* compiled code */ }
+ def foo2120(): scala.Int = { /* compiled code */ }
+ def foo2121(): scala.Int = { /* compiled code */ }
+ def foo2122(): scala.Int = { /* compiled code */ }
+ def foo2123(): scala.Int = { /* compiled code */ }
+ def foo2124(): scala.Int = { /* compiled code */ }
+ def foo2125(): scala.Int = { /* compiled code */ }
+ def foo2126(): scala.Int = { /* compiled code */ }
+ def foo2127(): scala.Int = { /* compiled code */ }
+ def foo2128(): scala.Int = { /* compiled code */ }
+ def foo2129(): scala.Int = { /* compiled code */ }
+ def foo2130(): scala.Int = { /* compiled code */ }
+ def foo2131(): scala.Int = { /* compiled code */ }
+ def foo2132(): scala.Int = { /* compiled code */ }
+ def foo2133(): scala.Int = { /* compiled code */ }
+ def foo2134(): scala.Int = { /* compiled code */ }
+ def foo2135(): scala.Int = { /* compiled code */ }
+ def foo2136(): scala.Int = { /* compiled code */ }
+ def foo2137(): scala.Int = { /* compiled code */ }
+ def foo2138(): scala.Int = { /* compiled code */ }
+ def foo2139(): scala.Int = { /* compiled code */ }
+ def foo2140(): scala.Int = { /* compiled code */ }
+ def foo2141(): scala.Int = { /* compiled code */ }
+ def foo2142(): scala.Int = { /* compiled code */ }
+ def foo2143(): scala.Int = { /* compiled code */ }
+ def foo2144(): scala.Int = { /* compiled code */ }
+ def foo2145(): scala.Int = { /* compiled code */ }
+ def foo2146(): scala.Int = { /* compiled code */ }
+ def foo2147(): scala.Int = { /* compiled code */ }
+ def foo2148(): scala.Int = { /* compiled code */ }
+ def foo2149(): scala.Int = { /* compiled code */ }
+ def foo2150(): scala.Int = { /* compiled code */ }
+ def foo2151(): scala.Int = { /* compiled code */ }
+ def foo2152(): scala.Int = { /* compiled code */ }
+ def foo2153(): scala.Int = { /* compiled code */ }
+ def foo2154(): scala.Int = { /* compiled code */ }
+ def foo2155(): scala.Int = { /* compiled code */ }
+ def foo2156(): scala.Int = { /* compiled code */ }
+ def foo2157(): scala.Int = { /* compiled code */ }
+ def foo2158(): scala.Int = { /* compiled code */ }
+ def foo2159(): scala.Int = { /* compiled code */ }
+ def foo2160(): scala.Int = { /* compiled code */ }
+ def foo2161(): scala.Int = { /* compiled code */ }
+ def foo2162(): scala.Int = { /* compiled code */ }
+ def foo2163(): scala.Int = { /* compiled code */ }
+ def foo2164(): scala.Int = { /* compiled code */ }
+ def foo2165(): scala.Int = { /* compiled code */ }
+ def foo2166(): scala.Int = { /* compiled code */ }
+ def foo2167(): scala.Int = { /* compiled code */ }
+ def foo2168(): scala.Int = { /* compiled code */ }
+ def foo2169(): scala.Int = { /* compiled code */ }
+ def foo2170(): scala.Int = { /* compiled code */ }
+ def foo2171(): scala.Int = { /* compiled code */ }
+ def foo2172(): scala.Int = { /* compiled code */ }
+ def foo2173(): scala.Int = { /* compiled code */ }
+ def foo2174(): scala.Int = { /* compiled code */ }
+ def foo2175(): scala.Int = { /* compiled code */ }
+ def foo2176(): scala.Int = { /* compiled code */ }
+ def foo2177(): scala.Int = { /* compiled code */ }
+ def foo2178(): scala.Int = { /* compiled code */ }
+ def foo2179(): scala.Int = { /* compiled code */ }
+ def foo2180(): scala.Int = { /* compiled code */ }
+ def foo2181(): scala.Int = { /* compiled code */ }
+ def foo2182(): scala.Int = { /* compiled code */ }
+ def foo2183(): scala.Int = { /* compiled code */ }
+ def foo2184(): scala.Int = { /* compiled code */ }
+ def foo2185(): scala.Int = { /* compiled code */ }
+ def foo2186(): scala.Int = { /* compiled code */ }
+ def foo2187(): scala.Int = { /* compiled code */ }
+ def foo2188(): scala.Int = { /* compiled code */ }
+ def foo2189(): scala.Int = { /* compiled code */ }
+ def foo2190(): scala.Int = { /* compiled code */ }
+ def foo2191(): scala.Int = { /* compiled code */ }
+ def foo2192(): scala.Int = { /* compiled code */ }
+ def foo2193(): scala.Int = { /* compiled code */ }
+ def foo2194(): scala.Int = { /* compiled code */ }
+ def foo2195(): scala.Int = { /* compiled code */ }
+ def foo2196(): scala.Int = { /* compiled code */ }
+ def foo2197(): scala.Int = { /* compiled code */ }
+ def foo2198(): scala.Int = { /* compiled code */ }
+ def foo2199(): scala.Int = { /* compiled code */ }
+ def foo2200(): scala.Int = { /* compiled code */ }
+ def foo2201(): scala.Int = { /* compiled code */ }
+ def foo2202(): scala.Int = { /* compiled code */ }
+ def foo2203(): scala.Int = { /* compiled code */ }
+ def foo2204(): scala.Int = { /* compiled code */ }
+ def foo2205(): scala.Int = { /* compiled code */ }
+ def foo2206(): scala.Int = { /* compiled code */ }
+ def foo2207(): scala.Int = { /* compiled code */ }
+ def foo2208(): scala.Int = { /* compiled code */ }
+ def foo2209(): scala.Int = { /* compiled code */ }
+ def foo2210(): scala.Int = { /* compiled code */ }
+ def foo2211(): scala.Int = { /* compiled code */ }
+ def foo2212(): scala.Int = { /* compiled code */ }
+ def foo2213(): scala.Int = { /* compiled code */ }
+ def foo2214(): scala.Int = { /* compiled code */ }
+ def foo2215(): scala.Int = { /* compiled code */ }
+ def foo2216(): scala.Int = { /* compiled code */ }
+ def foo2217(): scala.Int = { /* compiled code */ }
+ def foo2218(): scala.Int = { /* compiled code */ }
+ def foo2219(): scala.Int = { /* compiled code */ }
+ def foo2220(): scala.Int = { /* compiled code */ }
+ def foo2221(): scala.Int = { /* compiled code */ }
+ def foo2222(): scala.Int = { /* compiled code */ }
+ def foo2223(): scala.Int = { /* compiled code */ }
+ def foo2224(): scala.Int = { /* compiled code */ }
+ def foo2225(): scala.Int = { /* compiled code */ }
+ def foo2226(): scala.Int = { /* compiled code */ }
+ def foo2227(): scala.Int = { /* compiled code */ }
+ def foo2228(): scala.Int = { /* compiled code */ }
+ def foo2229(): scala.Int = { /* compiled code */ }
+ def foo2230(): scala.Int = { /* compiled code */ }
+ def foo2231(): scala.Int = { /* compiled code */ }
+ def foo2232(): scala.Int = { /* compiled code */ }
+ def foo2233(): scala.Int = { /* compiled code */ }
+ def foo2234(): scala.Int = { /* compiled code */ }
+ def foo2235(): scala.Int = { /* compiled code */ }
+ def foo2236(): scala.Int = { /* compiled code */ }
+ def foo2237(): scala.Int = { /* compiled code */ }
+ def foo2238(): scala.Int = { /* compiled code */ }
+ def foo2239(): scala.Int = { /* compiled code */ }
+ def foo2240(): scala.Int = { /* compiled code */ }
+ def foo2241(): scala.Int = { /* compiled code */ }
+ def foo2242(): scala.Int = { /* compiled code */ }
+ def foo2243(): scala.Int = { /* compiled code */ }
+ def foo2244(): scala.Int = { /* compiled code */ }
+ def foo2245(): scala.Int = { /* compiled code */ }
+ def foo2246(): scala.Int = { /* compiled code */ }
+ def foo2247(): scala.Int = { /* compiled code */ }
+ def foo2248(): scala.Int = { /* compiled code */ }
+ def foo2249(): scala.Int = { /* compiled code */ }
+ def foo2250(): scala.Int = { /* compiled code */ }
+ def foo2251(): scala.Int = { /* compiled code */ }
+ def foo2252(): scala.Int = { /* compiled code */ }
+ def foo2253(): scala.Int = { /* compiled code */ }
+ def foo2254(): scala.Int = { /* compiled code */ }
+ def foo2255(): scala.Int = { /* compiled code */ }
+ def foo2256(): scala.Int = { /* compiled code */ }
+ def foo2257(): scala.Int = { /* compiled code */ }
+ def foo2258(): scala.Int = { /* compiled code */ }
+ def foo2259(): scala.Int = { /* compiled code */ }
+ def foo2260(): scala.Int = { /* compiled code */ }
+ def foo2261(): scala.Int = { /* compiled code */ }
+ def foo2262(): scala.Int = { /* compiled code */ }
+ def foo2263(): scala.Int = { /* compiled code */ }
+ def foo2264(): scala.Int = { /* compiled code */ }
+ def foo2265(): scala.Int = { /* compiled code */ }
+ def foo2266(): scala.Int = { /* compiled code */ }
+ def foo2267(): scala.Int = { /* compiled code */ }
+ def foo2268(): scala.Int = { /* compiled code */ }
+ def foo2269(): scala.Int = { /* compiled code */ }
+ def foo2270(): scala.Int = { /* compiled code */ }
+ def foo2271(): scala.Int = { /* compiled code */ }
+ def foo2272(): scala.Int = { /* compiled code */ }
+ def foo2273(): scala.Int = { /* compiled code */ }
+ def foo2274(): scala.Int = { /* compiled code */ }
+ def foo2275(): scala.Int = { /* compiled code */ }
+ def foo2276(): scala.Int = { /* compiled code */ }
+ def foo2277(): scala.Int = { /* compiled code */ }
+ def foo2278(): scala.Int = { /* compiled code */ }
+ def foo2279(): scala.Int = { /* compiled code */ }
+ def foo2280(): scala.Int = { /* compiled code */ }
+ def foo2281(): scala.Int = { /* compiled code */ }
+ def foo2282(): scala.Int = { /* compiled code */ }
+ def foo2283(): scala.Int = { /* compiled code */ }
+ def foo2284(): scala.Int = { /* compiled code */ }
+ def foo2285(): scala.Int = { /* compiled code */ }
+ def foo2286(): scala.Int = { /* compiled code */ }
+ def foo2287(): scala.Int = { /* compiled code */ }
+ def foo2288(): scala.Int = { /* compiled code */ }
+ def foo2289(): scala.Int = { /* compiled code */ }
+ def foo2290(): scala.Int = { /* compiled code */ }
+ def foo2291(): scala.Int = { /* compiled code */ }
+ def foo2292(): scala.Int = { /* compiled code */ }
+ def foo2293(): scala.Int = { /* compiled code */ }
+ def foo2294(): scala.Int = { /* compiled code */ }
+ def foo2295(): scala.Int = { /* compiled code */ }
+ def foo2296(): scala.Int = { /* compiled code */ }
+ def foo2297(): scala.Int = { /* compiled code */ }
+ def foo2298(): scala.Int = { /* compiled code */ }
+ def foo2299(): scala.Int = { /* compiled code */ }
+ def foo2300(): scala.Int = { /* compiled code */ }
+ def foo2301(): scala.Int = { /* compiled code */ }
+ def foo2302(): scala.Int = { /* compiled code */ }
+ def foo2303(): scala.Int = { /* compiled code */ }
+ def foo2304(): scala.Int = { /* compiled code */ }
+ def foo2305(): scala.Int = { /* compiled code */ }
+ def foo2306(): scala.Int = { /* compiled code */ }
+ def foo2307(): scala.Int = { /* compiled code */ }
+ def foo2308(): scala.Int = { /* compiled code */ }
+ def foo2309(): scala.Int = { /* compiled code */ }
+ def foo2310(): scala.Int = { /* compiled code */ }
+ def foo2311(): scala.Int = { /* compiled code */ }
+ def foo2312(): scala.Int = { /* compiled code */ }
+ def foo2313(): scala.Int = { /* compiled code */ }
+ def foo2314(): scala.Int = { /* compiled code */ }
+ def foo2315(): scala.Int = { /* compiled code */ }
+ def foo2316(): scala.Int = { /* compiled code */ }
+ def foo2317(): scala.Int = { /* compiled code */ }
+ def foo2318(): scala.Int = { /* compiled code */ }
+ def foo2319(): scala.Int = { /* compiled code */ }
+ def foo2320(): scala.Int = { /* compiled code */ }
+ def foo2321(): scala.Int = { /* compiled code */ }
+ def foo2322(): scala.Int = { /* compiled code */ }
+ def foo2323(): scala.Int = { /* compiled code */ }
+ def foo2324(): scala.Int = { /* compiled code */ }
+ def foo2325(): scala.Int = { /* compiled code */ }
+ def foo2326(): scala.Int = { /* compiled code */ }
+ def foo2327(): scala.Int = { /* compiled code */ }
+ def foo2328(): scala.Int = { /* compiled code */ }
+ def foo2329(): scala.Int = { /* compiled code */ }
+ def foo2330(): scala.Int = { /* compiled code */ }
+ def foo2331(): scala.Int = { /* compiled code */ }
+ def foo2332(): scala.Int = { /* compiled code */ }
+ def foo2333(): scala.Int = { /* compiled code */ }
+ def foo2334(): scala.Int = { /* compiled code */ }
+ def foo2335(): scala.Int = { /* compiled code */ }
+ def foo2336(): scala.Int = { /* compiled code */ }
+ def foo2337(): scala.Int = { /* compiled code */ }
+ def foo2338(): scala.Int = { /* compiled code */ }
+ def foo2339(): scala.Int = { /* compiled code */ }
+ def foo2340(): scala.Int = { /* compiled code */ }
+ def foo2341(): scala.Int = { /* compiled code */ }
+ def foo2342(): scala.Int = { /* compiled code */ }
+ def foo2343(): scala.Int = { /* compiled code */ }
+ def foo2344(): scala.Int = { /* compiled code */ }
+ def foo2345(): scala.Int = { /* compiled code */ }
+ def foo2346(): scala.Int = { /* compiled code */ }
+ def foo2347(): scala.Int = { /* compiled code */ }
+ def foo2348(): scala.Int = { /* compiled code */ }
+ def foo2349(): scala.Int = { /* compiled code */ }
+ def foo2350(): scala.Int = { /* compiled code */ }
+ def foo2351(): scala.Int = { /* compiled code */ }
+ def foo2352(): scala.Int = { /* compiled code */ }
+ def foo2353(): scala.Int = { /* compiled code */ }
+ def foo2354(): scala.Int = { /* compiled code */ }
+ def foo2355(): scala.Int = { /* compiled code */ }
+ def foo2356(): scala.Int = { /* compiled code */ }
+ def foo2357(): scala.Int = { /* compiled code */ }
+ def foo2358(): scala.Int = { /* compiled code */ }
+ def foo2359(): scala.Int = { /* compiled code */ }
+ def foo2360(): scala.Int = { /* compiled code */ }
+ def foo2361(): scala.Int = { /* compiled code */ }
+ def foo2362(): scala.Int = { /* compiled code */ }
+ def foo2363(): scala.Int = { /* compiled code */ }
+ def foo2364(): scala.Int = { /* compiled code */ }
+ def foo2365(): scala.Int = { /* compiled code */ }
+ def foo2366(): scala.Int = { /* compiled code */ }
+ def foo2367(): scala.Int = { /* compiled code */ }
+ def foo2368(): scala.Int = { /* compiled code */ }
+ def foo2369(): scala.Int = { /* compiled code */ }
+ def foo2370(): scala.Int = { /* compiled code */ }
+ def foo2371(): scala.Int = { /* compiled code */ }
+ def foo2372(): scala.Int = { /* compiled code */ }
+ def foo2373(): scala.Int = { /* compiled code */ }
+ def foo2374(): scala.Int = { /* compiled code */ }
+ def foo2375(): scala.Int = { /* compiled code */ }
+ def foo2376(): scala.Int = { /* compiled code */ }
+ def foo2377(): scala.Int = { /* compiled code */ }
+ def foo2378(): scala.Int = { /* compiled code */ }
+ def foo2379(): scala.Int = { /* compiled code */ }
+ def foo2380(): scala.Int = { /* compiled code */ }
+ def foo2381(): scala.Int = { /* compiled code */ }
+ def foo2382(): scala.Int = { /* compiled code */ }
+ def foo2383(): scala.Int = { /* compiled code */ }
+ def foo2384(): scala.Int = { /* compiled code */ }
+ def foo2385(): scala.Int = { /* compiled code */ }
+ def foo2386(): scala.Int = { /* compiled code */ }
+ def foo2387(): scala.Int = { /* compiled code */ }
+ def foo2388(): scala.Int = { /* compiled code */ }
+ def foo2389(): scala.Int = { /* compiled code */ }
+ def foo2390(): scala.Int = { /* compiled code */ }
+ def foo2391(): scala.Int = { /* compiled code */ }
+ def foo2392(): scala.Int = { /* compiled code */ }
+ def foo2393(): scala.Int = { /* compiled code */ }
+ def foo2394(): scala.Int = { /* compiled code */ }
+ def foo2395(): scala.Int = { /* compiled code */ }
+ def foo2396(): scala.Int = { /* compiled code */ }
+ def foo2397(): scala.Int = { /* compiled code */ }
+ def foo2398(): scala.Int = { /* compiled code */ }
+ def foo2399(): scala.Int = { /* compiled code */ }
+ def foo2400(): scala.Int = { /* compiled code */ }
+ def foo2401(): scala.Int = { /* compiled code */ }
+ def foo2402(): scala.Int = { /* compiled code */ }
+ def foo2403(): scala.Int = { /* compiled code */ }
+ def foo2404(): scala.Int = { /* compiled code */ }
+ def foo2405(): scala.Int = { /* compiled code */ }
+ def foo2406(): scala.Int = { /* compiled code */ }
+ def foo2407(): scala.Int = { /* compiled code */ }
+ def foo2408(): scala.Int = { /* compiled code */ }
+ def foo2409(): scala.Int = { /* compiled code */ }
+ def foo2410(): scala.Int = { /* compiled code */ }
+ def foo2411(): scala.Int = { /* compiled code */ }
+ def foo2412(): scala.Int = { /* compiled code */ }
+ def foo2413(): scala.Int = { /* compiled code */ }
+ def foo2414(): scala.Int = { /* compiled code */ }
+ def foo2415(): scala.Int = { /* compiled code */ }
+ def foo2416(): scala.Int = { /* compiled code */ }
+ def foo2417(): scala.Int = { /* compiled code */ }
+ def foo2418(): scala.Int = { /* compiled code */ }
+ def foo2419(): scala.Int = { /* compiled code */ }
+ def foo2420(): scala.Int = { /* compiled code */ }
+ def foo2421(): scala.Int = { /* compiled code */ }
+ def foo2422(): scala.Int = { /* compiled code */ }
+ def foo2423(): scala.Int = { /* compiled code */ }
+ def foo2424(): scala.Int = { /* compiled code */ }
+ def foo2425(): scala.Int = { /* compiled code */ }
+ def foo2426(): scala.Int = { /* compiled code */ }
+ def foo2427(): scala.Int = { /* compiled code */ }
+ def foo2428(): scala.Int = { /* compiled code */ }
+ def foo2429(): scala.Int = { /* compiled code */ }
+ def foo2430(): scala.Int = { /* compiled code */ }
+ def foo2431(): scala.Int = { /* compiled code */ }
+ def foo2432(): scala.Int = { /* compiled code */ }
+ def foo2433(): scala.Int = { /* compiled code */ }
+ def foo2434(): scala.Int = { /* compiled code */ }
+ def foo2435(): scala.Int = { /* compiled code */ }
+ def foo2436(): scala.Int = { /* compiled code */ }
+ def foo2437(): scala.Int = { /* compiled code */ }
+ def foo2438(): scala.Int = { /* compiled code */ }
+ def foo2439(): scala.Int = { /* compiled code */ }
+ def foo2440(): scala.Int = { /* compiled code */ }
+ def foo2441(): scala.Int = { /* compiled code */ }
+ def foo2442(): scala.Int = { /* compiled code */ }
+ def foo2443(): scala.Int = { /* compiled code */ }
+ def foo2444(): scala.Int = { /* compiled code */ }
+ def foo2445(): scala.Int = { /* compiled code */ }
+ def foo2446(): scala.Int = { /* compiled code */ }
+ def foo2447(): scala.Int = { /* compiled code */ }
+ def foo2448(): scala.Int = { /* compiled code */ }
+ def foo2449(): scala.Int = { /* compiled code */ }
+ def foo2450(): scala.Int = { /* compiled code */ }
+ def foo2451(): scala.Int = { /* compiled code */ }
+ def foo2452(): scala.Int = { /* compiled code */ }
+ def foo2453(): scala.Int = { /* compiled code */ }
+ def foo2454(): scala.Int = { /* compiled code */ }
+ def foo2455(): scala.Int = { /* compiled code */ }
+ def foo2456(): scala.Int = { /* compiled code */ }
+ def foo2457(): scala.Int = { /* compiled code */ }
+ def foo2458(): scala.Int = { /* compiled code */ }
+ def foo2459(): scala.Int = { /* compiled code */ }
+ def foo2460(): scala.Int = { /* compiled code */ }
+ def foo2461(): scala.Int = { /* compiled code */ }
+ def foo2462(): scala.Int = { /* compiled code */ }
+ def foo2463(): scala.Int = { /* compiled code */ }
+ def foo2464(): scala.Int = { /* compiled code */ }
+ def foo2465(): scala.Int = { /* compiled code */ }
+ def foo2466(): scala.Int = { /* compiled code */ }
+ def foo2467(): scala.Int = { /* compiled code */ }
+ def foo2468(): scala.Int = { /* compiled code */ }
+ def foo2469(): scala.Int = { /* compiled code */ }
+ def foo2470(): scala.Int = { /* compiled code */ }
+ def foo2471(): scala.Int = { /* compiled code */ }
+ def foo2472(): scala.Int = { /* compiled code */ }
+ def foo2473(): scala.Int = { /* compiled code */ }
+ def foo2474(): scala.Int = { /* compiled code */ }
+ def foo2475(): scala.Int = { /* compiled code */ }
+ def foo2476(): scala.Int = { /* compiled code */ }
+ def foo2477(): scala.Int = { /* compiled code */ }
+ def foo2478(): scala.Int = { /* compiled code */ }
+ def foo2479(): scala.Int = { /* compiled code */ }
+ def foo2480(): scala.Int = { /* compiled code */ }
+ def foo2481(): scala.Int = { /* compiled code */ }
+ def foo2482(): scala.Int = { /* compiled code */ }
+ def foo2483(): scala.Int = { /* compiled code */ }
+ def foo2484(): scala.Int = { /* compiled code */ }
+ def foo2485(): scala.Int = { /* compiled code */ }
+ def foo2486(): scala.Int = { /* compiled code */ }
+ def foo2487(): scala.Int = { /* compiled code */ }
+ def foo2488(): scala.Int = { /* compiled code */ }
+ def foo2489(): scala.Int = { /* compiled code */ }
+ def foo2490(): scala.Int = { /* compiled code */ }
+ def foo2491(): scala.Int = { /* compiled code */ }
+ def foo2492(): scala.Int = { /* compiled code */ }
+ def foo2493(): scala.Int = { /* compiled code */ }
+ def foo2494(): scala.Int = { /* compiled code */ }
+ def foo2495(): scala.Int = { /* compiled code */ }
+ def foo2496(): scala.Int = { /* compiled code */ }
+ def foo2497(): scala.Int = { /* compiled code */ }
+ def foo2498(): scala.Int = { /* compiled code */ }
+ def foo2499(): scala.Int = { /* compiled code */ }
+ def foo2500(): scala.Int = { /* compiled code */ }
+ def foo2501(): scala.Int = { /* compiled code */ }
+ def foo2502(): scala.Int = { /* compiled code */ }
+ def foo2503(): scala.Int = { /* compiled code */ }
+ def foo2504(): scala.Int = { /* compiled code */ }
+ def foo2505(): scala.Int = { /* compiled code */ }
+ def foo2506(): scala.Int = { /* compiled code */ }
+ def foo2507(): scala.Int = { /* compiled code */ }
+ def foo2508(): scala.Int = { /* compiled code */ }
+ def foo2509(): scala.Int = { /* compiled code */ }
+ def foo2510(): scala.Int = { /* compiled code */ }
+ def foo2511(): scala.Int = { /* compiled code */ }
+ def foo2512(): scala.Int = { /* compiled code */ }
+ def foo2513(): scala.Int = { /* compiled code */ }
+ def foo2514(): scala.Int = { /* compiled code */ }
+ def foo2515(): scala.Int = { /* compiled code */ }
+ def foo2516(): scala.Int = { /* compiled code */ }
+ def foo2517(): scala.Int = { /* compiled code */ }
+ def foo2518(): scala.Int = { /* compiled code */ }
+ def foo2519(): scala.Int = { /* compiled code */ }
+ def foo2520(): scala.Int = { /* compiled code */ }
+ def foo2521(): scala.Int = { /* compiled code */ }
+ def foo2522(): scala.Int = { /* compiled code */ }
+ def foo2523(): scala.Int = { /* compiled code */ }
+ def foo2524(): scala.Int = { /* compiled code */ }
+ def foo2525(): scala.Int = { /* compiled code */ }
+ def foo2526(): scala.Int = { /* compiled code */ }
+ def foo2527(): scala.Int = { /* compiled code */ }
+ def foo2528(): scala.Int = { /* compiled code */ }
+ def foo2529(): scala.Int = { /* compiled code */ }
+ def foo2530(): scala.Int = { /* compiled code */ }
+ def foo2531(): scala.Int = { /* compiled code */ }
+ def foo2532(): scala.Int = { /* compiled code */ }
+ def foo2533(): scala.Int = { /* compiled code */ }
+ def foo2534(): scala.Int = { /* compiled code */ }
+ def foo2535(): scala.Int = { /* compiled code */ }
+ def foo2536(): scala.Int = { /* compiled code */ }
+ def foo2537(): scala.Int = { /* compiled code */ }
+ def foo2538(): scala.Int = { /* compiled code */ }
+ def foo2539(): scala.Int = { /* compiled code */ }
+ def foo2540(): scala.Int = { /* compiled code */ }
+ def foo2541(): scala.Int = { /* compiled code */ }
+ def foo2542(): scala.Int = { /* compiled code */ }
+ def foo2543(): scala.Int = { /* compiled code */ }
+ def foo2544(): scala.Int = { /* compiled code */ }
+ def foo2545(): scala.Int = { /* compiled code */ }
+ def foo2546(): scala.Int = { /* compiled code */ }
+ def foo2547(): scala.Int = { /* compiled code */ }
+ def foo2548(): scala.Int = { /* compiled code */ }
+ def foo2549(): scala.Int = { /* compiled code */ }
+ def foo2550(): scala.Int = { /* compiled code */ }
+ def foo2551(): scala.Int = { /* compiled code */ }
+ def foo2552(): scala.Int = { /* compiled code */ }
+ def foo2553(): scala.Int = { /* compiled code */ }
+ def foo2554(): scala.Int = { /* compiled code */ }
+ def foo2555(): scala.Int = { /* compiled code */ }
+ def foo2556(): scala.Int = { /* compiled code */ }
+ def foo2557(): scala.Int = { /* compiled code */ }
+ def foo2558(): scala.Int = { /* compiled code */ }
+ def foo2559(): scala.Int = { /* compiled code */ }
+ def foo2560(): scala.Int = { /* compiled code */ }
+ def foo2561(): scala.Int = { /* compiled code */ }
+ def foo2562(): scala.Int = { /* compiled code */ }
+ def foo2563(): scala.Int = { /* compiled code */ }
+ def foo2564(): scala.Int = { /* compiled code */ }
+ def foo2565(): scala.Int = { /* compiled code */ }
+ def foo2566(): scala.Int = { /* compiled code */ }
+ def foo2567(): scala.Int = { /* compiled code */ }
+ def foo2568(): scala.Int = { /* compiled code */ }
+ def foo2569(): scala.Int = { /* compiled code */ }
+ def foo2570(): scala.Int = { /* compiled code */ }
+ def foo2571(): scala.Int = { /* compiled code */ }
+ def foo2572(): scala.Int = { /* compiled code */ }
+ def foo2573(): scala.Int = { /* compiled code */ }
+ def foo2574(): scala.Int = { /* compiled code */ }
+ def foo2575(): scala.Int = { /* compiled code */ }
+ def foo2576(): scala.Int = { /* compiled code */ }
+ def foo2577(): scala.Int = { /* compiled code */ }
+ def foo2578(): scala.Int = { /* compiled code */ }
+ def foo2579(): scala.Int = { /* compiled code */ }
+ def foo2580(): scala.Int = { /* compiled code */ }
+ def foo2581(): scala.Int = { /* compiled code */ }
+ def foo2582(): scala.Int = { /* compiled code */ }
+ def foo2583(): scala.Int = { /* compiled code */ }
+ def foo2584(): scala.Int = { /* compiled code */ }
+ def foo2585(): scala.Int = { /* compiled code */ }
+ def foo2586(): scala.Int = { /* compiled code */ }
+ def foo2587(): scala.Int = { /* compiled code */ }
+ def foo2588(): scala.Int = { /* compiled code */ }
+ def foo2589(): scala.Int = { /* compiled code */ }
+ def foo2590(): scala.Int = { /* compiled code */ }
+ def foo2591(): scala.Int = { /* compiled code */ }
+ def foo2592(): scala.Int = { /* compiled code */ }
+ def foo2593(): scala.Int = { /* compiled code */ }
+ def foo2594(): scala.Int = { /* compiled code */ }
+ def foo2595(): scala.Int = { /* compiled code */ }
+ def foo2596(): scala.Int = { /* compiled code */ }
+ def foo2597(): scala.Int = { /* compiled code */ }
+ def foo2598(): scala.Int = { /* compiled code */ }
+ def foo2599(): scala.Int = { /* compiled code */ }
+ def foo2600(): scala.Int = { /* compiled code */ }
+ def foo2601(): scala.Int = { /* compiled code */ }
+ def foo2602(): scala.Int = { /* compiled code */ }
+ def foo2603(): scala.Int = { /* compiled code */ }
+ def foo2604(): scala.Int = { /* compiled code */ }
+ def foo2605(): scala.Int = { /* compiled code */ }
+ def foo2606(): scala.Int = { /* compiled code */ }
+ def foo2607(): scala.Int = { /* compiled code */ }
+ def foo2608(): scala.Int = { /* compiled code */ }
+ def foo2609(): scala.Int = { /* compiled code */ }
+ def foo2610(): scala.Int = { /* compiled code */ }
+ def foo2611(): scala.Int = { /* compiled code */ }
+ def foo2612(): scala.Int = { /* compiled code */ }
+ def foo2613(): scala.Int = { /* compiled code */ }
+ def foo2614(): scala.Int = { /* compiled code */ }
+ def foo2615(): scala.Int = { /* compiled code */ }
+ def foo2616(): scala.Int = { /* compiled code */ }
+ def foo2617(): scala.Int = { /* compiled code */ }
+ def foo2618(): scala.Int = { /* compiled code */ }
+ def foo2619(): scala.Int = { /* compiled code */ }
+ def foo2620(): scala.Int = { /* compiled code */ }
+ def foo2621(): scala.Int = { /* compiled code */ }
+ def foo2622(): scala.Int = { /* compiled code */ }
+ def foo2623(): scala.Int = { /* compiled code */ }
+ def foo2624(): scala.Int = { /* compiled code */ }
+ def foo2625(): scala.Int = { /* compiled code */ }
+ def foo2626(): scala.Int = { /* compiled code */ }
+ def foo2627(): scala.Int = { /* compiled code */ }
+ def foo2628(): scala.Int = { /* compiled code */ }
+ def foo2629(): scala.Int = { /* compiled code */ }
+ def foo2630(): scala.Int = { /* compiled code */ }
+ def foo2631(): scala.Int = { /* compiled code */ }
+ def foo2632(): scala.Int = { /* compiled code */ }
+ def foo2633(): scala.Int = { /* compiled code */ }
+ def foo2634(): scala.Int = { /* compiled code */ }
+ def foo2635(): scala.Int = { /* compiled code */ }
+ def foo2636(): scala.Int = { /* compiled code */ }
+ def foo2637(): scala.Int = { /* compiled code */ }
+ def foo2638(): scala.Int = { /* compiled code */ }
+ def foo2639(): scala.Int = { /* compiled code */ }
+ def foo2640(): scala.Int = { /* compiled code */ }
+ def foo2641(): scala.Int = { /* compiled code */ }
+ def foo2642(): scala.Int = { /* compiled code */ }
+ def foo2643(): scala.Int = { /* compiled code */ }
+ def foo2644(): scala.Int = { /* compiled code */ }
+ def foo2645(): scala.Int = { /* compiled code */ }
+ def foo2646(): scala.Int = { /* compiled code */ }
+ def foo2647(): scala.Int = { /* compiled code */ }
+ def foo2648(): scala.Int = { /* compiled code */ }
+ def foo2649(): scala.Int = { /* compiled code */ }
+ def foo2650(): scala.Int = { /* compiled code */ }
+ def foo2651(): scala.Int = { /* compiled code */ }
+ def foo2652(): scala.Int = { /* compiled code */ }
+ def foo2653(): scala.Int = { /* compiled code */ }
+ def foo2654(): scala.Int = { /* compiled code */ }
+ def foo2655(): scala.Int = { /* compiled code */ }
+ def foo2656(): scala.Int = { /* compiled code */ }
+ def foo2657(): scala.Int = { /* compiled code */ }
+ def foo2658(): scala.Int = { /* compiled code */ }
+ def foo2659(): scala.Int = { /* compiled code */ }
+ def foo2660(): scala.Int = { /* compiled code */ }
+ def foo2661(): scala.Int = { /* compiled code */ }
+ def foo2662(): scala.Int = { /* compiled code */ }
+ def foo2663(): scala.Int = { /* compiled code */ }
+ def foo2664(): scala.Int = { /* compiled code */ }
+ def foo2665(): scala.Int = { /* compiled code */ }
+ def foo2666(): scala.Int = { /* compiled code */ }
+ def foo2667(): scala.Int = { /* compiled code */ }
+ def foo2668(): scala.Int = { /* compiled code */ }
+ def foo2669(): scala.Int = { /* compiled code */ }
+ def foo2670(): scala.Int = { /* compiled code */ }
+ def foo2671(): scala.Int = { /* compiled code */ }
+ def foo2672(): scala.Int = { /* compiled code */ }
+ def foo2673(): scala.Int = { /* compiled code */ }
+ def foo2674(): scala.Int = { /* compiled code */ }
+ def foo2675(): scala.Int = { /* compiled code */ }
+ def foo2676(): scala.Int = { /* compiled code */ }
+ def foo2677(): scala.Int = { /* compiled code */ }
+ def foo2678(): scala.Int = { /* compiled code */ }
+ def foo2679(): scala.Int = { /* compiled code */ }
+ def foo2680(): scala.Int = { /* compiled code */ }
+ def foo2681(): scala.Int = { /* compiled code */ }
+ def foo2682(): scala.Int = { /* compiled code */ }
+ def foo2683(): scala.Int = { /* compiled code */ }
+ def foo2684(): scala.Int = { /* compiled code */ }
+ def foo2685(): scala.Int = { /* compiled code */ }
+ def foo2686(): scala.Int = { /* compiled code */ }
+ def foo2687(): scala.Int = { /* compiled code */ }
+ def foo2688(): scala.Int = { /* compiled code */ }
+ def foo2689(): scala.Int = { /* compiled code */ }
+ def foo2690(): scala.Int = { /* compiled code */ }
+ def foo2691(): scala.Int = { /* compiled code */ }
+ def foo2692(): scala.Int = { /* compiled code */ }
+ def foo2693(): scala.Int = { /* compiled code */ }
+ def foo2694(): scala.Int = { /* compiled code */ }
+ def foo2695(): scala.Int = { /* compiled code */ }
+ def foo2696(): scala.Int = { /* compiled code */ }
+ def foo2697(): scala.Int = { /* compiled code */ }
+ def foo2698(): scala.Int = { /* compiled code */ }
+ def foo2699(): scala.Int = { /* compiled code */ }
+ def foo2700(): scala.Int = { /* compiled code */ }
+ def foo2701(): scala.Int = { /* compiled code */ }
+ def foo2702(): scala.Int = { /* compiled code */ }
+ def foo2703(): scala.Int = { /* compiled code */ }
+ def foo2704(): scala.Int = { /* compiled code */ }
+ def foo2705(): scala.Int = { /* compiled code */ }
+ def foo2706(): scala.Int = { /* compiled code */ }
+ def foo2707(): scala.Int = { /* compiled code */ }
+ def foo2708(): scala.Int = { /* compiled code */ }
+ def foo2709(): scala.Int = { /* compiled code */ }
+ def foo2710(): scala.Int = { /* compiled code */ }
+ def foo2711(): scala.Int = { /* compiled code */ }
+ def foo2712(): scala.Int = { /* compiled code */ }
+ def foo2713(): scala.Int = { /* compiled code */ }
+ def foo2714(): scala.Int = { /* compiled code */ }
+ def foo2715(): scala.Int = { /* compiled code */ }
+ def foo2716(): scala.Int = { /* compiled code */ }
+ def foo2717(): scala.Int = { /* compiled code */ }
+ def foo2718(): scala.Int = { /* compiled code */ }
+ def foo2719(): scala.Int = { /* compiled code */ }
+ def foo2720(): scala.Int = { /* compiled code */ }
+ def foo2721(): scala.Int = { /* compiled code */ }
+ def foo2722(): scala.Int = { /* compiled code */ }
+ def foo2723(): scala.Int = { /* compiled code */ }
+ def foo2724(): scala.Int = { /* compiled code */ }
+ def foo2725(): scala.Int = { /* compiled code */ }
+ def foo2726(): scala.Int = { /* compiled code */ }
+ def foo2727(): scala.Int = { /* compiled code */ }
+ def foo2728(): scala.Int = { /* compiled code */ }
+ def foo2729(): scala.Int = { /* compiled code */ }
+ def foo2730(): scala.Int = { /* compiled code */ }
+ def foo2731(): scala.Int = { /* compiled code */ }
+ def foo2732(): scala.Int = { /* compiled code */ }
+ def foo2733(): scala.Int = { /* compiled code */ }
+ def foo2734(): scala.Int = { /* compiled code */ }
+ def foo2735(): scala.Int = { /* compiled code */ }
+ def foo2736(): scala.Int = { /* compiled code */ }
+ def foo2737(): scala.Int = { /* compiled code */ }
+ def foo2738(): scala.Int = { /* compiled code */ }
+ def foo2739(): scala.Int = { /* compiled code */ }
+ def foo2740(): scala.Int = { /* compiled code */ }
+ def foo2741(): scala.Int = { /* compiled code */ }
+ def foo2742(): scala.Int = { /* compiled code */ }
+ def foo2743(): scala.Int = { /* compiled code */ }
+ def foo2744(): scala.Int = { /* compiled code */ }
+ def foo2745(): scala.Int = { /* compiled code */ }
+ def foo2746(): scala.Int = { /* compiled code */ }
+ def foo2747(): scala.Int = { /* compiled code */ }
+ def foo2748(): scala.Int = { /* compiled code */ }
+ def foo2749(): scala.Int = { /* compiled code */ }
+ def foo2750(): scala.Int = { /* compiled code */ }
+ def foo2751(): scala.Int = { /* compiled code */ }
+ def foo2752(): scala.Int = { /* compiled code */ }
+ def foo2753(): scala.Int = { /* compiled code */ }
+ def foo2754(): scala.Int = { /* compiled code */ }
+ def foo2755(): scala.Int = { /* compiled code */ }
+ def foo2756(): scala.Int = { /* compiled code */ }
+ def foo2757(): scala.Int = { /* compiled code */ }
+ def foo2758(): scala.Int = { /* compiled code */ }
+ def foo2759(): scala.Int = { /* compiled code */ }
+ def foo2760(): scala.Int = { /* compiled code */ }
+ def foo2761(): scala.Int = { /* compiled code */ }
+ def foo2762(): scala.Int = { /* compiled code */ }
+ def foo2763(): scala.Int = { /* compiled code */ }
+ def foo2764(): scala.Int = { /* compiled code */ }
+ def foo2765(): scala.Int = { /* compiled code */ }
+ def foo2766(): scala.Int = { /* compiled code */ }
+ def foo2767(): scala.Int = { /* compiled code */ }
+ def foo2768(): scala.Int = { /* compiled code */ }
+ def foo2769(): scala.Int = { /* compiled code */ }
+ def foo2770(): scala.Int = { /* compiled code */ }
+ def foo2771(): scala.Int = { /* compiled code */ }
+ def foo2772(): scala.Int = { /* compiled code */ }
+ def foo2773(): scala.Int = { /* compiled code */ }
+ def foo2774(): scala.Int = { /* compiled code */ }
+ def foo2775(): scala.Int = { /* compiled code */ }
+ def foo2776(): scala.Int = { /* compiled code */ }
+ def foo2777(): scala.Int = { /* compiled code */ }
+ def foo2778(): scala.Int = { /* compiled code */ }
+ def foo2779(): scala.Int = { /* compiled code */ }
+ def foo2780(): scala.Int = { /* compiled code */ }
+ def foo2781(): scala.Int = { /* compiled code */ }
+ def foo2782(): scala.Int = { /* compiled code */ }
+ def foo2783(): scala.Int = { /* compiled code */ }
+ def foo2784(): scala.Int = { /* compiled code */ }
+ def foo2785(): scala.Int = { /* compiled code */ }
+ def foo2786(): scala.Int = { /* compiled code */ }
+ def foo2787(): scala.Int = { /* compiled code */ }
+ def foo2788(): scala.Int = { /* compiled code */ }
+ def foo2789(): scala.Int = { /* compiled code */ }
+ def foo2790(): scala.Int = { /* compiled code */ }
+ def foo2791(): scala.Int = { /* compiled code */ }
+ def foo2792(): scala.Int = { /* compiled code */ }
+ def foo2793(): scala.Int = { /* compiled code */ }
+ def foo2794(): scala.Int = { /* compiled code */ }
+ def foo2795(): scala.Int = { /* compiled code */ }
+ def foo2796(): scala.Int = { /* compiled code */ }
+ def foo2797(): scala.Int = { /* compiled code */ }
+ def foo2798(): scala.Int = { /* compiled code */ }
+ def foo2799(): scala.Int = { /* compiled code */ }
+ def foo2800(): scala.Int = { /* compiled code */ }
+ def foo2801(): scala.Int = { /* compiled code */ }
+ def foo2802(): scala.Int = { /* compiled code */ }
+ def foo2803(): scala.Int = { /* compiled code */ }
+ def foo2804(): scala.Int = { /* compiled code */ }
+ def foo2805(): scala.Int = { /* compiled code */ }
+ def foo2806(): scala.Int = { /* compiled code */ }
+ def foo2807(): scala.Int = { /* compiled code */ }
+ def foo2808(): scala.Int = { /* compiled code */ }
+ def foo2809(): scala.Int = { /* compiled code */ }
+ def foo2810(): scala.Int = { /* compiled code */ }
+ def foo2811(): scala.Int = { /* compiled code */ }
+ def foo2812(): scala.Int = { /* compiled code */ }
+ def foo2813(): scala.Int = { /* compiled code */ }
+ def foo2814(): scala.Int = { /* compiled code */ }
+ def foo2815(): scala.Int = { /* compiled code */ }
+ def foo2816(): scala.Int = { /* compiled code */ }
+ def foo2817(): scala.Int = { /* compiled code */ }
+ def foo2818(): scala.Int = { /* compiled code */ }
+ def foo2819(): scala.Int = { /* compiled code */ }
+ def foo2820(): scala.Int = { /* compiled code */ }
+ def foo2821(): scala.Int = { /* compiled code */ }
+ def foo2822(): scala.Int = { /* compiled code */ }
+ def foo2823(): scala.Int = { /* compiled code */ }
+ def foo2824(): scala.Int = { /* compiled code */ }
+ def foo2825(): scala.Int = { /* compiled code */ }
+ def foo2826(): scala.Int = { /* compiled code */ }
+ def foo2827(): scala.Int = { /* compiled code */ }
+ def foo2828(): scala.Int = { /* compiled code */ }
+ def foo2829(): scala.Int = { /* compiled code */ }
+ def foo2830(): scala.Int = { /* compiled code */ }
+ def foo2831(): scala.Int = { /* compiled code */ }
+ def foo2832(): scala.Int = { /* compiled code */ }
+ def foo2833(): scala.Int = { /* compiled code */ }
+ def foo2834(): scala.Int = { /* compiled code */ }
+ def foo2835(): scala.Int = { /* compiled code */ }
+ def foo2836(): scala.Int = { /* compiled code */ }
+ def foo2837(): scala.Int = { /* compiled code */ }
+ def foo2838(): scala.Int = { /* compiled code */ }
+ def foo2839(): scala.Int = { /* compiled code */ }
+ def foo2840(): scala.Int = { /* compiled code */ }
+ def foo2841(): scala.Int = { /* compiled code */ }
+ def foo2842(): scala.Int = { /* compiled code */ }
+ def foo2843(): scala.Int = { /* compiled code */ }
+ def foo2844(): scala.Int = { /* compiled code */ }
+ def foo2845(): scala.Int = { /* compiled code */ }
+ def foo2846(): scala.Int = { /* compiled code */ }
+ def foo2847(): scala.Int = { /* compiled code */ }
+ def foo2848(): scala.Int = { /* compiled code */ }
+ def foo2849(): scala.Int = { /* compiled code */ }
+ def foo2850(): scala.Int = { /* compiled code */ }
+ def foo2851(): scala.Int = { /* compiled code */ }
+ def foo2852(): scala.Int = { /* compiled code */ }
+ def foo2853(): scala.Int = { /* compiled code */ }
+ def foo2854(): scala.Int = { /* compiled code */ }
+ def foo2855(): scala.Int = { /* compiled code */ }
+ def foo2856(): scala.Int = { /* compiled code */ }
+ def foo2857(): scala.Int = { /* compiled code */ }
+ def foo2858(): scala.Int = { /* compiled code */ }
+ def foo2859(): scala.Int = { /* compiled code */ }
+ def foo2860(): scala.Int = { /* compiled code */ }
+ def foo2861(): scala.Int = { /* compiled code */ }
+ def foo2862(): scala.Int = { /* compiled code */ }
+ def foo2863(): scala.Int = { /* compiled code */ }
+ def foo2864(): scala.Int = { /* compiled code */ }
+ def foo2865(): scala.Int = { /* compiled code */ }
+ def foo2866(): scala.Int = { /* compiled code */ }
+ def foo2867(): scala.Int = { /* compiled code */ }
+ def foo2868(): scala.Int = { /* compiled code */ }
+ def foo2869(): scala.Int = { /* compiled code */ }
+ def foo2870(): scala.Int = { /* compiled code */ }
+ def foo2871(): scala.Int = { /* compiled code */ }
+ def foo2872(): scala.Int = { /* compiled code */ }
+ def foo2873(): scala.Int = { /* compiled code */ }
+ def foo2874(): scala.Int = { /* compiled code */ }
+ def foo2875(): scala.Int = { /* compiled code */ }
+ def foo2876(): scala.Int = { /* compiled code */ }
+ def foo2877(): scala.Int = { /* compiled code */ }
+ def foo2878(): scala.Int = { /* compiled code */ }
+ def foo2879(): scala.Int = { /* compiled code */ }
+ def foo2880(): scala.Int = { /* compiled code */ }
+ def foo2881(): scala.Int = { /* compiled code */ }
+ def foo2882(): scala.Int = { /* compiled code */ }
+ def foo2883(): scala.Int = { /* compiled code */ }
+ def foo2884(): scala.Int = { /* compiled code */ }
+ def foo2885(): scala.Int = { /* compiled code */ }
+ def foo2886(): scala.Int = { /* compiled code */ }
+ def foo2887(): scala.Int = { /* compiled code */ }
+ def foo2888(): scala.Int = { /* compiled code */ }
+ def foo2889(): scala.Int = { /* compiled code */ }
+ def foo2890(): scala.Int = { /* compiled code */ }
+ def foo2891(): scala.Int = { /* compiled code */ }
+ def foo2892(): scala.Int = { /* compiled code */ }
+ def foo2893(): scala.Int = { /* compiled code */ }
+ def foo2894(): scala.Int = { /* compiled code */ }
+ def foo2895(): scala.Int = { /* compiled code */ }
+ def foo2896(): scala.Int = { /* compiled code */ }
+ def foo2897(): scala.Int = { /* compiled code */ }
+ def foo2898(): scala.Int = { /* compiled code */ }
+ def foo2899(): scala.Int = { /* compiled code */ }
+ def foo2900(): scala.Int = { /* compiled code */ }
+ def foo2901(): scala.Int = { /* compiled code */ }
+ def foo2902(): scala.Int = { /* compiled code */ }
+ def foo2903(): scala.Int = { /* compiled code */ }
+ def foo2904(): scala.Int = { /* compiled code */ }
+ def foo2905(): scala.Int = { /* compiled code */ }
+ def foo2906(): scala.Int = { /* compiled code */ }
+ def foo2907(): scala.Int = { /* compiled code */ }
+ def foo2908(): scala.Int = { /* compiled code */ }
+ def foo2909(): scala.Int = { /* compiled code */ }
+ def foo2910(): scala.Int = { /* compiled code */ }
+ def foo2911(): scala.Int = { /* compiled code */ }
+ def foo2912(): scala.Int = { /* compiled code */ }
+ def foo2913(): scala.Int = { /* compiled code */ }
+ def foo2914(): scala.Int = { /* compiled code */ }
+ def foo2915(): scala.Int = { /* compiled code */ }
+ def foo2916(): scala.Int = { /* compiled code */ }
+ def foo2917(): scala.Int = { /* compiled code */ }
+ def foo2918(): scala.Int = { /* compiled code */ }
+ def foo2919(): scala.Int = { /* compiled code */ }
+ def foo2920(): scala.Int = { /* compiled code */ }
+ def foo2921(): scala.Int = { /* compiled code */ }
+ def foo2922(): scala.Int = { /* compiled code */ }
+ def foo2923(): scala.Int = { /* compiled code */ }
+ def foo2924(): scala.Int = { /* compiled code */ }
+ def foo2925(): scala.Int = { /* compiled code */ }
+ def foo2926(): scala.Int = { /* compiled code */ }
+ def foo2927(): scala.Int = { /* compiled code */ }
+ def foo2928(): scala.Int = { /* compiled code */ }
+ def foo2929(): scala.Int = { /* compiled code */ }
+ def foo2930(): scala.Int = { /* compiled code */ }
+ def foo2931(): scala.Int = { /* compiled code */ }
+ def foo2932(): scala.Int = { /* compiled code */ }
+ def foo2933(): scala.Int = { /* compiled code */ }
+ def foo2934(): scala.Int = { /* compiled code */ }
+ def foo2935(): scala.Int = { /* compiled code */ }
+ def foo2936(): scala.Int = { /* compiled code */ }
+ def foo2937(): scala.Int = { /* compiled code */ }
+ def foo2938(): scala.Int = { /* compiled code */ }
+ def foo2939(): scala.Int = { /* compiled code */ }
+ def foo2940(): scala.Int = { /* compiled code */ }
+ def foo2941(): scala.Int = { /* compiled code */ }
+ def foo2942(): scala.Int = { /* compiled code */ }
+ def foo2943(): scala.Int = { /* compiled code */ }
+ def foo2944(): scala.Int = { /* compiled code */ }
+ def foo2945(): scala.Int = { /* compiled code */ }
+ def foo2946(): scala.Int = { /* compiled code */ }
+ def foo2947(): scala.Int = { /* compiled code */ }
+ def foo2948(): scala.Int = { /* compiled code */ }
+ def foo2949(): scala.Int = { /* compiled code */ }
+ def foo2950(): scala.Int = { /* compiled code */ }
+ def foo2951(): scala.Int = { /* compiled code */ }
+ def foo2952(): scala.Int = { /* compiled code */ }
+ def foo2953(): scala.Int = { /* compiled code */ }
+ def foo2954(): scala.Int = { /* compiled code */ }
+ def foo2955(): scala.Int = { /* compiled code */ }
+ def foo2956(): scala.Int = { /* compiled code */ }
+ def foo2957(): scala.Int = { /* compiled code */ }
+ def foo2958(): scala.Int = { /* compiled code */ }
+ def foo2959(): scala.Int = { /* compiled code */ }
+ def foo2960(): scala.Int = { /* compiled code */ }
+ def foo2961(): scala.Int = { /* compiled code */ }
+ def foo2962(): scala.Int = { /* compiled code */ }
+ def foo2963(): scala.Int = { /* compiled code */ }
+ def foo2964(): scala.Int = { /* compiled code */ }
+ def foo2965(): scala.Int = { /* compiled code */ }
+ def foo2966(): scala.Int = { /* compiled code */ }
+ def foo2967(): scala.Int = { /* compiled code */ }
+ def foo2968(): scala.Int = { /* compiled code */ }
+ def foo2969(): scala.Int = { /* compiled code */ }
+ def foo2970(): scala.Int = { /* compiled code */ }
+ def foo2971(): scala.Int = { /* compiled code */ }
+ def foo2972(): scala.Int = { /* compiled code */ }
+ def foo2973(): scala.Int = { /* compiled code */ }
+ def foo2974(): scala.Int = { /* compiled code */ }
+ def foo2975(): scala.Int = { /* compiled code */ }
+ def foo2976(): scala.Int = { /* compiled code */ }
+ def foo2977(): scala.Int = { /* compiled code */ }
+ def foo2978(): scala.Int = { /* compiled code */ }
+ def foo2979(): scala.Int = { /* compiled code */ }
+ def foo2980(): scala.Int = { /* compiled code */ }
+ def foo2981(): scala.Int = { /* compiled code */ }
+ def foo2982(): scala.Int = { /* compiled code */ }
+ def foo2983(): scala.Int = { /* compiled code */ }
+ def foo2984(): scala.Int = { /* compiled code */ }
+ def foo2985(): scala.Int = { /* compiled code */ }
+ def foo2986(): scala.Int = { /* compiled code */ }
+ def foo2987(): scala.Int = { /* compiled code */ }
+ def foo2988(): scala.Int = { /* compiled code */ }
+ def foo2989(): scala.Int = { /* compiled code */ }
+ def foo2990(): scala.Int = { /* compiled code */ }
+ def foo2991(): scala.Int = { /* compiled code */ }
+ def foo2992(): scala.Int = { /* compiled code */ }
+ def foo2993(): scala.Int = { /* compiled code */ }
+ def foo2994(): scala.Int = { /* compiled code */ }
+ def foo2995(): scala.Int = { /* compiled code */ }
+ def foo2996(): scala.Int = { /* compiled code */ }
+ def foo2997(): scala.Int = { /* compiled code */ }
+ def foo2998(): scala.Int = { /* compiled code */ }
+ def foo2999(): scala.Int = { /* compiled code */ }
+ def foo3000(): scala.Int = { /* compiled code */ }
+ def foo3001(): scala.Int = { /* compiled code */ }
+ def foo3002(): scala.Int = { /* compiled code */ }
+ def foo3003(): scala.Int = { /* compiled code */ }
+ def foo3004(): scala.Int = { /* compiled code */ }
+ def foo3005(): scala.Int = { /* compiled code */ }
+ def foo3006(): scala.Int = { /* compiled code */ }
+ def foo3007(): scala.Int = { /* compiled code */ }
+ def foo3008(): scala.Int = { /* compiled code */ }
+ def foo3009(): scala.Int = { /* compiled code */ }
+ def foo3010(): scala.Int = { /* compiled code */ }
+ def foo3011(): scala.Int = { /* compiled code */ }
+ def foo3012(): scala.Int = { /* compiled code */ }
+ def foo3013(): scala.Int = { /* compiled code */ }
+ def foo3014(): scala.Int = { /* compiled code */ }
+ def foo3015(): scala.Int = { /* compiled code */ }
+ def foo3016(): scala.Int = { /* compiled code */ }
+ def foo3017(): scala.Int = { /* compiled code */ }
+ def foo3018(): scala.Int = { /* compiled code */ }
+ def foo3019(): scala.Int = { /* compiled code */ }
+ def foo3020(): scala.Int = { /* compiled code */ }
+ def foo3021(): scala.Int = { /* compiled code */ }
+ def foo3022(): scala.Int = { /* compiled code */ }
+ def foo3023(): scala.Int = { /* compiled code */ }
+ def foo3024(): scala.Int = { /* compiled code */ }
+ def foo3025(): scala.Int = { /* compiled code */ }
+ def foo3026(): scala.Int = { /* compiled code */ }
+ def foo3027(): scala.Int = { /* compiled code */ }
+ def foo3028(): scala.Int = { /* compiled code */ }
+ def foo3029(): scala.Int = { /* compiled code */ }
+ def foo3030(): scala.Int = { /* compiled code */ }
+ def foo3031(): scala.Int = { /* compiled code */ }
+ def foo3032(): scala.Int = { /* compiled code */ }
+ def foo3033(): scala.Int = { /* compiled code */ }
+ def foo3034(): scala.Int = { /* compiled code */ }
+ def foo3035(): scala.Int = { /* compiled code */ }
+ def foo3036(): scala.Int = { /* compiled code */ }
+ def foo3037(): scala.Int = { /* compiled code */ }
+ def foo3038(): scala.Int = { /* compiled code */ }
+ def foo3039(): scala.Int = { /* compiled code */ }
+ def foo3040(): scala.Int = { /* compiled code */ }
+ def foo3041(): scala.Int = { /* compiled code */ }
+ def foo3042(): scala.Int = { /* compiled code */ }
+ def foo3043(): scala.Int = { /* compiled code */ }
+ def foo3044(): scala.Int = { /* compiled code */ }
+ def foo3045(): scala.Int = { /* compiled code */ }
+ def foo3046(): scala.Int = { /* compiled code */ }
+ def foo3047(): scala.Int = { /* compiled code */ }
+ def foo3048(): scala.Int = { /* compiled code */ }
+ def foo3049(): scala.Int = { /* compiled code */ }
+ def foo3050(): scala.Int = { /* compiled code */ }
+ def foo3051(): scala.Int = { /* compiled code */ }
+ def foo3052(): scala.Int = { /* compiled code */ }
+ def foo3053(): scala.Int = { /* compiled code */ }
+ def foo3054(): scala.Int = { /* compiled code */ }
+ def foo3055(): scala.Int = { /* compiled code */ }
+ def foo3056(): scala.Int = { /* compiled code */ }
+ def foo3057(): scala.Int = { /* compiled code */ }
+ def foo3058(): scala.Int = { /* compiled code */ }
+ def foo3059(): scala.Int = { /* compiled code */ }
+ def foo3060(): scala.Int = { /* compiled code */ }
+ def foo3061(): scala.Int = { /* compiled code */ }
+ def foo3062(): scala.Int = { /* compiled code */ }
+ def foo3063(): scala.Int = { /* compiled code */ }
+ def foo3064(): scala.Int = { /* compiled code */ }
+ def foo3065(): scala.Int = { /* compiled code */ }
+ def foo3066(): scala.Int = { /* compiled code */ }
+ def foo3067(): scala.Int = { /* compiled code */ }
+ def foo3068(): scala.Int = { /* compiled code */ }
+ def foo3069(): scala.Int = { /* compiled code */ }
+ def foo3070(): scala.Int = { /* compiled code */ }
+ def foo3071(): scala.Int = { /* compiled code */ }
+ def foo3072(): scala.Int = { /* compiled code */ }
+ def foo3073(): scala.Int = { /* compiled code */ }
+ def foo3074(): scala.Int = { /* compiled code */ }
+ def foo3075(): scala.Int = { /* compiled code */ }
+ def foo3076(): scala.Int = { /* compiled code */ }
+ def foo3077(): scala.Int = { /* compiled code */ }
+ def foo3078(): scala.Int = { /* compiled code */ }
+ def foo3079(): scala.Int = { /* compiled code */ }
+ def foo3080(): scala.Int = { /* compiled code */ }
+ def foo3081(): scala.Int = { /* compiled code */ }
+ def foo3082(): scala.Int = { /* compiled code */ }
+ def foo3083(): scala.Int = { /* compiled code */ }
+ def foo3084(): scala.Int = { /* compiled code */ }
+ def foo3085(): scala.Int = { /* compiled code */ }
+ def foo3086(): scala.Int = { /* compiled code */ }
+ def foo3087(): scala.Int = { /* compiled code */ }
+ def foo3088(): scala.Int = { /* compiled code */ }
+ def foo3089(): scala.Int = { /* compiled code */ }
+ def foo3090(): scala.Int = { /* compiled code */ }
+ def foo3091(): scala.Int = { /* compiled code */ }
+ def foo3092(): scala.Int = { /* compiled code */ }
+ def foo3093(): scala.Int = { /* compiled code */ }
+ def foo3094(): scala.Int = { /* compiled code */ }
+ def foo3095(): scala.Int = { /* compiled code */ }
+ def foo3096(): scala.Int = { /* compiled code */ }
+ def foo3097(): scala.Int = { /* compiled code */ }
+ def foo3098(): scala.Int = { /* compiled code */ }
+ def foo3099(): scala.Int = { /* compiled code */ }
+ def foo3100(): scala.Int = { /* compiled code */ }
+ def foo3101(): scala.Int = { /* compiled code */ }
+ def foo3102(): scala.Int = { /* compiled code */ }
+ def foo3103(): scala.Int = { /* compiled code */ }
+ def foo3104(): scala.Int = { /* compiled code */ }
+ def foo3105(): scala.Int = { /* compiled code */ }
+ def foo3106(): scala.Int = { /* compiled code */ }
+ def foo3107(): scala.Int = { /* compiled code */ }
+ def foo3108(): scala.Int = { /* compiled code */ }
+ def foo3109(): scala.Int = { /* compiled code */ }
+ def foo3110(): scala.Int = { /* compiled code */ }
+ def foo3111(): scala.Int = { /* compiled code */ }
+ def foo3112(): scala.Int = { /* compiled code */ }
+ def foo3113(): scala.Int = { /* compiled code */ }
+ def foo3114(): scala.Int = { /* compiled code */ }
+ def foo3115(): scala.Int = { /* compiled code */ }
+ def foo3116(): scala.Int = { /* compiled code */ }
+ def foo3117(): scala.Int = { /* compiled code */ }
+ def foo3118(): scala.Int = { /* compiled code */ }
+ def foo3119(): scala.Int = { /* compiled code */ }
+ def foo3120(): scala.Int = { /* compiled code */ }
+ def foo3121(): scala.Int = { /* compiled code */ }
+ def foo3122(): scala.Int = { /* compiled code */ }
+ def foo3123(): scala.Int = { /* compiled code */ }
+ def foo3124(): scala.Int = { /* compiled code */ }
+ def foo3125(): scala.Int = { /* compiled code */ }
+ def foo3126(): scala.Int = { /* compiled code */ }
+ def foo3127(): scala.Int = { /* compiled code */ }
+ def foo3128(): scala.Int = { /* compiled code */ }
+ def foo3129(): scala.Int = { /* compiled code */ }
+ def foo3130(): scala.Int = { /* compiled code */ }
+ def foo3131(): scala.Int = { /* compiled code */ }
+ def foo3132(): scala.Int = { /* compiled code */ }
+ def foo3133(): scala.Int = { /* compiled code */ }
+ def foo3134(): scala.Int = { /* compiled code */ }
+ def foo3135(): scala.Int = { /* compiled code */ }
+ def foo3136(): scala.Int = { /* compiled code */ }
+ def foo3137(): scala.Int = { /* compiled code */ }
+ def foo3138(): scala.Int = { /* compiled code */ }
+ def foo3139(): scala.Int = { /* compiled code */ }
+ def foo3140(): scala.Int = { /* compiled code */ }
+ def foo3141(): scala.Int = { /* compiled code */ }
+ def foo3142(): scala.Int = { /* compiled code */ }
+ def foo3143(): scala.Int = { /* compiled code */ }
+ def foo3144(): scala.Int = { /* compiled code */ }
+ def foo3145(): scala.Int = { /* compiled code */ }
+ def foo3146(): scala.Int = { /* compiled code */ }
+ def foo3147(): scala.Int = { /* compiled code */ }
+ def foo3148(): scala.Int = { /* compiled code */ }
+ def foo3149(): scala.Int = { /* compiled code */ }
+ def foo3150(): scala.Int = { /* compiled code */ }
+ def foo3151(): scala.Int = { /* compiled code */ }
+ def foo3152(): scala.Int = { /* compiled code */ }
+ def foo3153(): scala.Int = { /* compiled code */ }
+ def foo3154(): scala.Int = { /* compiled code */ }
+ def foo3155(): scala.Int = { /* compiled code */ }
+ def foo3156(): scala.Int = { /* compiled code */ }
+ def foo3157(): scala.Int = { /* compiled code */ }
+ def foo3158(): scala.Int = { /* compiled code */ }
+ def foo3159(): scala.Int = { /* compiled code */ }
+ def foo3160(): scala.Int = { /* compiled code */ }
+ def foo3161(): scala.Int = { /* compiled code */ }
+ def foo3162(): scala.Int = { /* compiled code */ }
+ def foo3163(): scala.Int = { /* compiled code */ }
+ def foo3164(): scala.Int = { /* compiled code */ }
+ def foo3165(): scala.Int = { /* compiled code */ }
+ def foo3166(): scala.Int = { /* compiled code */ }
+ def foo3167(): scala.Int = { /* compiled code */ }
+ def foo3168(): scala.Int = { /* compiled code */ }
+ def foo3169(): scala.Int = { /* compiled code */ }
+ def foo3170(): scala.Int = { /* compiled code */ }
+ def foo3171(): scala.Int = { /* compiled code */ }
+ def foo3172(): scala.Int = { /* compiled code */ }
+ def foo3173(): scala.Int = { /* compiled code */ }
+ def foo3174(): scala.Int = { /* compiled code */ }
+ def foo3175(): scala.Int = { /* compiled code */ }
+ def foo3176(): scala.Int = { /* compiled code */ }
+ def foo3177(): scala.Int = { /* compiled code */ }
+ def foo3178(): scala.Int = { /* compiled code */ }
+ def foo3179(): scala.Int = { /* compiled code */ }
+ def foo3180(): scala.Int = { /* compiled code */ }
+ def foo3181(): scala.Int = { /* compiled code */ }
+ def foo3182(): scala.Int = { /* compiled code */ }
+ def foo3183(): scala.Int = { /* compiled code */ }
+ def foo3184(): scala.Int = { /* compiled code */ }
+ def foo3185(): scala.Int = { /* compiled code */ }
+ def foo3186(): scala.Int = { /* compiled code */ }
+ def foo3187(): scala.Int = { /* compiled code */ }
+ def foo3188(): scala.Int = { /* compiled code */ }
+ def foo3189(): scala.Int = { /* compiled code */ }
+ def foo3190(): scala.Int = { /* compiled code */ }
+ def foo3191(): scala.Int = { /* compiled code */ }
+ def foo3192(): scala.Int = { /* compiled code */ }
+ def foo3193(): scala.Int = { /* compiled code */ }
+ def foo3194(): scala.Int = { /* compiled code */ }
+ def foo3195(): scala.Int = { /* compiled code */ }
+ def foo3196(): scala.Int = { /* compiled code */ }
+ def foo3197(): scala.Int = { /* compiled code */ }
+ def foo3198(): scala.Int = { /* compiled code */ }
+ def foo3199(): scala.Int = { /* compiled code */ }
+ def foo3200(): scala.Int = { /* compiled code */ }
+ def foo3201(): scala.Int = { /* compiled code */ }
+ def foo3202(): scala.Int = { /* compiled code */ }
+ def foo3203(): scala.Int = { /* compiled code */ }
+ def foo3204(): scala.Int = { /* compiled code */ }
+ def foo3205(): scala.Int = { /* compiled code */ }
+ def foo3206(): scala.Int = { /* compiled code */ }
+ def foo3207(): scala.Int = { /* compiled code */ }
+ def foo3208(): scala.Int = { /* compiled code */ }
+ def foo3209(): scala.Int = { /* compiled code */ }
+ def foo3210(): scala.Int = { /* compiled code */ }
+ def foo3211(): scala.Int = { /* compiled code */ }
+ def foo3212(): scala.Int = { /* compiled code */ }
+ def foo3213(): scala.Int = { /* compiled code */ }
+ def foo3214(): scala.Int = { /* compiled code */ }
+ def foo3215(): scala.Int = { /* compiled code */ }
+ def foo3216(): scala.Int = { /* compiled code */ }
+ def foo3217(): scala.Int = { /* compiled code */ }
+ def foo3218(): scala.Int = { /* compiled code */ }
+ def foo3219(): scala.Int = { /* compiled code */ }
+ def foo3220(): scala.Int = { /* compiled code */ }
+ def foo3221(): scala.Int = { /* compiled code */ }
+ def foo3222(): scala.Int = { /* compiled code */ }
+ def foo3223(): scala.Int = { /* compiled code */ }
+ def foo3224(): scala.Int = { /* compiled code */ }
+ def foo3225(): scala.Int = { /* compiled code */ }
+ def foo3226(): scala.Int = { /* compiled code */ }
+ def foo3227(): scala.Int = { /* compiled code */ }
+ def foo3228(): scala.Int = { /* compiled code */ }
+ def foo3229(): scala.Int = { /* compiled code */ }
+ def foo3230(): scala.Int = { /* compiled code */ }
+ def foo3231(): scala.Int = { /* compiled code */ }
+ def foo3232(): scala.Int = { /* compiled code */ }
+ def foo3233(): scala.Int = { /* compiled code */ }
+ def foo3234(): scala.Int = { /* compiled code */ }
+ def foo3235(): scala.Int = { /* compiled code */ }
+ def foo3236(): scala.Int = { /* compiled code */ }
+ def foo3237(): scala.Int = { /* compiled code */ }
+ def foo3238(): scala.Int = { /* compiled code */ }
+ def foo3239(): scala.Int = { /* compiled code */ }
+ def foo3240(): scala.Int = { /* compiled code */ }
+ def foo3241(): scala.Int = { /* compiled code */ }
+ def foo3242(): scala.Int = { /* compiled code */ }
+ def foo3243(): scala.Int = { /* compiled code */ }
+ def foo3244(): scala.Int = { /* compiled code */ }
+ def foo3245(): scala.Int = { /* compiled code */ }
+ def foo3246(): scala.Int = { /* compiled code */ }
+ def foo3247(): scala.Int = { /* compiled code */ }
+ def foo3248(): scala.Int = { /* compiled code */ }
+ def foo3249(): scala.Int = { /* compiled code */ }
+ def foo3250(): scala.Int = { /* compiled code */ }
+ def foo3251(): scala.Int = { /* compiled code */ }
+ def foo3252(): scala.Int = { /* compiled code */ }
+ def foo3253(): scala.Int = { /* compiled code */ }
+ def foo3254(): scala.Int = { /* compiled code */ }
+ def foo3255(): scala.Int = { /* compiled code */ }
+ def foo3256(): scala.Int = { /* compiled code */ }
+ def foo3257(): scala.Int = { /* compiled code */ }
+ def foo3258(): scala.Int = { /* compiled code */ }
+ def foo3259(): scala.Int = { /* compiled code */ }
+ def foo3260(): scala.Int = { /* compiled code */ }
+ def foo3261(): scala.Int = { /* compiled code */ }
+ def foo3262(): scala.Int = { /* compiled code */ }
+ def foo3263(): scala.Int = { /* compiled code */ }
+ def foo3264(): scala.Int = { /* compiled code */ }
+ def foo3265(): scala.Int = { /* compiled code */ }
+ def foo3266(): scala.Int = { /* compiled code */ }
+ def foo3267(): scala.Int = { /* compiled code */ }
+ def foo3268(): scala.Int = { /* compiled code */ }
+ def foo3269(): scala.Int = { /* compiled code */ }
+ def foo3270(): scala.Int = { /* compiled code */ }
+ def foo3271(): scala.Int = { /* compiled code */ }
+ def foo3272(): scala.Int = { /* compiled code */ }
+ def foo3273(): scala.Int = { /* compiled code */ }
+ def foo3274(): scala.Int = { /* compiled code */ }
+ def foo3275(): scala.Int = { /* compiled code */ }
+ def foo3276(): scala.Int = { /* compiled code */ }
+ def foo3277(): scala.Int = { /* compiled code */ }
+ def foo3278(): scala.Int = { /* compiled code */ }
+ def foo3279(): scala.Int = { /* compiled code */ }
+ def foo3280(): scala.Int = { /* compiled code */ }
+ def foo3281(): scala.Int = { /* compiled code */ }
+ def foo3282(): scala.Int = { /* compiled code */ }
+ def foo3283(): scala.Int = { /* compiled code */ }
+ def foo3284(): scala.Int = { /* compiled code */ }
+ def foo3285(): scala.Int = { /* compiled code */ }
+ def foo3286(): scala.Int = { /* compiled code */ }
+ def foo3287(): scala.Int = { /* compiled code */ }
+ def foo3288(): scala.Int = { /* compiled code */ }
+ def foo3289(): scala.Int = { /* compiled code */ }
+ def foo3290(): scala.Int = { /* compiled code */ }
+ def foo3291(): scala.Int = { /* compiled code */ }
+ def foo3292(): scala.Int = { /* compiled code */ }
+ def foo3293(): scala.Int = { /* compiled code */ }
+ def foo3294(): scala.Int = { /* compiled code */ }
+ def foo3295(): scala.Int = { /* compiled code */ }
+ def foo3296(): scala.Int = { /* compiled code */ }
+ def foo3297(): scala.Int = { /* compiled code */ }
+ def foo3298(): scala.Int = { /* compiled code */ }
+ def foo3299(): scala.Int = { /* compiled code */ }
+ def foo3300(): scala.Int = { /* compiled code */ }
+ def foo3301(): scala.Int = { /* compiled code */ }
+ def foo3302(): scala.Int = { /* compiled code */ }
+ def foo3303(): scala.Int = { /* compiled code */ }
+ def foo3304(): scala.Int = { /* compiled code */ }
+ def foo3305(): scala.Int = { /* compiled code */ }
+ def foo3306(): scala.Int = { /* compiled code */ }
+ def foo3307(): scala.Int = { /* compiled code */ }
+ def foo3308(): scala.Int = { /* compiled code */ }
+ def foo3309(): scala.Int = { /* compiled code */ }
+ def foo3310(): scala.Int = { /* compiled code */ }
+ def foo3311(): scala.Int = { /* compiled code */ }
+ def foo3312(): scala.Int = { /* compiled code */ }
+ def foo3313(): scala.Int = { /* compiled code */ }
+ def foo3314(): scala.Int = { /* compiled code */ }
+ def foo3315(): scala.Int = { /* compiled code */ }
+ def foo3316(): scala.Int = { /* compiled code */ }
+ def foo3317(): scala.Int = { /* compiled code */ }
+ def foo3318(): scala.Int = { /* compiled code */ }
+ def foo3319(): scala.Int = { /* compiled code */ }
+ def foo3320(): scala.Int = { /* compiled code */ }
+ def foo3321(): scala.Int = { /* compiled code */ }
+ def foo3322(): scala.Int = { /* compiled code */ }
+ def foo3323(): scala.Int = { /* compiled code */ }
+ def foo3324(): scala.Int = { /* compiled code */ }
+ def foo3325(): scala.Int = { /* compiled code */ }
+ def foo3326(): scala.Int = { /* compiled code */ }
+ def foo3327(): scala.Int = { /* compiled code */ }
+ def foo3328(): scala.Int = { /* compiled code */ }
+ def foo3329(): scala.Int = { /* compiled code */ }
+ def foo3330(): scala.Int = { /* compiled code */ }
+ def foo3331(): scala.Int = { /* compiled code */ }
+ def foo3332(): scala.Int = { /* compiled code */ }
+ def foo3333(): scala.Int = { /* compiled code */ }
+ def foo3334(): scala.Int = { /* compiled code */ }
+ def foo3335(): scala.Int = { /* compiled code */ }
+ def foo3336(): scala.Int = { /* compiled code */ }
+ def foo3337(): scala.Int = { /* compiled code */ }
+ def foo3338(): scala.Int = { /* compiled code */ }
+ def foo3339(): scala.Int = { /* compiled code */ }
+ def foo3340(): scala.Int = { /* compiled code */ }
+ def foo3341(): scala.Int = { /* compiled code */ }
+ def foo3342(): scala.Int = { /* compiled code */ }
+ def foo3343(): scala.Int = { /* compiled code */ }
+ def foo3344(): scala.Int = { /* compiled code */ }
+ def foo3345(): scala.Int = { /* compiled code */ }
+ def foo3346(): scala.Int = { /* compiled code */ }
+ def foo3347(): scala.Int = { /* compiled code */ }
+ def foo3348(): scala.Int = { /* compiled code */ }
+ def foo3349(): scala.Int = { /* compiled code */ }
+ def foo3350(): scala.Int = { /* compiled code */ }
+ def foo3351(): scala.Int = { /* compiled code */ }
+ def foo3352(): scala.Int = { /* compiled code */ }
+ def foo3353(): scala.Int = { /* compiled code */ }
+ def foo3354(): scala.Int = { /* compiled code */ }
+ def foo3355(): scala.Int = { /* compiled code */ }
+ def foo3356(): scala.Int = { /* compiled code */ }
+ def foo3357(): scala.Int = { /* compiled code */ }
+ def foo3358(): scala.Int = { /* compiled code */ }
+ def foo3359(): scala.Int = { /* compiled code */ }
+ def foo3360(): scala.Int = { /* compiled code */ }
+ def foo3361(): scala.Int = { /* compiled code */ }
+ def foo3362(): scala.Int = { /* compiled code */ }
+ def foo3363(): scala.Int = { /* compiled code */ }
+ def foo3364(): scala.Int = { /* compiled code */ }
+ def foo3365(): scala.Int = { /* compiled code */ }
+ def foo3366(): scala.Int = { /* compiled code */ }
+ def foo3367(): scala.Int = { /* compiled code */ }
+ def foo3368(): scala.Int = { /* compiled code */ }
+ def foo3369(): scala.Int = { /* compiled code */ }
+ def foo3370(): scala.Int = { /* compiled code */ }
+ def foo3371(): scala.Int = { /* compiled code */ }
+ def foo3372(): scala.Int = { /* compiled code */ }
+ def foo3373(): scala.Int = { /* compiled code */ }
+ def foo3374(): scala.Int = { /* compiled code */ }
+ def foo3375(): scala.Int = { /* compiled code */ }
+ def foo3376(): scala.Int = { /* compiled code */ }
+ def foo3377(): scala.Int = { /* compiled code */ }
+ def foo3378(): scala.Int = { /* compiled code */ }
+ def foo3379(): scala.Int = { /* compiled code */ }
+ def foo3380(): scala.Int = { /* compiled code */ }
+ def foo3381(): scala.Int = { /* compiled code */ }
+ def foo3382(): scala.Int = { /* compiled code */ }
+ def foo3383(): scala.Int = { /* compiled code */ }
+ def foo3384(): scala.Int = { /* compiled code */ }
+ def foo3385(): scala.Int = { /* compiled code */ }
+ def foo3386(): scala.Int = { /* compiled code */ }
+ def foo3387(): scala.Int = { /* compiled code */ }
+ def foo3388(): scala.Int = { /* compiled code */ }
+ def foo3389(): scala.Int = { /* compiled code */ }
+ def foo3390(): scala.Int = { /* compiled code */ }
+ def foo3391(): scala.Int = { /* compiled code */ }
+ def foo3392(): scala.Int = { /* compiled code */ }
+ def foo3393(): scala.Int = { /* compiled code */ }
+ def foo3394(): scala.Int = { /* compiled code */ }
+ def foo3395(): scala.Int = { /* compiled code */ }
+ def foo3396(): scala.Int = { /* compiled code */ }
+ def foo3397(): scala.Int = { /* compiled code */ }
+ def foo3398(): scala.Int = { /* compiled code */ }
+ def foo3399(): scala.Int = { /* compiled code */ }
+ def foo3400(): scala.Int = { /* compiled code */ }
+ def foo3401(): scala.Int = { /* compiled code */ }
+ def foo3402(): scala.Int = { /* compiled code */ }
+ def foo3403(): scala.Int = { /* compiled code */ }
+ def foo3404(): scala.Int = { /* compiled code */ }
+ def foo3405(): scala.Int = { /* compiled code */ }
+ def foo3406(): scala.Int = { /* compiled code */ }
+ def foo3407(): scala.Int = { /* compiled code */ }
+ def foo3408(): scala.Int = { /* compiled code */ }
+ def foo3409(): scala.Int = { /* compiled code */ }
+ def foo3410(): scala.Int = { /* compiled code */ }
+ def foo3411(): scala.Int = { /* compiled code */ }
+ def foo3412(): scala.Int = { /* compiled code */ }
+ def foo3413(): scala.Int = { /* compiled code */ }
+ def foo3414(): scala.Int = { /* compiled code */ }
+ def foo3415(): scala.Int = { /* compiled code */ }
+ def foo3416(): scala.Int = { /* compiled code */ }
+ def foo3417(): scala.Int = { /* compiled code */ }
+ def foo3418(): scala.Int = { /* compiled code */ }
+ def foo3419(): scala.Int = { /* compiled code */ }
+ def foo3420(): scala.Int = { /* compiled code */ }
+ def foo3421(): scala.Int = { /* compiled code */ }
+ def foo3422(): scala.Int = { /* compiled code */ }
+ def foo3423(): scala.Int = { /* compiled code */ }
+ def foo3424(): scala.Int = { /* compiled code */ }
+ def foo3425(): scala.Int = { /* compiled code */ }
+ def foo3426(): scala.Int = { /* compiled code */ }
+ def foo3427(): scala.Int = { /* compiled code */ }
+ def foo3428(): scala.Int = { /* compiled code */ }
+ def foo3429(): scala.Int = { /* compiled code */ }
+ def foo3430(): scala.Int = { /* compiled code */ }
+ def foo3431(): scala.Int = { /* compiled code */ }
+ def foo3432(): scala.Int = { /* compiled code */ }
+ def foo3433(): scala.Int = { /* compiled code */ }
+ def foo3434(): scala.Int = { /* compiled code */ }
+ def foo3435(): scala.Int = { /* compiled code */ }
+ def foo3436(): scala.Int = { /* compiled code */ }
+ def foo3437(): scala.Int = { /* compiled code */ }
+ def foo3438(): scala.Int = { /* compiled code */ }
+ def foo3439(): scala.Int = { /* compiled code */ }
+ def foo3440(): scala.Int = { /* compiled code */ }
+ def foo3441(): scala.Int = { /* compiled code */ }
+ def foo3442(): scala.Int = { /* compiled code */ }
+ def foo3443(): scala.Int = { /* compiled code */ }
+ def foo3444(): scala.Int = { /* compiled code */ }
+ def foo3445(): scala.Int = { /* compiled code */ }
+ def foo3446(): scala.Int = { /* compiled code */ }
+ def foo3447(): scala.Int = { /* compiled code */ }
+ def foo3448(): scala.Int = { /* compiled code */ }
+ def foo3449(): scala.Int = { /* compiled code */ }
+ def foo3450(): scala.Int = { /* compiled code */ }
+ def foo3451(): scala.Int = { /* compiled code */ }
+ def foo3452(): scala.Int = { /* compiled code */ }
+ def foo3453(): scala.Int = { /* compiled code */ }
+ def foo3454(): scala.Int = { /* compiled code */ }
+ def foo3455(): scala.Int = { /* compiled code */ }
+ def foo3456(): scala.Int = { /* compiled code */ }
+ def foo3457(): scala.Int = { /* compiled code */ }
+ def foo3458(): scala.Int = { /* compiled code */ }
+ def foo3459(): scala.Int = { /* compiled code */ }
+ def foo3460(): scala.Int = { /* compiled code */ }
+ def foo3461(): scala.Int = { /* compiled code */ }
+ def foo3462(): scala.Int = { /* compiled code */ }
+ def foo3463(): scala.Int = { /* compiled code */ }
+ def foo3464(): scala.Int = { /* compiled code */ }
+ def foo3465(): scala.Int = { /* compiled code */ }
+ def foo3466(): scala.Int = { /* compiled code */ }
+ def foo3467(): scala.Int = { /* compiled code */ }
+ def foo3468(): scala.Int = { /* compiled code */ }
+ def foo3469(): scala.Int = { /* compiled code */ }
+ def foo3470(): scala.Int = { /* compiled code */ }
+ def foo3471(): scala.Int = { /* compiled code */ }
+ def foo3472(): scala.Int = { /* compiled code */ }
+ def foo3473(): scala.Int = { /* compiled code */ }
+ def foo3474(): scala.Int = { /* compiled code */ }
+ def foo3475(): scala.Int = { /* compiled code */ }
+ def foo3476(): scala.Int = { /* compiled code */ }
+ def foo3477(): scala.Int = { /* compiled code */ }
+ def foo3478(): scala.Int = { /* compiled code */ }
+ def foo3479(): scala.Int = { /* compiled code */ }
+ def foo3480(): scala.Int = { /* compiled code */ }
+ def foo3481(): scala.Int = { /* compiled code */ }
+ def foo3482(): scala.Int = { /* compiled code */ }
+ def foo3483(): scala.Int = { /* compiled code */ }
+ def foo3484(): scala.Int = { /* compiled code */ }
+ def foo3485(): scala.Int = { /* compiled code */ }
+ def foo3486(): scala.Int = { /* compiled code */ }
+ def foo3487(): scala.Int = { /* compiled code */ }
+ def foo3488(): scala.Int = { /* compiled code */ }
+ def foo3489(): scala.Int = { /* compiled code */ }
+ def foo3490(): scala.Int = { /* compiled code */ }
+ def foo3491(): scala.Int = { /* compiled code */ }
+ def foo3492(): scala.Int = { /* compiled code */ }
+ def foo3493(): scala.Int = { /* compiled code */ }
+ def foo3494(): scala.Int = { /* compiled code */ }
+ def foo3495(): scala.Int = { /* compiled code */ }
+ def foo3496(): scala.Int = { /* compiled code */ }
+ def foo3497(): scala.Int = { /* compiled code */ }
+ def foo3498(): scala.Int = { /* compiled code */ }
+ def foo3499(): scala.Int = { /* compiled code */ }
+ def foo3500(): scala.Int = { /* compiled code */ }
+}
diff --git a/test/files/scalap/t8679.scala b/test/files/scalap/t8679.scala
new file mode 100644
index 0000000000..f008a7af35
--- /dev/null
+++ b/test/files/scalap/t8679.scala
@@ -0,0 +1,3502 @@
+class T8679 {
+ def foo1(): Int = 3
+ def foo2(): Int = 4
+ def foo3(): Int = 5
+ def foo4(): Int = 6
+ def foo5(): Int = 7
+ def foo6(): Int = 8
+ def foo7(): Int = 9
+ def foo8(): Int = 10
+ def foo9(): Int = 11
+ def foo10(): Int = 12
+ def foo11(): Int = 13
+ def foo12(): Int = 14
+ def foo13(): Int = 15
+ def foo14(): Int = 16
+ def foo15(): Int = 17
+ def foo16(): Int = 18
+ def foo17(): Int = 19
+ def foo18(): Int = 20
+ def foo19(): Int = 21
+ def foo20(): Int = 22
+ def foo21(): Int = 23
+ def foo22(): Int = 24
+ def foo23(): Int = 25
+ def foo24(): Int = 26
+ def foo25(): Int = 27
+ def foo26(): Int = 28
+ def foo27(): Int = 29
+ def foo28(): Int = 30
+ def foo29(): Int = 31
+ def foo30(): Int = 32
+ def foo31(): Int = 33
+ def foo32(): Int = 34
+ def foo33(): Int = 35
+ def foo34(): Int = 36
+ def foo35(): Int = 37
+ def foo36(): Int = 38
+ def foo37(): Int = 39
+ def foo38(): Int = 40
+ def foo39(): Int = 41
+ def foo40(): Int = 42
+ def foo41(): Int = 43
+ def foo42(): Int = 44
+ def foo43(): Int = 45
+ def foo44(): Int = 46
+ def foo45(): Int = 47
+ def foo46(): Int = 48
+ def foo47(): Int = 49
+ def foo48(): Int = 50
+ def foo49(): Int = 51
+ def foo50(): Int = 52
+ def foo51(): Int = 53
+ def foo52(): Int = 54
+ def foo53(): Int = 55
+ def foo54(): Int = 56
+ def foo55(): Int = 57
+ def foo56(): Int = 58
+ def foo57(): Int = 59
+ def foo58(): Int = 60
+ def foo59(): Int = 61
+ def foo60(): Int = 62
+ def foo61(): Int = 63
+ def foo62(): Int = 64
+ def foo63(): Int = 65
+ def foo64(): Int = 66
+ def foo65(): Int = 67
+ def foo66(): Int = 68
+ def foo67(): Int = 69
+ def foo68(): Int = 70
+ def foo69(): Int = 71
+ def foo70(): Int = 72
+ def foo71(): Int = 73
+ def foo72(): Int = 74
+ def foo73(): Int = 75
+ def foo74(): Int = 76
+ def foo75(): Int = 77
+ def foo76(): Int = 78
+ def foo77(): Int = 79
+ def foo78(): Int = 80
+ def foo79(): Int = 81
+ def foo80(): Int = 82
+ def foo81(): Int = 83
+ def foo82(): Int = 84
+ def foo83(): Int = 85
+ def foo84(): Int = 86
+ def foo85(): Int = 87
+ def foo86(): Int = 88
+ def foo87(): Int = 89
+ def foo88(): Int = 90
+ def foo89(): Int = 91
+ def foo90(): Int = 92
+ def foo91(): Int = 93
+ def foo92(): Int = 94
+ def foo93(): Int = 95
+ def foo94(): Int = 96
+ def foo95(): Int = 97
+ def foo96(): Int = 98
+ def foo97(): Int = 99
+ def foo98(): Int = 100
+ def foo99(): Int = 101
+ def foo100(): Int = 102
+ def foo101(): Int = 103
+ def foo102(): Int = 104
+ def foo103(): Int = 105
+ def foo104(): Int = 106
+ def foo105(): Int = 107
+ def foo106(): Int = 108
+ def foo107(): Int = 109
+ def foo108(): Int = 110
+ def foo109(): Int = 111
+ def foo110(): Int = 112
+ def foo111(): Int = 113
+ def foo112(): Int = 114
+ def foo113(): Int = 115
+ def foo114(): Int = 116
+ def foo115(): Int = 117
+ def foo116(): Int = 118
+ def foo117(): Int = 119
+ def foo118(): Int = 120
+ def foo119(): Int = 121
+ def foo120(): Int = 122
+ def foo121(): Int = 123
+ def foo122(): Int = 124
+ def foo123(): Int = 125
+ def foo124(): Int = 126
+ def foo125(): Int = 127
+ def foo126(): Int = 128
+ def foo127(): Int = 129
+ def foo128(): Int = 130
+ def foo129(): Int = 131
+ def foo130(): Int = 132
+ def foo131(): Int = 133
+ def foo132(): Int = 134
+ def foo133(): Int = 135
+ def foo134(): Int = 136
+ def foo135(): Int = 137
+ def foo136(): Int = 138
+ def foo137(): Int = 139
+ def foo138(): Int = 140
+ def foo139(): Int = 141
+ def foo140(): Int = 142
+ def foo141(): Int = 143
+ def foo142(): Int = 144
+ def foo143(): Int = 145
+ def foo144(): Int = 146
+ def foo145(): Int = 147
+ def foo146(): Int = 148
+ def foo147(): Int = 149
+ def foo148(): Int = 150
+ def foo149(): Int = 151
+ def foo150(): Int = 152
+ def foo151(): Int = 153
+ def foo152(): Int = 154
+ def foo153(): Int = 155
+ def foo154(): Int = 156
+ def foo155(): Int = 157
+ def foo156(): Int = 158
+ def foo157(): Int = 159
+ def foo158(): Int = 160
+ def foo159(): Int = 161
+ def foo160(): Int = 162
+ def foo161(): Int = 163
+ def foo162(): Int = 164
+ def foo163(): Int = 165
+ def foo164(): Int = 166
+ def foo165(): Int = 167
+ def foo166(): Int = 168
+ def foo167(): Int = 169
+ def foo168(): Int = 170
+ def foo169(): Int = 171
+ def foo170(): Int = 172
+ def foo171(): Int = 173
+ def foo172(): Int = 174
+ def foo173(): Int = 175
+ def foo174(): Int = 176
+ def foo175(): Int = 177
+ def foo176(): Int = 178
+ def foo177(): Int = 179
+ def foo178(): Int = 180
+ def foo179(): Int = 181
+ def foo180(): Int = 182
+ def foo181(): Int = 183
+ def foo182(): Int = 184
+ def foo183(): Int = 185
+ def foo184(): Int = 186
+ def foo185(): Int = 187
+ def foo186(): Int = 188
+ def foo187(): Int = 189
+ def foo188(): Int = 190
+ def foo189(): Int = 191
+ def foo190(): Int = 192
+ def foo191(): Int = 193
+ def foo192(): Int = 194
+ def foo193(): Int = 195
+ def foo194(): Int = 196
+ def foo195(): Int = 197
+ def foo196(): Int = 198
+ def foo197(): Int = 199
+ def foo198(): Int = 200
+ def foo199(): Int = 201
+ def foo200(): Int = 202
+ def foo201(): Int = 203
+ def foo202(): Int = 204
+ def foo203(): Int = 205
+ def foo204(): Int = 206
+ def foo205(): Int = 207
+ def foo206(): Int = 208
+ def foo207(): Int = 209
+ def foo208(): Int = 210
+ def foo209(): Int = 211
+ def foo210(): Int = 212
+ def foo211(): Int = 213
+ def foo212(): Int = 214
+ def foo213(): Int = 215
+ def foo214(): Int = 216
+ def foo215(): Int = 217
+ def foo216(): Int = 218
+ def foo217(): Int = 219
+ def foo218(): Int = 220
+ def foo219(): Int = 221
+ def foo220(): Int = 222
+ def foo221(): Int = 223
+ def foo222(): Int = 224
+ def foo223(): Int = 225
+ def foo224(): Int = 226
+ def foo225(): Int = 227
+ def foo226(): Int = 228
+ def foo227(): Int = 229
+ def foo228(): Int = 230
+ def foo229(): Int = 231
+ def foo230(): Int = 232
+ def foo231(): Int = 233
+ def foo232(): Int = 234
+ def foo233(): Int = 235
+ def foo234(): Int = 236
+ def foo235(): Int = 237
+ def foo236(): Int = 238
+ def foo237(): Int = 239
+ def foo238(): Int = 240
+ def foo239(): Int = 241
+ def foo240(): Int = 242
+ def foo241(): Int = 243
+ def foo242(): Int = 244
+ def foo243(): Int = 245
+ def foo244(): Int = 246
+ def foo245(): Int = 247
+ def foo246(): Int = 248
+ def foo247(): Int = 249
+ def foo248(): Int = 250
+ def foo249(): Int = 251
+ def foo250(): Int = 252
+ def foo251(): Int = 253
+ def foo252(): Int = 254
+ def foo253(): Int = 255
+ def foo254(): Int = 256
+ def foo255(): Int = 257
+ def foo256(): Int = 258
+ def foo257(): Int = 259
+ def foo258(): Int = 260
+ def foo259(): Int = 261
+ def foo260(): Int = 262
+ def foo261(): Int = 263
+ def foo262(): Int = 264
+ def foo263(): Int = 265
+ def foo264(): Int = 266
+ def foo265(): Int = 267
+ def foo266(): Int = 268
+ def foo267(): Int = 269
+ def foo268(): Int = 270
+ def foo269(): Int = 271
+ def foo270(): Int = 272
+ def foo271(): Int = 273
+ def foo272(): Int = 274
+ def foo273(): Int = 275
+ def foo274(): Int = 276
+ def foo275(): Int = 277
+ def foo276(): Int = 278
+ def foo277(): Int = 279
+ def foo278(): Int = 280
+ def foo279(): Int = 281
+ def foo280(): Int = 282
+ def foo281(): Int = 283
+ def foo282(): Int = 284
+ def foo283(): Int = 285
+ def foo284(): Int = 286
+ def foo285(): Int = 287
+ def foo286(): Int = 288
+ def foo287(): Int = 289
+ def foo288(): Int = 290
+ def foo289(): Int = 291
+ def foo290(): Int = 292
+ def foo291(): Int = 293
+ def foo292(): Int = 294
+ def foo293(): Int = 295
+ def foo294(): Int = 296
+ def foo295(): Int = 297
+ def foo296(): Int = 298
+ def foo297(): Int = 299
+ def foo298(): Int = 300
+ def foo299(): Int = 301
+ def foo300(): Int = 302
+ def foo301(): Int = 303
+ def foo302(): Int = 304
+ def foo303(): Int = 305
+ def foo304(): Int = 306
+ def foo305(): Int = 307
+ def foo306(): Int = 308
+ def foo307(): Int = 309
+ def foo308(): Int = 310
+ def foo309(): Int = 311
+ def foo310(): Int = 312
+ def foo311(): Int = 313
+ def foo312(): Int = 314
+ def foo313(): Int = 315
+ def foo314(): Int = 316
+ def foo315(): Int = 317
+ def foo316(): Int = 318
+ def foo317(): Int = 319
+ def foo318(): Int = 320
+ def foo319(): Int = 321
+ def foo320(): Int = 322
+ def foo321(): Int = 323
+ def foo322(): Int = 324
+ def foo323(): Int = 325
+ def foo324(): Int = 326
+ def foo325(): Int = 327
+ def foo326(): Int = 328
+ def foo327(): Int = 329
+ def foo328(): Int = 330
+ def foo329(): Int = 331
+ def foo330(): Int = 332
+ def foo331(): Int = 333
+ def foo332(): Int = 334
+ def foo333(): Int = 335
+ def foo334(): Int = 336
+ def foo335(): Int = 337
+ def foo336(): Int = 338
+ def foo337(): Int = 339
+ def foo338(): Int = 340
+ def foo339(): Int = 341
+ def foo340(): Int = 342
+ def foo341(): Int = 343
+ def foo342(): Int = 344
+ def foo343(): Int = 345
+ def foo344(): Int = 346
+ def foo345(): Int = 347
+ def foo346(): Int = 348
+ def foo347(): Int = 349
+ def foo348(): Int = 350
+ def foo349(): Int = 351
+ def foo350(): Int = 352
+ def foo351(): Int = 353
+ def foo352(): Int = 354
+ def foo353(): Int = 355
+ def foo354(): Int = 356
+ def foo355(): Int = 357
+ def foo356(): Int = 358
+ def foo357(): Int = 359
+ def foo358(): Int = 360
+ def foo359(): Int = 361
+ def foo360(): Int = 362
+ def foo361(): Int = 363
+ def foo362(): Int = 364
+ def foo363(): Int = 365
+ def foo364(): Int = 366
+ def foo365(): Int = 367
+ def foo366(): Int = 368
+ def foo367(): Int = 369
+ def foo368(): Int = 370
+ def foo369(): Int = 371
+ def foo370(): Int = 372
+ def foo371(): Int = 373
+ def foo372(): Int = 374
+ def foo373(): Int = 375
+ def foo374(): Int = 376
+ def foo375(): Int = 377
+ def foo376(): Int = 378
+ def foo377(): Int = 379
+ def foo378(): Int = 380
+ def foo379(): Int = 381
+ def foo380(): Int = 382
+ def foo381(): Int = 383
+ def foo382(): Int = 384
+ def foo383(): Int = 385
+ def foo384(): Int = 386
+ def foo385(): Int = 387
+ def foo386(): Int = 388
+ def foo387(): Int = 389
+ def foo388(): Int = 390
+ def foo389(): Int = 391
+ def foo390(): Int = 392
+ def foo391(): Int = 393
+ def foo392(): Int = 394
+ def foo393(): Int = 395
+ def foo394(): Int = 396
+ def foo395(): Int = 397
+ def foo396(): Int = 398
+ def foo397(): Int = 399
+ def foo398(): Int = 400
+ def foo399(): Int = 401
+ def foo400(): Int = 402
+ def foo401(): Int = 403
+ def foo402(): Int = 404
+ def foo403(): Int = 405
+ def foo404(): Int = 406
+ def foo405(): Int = 407
+ def foo406(): Int = 408
+ def foo407(): Int = 409
+ def foo408(): Int = 410
+ def foo409(): Int = 411
+ def foo410(): Int = 412
+ def foo411(): Int = 413
+ def foo412(): Int = 414
+ def foo413(): Int = 415
+ def foo414(): Int = 416
+ def foo415(): Int = 417
+ def foo416(): Int = 418
+ def foo417(): Int = 419
+ def foo418(): Int = 420
+ def foo419(): Int = 421
+ def foo420(): Int = 422
+ def foo421(): Int = 423
+ def foo422(): Int = 424
+ def foo423(): Int = 425
+ def foo424(): Int = 426
+ def foo425(): Int = 427
+ def foo426(): Int = 428
+ def foo427(): Int = 429
+ def foo428(): Int = 430
+ def foo429(): Int = 431
+ def foo430(): Int = 432
+ def foo431(): Int = 433
+ def foo432(): Int = 434
+ def foo433(): Int = 435
+ def foo434(): Int = 436
+ def foo435(): Int = 437
+ def foo436(): Int = 438
+ def foo437(): Int = 439
+ def foo438(): Int = 440
+ def foo439(): Int = 441
+ def foo440(): Int = 442
+ def foo441(): Int = 443
+ def foo442(): Int = 444
+ def foo443(): Int = 445
+ def foo444(): Int = 446
+ def foo445(): Int = 447
+ def foo446(): Int = 448
+ def foo447(): Int = 449
+ def foo448(): Int = 450
+ def foo449(): Int = 451
+ def foo450(): Int = 452
+ def foo451(): Int = 453
+ def foo452(): Int = 454
+ def foo453(): Int = 455
+ def foo454(): Int = 456
+ def foo455(): Int = 457
+ def foo456(): Int = 458
+ def foo457(): Int = 459
+ def foo458(): Int = 460
+ def foo459(): Int = 461
+ def foo460(): Int = 462
+ def foo461(): Int = 463
+ def foo462(): Int = 464
+ def foo463(): Int = 465
+ def foo464(): Int = 466
+ def foo465(): Int = 467
+ def foo466(): Int = 468
+ def foo467(): Int = 469
+ def foo468(): Int = 470
+ def foo469(): Int = 471
+ def foo470(): Int = 472
+ def foo471(): Int = 473
+ def foo472(): Int = 474
+ def foo473(): Int = 475
+ def foo474(): Int = 476
+ def foo475(): Int = 477
+ def foo476(): Int = 478
+ def foo477(): Int = 479
+ def foo478(): Int = 480
+ def foo479(): Int = 481
+ def foo480(): Int = 482
+ def foo481(): Int = 483
+ def foo482(): Int = 484
+ def foo483(): Int = 485
+ def foo484(): Int = 486
+ def foo485(): Int = 487
+ def foo486(): Int = 488
+ def foo487(): Int = 489
+ def foo488(): Int = 490
+ def foo489(): Int = 491
+ def foo490(): Int = 492
+ def foo491(): Int = 493
+ def foo492(): Int = 494
+ def foo493(): Int = 495
+ def foo494(): Int = 496
+ def foo495(): Int = 497
+ def foo496(): Int = 498
+ def foo497(): Int = 499
+ def foo498(): Int = 500
+ def foo499(): Int = 501
+ def foo500(): Int = 502
+ def foo501(): Int = 503
+ def foo502(): Int = 504
+ def foo503(): Int = 505
+ def foo504(): Int = 506
+ def foo505(): Int = 507
+ def foo506(): Int = 508
+ def foo507(): Int = 509
+ def foo508(): Int = 510
+ def foo509(): Int = 511
+ def foo510(): Int = 512
+ def foo511(): Int = 513
+ def foo512(): Int = 514
+ def foo513(): Int = 515
+ def foo514(): Int = 516
+ def foo515(): Int = 517
+ def foo516(): Int = 518
+ def foo517(): Int = 519
+ def foo518(): Int = 520
+ def foo519(): Int = 521
+ def foo520(): Int = 522
+ def foo521(): Int = 523
+ def foo522(): Int = 524
+ def foo523(): Int = 525
+ def foo524(): Int = 526
+ def foo525(): Int = 527
+ def foo526(): Int = 528
+ def foo527(): Int = 529
+ def foo528(): Int = 530
+ def foo529(): Int = 531
+ def foo530(): Int = 532
+ def foo531(): Int = 533
+ def foo532(): Int = 534
+ def foo533(): Int = 535
+ def foo534(): Int = 536
+ def foo535(): Int = 537
+ def foo536(): Int = 538
+ def foo537(): Int = 539
+ def foo538(): Int = 540
+ def foo539(): Int = 541
+ def foo540(): Int = 542
+ def foo541(): Int = 543
+ def foo542(): Int = 544
+ def foo543(): Int = 545
+ def foo544(): Int = 546
+ def foo545(): Int = 547
+ def foo546(): Int = 548
+ def foo547(): Int = 549
+ def foo548(): Int = 550
+ def foo549(): Int = 551
+ def foo550(): Int = 552
+ def foo551(): Int = 553
+ def foo552(): Int = 554
+ def foo553(): Int = 555
+ def foo554(): Int = 556
+ def foo555(): Int = 557
+ def foo556(): Int = 558
+ def foo557(): Int = 559
+ def foo558(): Int = 560
+ def foo559(): Int = 561
+ def foo560(): Int = 562
+ def foo561(): Int = 563
+ def foo562(): Int = 564
+ def foo563(): Int = 565
+ def foo564(): Int = 566
+ def foo565(): Int = 567
+ def foo566(): Int = 568
+ def foo567(): Int = 569
+ def foo568(): Int = 570
+ def foo569(): Int = 571
+ def foo570(): Int = 572
+ def foo571(): Int = 573
+ def foo572(): Int = 574
+ def foo573(): Int = 575
+ def foo574(): Int = 576
+ def foo575(): Int = 577
+ def foo576(): Int = 578
+ def foo577(): Int = 579
+ def foo578(): Int = 580
+ def foo579(): Int = 581
+ def foo580(): Int = 582
+ def foo581(): Int = 583
+ def foo582(): Int = 584
+ def foo583(): Int = 585
+ def foo584(): Int = 586
+ def foo585(): Int = 587
+ def foo586(): Int = 588
+ def foo587(): Int = 589
+ def foo588(): Int = 590
+ def foo589(): Int = 591
+ def foo590(): Int = 592
+ def foo591(): Int = 593
+ def foo592(): Int = 594
+ def foo593(): Int = 595
+ def foo594(): Int = 596
+ def foo595(): Int = 597
+ def foo596(): Int = 598
+ def foo597(): Int = 599
+ def foo598(): Int = 600
+ def foo599(): Int = 601
+ def foo600(): Int = 602
+ def foo601(): Int = 603
+ def foo602(): Int = 604
+ def foo603(): Int = 605
+ def foo604(): Int = 606
+ def foo605(): Int = 607
+ def foo606(): Int = 608
+ def foo607(): Int = 609
+ def foo608(): Int = 610
+ def foo609(): Int = 611
+ def foo610(): Int = 612
+ def foo611(): Int = 613
+ def foo612(): Int = 614
+ def foo613(): Int = 615
+ def foo614(): Int = 616
+ def foo615(): Int = 617
+ def foo616(): Int = 618
+ def foo617(): Int = 619
+ def foo618(): Int = 620
+ def foo619(): Int = 621
+ def foo620(): Int = 622
+ def foo621(): Int = 623
+ def foo622(): Int = 624
+ def foo623(): Int = 625
+ def foo624(): Int = 626
+ def foo625(): Int = 627
+ def foo626(): Int = 628
+ def foo627(): Int = 629
+ def foo628(): Int = 630
+ def foo629(): Int = 631
+ def foo630(): Int = 632
+ def foo631(): Int = 633
+ def foo632(): Int = 634
+ def foo633(): Int = 635
+ def foo634(): Int = 636
+ def foo635(): Int = 637
+ def foo636(): Int = 638
+ def foo637(): Int = 639
+ def foo638(): Int = 640
+ def foo639(): Int = 641
+ def foo640(): Int = 642
+ def foo641(): Int = 643
+ def foo642(): Int = 644
+ def foo643(): Int = 645
+ def foo644(): Int = 646
+ def foo645(): Int = 647
+ def foo646(): Int = 648
+ def foo647(): Int = 649
+ def foo648(): Int = 650
+ def foo649(): Int = 651
+ def foo650(): Int = 652
+ def foo651(): Int = 653
+ def foo652(): Int = 654
+ def foo653(): Int = 655
+ def foo654(): Int = 656
+ def foo655(): Int = 657
+ def foo656(): Int = 658
+ def foo657(): Int = 659
+ def foo658(): Int = 660
+ def foo659(): Int = 661
+ def foo660(): Int = 662
+ def foo661(): Int = 663
+ def foo662(): Int = 664
+ def foo663(): Int = 665
+ def foo664(): Int = 666
+ def foo665(): Int = 667
+ def foo666(): Int = 668
+ def foo667(): Int = 669
+ def foo668(): Int = 670
+ def foo669(): Int = 671
+ def foo670(): Int = 672
+ def foo671(): Int = 673
+ def foo672(): Int = 674
+ def foo673(): Int = 675
+ def foo674(): Int = 676
+ def foo675(): Int = 677
+ def foo676(): Int = 678
+ def foo677(): Int = 679
+ def foo678(): Int = 680
+ def foo679(): Int = 681
+ def foo680(): Int = 682
+ def foo681(): Int = 683
+ def foo682(): Int = 684
+ def foo683(): Int = 685
+ def foo684(): Int = 686
+ def foo685(): Int = 687
+ def foo686(): Int = 688
+ def foo687(): Int = 689
+ def foo688(): Int = 690
+ def foo689(): Int = 691
+ def foo690(): Int = 692
+ def foo691(): Int = 693
+ def foo692(): Int = 694
+ def foo693(): Int = 695
+ def foo694(): Int = 696
+ def foo695(): Int = 697
+ def foo696(): Int = 698
+ def foo697(): Int = 699
+ def foo698(): Int = 700
+ def foo699(): Int = 701
+ def foo700(): Int = 702
+ def foo701(): Int = 703
+ def foo702(): Int = 704
+ def foo703(): Int = 705
+ def foo704(): Int = 706
+ def foo705(): Int = 707
+ def foo706(): Int = 708
+ def foo707(): Int = 709
+ def foo708(): Int = 710
+ def foo709(): Int = 711
+ def foo710(): Int = 712
+ def foo711(): Int = 713
+ def foo712(): Int = 714
+ def foo713(): Int = 715
+ def foo714(): Int = 716
+ def foo715(): Int = 717
+ def foo716(): Int = 718
+ def foo717(): Int = 719
+ def foo718(): Int = 720
+ def foo719(): Int = 721
+ def foo720(): Int = 722
+ def foo721(): Int = 723
+ def foo722(): Int = 724
+ def foo723(): Int = 725
+ def foo724(): Int = 726
+ def foo725(): Int = 727
+ def foo726(): Int = 728
+ def foo727(): Int = 729
+ def foo728(): Int = 730
+ def foo729(): Int = 731
+ def foo730(): Int = 732
+ def foo731(): Int = 733
+ def foo732(): Int = 734
+ def foo733(): Int = 735
+ def foo734(): Int = 736
+ def foo735(): Int = 737
+ def foo736(): Int = 738
+ def foo737(): Int = 739
+ def foo738(): Int = 740
+ def foo739(): Int = 741
+ def foo740(): Int = 742
+ def foo741(): Int = 743
+ def foo742(): Int = 744
+ def foo743(): Int = 745
+ def foo744(): Int = 746
+ def foo745(): Int = 747
+ def foo746(): Int = 748
+ def foo747(): Int = 749
+ def foo748(): Int = 750
+ def foo749(): Int = 751
+ def foo750(): Int = 752
+ def foo751(): Int = 753
+ def foo752(): Int = 754
+ def foo753(): Int = 755
+ def foo754(): Int = 756
+ def foo755(): Int = 757
+ def foo756(): Int = 758
+ def foo757(): Int = 759
+ def foo758(): Int = 760
+ def foo759(): Int = 761
+ def foo760(): Int = 762
+ def foo761(): Int = 763
+ def foo762(): Int = 764
+ def foo763(): Int = 765
+ def foo764(): Int = 766
+ def foo765(): Int = 767
+ def foo766(): Int = 768
+ def foo767(): Int = 769
+ def foo768(): Int = 770
+ def foo769(): Int = 771
+ def foo770(): Int = 772
+ def foo771(): Int = 773
+ def foo772(): Int = 774
+ def foo773(): Int = 775
+ def foo774(): Int = 776
+ def foo775(): Int = 777
+ def foo776(): Int = 778
+ def foo777(): Int = 779
+ def foo778(): Int = 780
+ def foo779(): Int = 781
+ def foo780(): Int = 782
+ def foo781(): Int = 783
+ def foo782(): Int = 784
+ def foo783(): Int = 785
+ def foo784(): Int = 786
+ def foo785(): Int = 787
+ def foo786(): Int = 788
+ def foo787(): Int = 789
+ def foo788(): Int = 790
+ def foo789(): Int = 791
+ def foo790(): Int = 792
+ def foo791(): Int = 793
+ def foo792(): Int = 794
+ def foo793(): Int = 795
+ def foo794(): Int = 796
+ def foo795(): Int = 797
+ def foo796(): Int = 798
+ def foo797(): Int = 799
+ def foo798(): Int = 800
+ def foo799(): Int = 801
+ def foo800(): Int = 802
+ def foo801(): Int = 803
+ def foo802(): Int = 804
+ def foo803(): Int = 805
+ def foo804(): Int = 806
+ def foo805(): Int = 807
+ def foo806(): Int = 808
+ def foo807(): Int = 809
+ def foo808(): Int = 810
+ def foo809(): Int = 811
+ def foo810(): Int = 812
+ def foo811(): Int = 813
+ def foo812(): Int = 814
+ def foo813(): Int = 815
+ def foo814(): Int = 816
+ def foo815(): Int = 817
+ def foo816(): Int = 818
+ def foo817(): Int = 819
+ def foo818(): Int = 820
+ def foo819(): Int = 821
+ def foo820(): Int = 822
+ def foo821(): Int = 823
+ def foo822(): Int = 824
+ def foo823(): Int = 825
+ def foo824(): Int = 826
+ def foo825(): Int = 827
+ def foo826(): Int = 828
+ def foo827(): Int = 829
+ def foo828(): Int = 830
+ def foo829(): Int = 831
+ def foo830(): Int = 832
+ def foo831(): Int = 833
+ def foo832(): Int = 834
+ def foo833(): Int = 835
+ def foo834(): Int = 836
+ def foo835(): Int = 837
+ def foo836(): Int = 838
+ def foo837(): Int = 839
+ def foo838(): Int = 840
+ def foo839(): Int = 841
+ def foo840(): Int = 842
+ def foo841(): Int = 843
+ def foo842(): Int = 844
+ def foo843(): Int = 845
+ def foo844(): Int = 846
+ def foo845(): Int = 847
+ def foo846(): Int = 848
+ def foo847(): Int = 849
+ def foo848(): Int = 850
+ def foo849(): Int = 851
+ def foo850(): Int = 852
+ def foo851(): Int = 853
+ def foo852(): Int = 854
+ def foo853(): Int = 855
+ def foo854(): Int = 856
+ def foo855(): Int = 857
+ def foo856(): Int = 858
+ def foo857(): Int = 859
+ def foo858(): Int = 860
+ def foo859(): Int = 861
+ def foo860(): Int = 862
+ def foo861(): Int = 863
+ def foo862(): Int = 864
+ def foo863(): Int = 865
+ def foo864(): Int = 866
+ def foo865(): Int = 867
+ def foo866(): Int = 868
+ def foo867(): Int = 869
+ def foo868(): Int = 870
+ def foo869(): Int = 871
+ def foo870(): Int = 872
+ def foo871(): Int = 873
+ def foo872(): Int = 874
+ def foo873(): Int = 875
+ def foo874(): Int = 876
+ def foo875(): Int = 877
+ def foo876(): Int = 878
+ def foo877(): Int = 879
+ def foo878(): Int = 880
+ def foo879(): Int = 881
+ def foo880(): Int = 882
+ def foo881(): Int = 883
+ def foo882(): Int = 884
+ def foo883(): Int = 885
+ def foo884(): Int = 886
+ def foo885(): Int = 887
+ def foo886(): Int = 888
+ def foo887(): Int = 889
+ def foo888(): Int = 890
+ def foo889(): Int = 891
+ def foo890(): Int = 892
+ def foo891(): Int = 893
+ def foo892(): Int = 894
+ def foo893(): Int = 895
+ def foo894(): Int = 896
+ def foo895(): Int = 897
+ def foo896(): Int = 898
+ def foo897(): Int = 899
+ def foo898(): Int = 900
+ def foo899(): Int = 901
+ def foo900(): Int = 902
+ def foo901(): Int = 903
+ def foo902(): Int = 904
+ def foo903(): Int = 905
+ def foo904(): Int = 906
+ def foo905(): Int = 907
+ def foo906(): Int = 908
+ def foo907(): Int = 909
+ def foo908(): Int = 910
+ def foo909(): Int = 911
+ def foo910(): Int = 912
+ def foo911(): Int = 913
+ def foo912(): Int = 914
+ def foo913(): Int = 915
+ def foo914(): Int = 916
+ def foo915(): Int = 917
+ def foo916(): Int = 918
+ def foo917(): Int = 919
+ def foo918(): Int = 920
+ def foo919(): Int = 921
+ def foo920(): Int = 922
+ def foo921(): Int = 923
+ def foo922(): Int = 924
+ def foo923(): Int = 925
+ def foo924(): Int = 926
+ def foo925(): Int = 927
+ def foo926(): Int = 928
+ def foo927(): Int = 929
+ def foo928(): Int = 930
+ def foo929(): Int = 931
+ def foo930(): Int = 932
+ def foo931(): Int = 933
+ def foo932(): Int = 934
+ def foo933(): Int = 935
+ def foo934(): Int = 936
+ def foo935(): Int = 937
+ def foo936(): Int = 938
+ def foo937(): Int = 939
+ def foo938(): Int = 940
+ def foo939(): Int = 941
+ def foo940(): Int = 942
+ def foo941(): Int = 943
+ def foo942(): Int = 944
+ def foo943(): Int = 945
+ def foo944(): Int = 946
+ def foo945(): Int = 947
+ def foo946(): Int = 948
+ def foo947(): Int = 949
+ def foo948(): Int = 950
+ def foo949(): Int = 951
+ def foo950(): Int = 952
+ def foo951(): Int = 953
+ def foo952(): Int = 954
+ def foo953(): Int = 955
+ def foo954(): Int = 956
+ def foo955(): Int = 957
+ def foo956(): Int = 958
+ def foo957(): Int = 959
+ def foo958(): Int = 960
+ def foo959(): Int = 961
+ def foo960(): Int = 962
+ def foo961(): Int = 963
+ def foo962(): Int = 964
+ def foo963(): Int = 965
+ def foo964(): Int = 966
+ def foo965(): Int = 967
+ def foo966(): Int = 968
+ def foo967(): Int = 969
+ def foo968(): Int = 970
+ def foo969(): Int = 971
+ def foo970(): Int = 972
+ def foo971(): Int = 973
+ def foo972(): Int = 974
+ def foo973(): Int = 975
+ def foo974(): Int = 976
+ def foo975(): Int = 977
+ def foo976(): Int = 978
+ def foo977(): Int = 979
+ def foo978(): Int = 980
+ def foo979(): Int = 981
+ def foo980(): Int = 982
+ def foo981(): Int = 983
+ def foo982(): Int = 984
+ def foo983(): Int = 985
+ def foo984(): Int = 986
+ def foo985(): Int = 987
+ def foo986(): Int = 988
+ def foo987(): Int = 989
+ def foo988(): Int = 990
+ def foo989(): Int = 991
+ def foo990(): Int = 992
+ def foo991(): Int = 993
+ def foo992(): Int = 994
+ def foo993(): Int = 995
+ def foo994(): Int = 996
+ def foo995(): Int = 997
+ def foo996(): Int = 998
+ def foo997(): Int = 999
+ def foo998(): Int = 1000
+ def foo999(): Int = 1001
+ def foo1000(): Int = 1002
+ def foo1001(): Int = 1003
+ def foo1002(): Int = 1004
+ def foo1003(): Int = 1005
+ def foo1004(): Int = 1006
+ def foo1005(): Int = 1007
+ def foo1006(): Int = 1008
+ def foo1007(): Int = 1009
+ def foo1008(): Int = 1010
+ def foo1009(): Int = 1011
+ def foo1010(): Int = 1012
+ def foo1011(): Int = 1013
+ def foo1012(): Int = 1014
+ def foo1013(): Int = 1015
+ def foo1014(): Int = 1016
+ def foo1015(): Int = 1017
+ def foo1016(): Int = 1018
+ def foo1017(): Int = 1019
+ def foo1018(): Int = 1020
+ def foo1019(): Int = 1021
+ def foo1020(): Int = 1022
+ def foo1021(): Int = 1023
+ def foo1022(): Int = 1024
+ def foo1023(): Int = 1025
+ def foo1024(): Int = 1026
+ def foo1025(): Int = 1027
+ def foo1026(): Int = 1028
+ def foo1027(): Int = 1029
+ def foo1028(): Int = 1030
+ def foo1029(): Int = 1031
+ def foo1030(): Int = 1032
+ def foo1031(): Int = 1033
+ def foo1032(): Int = 1034
+ def foo1033(): Int = 1035
+ def foo1034(): Int = 1036
+ def foo1035(): Int = 1037
+ def foo1036(): Int = 1038
+ def foo1037(): Int = 1039
+ def foo1038(): Int = 1040
+ def foo1039(): Int = 1041
+ def foo1040(): Int = 1042
+ def foo1041(): Int = 1043
+ def foo1042(): Int = 1044
+ def foo1043(): Int = 1045
+ def foo1044(): Int = 1046
+ def foo1045(): Int = 1047
+ def foo1046(): Int = 1048
+ def foo1047(): Int = 1049
+ def foo1048(): Int = 1050
+ def foo1049(): Int = 1051
+ def foo1050(): Int = 1052
+ def foo1051(): Int = 1053
+ def foo1052(): Int = 1054
+ def foo1053(): Int = 1055
+ def foo1054(): Int = 1056
+ def foo1055(): Int = 1057
+ def foo1056(): Int = 1058
+ def foo1057(): Int = 1059
+ def foo1058(): Int = 1060
+ def foo1059(): Int = 1061
+ def foo1060(): Int = 1062
+ def foo1061(): Int = 1063
+ def foo1062(): Int = 1064
+ def foo1063(): Int = 1065
+ def foo1064(): Int = 1066
+ def foo1065(): Int = 1067
+ def foo1066(): Int = 1068
+ def foo1067(): Int = 1069
+ def foo1068(): Int = 1070
+ def foo1069(): Int = 1071
+ def foo1070(): Int = 1072
+ def foo1071(): Int = 1073
+ def foo1072(): Int = 1074
+ def foo1073(): Int = 1075
+ def foo1074(): Int = 1076
+ def foo1075(): Int = 1077
+ def foo1076(): Int = 1078
+ def foo1077(): Int = 1079
+ def foo1078(): Int = 1080
+ def foo1079(): Int = 1081
+ def foo1080(): Int = 1082
+ def foo1081(): Int = 1083
+ def foo1082(): Int = 1084
+ def foo1083(): Int = 1085
+ def foo1084(): Int = 1086
+ def foo1085(): Int = 1087
+ def foo1086(): Int = 1088
+ def foo1087(): Int = 1089
+ def foo1088(): Int = 1090
+ def foo1089(): Int = 1091
+ def foo1090(): Int = 1092
+ def foo1091(): Int = 1093
+ def foo1092(): Int = 1094
+ def foo1093(): Int = 1095
+ def foo1094(): Int = 1096
+ def foo1095(): Int = 1097
+ def foo1096(): Int = 1098
+ def foo1097(): Int = 1099
+ def foo1098(): Int = 1100
+ def foo1099(): Int = 1101
+ def foo1100(): Int = 1102
+ def foo1101(): Int = 1103
+ def foo1102(): Int = 1104
+ def foo1103(): Int = 1105
+ def foo1104(): Int = 1106
+ def foo1105(): Int = 1107
+ def foo1106(): Int = 1108
+ def foo1107(): Int = 1109
+ def foo1108(): Int = 1110
+ def foo1109(): Int = 1111
+ def foo1110(): Int = 1112
+ def foo1111(): Int = 1113
+ def foo1112(): Int = 1114
+ def foo1113(): Int = 1115
+ def foo1114(): Int = 1116
+ def foo1115(): Int = 1117
+ def foo1116(): Int = 1118
+ def foo1117(): Int = 1119
+ def foo1118(): Int = 1120
+ def foo1119(): Int = 1121
+ def foo1120(): Int = 1122
+ def foo1121(): Int = 1123
+ def foo1122(): Int = 1124
+ def foo1123(): Int = 1125
+ def foo1124(): Int = 1126
+ def foo1125(): Int = 1127
+ def foo1126(): Int = 1128
+ def foo1127(): Int = 1129
+ def foo1128(): Int = 1130
+ def foo1129(): Int = 1131
+ def foo1130(): Int = 1132
+ def foo1131(): Int = 1133
+ def foo1132(): Int = 1134
+ def foo1133(): Int = 1135
+ def foo1134(): Int = 1136
+ def foo1135(): Int = 1137
+ def foo1136(): Int = 1138
+ def foo1137(): Int = 1139
+ def foo1138(): Int = 1140
+ def foo1139(): Int = 1141
+ def foo1140(): Int = 1142
+ def foo1141(): Int = 1143
+ def foo1142(): Int = 1144
+ def foo1143(): Int = 1145
+ def foo1144(): Int = 1146
+ def foo1145(): Int = 1147
+ def foo1146(): Int = 1148
+ def foo1147(): Int = 1149
+ def foo1148(): Int = 1150
+ def foo1149(): Int = 1151
+ def foo1150(): Int = 1152
+ def foo1151(): Int = 1153
+ def foo1152(): Int = 1154
+ def foo1153(): Int = 1155
+ def foo1154(): Int = 1156
+ def foo1155(): Int = 1157
+ def foo1156(): Int = 1158
+ def foo1157(): Int = 1159
+ def foo1158(): Int = 1160
+ def foo1159(): Int = 1161
+ def foo1160(): Int = 1162
+ def foo1161(): Int = 1163
+ def foo1162(): Int = 1164
+ def foo1163(): Int = 1165
+ def foo1164(): Int = 1166
+ def foo1165(): Int = 1167
+ def foo1166(): Int = 1168
+ def foo1167(): Int = 1169
+ def foo1168(): Int = 1170
+ def foo1169(): Int = 1171
+ def foo1170(): Int = 1172
+ def foo1171(): Int = 1173
+ def foo1172(): Int = 1174
+ def foo1173(): Int = 1175
+ def foo1174(): Int = 1176
+ def foo1175(): Int = 1177
+ def foo1176(): Int = 1178
+ def foo1177(): Int = 1179
+ def foo1178(): Int = 1180
+ def foo1179(): Int = 1181
+ def foo1180(): Int = 1182
+ def foo1181(): Int = 1183
+ def foo1182(): Int = 1184
+ def foo1183(): Int = 1185
+ def foo1184(): Int = 1186
+ def foo1185(): Int = 1187
+ def foo1186(): Int = 1188
+ def foo1187(): Int = 1189
+ def foo1188(): Int = 1190
+ def foo1189(): Int = 1191
+ def foo1190(): Int = 1192
+ def foo1191(): Int = 1193
+ def foo1192(): Int = 1194
+ def foo1193(): Int = 1195
+ def foo1194(): Int = 1196
+ def foo1195(): Int = 1197
+ def foo1196(): Int = 1198
+ def foo1197(): Int = 1199
+ def foo1198(): Int = 1200
+ def foo1199(): Int = 1201
+ def foo1200(): Int = 1202
+ def foo1201(): Int = 1203
+ def foo1202(): Int = 1204
+ def foo1203(): Int = 1205
+ def foo1204(): Int = 1206
+ def foo1205(): Int = 1207
+ def foo1206(): Int = 1208
+ def foo1207(): Int = 1209
+ def foo1208(): Int = 1210
+ def foo1209(): Int = 1211
+ def foo1210(): Int = 1212
+ def foo1211(): Int = 1213
+ def foo1212(): Int = 1214
+ def foo1213(): Int = 1215
+ def foo1214(): Int = 1216
+ def foo1215(): Int = 1217
+ def foo1216(): Int = 1218
+ def foo1217(): Int = 1219
+ def foo1218(): Int = 1220
+ def foo1219(): Int = 1221
+ def foo1220(): Int = 1222
+ def foo1221(): Int = 1223
+ def foo1222(): Int = 1224
+ def foo1223(): Int = 1225
+ def foo1224(): Int = 1226
+ def foo1225(): Int = 1227
+ def foo1226(): Int = 1228
+ def foo1227(): Int = 1229
+ def foo1228(): Int = 1230
+ def foo1229(): Int = 1231
+ def foo1230(): Int = 1232
+ def foo1231(): Int = 1233
+ def foo1232(): Int = 1234
+ def foo1233(): Int = 1235
+ def foo1234(): Int = 1236
+ def foo1235(): Int = 1237
+ def foo1236(): Int = 1238
+ def foo1237(): Int = 1239
+ def foo1238(): Int = 1240
+ def foo1239(): Int = 1241
+ def foo1240(): Int = 1242
+ def foo1241(): Int = 1243
+ def foo1242(): Int = 1244
+ def foo1243(): Int = 1245
+ def foo1244(): Int = 1246
+ def foo1245(): Int = 1247
+ def foo1246(): Int = 1248
+ def foo1247(): Int = 1249
+ def foo1248(): Int = 1250
+ def foo1249(): Int = 1251
+ def foo1250(): Int = 1252
+ def foo1251(): Int = 1253
+ def foo1252(): Int = 1254
+ def foo1253(): Int = 1255
+ def foo1254(): Int = 1256
+ def foo1255(): Int = 1257
+ def foo1256(): Int = 1258
+ def foo1257(): Int = 1259
+ def foo1258(): Int = 1260
+ def foo1259(): Int = 1261
+ def foo1260(): Int = 1262
+ def foo1261(): Int = 1263
+ def foo1262(): Int = 1264
+ def foo1263(): Int = 1265
+ def foo1264(): Int = 1266
+ def foo1265(): Int = 1267
+ def foo1266(): Int = 1268
+ def foo1267(): Int = 1269
+ def foo1268(): Int = 1270
+ def foo1269(): Int = 1271
+ def foo1270(): Int = 1272
+ def foo1271(): Int = 1273
+ def foo1272(): Int = 1274
+ def foo1273(): Int = 1275
+ def foo1274(): Int = 1276
+ def foo1275(): Int = 1277
+ def foo1276(): Int = 1278
+ def foo1277(): Int = 1279
+ def foo1278(): Int = 1280
+ def foo1279(): Int = 1281
+ def foo1280(): Int = 1282
+ def foo1281(): Int = 1283
+ def foo1282(): Int = 1284
+ def foo1283(): Int = 1285
+ def foo1284(): Int = 1286
+ def foo1285(): Int = 1287
+ def foo1286(): Int = 1288
+ def foo1287(): Int = 1289
+ def foo1288(): Int = 1290
+ def foo1289(): Int = 1291
+ def foo1290(): Int = 1292
+ def foo1291(): Int = 1293
+ def foo1292(): Int = 1294
+ def foo1293(): Int = 1295
+ def foo1294(): Int = 1296
+ def foo1295(): Int = 1297
+ def foo1296(): Int = 1298
+ def foo1297(): Int = 1299
+ def foo1298(): Int = 1300
+ def foo1299(): Int = 1301
+ def foo1300(): Int = 1302
+ def foo1301(): Int = 1303
+ def foo1302(): Int = 1304
+ def foo1303(): Int = 1305
+ def foo1304(): Int = 1306
+ def foo1305(): Int = 1307
+ def foo1306(): Int = 1308
+ def foo1307(): Int = 1309
+ def foo1308(): Int = 1310
+ def foo1309(): Int = 1311
+ def foo1310(): Int = 1312
+ def foo1311(): Int = 1313
+ def foo1312(): Int = 1314
+ def foo1313(): Int = 1315
+ def foo1314(): Int = 1316
+ def foo1315(): Int = 1317
+ def foo1316(): Int = 1318
+ def foo1317(): Int = 1319
+ def foo1318(): Int = 1320
+ def foo1319(): Int = 1321
+ def foo1320(): Int = 1322
+ def foo1321(): Int = 1323
+ def foo1322(): Int = 1324
+ def foo1323(): Int = 1325
+ def foo1324(): Int = 1326
+ def foo1325(): Int = 1327
+ def foo1326(): Int = 1328
+ def foo1327(): Int = 1329
+ def foo1328(): Int = 1330
+ def foo1329(): Int = 1331
+ def foo1330(): Int = 1332
+ def foo1331(): Int = 1333
+ def foo1332(): Int = 1334
+ def foo1333(): Int = 1335
+ def foo1334(): Int = 1336
+ def foo1335(): Int = 1337
+ def foo1336(): Int = 1338
+ def foo1337(): Int = 1339
+ def foo1338(): Int = 1340
+ def foo1339(): Int = 1341
+ def foo1340(): Int = 1342
+ def foo1341(): Int = 1343
+ def foo1342(): Int = 1344
+ def foo1343(): Int = 1345
+ def foo1344(): Int = 1346
+ def foo1345(): Int = 1347
+ def foo1346(): Int = 1348
+ def foo1347(): Int = 1349
+ def foo1348(): Int = 1350
+ def foo1349(): Int = 1351
+ def foo1350(): Int = 1352
+ def foo1351(): Int = 1353
+ def foo1352(): Int = 1354
+ def foo1353(): Int = 1355
+ def foo1354(): Int = 1356
+ def foo1355(): Int = 1357
+ def foo1356(): Int = 1358
+ def foo1357(): Int = 1359
+ def foo1358(): Int = 1360
+ def foo1359(): Int = 1361
+ def foo1360(): Int = 1362
+ def foo1361(): Int = 1363
+ def foo1362(): Int = 1364
+ def foo1363(): Int = 1365
+ def foo1364(): Int = 1366
+ def foo1365(): Int = 1367
+ def foo1366(): Int = 1368
+ def foo1367(): Int = 1369
+ def foo1368(): Int = 1370
+ def foo1369(): Int = 1371
+ def foo1370(): Int = 1372
+ def foo1371(): Int = 1373
+ def foo1372(): Int = 1374
+ def foo1373(): Int = 1375
+ def foo1374(): Int = 1376
+ def foo1375(): Int = 1377
+ def foo1376(): Int = 1378
+ def foo1377(): Int = 1379
+ def foo1378(): Int = 1380
+ def foo1379(): Int = 1381
+ def foo1380(): Int = 1382
+ def foo1381(): Int = 1383
+ def foo1382(): Int = 1384
+ def foo1383(): Int = 1385
+ def foo1384(): Int = 1386
+ def foo1385(): Int = 1387
+ def foo1386(): Int = 1388
+ def foo1387(): Int = 1389
+ def foo1388(): Int = 1390
+ def foo1389(): Int = 1391
+ def foo1390(): Int = 1392
+ def foo1391(): Int = 1393
+ def foo1392(): Int = 1394
+ def foo1393(): Int = 1395
+ def foo1394(): Int = 1396
+ def foo1395(): Int = 1397
+ def foo1396(): Int = 1398
+ def foo1397(): Int = 1399
+ def foo1398(): Int = 1400
+ def foo1399(): Int = 1401
+ def foo1400(): Int = 1402
+ def foo1401(): Int = 1403
+ def foo1402(): Int = 1404
+ def foo1403(): Int = 1405
+ def foo1404(): Int = 1406
+ def foo1405(): Int = 1407
+ def foo1406(): Int = 1408
+ def foo1407(): Int = 1409
+ def foo1408(): Int = 1410
+ def foo1409(): Int = 1411
+ def foo1410(): Int = 1412
+ def foo1411(): Int = 1413
+ def foo1412(): Int = 1414
+ def foo1413(): Int = 1415
+ def foo1414(): Int = 1416
+ def foo1415(): Int = 1417
+ def foo1416(): Int = 1418
+ def foo1417(): Int = 1419
+ def foo1418(): Int = 1420
+ def foo1419(): Int = 1421
+ def foo1420(): Int = 1422
+ def foo1421(): Int = 1423
+ def foo1422(): Int = 1424
+ def foo1423(): Int = 1425
+ def foo1424(): Int = 1426
+ def foo1425(): Int = 1427
+ def foo1426(): Int = 1428
+ def foo1427(): Int = 1429
+ def foo1428(): Int = 1430
+ def foo1429(): Int = 1431
+ def foo1430(): Int = 1432
+ def foo1431(): Int = 1433
+ def foo1432(): Int = 1434
+ def foo1433(): Int = 1435
+ def foo1434(): Int = 1436
+ def foo1435(): Int = 1437
+ def foo1436(): Int = 1438
+ def foo1437(): Int = 1439
+ def foo1438(): Int = 1440
+ def foo1439(): Int = 1441
+ def foo1440(): Int = 1442
+ def foo1441(): Int = 1443
+ def foo1442(): Int = 1444
+ def foo1443(): Int = 1445
+ def foo1444(): Int = 1446
+ def foo1445(): Int = 1447
+ def foo1446(): Int = 1448
+ def foo1447(): Int = 1449
+ def foo1448(): Int = 1450
+ def foo1449(): Int = 1451
+ def foo1450(): Int = 1452
+ def foo1451(): Int = 1453
+ def foo1452(): Int = 1454
+ def foo1453(): Int = 1455
+ def foo1454(): Int = 1456
+ def foo1455(): Int = 1457
+ def foo1456(): Int = 1458
+ def foo1457(): Int = 1459
+ def foo1458(): Int = 1460
+ def foo1459(): Int = 1461
+ def foo1460(): Int = 1462
+ def foo1461(): Int = 1463
+ def foo1462(): Int = 1464
+ def foo1463(): Int = 1465
+ def foo1464(): Int = 1466
+ def foo1465(): Int = 1467
+ def foo1466(): Int = 1468
+ def foo1467(): Int = 1469
+ def foo1468(): Int = 1470
+ def foo1469(): Int = 1471
+ def foo1470(): Int = 1472
+ def foo1471(): Int = 1473
+ def foo1472(): Int = 1474
+ def foo1473(): Int = 1475
+ def foo1474(): Int = 1476
+ def foo1475(): Int = 1477
+ def foo1476(): Int = 1478
+ def foo1477(): Int = 1479
+ def foo1478(): Int = 1480
+ def foo1479(): Int = 1481
+ def foo1480(): Int = 1482
+ def foo1481(): Int = 1483
+ def foo1482(): Int = 1484
+ def foo1483(): Int = 1485
+ def foo1484(): Int = 1486
+ def foo1485(): Int = 1487
+ def foo1486(): Int = 1488
+ def foo1487(): Int = 1489
+ def foo1488(): Int = 1490
+ def foo1489(): Int = 1491
+ def foo1490(): Int = 1492
+ def foo1491(): Int = 1493
+ def foo1492(): Int = 1494
+ def foo1493(): Int = 1495
+ def foo1494(): Int = 1496
+ def foo1495(): Int = 1497
+ def foo1496(): Int = 1498
+ def foo1497(): Int = 1499
+ def foo1498(): Int = 1500
+ def foo1499(): Int = 1501
+ def foo1500(): Int = 1502
+ def foo1501(): Int = 1503
+ def foo1502(): Int = 1504
+ def foo1503(): Int = 1505
+ def foo1504(): Int = 1506
+ def foo1505(): Int = 1507
+ def foo1506(): Int = 1508
+ def foo1507(): Int = 1509
+ def foo1508(): Int = 1510
+ def foo1509(): Int = 1511
+ def foo1510(): Int = 1512
+ def foo1511(): Int = 1513
+ def foo1512(): Int = 1514
+ def foo1513(): Int = 1515
+ def foo1514(): Int = 1516
+ def foo1515(): Int = 1517
+ def foo1516(): Int = 1518
+ def foo1517(): Int = 1519
+ def foo1518(): Int = 1520
+ def foo1519(): Int = 1521
+ def foo1520(): Int = 1522
+ def foo1521(): Int = 1523
+ def foo1522(): Int = 1524
+ def foo1523(): Int = 1525
+ def foo1524(): Int = 1526
+ def foo1525(): Int = 1527
+ def foo1526(): Int = 1528
+ def foo1527(): Int = 1529
+ def foo1528(): Int = 1530
+ def foo1529(): Int = 1531
+ def foo1530(): Int = 1532
+ def foo1531(): Int = 1533
+ def foo1532(): Int = 1534
+ def foo1533(): Int = 1535
+ def foo1534(): Int = 1536
+ def foo1535(): Int = 1537
+ def foo1536(): Int = 1538
+ def foo1537(): Int = 1539
+ def foo1538(): Int = 1540
+ def foo1539(): Int = 1541
+ def foo1540(): Int = 1542
+ def foo1541(): Int = 1543
+ def foo1542(): Int = 1544
+ def foo1543(): Int = 1545
+ def foo1544(): Int = 1546
+ def foo1545(): Int = 1547
+ def foo1546(): Int = 1548
+ def foo1547(): Int = 1549
+ def foo1548(): Int = 1550
+ def foo1549(): Int = 1551
+ def foo1550(): Int = 1552
+ def foo1551(): Int = 1553
+ def foo1552(): Int = 1554
+ def foo1553(): Int = 1555
+ def foo1554(): Int = 1556
+ def foo1555(): Int = 1557
+ def foo1556(): Int = 1558
+ def foo1557(): Int = 1559
+ def foo1558(): Int = 1560
+ def foo1559(): Int = 1561
+ def foo1560(): Int = 1562
+ def foo1561(): Int = 1563
+ def foo1562(): Int = 1564
+ def foo1563(): Int = 1565
+ def foo1564(): Int = 1566
+ def foo1565(): Int = 1567
+ def foo1566(): Int = 1568
+ def foo1567(): Int = 1569
+ def foo1568(): Int = 1570
+ def foo1569(): Int = 1571
+ def foo1570(): Int = 1572
+ def foo1571(): Int = 1573
+ def foo1572(): Int = 1574
+ def foo1573(): Int = 1575
+ def foo1574(): Int = 1576
+ def foo1575(): Int = 1577
+ def foo1576(): Int = 1578
+ def foo1577(): Int = 1579
+ def foo1578(): Int = 1580
+ def foo1579(): Int = 1581
+ def foo1580(): Int = 1582
+ def foo1581(): Int = 1583
+ def foo1582(): Int = 1584
+ def foo1583(): Int = 1585
+ def foo1584(): Int = 1586
+ def foo1585(): Int = 1587
+ def foo1586(): Int = 1588
+ def foo1587(): Int = 1589
+ def foo1588(): Int = 1590
+ def foo1589(): Int = 1591
+ def foo1590(): Int = 1592
+ def foo1591(): Int = 1593
+ def foo1592(): Int = 1594
+ def foo1593(): Int = 1595
+ def foo1594(): Int = 1596
+ def foo1595(): Int = 1597
+ def foo1596(): Int = 1598
+ def foo1597(): Int = 1599
+ def foo1598(): Int = 1600
+ def foo1599(): Int = 1601
+ def foo1600(): Int = 1602
+ def foo1601(): Int = 1603
+ def foo1602(): Int = 1604
+ def foo1603(): Int = 1605
+ def foo1604(): Int = 1606
+ def foo1605(): Int = 1607
+ def foo1606(): Int = 1608
+ def foo1607(): Int = 1609
+ def foo1608(): Int = 1610
+ def foo1609(): Int = 1611
+ def foo1610(): Int = 1612
+ def foo1611(): Int = 1613
+ def foo1612(): Int = 1614
+ def foo1613(): Int = 1615
+ def foo1614(): Int = 1616
+ def foo1615(): Int = 1617
+ def foo1616(): Int = 1618
+ def foo1617(): Int = 1619
+ def foo1618(): Int = 1620
+ def foo1619(): Int = 1621
+ def foo1620(): Int = 1622
+ def foo1621(): Int = 1623
+ def foo1622(): Int = 1624
+ def foo1623(): Int = 1625
+ def foo1624(): Int = 1626
+ def foo1625(): Int = 1627
+ def foo1626(): Int = 1628
+ def foo1627(): Int = 1629
+ def foo1628(): Int = 1630
+ def foo1629(): Int = 1631
+ def foo1630(): Int = 1632
+ def foo1631(): Int = 1633
+ def foo1632(): Int = 1634
+ def foo1633(): Int = 1635
+ def foo1634(): Int = 1636
+ def foo1635(): Int = 1637
+ def foo1636(): Int = 1638
+ def foo1637(): Int = 1639
+ def foo1638(): Int = 1640
+ def foo1639(): Int = 1641
+ def foo1640(): Int = 1642
+ def foo1641(): Int = 1643
+ def foo1642(): Int = 1644
+ def foo1643(): Int = 1645
+ def foo1644(): Int = 1646
+ def foo1645(): Int = 1647
+ def foo1646(): Int = 1648
+ def foo1647(): Int = 1649
+ def foo1648(): Int = 1650
+ def foo1649(): Int = 1651
+ def foo1650(): Int = 1652
+ def foo1651(): Int = 1653
+ def foo1652(): Int = 1654
+ def foo1653(): Int = 1655
+ def foo1654(): Int = 1656
+ def foo1655(): Int = 1657
+ def foo1656(): Int = 1658
+ def foo1657(): Int = 1659
+ def foo1658(): Int = 1660
+ def foo1659(): Int = 1661
+ def foo1660(): Int = 1662
+ def foo1661(): Int = 1663
+ def foo1662(): Int = 1664
+ def foo1663(): Int = 1665
+ def foo1664(): Int = 1666
+ def foo1665(): Int = 1667
+ def foo1666(): Int = 1668
+ def foo1667(): Int = 1669
+ def foo1668(): Int = 1670
+ def foo1669(): Int = 1671
+ def foo1670(): Int = 1672
+ def foo1671(): Int = 1673
+ def foo1672(): Int = 1674
+ def foo1673(): Int = 1675
+ def foo1674(): Int = 1676
+ def foo1675(): Int = 1677
+ def foo1676(): Int = 1678
+ def foo1677(): Int = 1679
+ def foo1678(): Int = 1680
+ def foo1679(): Int = 1681
+ def foo1680(): Int = 1682
+ def foo1681(): Int = 1683
+ def foo1682(): Int = 1684
+ def foo1683(): Int = 1685
+ def foo1684(): Int = 1686
+ def foo1685(): Int = 1687
+ def foo1686(): Int = 1688
+ def foo1687(): Int = 1689
+ def foo1688(): Int = 1690
+ def foo1689(): Int = 1691
+ def foo1690(): Int = 1692
+ def foo1691(): Int = 1693
+ def foo1692(): Int = 1694
+ def foo1693(): Int = 1695
+ def foo1694(): Int = 1696
+ def foo1695(): Int = 1697
+ def foo1696(): Int = 1698
+ def foo1697(): Int = 1699
+ def foo1698(): Int = 1700
+ def foo1699(): Int = 1701
+ def foo1700(): Int = 1702
+ def foo1701(): Int = 1703
+ def foo1702(): Int = 1704
+ def foo1703(): Int = 1705
+ def foo1704(): Int = 1706
+ def foo1705(): Int = 1707
+ def foo1706(): Int = 1708
+ def foo1707(): Int = 1709
+ def foo1708(): Int = 1710
+ def foo1709(): Int = 1711
+ def foo1710(): Int = 1712
+ def foo1711(): Int = 1713
+ def foo1712(): Int = 1714
+ def foo1713(): Int = 1715
+ def foo1714(): Int = 1716
+ def foo1715(): Int = 1717
+ def foo1716(): Int = 1718
+ def foo1717(): Int = 1719
+ def foo1718(): Int = 1720
+ def foo1719(): Int = 1721
+ def foo1720(): Int = 1722
+ def foo1721(): Int = 1723
+ def foo1722(): Int = 1724
+ def foo1723(): Int = 1725
+ def foo1724(): Int = 1726
+ def foo1725(): Int = 1727
+ def foo1726(): Int = 1728
+ def foo1727(): Int = 1729
+ def foo1728(): Int = 1730
+ def foo1729(): Int = 1731
+ def foo1730(): Int = 1732
+ def foo1731(): Int = 1733
+ def foo1732(): Int = 1734
+ def foo1733(): Int = 1735
+ def foo1734(): Int = 1736
+ def foo1735(): Int = 1737
+ def foo1736(): Int = 1738
+ def foo1737(): Int = 1739
+ def foo1738(): Int = 1740
+ def foo1739(): Int = 1741
+ def foo1740(): Int = 1742
+ def foo1741(): Int = 1743
+ def foo1742(): Int = 1744
+ def foo1743(): Int = 1745
+ def foo1744(): Int = 1746
+ def foo1745(): Int = 1747
+ def foo1746(): Int = 1748
+ def foo1747(): Int = 1749
+ def foo1748(): Int = 1750
+ def foo1749(): Int = 1751
+ def foo1750(): Int = 1752
+ def foo1751(): Int = 1753
+ def foo1752(): Int = 1754
+ def foo1753(): Int = 1755
+ def foo1754(): Int = 1756
+ def foo1755(): Int = 1757
+ def foo1756(): Int = 1758
+ def foo1757(): Int = 1759
+ def foo1758(): Int = 1760
+ def foo1759(): Int = 1761
+ def foo1760(): Int = 1762
+ def foo1761(): Int = 1763
+ def foo1762(): Int = 1764
+ def foo1763(): Int = 1765
+ def foo1764(): Int = 1766
+ def foo1765(): Int = 1767
+ def foo1766(): Int = 1768
+ def foo1767(): Int = 1769
+ def foo1768(): Int = 1770
+ def foo1769(): Int = 1771
+ def foo1770(): Int = 1772
+ def foo1771(): Int = 1773
+ def foo1772(): Int = 1774
+ def foo1773(): Int = 1775
+ def foo1774(): Int = 1776
+ def foo1775(): Int = 1777
+ def foo1776(): Int = 1778
+ def foo1777(): Int = 1779
+ def foo1778(): Int = 1780
+ def foo1779(): Int = 1781
+ def foo1780(): Int = 1782
+ def foo1781(): Int = 1783
+ def foo1782(): Int = 1784
+ def foo1783(): Int = 1785
+ def foo1784(): Int = 1786
+ def foo1785(): Int = 1787
+ def foo1786(): Int = 1788
+ def foo1787(): Int = 1789
+ def foo1788(): Int = 1790
+ def foo1789(): Int = 1791
+ def foo1790(): Int = 1792
+ def foo1791(): Int = 1793
+ def foo1792(): Int = 1794
+ def foo1793(): Int = 1795
+ def foo1794(): Int = 1796
+ def foo1795(): Int = 1797
+ def foo1796(): Int = 1798
+ def foo1797(): Int = 1799
+ def foo1798(): Int = 1800
+ def foo1799(): Int = 1801
+ def foo1800(): Int = 1802
+ def foo1801(): Int = 1803
+ def foo1802(): Int = 1804
+ def foo1803(): Int = 1805
+ def foo1804(): Int = 1806
+ def foo1805(): Int = 1807
+ def foo1806(): Int = 1808
+ def foo1807(): Int = 1809
+ def foo1808(): Int = 1810
+ def foo1809(): Int = 1811
+ def foo1810(): Int = 1812
+ def foo1811(): Int = 1813
+ def foo1812(): Int = 1814
+ def foo1813(): Int = 1815
+ def foo1814(): Int = 1816
+ def foo1815(): Int = 1817
+ def foo1816(): Int = 1818
+ def foo1817(): Int = 1819
+ def foo1818(): Int = 1820
+ def foo1819(): Int = 1821
+ def foo1820(): Int = 1822
+ def foo1821(): Int = 1823
+ def foo1822(): Int = 1824
+ def foo1823(): Int = 1825
+ def foo1824(): Int = 1826
+ def foo1825(): Int = 1827
+ def foo1826(): Int = 1828
+ def foo1827(): Int = 1829
+ def foo1828(): Int = 1830
+ def foo1829(): Int = 1831
+ def foo1830(): Int = 1832
+ def foo1831(): Int = 1833
+ def foo1832(): Int = 1834
+ def foo1833(): Int = 1835
+ def foo1834(): Int = 1836
+ def foo1835(): Int = 1837
+ def foo1836(): Int = 1838
+ def foo1837(): Int = 1839
+ def foo1838(): Int = 1840
+ def foo1839(): Int = 1841
+ def foo1840(): Int = 1842
+ def foo1841(): Int = 1843
+ def foo1842(): Int = 1844
+ def foo1843(): Int = 1845
+ def foo1844(): Int = 1846
+ def foo1845(): Int = 1847
+ def foo1846(): Int = 1848
+ def foo1847(): Int = 1849
+ def foo1848(): Int = 1850
+ def foo1849(): Int = 1851
+ def foo1850(): Int = 1852
+ def foo1851(): Int = 1853
+ def foo1852(): Int = 1854
+ def foo1853(): Int = 1855
+ def foo1854(): Int = 1856
+ def foo1855(): Int = 1857
+ def foo1856(): Int = 1858
+ def foo1857(): Int = 1859
+ def foo1858(): Int = 1860
+ def foo1859(): Int = 1861
+ def foo1860(): Int = 1862
+ def foo1861(): Int = 1863
+ def foo1862(): Int = 1864
+ def foo1863(): Int = 1865
+ def foo1864(): Int = 1866
+ def foo1865(): Int = 1867
+ def foo1866(): Int = 1868
+ def foo1867(): Int = 1869
+ def foo1868(): Int = 1870
+ def foo1869(): Int = 1871
+ def foo1870(): Int = 1872
+ def foo1871(): Int = 1873
+ def foo1872(): Int = 1874
+ def foo1873(): Int = 1875
+ def foo1874(): Int = 1876
+ def foo1875(): Int = 1877
+ def foo1876(): Int = 1878
+ def foo1877(): Int = 1879
+ def foo1878(): Int = 1880
+ def foo1879(): Int = 1881
+ def foo1880(): Int = 1882
+ def foo1881(): Int = 1883
+ def foo1882(): Int = 1884
+ def foo1883(): Int = 1885
+ def foo1884(): Int = 1886
+ def foo1885(): Int = 1887
+ def foo1886(): Int = 1888
+ def foo1887(): Int = 1889
+ def foo1888(): Int = 1890
+ def foo1889(): Int = 1891
+ def foo1890(): Int = 1892
+ def foo1891(): Int = 1893
+ def foo1892(): Int = 1894
+ def foo1893(): Int = 1895
+ def foo1894(): Int = 1896
+ def foo1895(): Int = 1897
+ def foo1896(): Int = 1898
+ def foo1897(): Int = 1899
+ def foo1898(): Int = 1900
+ def foo1899(): Int = 1901
+ def foo1900(): Int = 1902
+ def foo1901(): Int = 1903
+ def foo1902(): Int = 1904
+ def foo1903(): Int = 1905
+ def foo1904(): Int = 1906
+ def foo1905(): Int = 1907
+ def foo1906(): Int = 1908
+ def foo1907(): Int = 1909
+ def foo1908(): Int = 1910
+ def foo1909(): Int = 1911
+ def foo1910(): Int = 1912
+ def foo1911(): Int = 1913
+ def foo1912(): Int = 1914
+ def foo1913(): Int = 1915
+ def foo1914(): Int = 1916
+ def foo1915(): Int = 1917
+ def foo1916(): Int = 1918
+ def foo1917(): Int = 1919
+ def foo1918(): Int = 1920
+ def foo1919(): Int = 1921
+ def foo1920(): Int = 1922
+ def foo1921(): Int = 1923
+ def foo1922(): Int = 1924
+ def foo1923(): Int = 1925
+ def foo1924(): Int = 1926
+ def foo1925(): Int = 1927
+ def foo1926(): Int = 1928
+ def foo1927(): Int = 1929
+ def foo1928(): Int = 1930
+ def foo1929(): Int = 1931
+ def foo1930(): Int = 1932
+ def foo1931(): Int = 1933
+ def foo1932(): Int = 1934
+ def foo1933(): Int = 1935
+ def foo1934(): Int = 1936
+ def foo1935(): Int = 1937
+ def foo1936(): Int = 1938
+ def foo1937(): Int = 1939
+ def foo1938(): Int = 1940
+ def foo1939(): Int = 1941
+ def foo1940(): Int = 1942
+ def foo1941(): Int = 1943
+ def foo1942(): Int = 1944
+ def foo1943(): Int = 1945
+ def foo1944(): Int = 1946
+ def foo1945(): Int = 1947
+ def foo1946(): Int = 1948
+ def foo1947(): Int = 1949
+ def foo1948(): Int = 1950
+ def foo1949(): Int = 1951
+ def foo1950(): Int = 1952
+ def foo1951(): Int = 1953
+ def foo1952(): Int = 1954
+ def foo1953(): Int = 1955
+ def foo1954(): Int = 1956
+ def foo1955(): Int = 1957
+ def foo1956(): Int = 1958
+ def foo1957(): Int = 1959
+ def foo1958(): Int = 1960
+ def foo1959(): Int = 1961
+ def foo1960(): Int = 1962
+ def foo1961(): Int = 1963
+ def foo1962(): Int = 1964
+ def foo1963(): Int = 1965
+ def foo1964(): Int = 1966
+ def foo1965(): Int = 1967
+ def foo1966(): Int = 1968
+ def foo1967(): Int = 1969
+ def foo1968(): Int = 1970
+ def foo1969(): Int = 1971
+ def foo1970(): Int = 1972
+ def foo1971(): Int = 1973
+ def foo1972(): Int = 1974
+ def foo1973(): Int = 1975
+ def foo1974(): Int = 1976
+ def foo1975(): Int = 1977
+ def foo1976(): Int = 1978
+ def foo1977(): Int = 1979
+ def foo1978(): Int = 1980
+ def foo1979(): Int = 1981
+ def foo1980(): Int = 1982
+ def foo1981(): Int = 1983
+ def foo1982(): Int = 1984
+ def foo1983(): Int = 1985
+ def foo1984(): Int = 1986
+ def foo1985(): Int = 1987
+ def foo1986(): Int = 1988
+ def foo1987(): Int = 1989
+ def foo1988(): Int = 1990
+ def foo1989(): Int = 1991
+ def foo1990(): Int = 1992
+ def foo1991(): Int = 1993
+ def foo1992(): Int = 1994
+ def foo1993(): Int = 1995
+ def foo1994(): Int = 1996
+ def foo1995(): Int = 1997
+ def foo1996(): Int = 1998
+ def foo1997(): Int = 1999
+ def foo1998(): Int = 2000
+ def foo1999(): Int = 2001
+ def foo2000(): Int = 2002
+ def foo2001(): Int = 2003
+ def foo2002(): Int = 2004
+ def foo2003(): Int = 2005
+ def foo2004(): Int = 2006
+ def foo2005(): Int = 2007
+ def foo2006(): Int = 2008
+ def foo2007(): Int = 2009
+ def foo2008(): Int = 2010
+ def foo2009(): Int = 2011
+ def foo2010(): Int = 2012
+ def foo2011(): Int = 2013
+ def foo2012(): Int = 2014
+ def foo2013(): Int = 2015
+ def foo2014(): Int = 2016
+ def foo2015(): Int = 2017
+ def foo2016(): Int = 2018
+ def foo2017(): Int = 2019
+ def foo2018(): Int = 2020
+ def foo2019(): Int = 2021
+ def foo2020(): Int = 2022
+ def foo2021(): Int = 2023
+ def foo2022(): Int = 2024
+ def foo2023(): Int = 2025
+ def foo2024(): Int = 2026
+ def foo2025(): Int = 2027
+ def foo2026(): Int = 2028
+ def foo2027(): Int = 2029
+ def foo2028(): Int = 2030
+ def foo2029(): Int = 2031
+ def foo2030(): Int = 2032
+ def foo2031(): Int = 2033
+ def foo2032(): Int = 2034
+ def foo2033(): Int = 2035
+ def foo2034(): Int = 2036
+ def foo2035(): Int = 2037
+ def foo2036(): Int = 2038
+ def foo2037(): Int = 2039
+ def foo2038(): Int = 2040
+ def foo2039(): Int = 2041
+ def foo2040(): Int = 2042
+ def foo2041(): Int = 2043
+ def foo2042(): Int = 2044
+ def foo2043(): Int = 2045
+ def foo2044(): Int = 2046
+ def foo2045(): Int = 2047
+ def foo2046(): Int = 2048
+ def foo2047(): Int = 2049
+ def foo2048(): Int = 2050
+ def foo2049(): Int = 2051
+ def foo2050(): Int = 2052
+ def foo2051(): Int = 2053
+ def foo2052(): Int = 2054
+ def foo2053(): Int = 2055
+ def foo2054(): Int = 2056
+ def foo2055(): Int = 2057
+ def foo2056(): Int = 2058
+ def foo2057(): Int = 2059
+ def foo2058(): Int = 2060
+ def foo2059(): Int = 2061
+ def foo2060(): Int = 2062
+ def foo2061(): Int = 2063
+ def foo2062(): Int = 2064
+ def foo2063(): Int = 2065
+ def foo2064(): Int = 2066
+ def foo2065(): Int = 2067
+ def foo2066(): Int = 2068
+ def foo2067(): Int = 2069
+ def foo2068(): Int = 2070
+ def foo2069(): Int = 2071
+ def foo2070(): Int = 2072
+ def foo2071(): Int = 2073
+ def foo2072(): Int = 2074
+ def foo2073(): Int = 2075
+ def foo2074(): Int = 2076
+ def foo2075(): Int = 2077
+ def foo2076(): Int = 2078
+ def foo2077(): Int = 2079
+ def foo2078(): Int = 2080
+ def foo2079(): Int = 2081
+ def foo2080(): Int = 2082
+ def foo2081(): Int = 2083
+ def foo2082(): Int = 2084
+ def foo2083(): Int = 2085
+ def foo2084(): Int = 2086
+ def foo2085(): Int = 2087
+ def foo2086(): Int = 2088
+ def foo2087(): Int = 2089
+ def foo2088(): Int = 2090
+ def foo2089(): Int = 2091
+ def foo2090(): Int = 2092
+ def foo2091(): Int = 2093
+ def foo2092(): Int = 2094
+ def foo2093(): Int = 2095
+ def foo2094(): Int = 2096
+ def foo2095(): Int = 2097
+ def foo2096(): Int = 2098
+ def foo2097(): Int = 2099
+ def foo2098(): Int = 2100
+ def foo2099(): Int = 2101
+ def foo2100(): Int = 2102
+ def foo2101(): Int = 2103
+ def foo2102(): Int = 2104
+ def foo2103(): Int = 2105
+ def foo2104(): Int = 2106
+ def foo2105(): Int = 2107
+ def foo2106(): Int = 2108
+ def foo2107(): Int = 2109
+ def foo2108(): Int = 2110
+ def foo2109(): Int = 2111
+ def foo2110(): Int = 2112
+ def foo2111(): Int = 2113
+ def foo2112(): Int = 2114
+ def foo2113(): Int = 2115
+ def foo2114(): Int = 2116
+ def foo2115(): Int = 2117
+ def foo2116(): Int = 2118
+ def foo2117(): Int = 2119
+ def foo2118(): Int = 2120
+ def foo2119(): Int = 2121
+ def foo2120(): Int = 2122
+ def foo2121(): Int = 2123
+ def foo2122(): Int = 2124
+ def foo2123(): Int = 2125
+ def foo2124(): Int = 2126
+ def foo2125(): Int = 2127
+ def foo2126(): Int = 2128
+ def foo2127(): Int = 2129
+ def foo2128(): Int = 2130
+ def foo2129(): Int = 2131
+ def foo2130(): Int = 2132
+ def foo2131(): Int = 2133
+ def foo2132(): Int = 2134
+ def foo2133(): Int = 2135
+ def foo2134(): Int = 2136
+ def foo2135(): Int = 2137
+ def foo2136(): Int = 2138
+ def foo2137(): Int = 2139
+ def foo2138(): Int = 2140
+ def foo2139(): Int = 2141
+ def foo2140(): Int = 2142
+ def foo2141(): Int = 2143
+ def foo2142(): Int = 2144
+ def foo2143(): Int = 2145
+ def foo2144(): Int = 2146
+ def foo2145(): Int = 2147
+ def foo2146(): Int = 2148
+ def foo2147(): Int = 2149
+ def foo2148(): Int = 2150
+ def foo2149(): Int = 2151
+ def foo2150(): Int = 2152
+ def foo2151(): Int = 2153
+ def foo2152(): Int = 2154
+ def foo2153(): Int = 2155
+ def foo2154(): Int = 2156
+ def foo2155(): Int = 2157
+ def foo2156(): Int = 2158
+ def foo2157(): Int = 2159
+ def foo2158(): Int = 2160
+ def foo2159(): Int = 2161
+ def foo2160(): Int = 2162
+ def foo2161(): Int = 2163
+ def foo2162(): Int = 2164
+ def foo2163(): Int = 2165
+ def foo2164(): Int = 2166
+ def foo2165(): Int = 2167
+ def foo2166(): Int = 2168
+ def foo2167(): Int = 2169
+ def foo2168(): Int = 2170
+ def foo2169(): Int = 2171
+ def foo2170(): Int = 2172
+ def foo2171(): Int = 2173
+ def foo2172(): Int = 2174
+ def foo2173(): Int = 2175
+ def foo2174(): Int = 2176
+ def foo2175(): Int = 2177
+ def foo2176(): Int = 2178
+ def foo2177(): Int = 2179
+ def foo2178(): Int = 2180
+ def foo2179(): Int = 2181
+ def foo2180(): Int = 2182
+ def foo2181(): Int = 2183
+ def foo2182(): Int = 2184
+ def foo2183(): Int = 2185
+ def foo2184(): Int = 2186
+ def foo2185(): Int = 2187
+ def foo2186(): Int = 2188
+ def foo2187(): Int = 2189
+ def foo2188(): Int = 2190
+ def foo2189(): Int = 2191
+ def foo2190(): Int = 2192
+ def foo2191(): Int = 2193
+ def foo2192(): Int = 2194
+ def foo2193(): Int = 2195
+ def foo2194(): Int = 2196
+ def foo2195(): Int = 2197
+ def foo2196(): Int = 2198
+ def foo2197(): Int = 2199
+ def foo2198(): Int = 2200
+ def foo2199(): Int = 2201
+ def foo2200(): Int = 2202
+ def foo2201(): Int = 2203
+ def foo2202(): Int = 2204
+ def foo2203(): Int = 2205
+ def foo2204(): Int = 2206
+ def foo2205(): Int = 2207
+ def foo2206(): Int = 2208
+ def foo2207(): Int = 2209
+ def foo2208(): Int = 2210
+ def foo2209(): Int = 2211
+ def foo2210(): Int = 2212
+ def foo2211(): Int = 2213
+ def foo2212(): Int = 2214
+ def foo2213(): Int = 2215
+ def foo2214(): Int = 2216
+ def foo2215(): Int = 2217
+ def foo2216(): Int = 2218
+ def foo2217(): Int = 2219
+ def foo2218(): Int = 2220
+ def foo2219(): Int = 2221
+ def foo2220(): Int = 2222
+ def foo2221(): Int = 2223
+ def foo2222(): Int = 2224
+ def foo2223(): Int = 2225
+ def foo2224(): Int = 2226
+ def foo2225(): Int = 2227
+ def foo2226(): Int = 2228
+ def foo2227(): Int = 2229
+ def foo2228(): Int = 2230
+ def foo2229(): Int = 2231
+ def foo2230(): Int = 2232
+ def foo2231(): Int = 2233
+ def foo2232(): Int = 2234
+ def foo2233(): Int = 2235
+ def foo2234(): Int = 2236
+ def foo2235(): Int = 2237
+ def foo2236(): Int = 2238
+ def foo2237(): Int = 2239
+ def foo2238(): Int = 2240
+ def foo2239(): Int = 2241
+ def foo2240(): Int = 2242
+ def foo2241(): Int = 2243
+ def foo2242(): Int = 2244
+ def foo2243(): Int = 2245
+ def foo2244(): Int = 2246
+ def foo2245(): Int = 2247
+ def foo2246(): Int = 2248
+ def foo2247(): Int = 2249
+ def foo2248(): Int = 2250
+ def foo2249(): Int = 2251
+ def foo2250(): Int = 2252
+ def foo2251(): Int = 2253
+ def foo2252(): Int = 2254
+ def foo2253(): Int = 2255
+ def foo2254(): Int = 2256
+ def foo2255(): Int = 2257
+ def foo2256(): Int = 2258
+ def foo2257(): Int = 2259
+ def foo2258(): Int = 2260
+ def foo2259(): Int = 2261
+ def foo2260(): Int = 2262
+ def foo2261(): Int = 2263
+ def foo2262(): Int = 2264
+ def foo2263(): Int = 2265
+ def foo2264(): Int = 2266
+ def foo2265(): Int = 2267
+ def foo2266(): Int = 2268
+ def foo2267(): Int = 2269
+ def foo2268(): Int = 2270
+ def foo2269(): Int = 2271
+ def foo2270(): Int = 2272
+ def foo2271(): Int = 2273
+ def foo2272(): Int = 2274
+ def foo2273(): Int = 2275
+ def foo2274(): Int = 2276
+ def foo2275(): Int = 2277
+ def foo2276(): Int = 2278
+ def foo2277(): Int = 2279
+ def foo2278(): Int = 2280
+ def foo2279(): Int = 2281
+ def foo2280(): Int = 2282
+ def foo2281(): Int = 2283
+ def foo2282(): Int = 2284
+ def foo2283(): Int = 2285
+ def foo2284(): Int = 2286
+ def foo2285(): Int = 2287
+ def foo2286(): Int = 2288
+ def foo2287(): Int = 2289
+ def foo2288(): Int = 2290
+ def foo2289(): Int = 2291
+ def foo2290(): Int = 2292
+ def foo2291(): Int = 2293
+ def foo2292(): Int = 2294
+ def foo2293(): Int = 2295
+ def foo2294(): Int = 2296
+ def foo2295(): Int = 2297
+ def foo2296(): Int = 2298
+ def foo2297(): Int = 2299
+ def foo2298(): Int = 2300
+ def foo2299(): Int = 2301
+ def foo2300(): Int = 2302
+ def foo2301(): Int = 2303
+ def foo2302(): Int = 2304
+ def foo2303(): Int = 2305
+ def foo2304(): Int = 2306
+ def foo2305(): Int = 2307
+ def foo2306(): Int = 2308
+ def foo2307(): Int = 2309
+ def foo2308(): Int = 2310
+ def foo2309(): Int = 2311
+ def foo2310(): Int = 2312
+ def foo2311(): Int = 2313
+ def foo2312(): Int = 2314
+ def foo2313(): Int = 2315
+ def foo2314(): Int = 2316
+ def foo2315(): Int = 2317
+ def foo2316(): Int = 2318
+ def foo2317(): Int = 2319
+ def foo2318(): Int = 2320
+ def foo2319(): Int = 2321
+ def foo2320(): Int = 2322
+ def foo2321(): Int = 2323
+ def foo2322(): Int = 2324
+ def foo2323(): Int = 2325
+ def foo2324(): Int = 2326
+ def foo2325(): Int = 2327
+ def foo2326(): Int = 2328
+ def foo2327(): Int = 2329
+ def foo2328(): Int = 2330
+ def foo2329(): Int = 2331
+ def foo2330(): Int = 2332
+ def foo2331(): Int = 2333
+ def foo2332(): Int = 2334
+ def foo2333(): Int = 2335
+ def foo2334(): Int = 2336
+ def foo2335(): Int = 2337
+ def foo2336(): Int = 2338
+ def foo2337(): Int = 2339
+ def foo2338(): Int = 2340
+ def foo2339(): Int = 2341
+ def foo2340(): Int = 2342
+ def foo2341(): Int = 2343
+ def foo2342(): Int = 2344
+ def foo2343(): Int = 2345
+ def foo2344(): Int = 2346
+ def foo2345(): Int = 2347
+ def foo2346(): Int = 2348
+ def foo2347(): Int = 2349
+ def foo2348(): Int = 2350
+ def foo2349(): Int = 2351
+ def foo2350(): Int = 2352
+ def foo2351(): Int = 2353
+ def foo2352(): Int = 2354
+ def foo2353(): Int = 2355
+ def foo2354(): Int = 2356
+ def foo2355(): Int = 2357
+ def foo2356(): Int = 2358
+ def foo2357(): Int = 2359
+ def foo2358(): Int = 2360
+ def foo2359(): Int = 2361
+ def foo2360(): Int = 2362
+ def foo2361(): Int = 2363
+ def foo2362(): Int = 2364
+ def foo2363(): Int = 2365
+ def foo2364(): Int = 2366
+ def foo2365(): Int = 2367
+ def foo2366(): Int = 2368
+ def foo2367(): Int = 2369
+ def foo2368(): Int = 2370
+ def foo2369(): Int = 2371
+ def foo2370(): Int = 2372
+ def foo2371(): Int = 2373
+ def foo2372(): Int = 2374
+ def foo2373(): Int = 2375
+ def foo2374(): Int = 2376
+ def foo2375(): Int = 2377
+ def foo2376(): Int = 2378
+ def foo2377(): Int = 2379
+ def foo2378(): Int = 2380
+ def foo2379(): Int = 2381
+ def foo2380(): Int = 2382
+ def foo2381(): Int = 2383
+ def foo2382(): Int = 2384
+ def foo2383(): Int = 2385
+ def foo2384(): Int = 2386
+ def foo2385(): Int = 2387
+ def foo2386(): Int = 2388
+ def foo2387(): Int = 2389
+ def foo2388(): Int = 2390
+ def foo2389(): Int = 2391
+ def foo2390(): Int = 2392
+ def foo2391(): Int = 2393
+ def foo2392(): Int = 2394
+ def foo2393(): Int = 2395
+ def foo2394(): Int = 2396
+ def foo2395(): Int = 2397
+ def foo2396(): Int = 2398
+ def foo2397(): Int = 2399
+ def foo2398(): Int = 2400
+ def foo2399(): Int = 2401
+ def foo2400(): Int = 2402
+ def foo2401(): Int = 2403
+ def foo2402(): Int = 2404
+ def foo2403(): Int = 2405
+ def foo2404(): Int = 2406
+ def foo2405(): Int = 2407
+ def foo2406(): Int = 2408
+ def foo2407(): Int = 2409
+ def foo2408(): Int = 2410
+ def foo2409(): Int = 2411
+ def foo2410(): Int = 2412
+ def foo2411(): Int = 2413
+ def foo2412(): Int = 2414
+ def foo2413(): Int = 2415
+ def foo2414(): Int = 2416
+ def foo2415(): Int = 2417
+ def foo2416(): Int = 2418
+ def foo2417(): Int = 2419
+ def foo2418(): Int = 2420
+ def foo2419(): Int = 2421
+ def foo2420(): Int = 2422
+ def foo2421(): Int = 2423
+ def foo2422(): Int = 2424
+ def foo2423(): Int = 2425
+ def foo2424(): Int = 2426
+ def foo2425(): Int = 2427
+ def foo2426(): Int = 2428
+ def foo2427(): Int = 2429
+ def foo2428(): Int = 2430
+ def foo2429(): Int = 2431
+ def foo2430(): Int = 2432
+ def foo2431(): Int = 2433
+ def foo2432(): Int = 2434
+ def foo2433(): Int = 2435
+ def foo2434(): Int = 2436
+ def foo2435(): Int = 2437
+ def foo2436(): Int = 2438
+ def foo2437(): Int = 2439
+ def foo2438(): Int = 2440
+ def foo2439(): Int = 2441
+ def foo2440(): Int = 2442
+ def foo2441(): Int = 2443
+ def foo2442(): Int = 2444
+ def foo2443(): Int = 2445
+ def foo2444(): Int = 2446
+ def foo2445(): Int = 2447
+ def foo2446(): Int = 2448
+ def foo2447(): Int = 2449
+ def foo2448(): Int = 2450
+ def foo2449(): Int = 2451
+ def foo2450(): Int = 2452
+ def foo2451(): Int = 2453
+ def foo2452(): Int = 2454
+ def foo2453(): Int = 2455
+ def foo2454(): Int = 2456
+ def foo2455(): Int = 2457
+ def foo2456(): Int = 2458
+ def foo2457(): Int = 2459
+ def foo2458(): Int = 2460
+ def foo2459(): Int = 2461
+ def foo2460(): Int = 2462
+ def foo2461(): Int = 2463
+ def foo2462(): Int = 2464
+ def foo2463(): Int = 2465
+ def foo2464(): Int = 2466
+ def foo2465(): Int = 2467
+ def foo2466(): Int = 2468
+ def foo2467(): Int = 2469
+ def foo2468(): Int = 2470
+ def foo2469(): Int = 2471
+ def foo2470(): Int = 2472
+ def foo2471(): Int = 2473
+ def foo2472(): Int = 2474
+ def foo2473(): Int = 2475
+ def foo2474(): Int = 2476
+ def foo2475(): Int = 2477
+ def foo2476(): Int = 2478
+ def foo2477(): Int = 2479
+ def foo2478(): Int = 2480
+ def foo2479(): Int = 2481
+ def foo2480(): Int = 2482
+ def foo2481(): Int = 2483
+ def foo2482(): Int = 2484
+ def foo2483(): Int = 2485
+ def foo2484(): Int = 2486
+ def foo2485(): Int = 2487
+ def foo2486(): Int = 2488
+ def foo2487(): Int = 2489
+ def foo2488(): Int = 2490
+ def foo2489(): Int = 2491
+ def foo2490(): Int = 2492
+ def foo2491(): Int = 2493
+ def foo2492(): Int = 2494
+ def foo2493(): Int = 2495
+ def foo2494(): Int = 2496
+ def foo2495(): Int = 2497
+ def foo2496(): Int = 2498
+ def foo2497(): Int = 2499
+ def foo2498(): Int = 2500
+ def foo2499(): Int = 2501
+ def foo2500(): Int = 2502
+ def foo2501(): Int = 2503
+ def foo2502(): Int = 2504
+ def foo2503(): Int = 2505
+ def foo2504(): Int = 2506
+ def foo2505(): Int = 2507
+ def foo2506(): Int = 2508
+ def foo2507(): Int = 2509
+ def foo2508(): Int = 2510
+ def foo2509(): Int = 2511
+ def foo2510(): Int = 2512
+ def foo2511(): Int = 2513
+ def foo2512(): Int = 2514
+ def foo2513(): Int = 2515
+ def foo2514(): Int = 2516
+ def foo2515(): Int = 2517
+ def foo2516(): Int = 2518
+ def foo2517(): Int = 2519
+ def foo2518(): Int = 2520
+ def foo2519(): Int = 2521
+ def foo2520(): Int = 2522
+ def foo2521(): Int = 2523
+ def foo2522(): Int = 2524
+ def foo2523(): Int = 2525
+ def foo2524(): Int = 2526
+ def foo2525(): Int = 2527
+ def foo2526(): Int = 2528
+ def foo2527(): Int = 2529
+ def foo2528(): Int = 2530
+ def foo2529(): Int = 2531
+ def foo2530(): Int = 2532
+ def foo2531(): Int = 2533
+ def foo2532(): Int = 2534
+ def foo2533(): Int = 2535
+ def foo2534(): Int = 2536
+ def foo2535(): Int = 2537
+ def foo2536(): Int = 2538
+ def foo2537(): Int = 2539
+ def foo2538(): Int = 2540
+ def foo2539(): Int = 2541
+ def foo2540(): Int = 2542
+ def foo2541(): Int = 2543
+ def foo2542(): Int = 2544
+ def foo2543(): Int = 2545
+ def foo2544(): Int = 2546
+ def foo2545(): Int = 2547
+ def foo2546(): Int = 2548
+ def foo2547(): Int = 2549
+ def foo2548(): Int = 2550
+ def foo2549(): Int = 2551
+ def foo2550(): Int = 2552
+ def foo2551(): Int = 2553
+ def foo2552(): Int = 2554
+ def foo2553(): Int = 2555
+ def foo2554(): Int = 2556
+ def foo2555(): Int = 2557
+ def foo2556(): Int = 2558
+ def foo2557(): Int = 2559
+ def foo2558(): Int = 2560
+ def foo2559(): Int = 2561
+ def foo2560(): Int = 2562
+ def foo2561(): Int = 2563
+ def foo2562(): Int = 2564
+ def foo2563(): Int = 2565
+ def foo2564(): Int = 2566
+ def foo2565(): Int = 2567
+ def foo2566(): Int = 2568
+ def foo2567(): Int = 2569
+ def foo2568(): Int = 2570
+ def foo2569(): Int = 2571
+ def foo2570(): Int = 2572
+ def foo2571(): Int = 2573
+ def foo2572(): Int = 2574
+ def foo2573(): Int = 2575
+ def foo2574(): Int = 2576
+ def foo2575(): Int = 2577
+ def foo2576(): Int = 2578
+ def foo2577(): Int = 2579
+ def foo2578(): Int = 2580
+ def foo2579(): Int = 2581
+ def foo2580(): Int = 2582
+ def foo2581(): Int = 2583
+ def foo2582(): Int = 2584
+ def foo2583(): Int = 2585
+ def foo2584(): Int = 2586
+ def foo2585(): Int = 2587
+ def foo2586(): Int = 2588
+ def foo2587(): Int = 2589
+ def foo2588(): Int = 2590
+ def foo2589(): Int = 2591
+ def foo2590(): Int = 2592
+ def foo2591(): Int = 2593
+ def foo2592(): Int = 2594
+ def foo2593(): Int = 2595
+ def foo2594(): Int = 2596
+ def foo2595(): Int = 2597
+ def foo2596(): Int = 2598
+ def foo2597(): Int = 2599
+ def foo2598(): Int = 2600
+ def foo2599(): Int = 2601
+ def foo2600(): Int = 2602
+ def foo2601(): Int = 2603
+ def foo2602(): Int = 2604
+ def foo2603(): Int = 2605
+ def foo2604(): Int = 2606
+ def foo2605(): Int = 2607
+ def foo2606(): Int = 2608
+ def foo2607(): Int = 2609
+ def foo2608(): Int = 2610
+ def foo2609(): Int = 2611
+ def foo2610(): Int = 2612
+ def foo2611(): Int = 2613
+ def foo2612(): Int = 2614
+ def foo2613(): Int = 2615
+ def foo2614(): Int = 2616
+ def foo2615(): Int = 2617
+ def foo2616(): Int = 2618
+ def foo2617(): Int = 2619
+ def foo2618(): Int = 2620
+ def foo2619(): Int = 2621
+ def foo2620(): Int = 2622
+ def foo2621(): Int = 2623
+ def foo2622(): Int = 2624
+ def foo2623(): Int = 2625
+ def foo2624(): Int = 2626
+ def foo2625(): Int = 2627
+ def foo2626(): Int = 2628
+ def foo2627(): Int = 2629
+ def foo2628(): Int = 2630
+ def foo2629(): Int = 2631
+ def foo2630(): Int = 2632
+ def foo2631(): Int = 2633
+ def foo2632(): Int = 2634
+ def foo2633(): Int = 2635
+ def foo2634(): Int = 2636
+ def foo2635(): Int = 2637
+ def foo2636(): Int = 2638
+ def foo2637(): Int = 2639
+ def foo2638(): Int = 2640
+ def foo2639(): Int = 2641
+ def foo2640(): Int = 2642
+ def foo2641(): Int = 2643
+ def foo2642(): Int = 2644
+ def foo2643(): Int = 2645
+ def foo2644(): Int = 2646
+ def foo2645(): Int = 2647
+ def foo2646(): Int = 2648
+ def foo2647(): Int = 2649
+ def foo2648(): Int = 2650
+ def foo2649(): Int = 2651
+ def foo2650(): Int = 2652
+ def foo2651(): Int = 2653
+ def foo2652(): Int = 2654
+ def foo2653(): Int = 2655
+ def foo2654(): Int = 2656
+ def foo2655(): Int = 2657
+ def foo2656(): Int = 2658
+ def foo2657(): Int = 2659
+ def foo2658(): Int = 2660
+ def foo2659(): Int = 2661
+ def foo2660(): Int = 2662
+ def foo2661(): Int = 2663
+ def foo2662(): Int = 2664
+ def foo2663(): Int = 2665
+ def foo2664(): Int = 2666
+ def foo2665(): Int = 2667
+ def foo2666(): Int = 2668
+ def foo2667(): Int = 2669
+ def foo2668(): Int = 2670
+ def foo2669(): Int = 2671
+ def foo2670(): Int = 2672
+ def foo2671(): Int = 2673
+ def foo2672(): Int = 2674
+ def foo2673(): Int = 2675
+ def foo2674(): Int = 2676
+ def foo2675(): Int = 2677
+ def foo2676(): Int = 2678
+ def foo2677(): Int = 2679
+ def foo2678(): Int = 2680
+ def foo2679(): Int = 2681
+ def foo2680(): Int = 2682
+ def foo2681(): Int = 2683
+ def foo2682(): Int = 2684
+ def foo2683(): Int = 2685
+ def foo2684(): Int = 2686
+ def foo2685(): Int = 2687
+ def foo2686(): Int = 2688
+ def foo2687(): Int = 2689
+ def foo2688(): Int = 2690
+ def foo2689(): Int = 2691
+ def foo2690(): Int = 2692
+ def foo2691(): Int = 2693
+ def foo2692(): Int = 2694
+ def foo2693(): Int = 2695
+ def foo2694(): Int = 2696
+ def foo2695(): Int = 2697
+ def foo2696(): Int = 2698
+ def foo2697(): Int = 2699
+ def foo2698(): Int = 2700
+ def foo2699(): Int = 2701
+ def foo2700(): Int = 2702
+ def foo2701(): Int = 2703
+ def foo2702(): Int = 2704
+ def foo2703(): Int = 2705
+ def foo2704(): Int = 2706
+ def foo2705(): Int = 2707
+ def foo2706(): Int = 2708
+ def foo2707(): Int = 2709
+ def foo2708(): Int = 2710
+ def foo2709(): Int = 2711
+ def foo2710(): Int = 2712
+ def foo2711(): Int = 2713
+ def foo2712(): Int = 2714
+ def foo2713(): Int = 2715
+ def foo2714(): Int = 2716
+ def foo2715(): Int = 2717
+ def foo2716(): Int = 2718
+ def foo2717(): Int = 2719
+ def foo2718(): Int = 2720
+ def foo2719(): Int = 2721
+ def foo2720(): Int = 2722
+ def foo2721(): Int = 2723
+ def foo2722(): Int = 2724
+ def foo2723(): Int = 2725
+ def foo2724(): Int = 2726
+ def foo2725(): Int = 2727
+ def foo2726(): Int = 2728
+ def foo2727(): Int = 2729
+ def foo2728(): Int = 2730
+ def foo2729(): Int = 2731
+ def foo2730(): Int = 2732
+ def foo2731(): Int = 2733
+ def foo2732(): Int = 2734
+ def foo2733(): Int = 2735
+ def foo2734(): Int = 2736
+ def foo2735(): Int = 2737
+ def foo2736(): Int = 2738
+ def foo2737(): Int = 2739
+ def foo2738(): Int = 2740
+ def foo2739(): Int = 2741
+ def foo2740(): Int = 2742
+ def foo2741(): Int = 2743
+ def foo2742(): Int = 2744
+ def foo2743(): Int = 2745
+ def foo2744(): Int = 2746
+ def foo2745(): Int = 2747
+ def foo2746(): Int = 2748
+ def foo2747(): Int = 2749
+ def foo2748(): Int = 2750
+ def foo2749(): Int = 2751
+ def foo2750(): Int = 2752
+ def foo2751(): Int = 2753
+ def foo2752(): Int = 2754
+ def foo2753(): Int = 2755
+ def foo2754(): Int = 2756
+ def foo2755(): Int = 2757
+ def foo2756(): Int = 2758
+ def foo2757(): Int = 2759
+ def foo2758(): Int = 2760
+ def foo2759(): Int = 2761
+ def foo2760(): Int = 2762
+ def foo2761(): Int = 2763
+ def foo2762(): Int = 2764
+ def foo2763(): Int = 2765
+ def foo2764(): Int = 2766
+ def foo2765(): Int = 2767
+ def foo2766(): Int = 2768
+ def foo2767(): Int = 2769
+ def foo2768(): Int = 2770
+ def foo2769(): Int = 2771
+ def foo2770(): Int = 2772
+ def foo2771(): Int = 2773
+ def foo2772(): Int = 2774
+ def foo2773(): Int = 2775
+ def foo2774(): Int = 2776
+ def foo2775(): Int = 2777
+ def foo2776(): Int = 2778
+ def foo2777(): Int = 2779
+ def foo2778(): Int = 2780
+ def foo2779(): Int = 2781
+ def foo2780(): Int = 2782
+ def foo2781(): Int = 2783
+ def foo2782(): Int = 2784
+ def foo2783(): Int = 2785
+ def foo2784(): Int = 2786
+ def foo2785(): Int = 2787
+ def foo2786(): Int = 2788
+ def foo2787(): Int = 2789
+ def foo2788(): Int = 2790
+ def foo2789(): Int = 2791
+ def foo2790(): Int = 2792
+ def foo2791(): Int = 2793
+ def foo2792(): Int = 2794
+ def foo2793(): Int = 2795
+ def foo2794(): Int = 2796
+ def foo2795(): Int = 2797
+ def foo2796(): Int = 2798
+ def foo2797(): Int = 2799
+ def foo2798(): Int = 2800
+ def foo2799(): Int = 2801
+ def foo2800(): Int = 2802
+ def foo2801(): Int = 2803
+ def foo2802(): Int = 2804
+ def foo2803(): Int = 2805
+ def foo2804(): Int = 2806
+ def foo2805(): Int = 2807
+ def foo2806(): Int = 2808
+ def foo2807(): Int = 2809
+ def foo2808(): Int = 2810
+ def foo2809(): Int = 2811
+ def foo2810(): Int = 2812
+ def foo2811(): Int = 2813
+ def foo2812(): Int = 2814
+ def foo2813(): Int = 2815
+ def foo2814(): Int = 2816
+ def foo2815(): Int = 2817
+ def foo2816(): Int = 2818
+ def foo2817(): Int = 2819
+ def foo2818(): Int = 2820
+ def foo2819(): Int = 2821
+ def foo2820(): Int = 2822
+ def foo2821(): Int = 2823
+ def foo2822(): Int = 2824
+ def foo2823(): Int = 2825
+ def foo2824(): Int = 2826
+ def foo2825(): Int = 2827
+ def foo2826(): Int = 2828
+ def foo2827(): Int = 2829
+ def foo2828(): Int = 2830
+ def foo2829(): Int = 2831
+ def foo2830(): Int = 2832
+ def foo2831(): Int = 2833
+ def foo2832(): Int = 2834
+ def foo2833(): Int = 2835
+ def foo2834(): Int = 2836
+ def foo2835(): Int = 2837
+ def foo2836(): Int = 2838
+ def foo2837(): Int = 2839
+ def foo2838(): Int = 2840
+ def foo2839(): Int = 2841
+ def foo2840(): Int = 2842
+ def foo2841(): Int = 2843
+ def foo2842(): Int = 2844
+ def foo2843(): Int = 2845
+ def foo2844(): Int = 2846
+ def foo2845(): Int = 2847
+ def foo2846(): Int = 2848
+ def foo2847(): Int = 2849
+ def foo2848(): Int = 2850
+ def foo2849(): Int = 2851
+ def foo2850(): Int = 2852
+ def foo2851(): Int = 2853
+ def foo2852(): Int = 2854
+ def foo2853(): Int = 2855
+ def foo2854(): Int = 2856
+ def foo2855(): Int = 2857
+ def foo2856(): Int = 2858
+ def foo2857(): Int = 2859
+ def foo2858(): Int = 2860
+ def foo2859(): Int = 2861
+ def foo2860(): Int = 2862
+ def foo2861(): Int = 2863
+ def foo2862(): Int = 2864
+ def foo2863(): Int = 2865
+ def foo2864(): Int = 2866
+ def foo2865(): Int = 2867
+ def foo2866(): Int = 2868
+ def foo2867(): Int = 2869
+ def foo2868(): Int = 2870
+ def foo2869(): Int = 2871
+ def foo2870(): Int = 2872
+ def foo2871(): Int = 2873
+ def foo2872(): Int = 2874
+ def foo2873(): Int = 2875
+ def foo2874(): Int = 2876
+ def foo2875(): Int = 2877
+ def foo2876(): Int = 2878
+ def foo2877(): Int = 2879
+ def foo2878(): Int = 2880
+ def foo2879(): Int = 2881
+ def foo2880(): Int = 2882
+ def foo2881(): Int = 2883
+ def foo2882(): Int = 2884
+ def foo2883(): Int = 2885
+ def foo2884(): Int = 2886
+ def foo2885(): Int = 2887
+ def foo2886(): Int = 2888
+ def foo2887(): Int = 2889
+ def foo2888(): Int = 2890
+ def foo2889(): Int = 2891
+ def foo2890(): Int = 2892
+ def foo2891(): Int = 2893
+ def foo2892(): Int = 2894
+ def foo2893(): Int = 2895
+ def foo2894(): Int = 2896
+ def foo2895(): Int = 2897
+ def foo2896(): Int = 2898
+ def foo2897(): Int = 2899
+ def foo2898(): Int = 2900
+ def foo2899(): Int = 2901
+ def foo2900(): Int = 2902
+ def foo2901(): Int = 2903
+ def foo2902(): Int = 2904
+ def foo2903(): Int = 2905
+ def foo2904(): Int = 2906
+ def foo2905(): Int = 2907
+ def foo2906(): Int = 2908
+ def foo2907(): Int = 2909
+ def foo2908(): Int = 2910
+ def foo2909(): Int = 2911
+ def foo2910(): Int = 2912
+ def foo2911(): Int = 2913
+ def foo2912(): Int = 2914
+ def foo2913(): Int = 2915
+ def foo2914(): Int = 2916
+ def foo2915(): Int = 2917
+ def foo2916(): Int = 2918
+ def foo2917(): Int = 2919
+ def foo2918(): Int = 2920
+ def foo2919(): Int = 2921
+ def foo2920(): Int = 2922
+ def foo2921(): Int = 2923
+ def foo2922(): Int = 2924
+ def foo2923(): Int = 2925
+ def foo2924(): Int = 2926
+ def foo2925(): Int = 2927
+ def foo2926(): Int = 2928
+ def foo2927(): Int = 2929
+ def foo2928(): Int = 2930
+ def foo2929(): Int = 2931
+ def foo2930(): Int = 2932
+ def foo2931(): Int = 2933
+ def foo2932(): Int = 2934
+ def foo2933(): Int = 2935
+ def foo2934(): Int = 2936
+ def foo2935(): Int = 2937
+ def foo2936(): Int = 2938
+ def foo2937(): Int = 2939
+ def foo2938(): Int = 2940
+ def foo2939(): Int = 2941
+ def foo2940(): Int = 2942
+ def foo2941(): Int = 2943
+ def foo2942(): Int = 2944
+ def foo2943(): Int = 2945
+ def foo2944(): Int = 2946
+ def foo2945(): Int = 2947
+ def foo2946(): Int = 2948
+ def foo2947(): Int = 2949
+ def foo2948(): Int = 2950
+ def foo2949(): Int = 2951
+ def foo2950(): Int = 2952
+ def foo2951(): Int = 2953
+ def foo2952(): Int = 2954
+ def foo2953(): Int = 2955
+ def foo2954(): Int = 2956
+ def foo2955(): Int = 2957
+ def foo2956(): Int = 2958
+ def foo2957(): Int = 2959
+ def foo2958(): Int = 2960
+ def foo2959(): Int = 2961
+ def foo2960(): Int = 2962
+ def foo2961(): Int = 2963
+ def foo2962(): Int = 2964
+ def foo2963(): Int = 2965
+ def foo2964(): Int = 2966
+ def foo2965(): Int = 2967
+ def foo2966(): Int = 2968
+ def foo2967(): Int = 2969
+ def foo2968(): Int = 2970
+ def foo2969(): Int = 2971
+ def foo2970(): Int = 2972
+ def foo2971(): Int = 2973
+ def foo2972(): Int = 2974
+ def foo2973(): Int = 2975
+ def foo2974(): Int = 2976
+ def foo2975(): Int = 2977
+ def foo2976(): Int = 2978
+ def foo2977(): Int = 2979
+ def foo2978(): Int = 2980
+ def foo2979(): Int = 2981
+ def foo2980(): Int = 2982
+ def foo2981(): Int = 2983
+ def foo2982(): Int = 2984
+ def foo2983(): Int = 2985
+ def foo2984(): Int = 2986
+ def foo2985(): Int = 2987
+ def foo2986(): Int = 2988
+ def foo2987(): Int = 2989
+ def foo2988(): Int = 2990
+ def foo2989(): Int = 2991
+ def foo2990(): Int = 2992
+ def foo2991(): Int = 2993
+ def foo2992(): Int = 2994
+ def foo2993(): Int = 2995
+ def foo2994(): Int = 2996
+ def foo2995(): Int = 2997
+ def foo2996(): Int = 2998
+ def foo2997(): Int = 2999
+ def foo2998(): Int = 3000
+ def foo2999(): Int = 3001
+ def foo3000(): Int = 3002
+ def foo3001(): Int = 3003
+ def foo3002(): Int = 3004
+ def foo3003(): Int = 3005
+ def foo3004(): Int = 3006
+ def foo3005(): Int = 3007
+ def foo3006(): Int = 3008
+ def foo3007(): Int = 3009
+ def foo3008(): Int = 3010
+ def foo3009(): Int = 3011
+ def foo3010(): Int = 3012
+ def foo3011(): Int = 3013
+ def foo3012(): Int = 3014
+ def foo3013(): Int = 3015
+ def foo3014(): Int = 3016
+ def foo3015(): Int = 3017
+ def foo3016(): Int = 3018
+ def foo3017(): Int = 3019
+ def foo3018(): Int = 3020
+ def foo3019(): Int = 3021
+ def foo3020(): Int = 3022
+ def foo3021(): Int = 3023
+ def foo3022(): Int = 3024
+ def foo3023(): Int = 3025
+ def foo3024(): Int = 3026
+ def foo3025(): Int = 3027
+ def foo3026(): Int = 3028
+ def foo3027(): Int = 3029
+ def foo3028(): Int = 3030
+ def foo3029(): Int = 3031
+ def foo3030(): Int = 3032
+ def foo3031(): Int = 3033
+ def foo3032(): Int = 3034
+ def foo3033(): Int = 3035
+ def foo3034(): Int = 3036
+ def foo3035(): Int = 3037
+ def foo3036(): Int = 3038
+ def foo3037(): Int = 3039
+ def foo3038(): Int = 3040
+ def foo3039(): Int = 3041
+ def foo3040(): Int = 3042
+ def foo3041(): Int = 3043
+ def foo3042(): Int = 3044
+ def foo3043(): Int = 3045
+ def foo3044(): Int = 3046
+ def foo3045(): Int = 3047
+ def foo3046(): Int = 3048
+ def foo3047(): Int = 3049
+ def foo3048(): Int = 3050
+ def foo3049(): Int = 3051
+ def foo3050(): Int = 3052
+ def foo3051(): Int = 3053
+ def foo3052(): Int = 3054
+ def foo3053(): Int = 3055
+ def foo3054(): Int = 3056
+ def foo3055(): Int = 3057
+ def foo3056(): Int = 3058
+ def foo3057(): Int = 3059
+ def foo3058(): Int = 3060
+ def foo3059(): Int = 3061
+ def foo3060(): Int = 3062
+ def foo3061(): Int = 3063
+ def foo3062(): Int = 3064
+ def foo3063(): Int = 3065
+ def foo3064(): Int = 3066
+ def foo3065(): Int = 3067
+ def foo3066(): Int = 3068
+ def foo3067(): Int = 3069
+ def foo3068(): Int = 3070
+ def foo3069(): Int = 3071
+ def foo3070(): Int = 3072
+ def foo3071(): Int = 3073
+ def foo3072(): Int = 3074
+ def foo3073(): Int = 3075
+ def foo3074(): Int = 3076
+ def foo3075(): Int = 3077
+ def foo3076(): Int = 3078
+ def foo3077(): Int = 3079
+ def foo3078(): Int = 3080
+ def foo3079(): Int = 3081
+ def foo3080(): Int = 3082
+ def foo3081(): Int = 3083
+ def foo3082(): Int = 3084
+ def foo3083(): Int = 3085
+ def foo3084(): Int = 3086
+ def foo3085(): Int = 3087
+ def foo3086(): Int = 3088
+ def foo3087(): Int = 3089
+ def foo3088(): Int = 3090
+ def foo3089(): Int = 3091
+ def foo3090(): Int = 3092
+ def foo3091(): Int = 3093
+ def foo3092(): Int = 3094
+ def foo3093(): Int = 3095
+ def foo3094(): Int = 3096
+ def foo3095(): Int = 3097
+ def foo3096(): Int = 3098
+ def foo3097(): Int = 3099
+ def foo3098(): Int = 3100
+ def foo3099(): Int = 3101
+ def foo3100(): Int = 3102
+ def foo3101(): Int = 3103
+ def foo3102(): Int = 3104
+ def foo3103(): Int = 3105
+ def foo3104(): Int = 3106
+ def foo3105(): Int = 3107
+ def foo3106(): Int = 3108
+ def foo3107(): Int = 3109
+ def foo3108(): Int = 3110
+ def foo3109(): Int = 3111
+ def foo3110(): Int = 3112
+ def foo3111(): Int = 3113
+ def foo3112(): Int = 3114
+ def foo3113(): Int = 3115
+ def foo3114(): Int = 3116
+ def foo3115(): Int = 3117
+ def foo3116(): Int = 3118
+ def foo3117(): Int = 3119
+ def foo3118(): Int = 3120
+ def foo3119(): Int = 3121
+ def foo3120(): Int = 3122
+ def foo3121(): Int = 3123
+ def foo3122(): Int = 3124
+ def foo3123(): Int = 3125
+ def foo3124(): Int = 3126
+ def foo3125(): Int = 3127
+ def foo3126(): Int = 3128
+ def foo3127(): Int = 3129
+ def foo3128(): Int = 3130
+ def foo3129(): Int = 3131
+ def foo3130(): Int = 3132
+ def foo3131(): Int = 3133
+ def foo3132(): Int = 3134
+ def foo3133(): Int = 3135
+ def foo3134(): Int = 3136
+ def foo3135(): Int = 3137
+ def foo3136(): Int = 3138
+ def foo3137(): Int = 3139
+ def foo3138(): Int = 3140
+ def foo3139(): Int = 3141
+ def foo3140(): Int = 3142
+ def foo3141(): Int = 3143
+ def foo3142(): Int = 3144
+ def foo3143(): Int = 3145
+ def foo3144(): Int = 3146
+ def foo3145(): Int = 3147
+ def foo3146(): Int = 3148
+ def foo3147(): Int = 3149
+ def foo3148(): Int = 3150
+ def foo3149(): Int = 3151
+ def foo3150(): Int = 3152
+ def foo3151(): Int = 3153
+ def foo3152(): Int = 3154
+ def foo3153(): Int = 3155
+ def foo3154(): Int = 3156
+ def foo3155(): Int = 3157
+ def foo3156(): Int = 3158
+ def foo3157(): Int = 3159
+ def foo3158(): Int = 3160
+ def foo3159(): Int = 3161
+ def foo3160(): Int = 3162
+ def foo3161(): Int = 3163
+ def foo3162(): Int = 3164
+ def foo3163(): Int = 3165
+ def foo3164(): Int = 3166
+ def foo3165(): Int = 3167
+ def foo3166(): Int = 3168
+ def foo3167(): Int = 3169
+ def foo3168(): Int = 3170
+ def foo3169(): Int = 3171
+ def foo3170(): Int = 3172
+ def foo3171(): Int = 3173
+ def foo3172(): Int = 3174
+ def foo3173(): Int = 3175
+ def foo3174(): Int = 3176
+ def foo3175(): Int = 3177
+ def foo3176(): Int = 3178
+ def foo3177(): Int = 3179
+ def foo3178(): Int = 3180
+ def foo3179(): Int = 3181
+ def foo3180(): Int = 3182
+ def foo3181(): Int = 3183
+ def foo3182(): Int = 3184
+ def foo3183(): Int = 3185
+ def foo3184(): Int = 3186
+ def foo3185(): Int = 3187
+ def foo3186(): Int = 3188
+ def foo3187(): Int = 3189
+ def foo3188(): Int = 3190
+ def foo3189(): Int = 3191
+ def foo3190(): Int = 3192
+ def foo3191(): Int = 3193
+ def foo3192(): Int = 3194
+ def foo3193(): Int = 3195
+ def foo3194(): Int = 3196
+ def foo3195(): Int = 3197
+ def foo3196(): Int = 3198
+ def foo3197(): Int = 3199
+ def foo3198(): Int = 3200
+ def foo3199(): Int = 3201
+ def foo3200(): Int = 3202
+ def foo3201(): Int = 3203
+ def foo3202(): Int = 3204
+ def foo3203(): Int = 3205
+ def foo3204(): Int = 3206
+ def foo3205(): Int = 3207
+ def foo3206(): Int = 3208
+ def foo3207(): Int = 3209
+ def foo3208(): Int = 3210
+ def foo3209(): Int = 3211
+ def foo3210(): Int = 3212
+ def foo3211(): Int = 3213
+ def foo3212(): Int = 3214
+ def foo3213(): Int = 3215
+ def foo3214(): Int = 3216
+ def foo3215(): Int = 3217
+ def foo3216(): Int = 3218
+ def foo3217(): Int = 3219
+ def foo3218(): Int = 3220
+ def foo3219(): Int = 3221
+ def foo3220(): Int = 3222
+ def foo3221(): Int = 3223
+ def foo3222(): Int = 3224
+ def foo3223(): Int = 3225
+ def foo3224(): Int = 3226
+ def foo3225(): Int = 3227
+ def foo3226(): Int = 3228
+ def foo3227(): Int = 3229
+ def foo3228(): Int = 3230
+ def foo3229(): Int = 3231
+ def foo3230(): Int = 3232
+ def foo3231(): Int = 3233
+ def foo3232(): Int = 3234
+ def foo3233(): Int = 3235
+ def foo3234(): Int = 3236
+ def foo3235(): Int = 3237
+ def foo3236(): Int = 3238
+ def foo3237(): Int = 3239
+ def foo3238(): Int = 3240
+ def foo3239(): Int = 3241
+ def foo3240(): Int = 3242
+ def foo3241(): Int = 3243
+ def foo3242(): Int = 3244
+ def foo3243(): Int = 3245
+ def foo3244(): Int = 3246
+ def foo3245(): Int = 3247
+ def foo3246(): Int = 3248
+ def foo3247(): Int = 3249
+ def foo3248(): Int = 3250
+ def foo3249(): Int = 3251
+ def foo3250(): Int = 3252
+ def foo3251(): Int = 3253
+ def foo3252(): Int = 3254
+ def foo3253(): Int = 3255
+ def foo3254(): Int = 3256
+ def foo3255(): Int = 3257
+ def foo3256(): Int = 3258
+ def foo3257(): Int = 3259
+ def foo3258(): Int = 3260
+ def foo3259(): Int = 3261
+ def foo3260(): Int = 3262
+ def foo3261(): Int = 3263
+ def foo3262(): Int = 3264
+ def foo3263(): Int = 3265
+ def foo3264(): Int = 3266
+ def foo3265(): Int = 3267
+ def foo3266(): Int = 3268
+ def foo3267(): Int = 3269
+ def foo3268(): Int = 3270
+ def foo3269(): Int = 3271
+ def foo3270(): Int = 3272
+ def foo3271(): Int = 3273
+ def foo3272(): Int = 3274
+ def foo3273(): Int = 3275
+ def foo3274(): Int = 3276
+ def foo3275(): Int = 3277
+ def foo3276(): Int = 3278
+ def foo3277(): Int = 3279
+ def foo3278(): Int = 3280
+ def foo3279(): Int = 3281
+ def foo3280(): Int = 3282
+ def foo3281(): Int = 3283
+ def foo3282(): Int = 3284
+ def foo3283(): Int = 3285
+ def foo3284(): Int = 3286
+ def foo3285(): Int = 3287
+ def foo3286(): Int = 3288
+ def foo3287(): Int = 3289
+ def foo3288(): Int = 3290
+ def foo3289(): Int = 3291
+ def foo3290(): Int = 3292
+ def foo3291(): Int = 3293
+ def foo3292(): Int = 3294
+ def foo3293(): Int = 3295
+ def foo3294(): Int = 3296
+ def foo3295(): Int = 3297
+ def foo3296(): Int = 3298
+ def foo3297(): Int = 3299
+ def foo3298(): Int = 3300
+ def foo3299(): Int = 3301
+ def foo3300(): Int = 3302
+ def foo3301(): Int = 3303
+ def foo3302(): Int = 3304
+ def foo3303(): Int = 3305
+ def foo3304(): Int = 3306
+ def foo3305(): Int = 3307
+ def foo3306(): Int = 3308
+ def foo3307(): Int = 3309
+ def foo3308(): Int = 3310
+ def foo3309(): Int = 3311
+ def foo3310(): Int = 3312
+ def foo3311(): Int = 3313
+ def foo3312(): Int = 3314
+ def foo3313(): Int = 3315
+ def foo3314(): Int = 3316
+ def foo3315(): Int = 3317
+ def foo3316(): Int = 3318
+ def foo3317(): Int = 3319
+ def foo3318(): Int = 3320
+ def foo3319(): Int = 3321
+ def foo3320(): Int = 3322
+ def foo3321(): Int = 3323
+ def foo3322(): Int = 3324
+ def foo3323(): Int = 3325
+ def foo3324(): Int = 3326
+ def foo3325(): Int = 3327
+ def foo3326(): Int = 3328
+ def foo3327(): Int = 3329
+ def foo3328(): Int = 3330
+ def foo3329(): Int = 3331
+ def foo3330(): Int = 3332
+ def foo3331(): Int = 3333
+ def foo3332(): Int = 3334
+ def foo3333(): Int = 3335
+ def foo3334(): Int = 3336
+ def foo3335(): Int = 3337
+ def foo3336(): Int = 3338
+ def foo3337(): Int = 3339
+ def foo3338(): Int = 3340
+ def foo3339(): Int = 3341
+ def foo3340(): Int = 3342
+ def foo3341(): Int = 3343
+ def foo3342(): Int = 3344
+ def foo3343(): Int = 3345
+ def foo3344(): Int = 3346
+ def foo3345(): Int = 3347
+ def foo3346(): Int = 3348
+ def foo3347(): Int = 3349
+ def foo3348(): Int = 3350
+ def foo3349(): Int = 3351
+ def foo3350(): Int = 3352
+ def foo3351(): Int = 3353
+ def foo3352(): Int = 3354
+ def foo3353(): Int = 3355
+ def foo3354(): Int = 3356
+ def foo3355(): Int = 3357
+ def foo3356(): Int = 3358
+ def foo3357(): Int = 3359
+ def foo3358(): Int = 3360
+ def foo3359(): Int = 3361
+ def foo3360(): Int = 3362
+ def foo3361(): Int = 3363
+ def foo3362(): Int = 3364
+ def foo3363(): Int = 3365
+ def foo3364(): Int = 3366
+ def foo3365(): Int = 3367
+ def foo3366(): Int = 3368
+ def foo3367(): Int = 3369
+ def foo3368(): Int = 3370
+ def foo3369(): Int = 3371
+ def foo3370(): Int = 3372
+ def foo3371(): Int = 3373
+ def foo3372(): Int = 3374
+ def foo3373(): Int = 3375
+ def foo3374(): Int = 3376
+ def foo3375(): Int = 3377
+ def foo3376(): Int = 3378
+ def foo3377(): Int = 3379
+ def foo3378(): Int = 3380
+ def foo3379(): Int = 3381
+ def foo3380(): Int = 3382
+ def foo3381(): Int = 3383
+ def foo3382(): Int = 3384
+ def foo3383(): Int = 3385
+ def foo3384(): Int = 3386
+ def foo3385(): Int = 3387
+ def foo3386(): Int = 3388
+ def foo3387(): Int = 3389
+ def foo3388(): Int = 3390
+ def foo3389(): Int = 3391
+ def foo3390(): Int = 3392
+ def foo3391(): Int = 3393
+ def foo3392(): Int = 3394
+ def foo3393(): Int = 3395
+ def foo3394(): Int = 3396
+ def foo3395(): Int = 3397
+ def foo3396(): Int = 3398
+ def foo3397(): Int = 3399
+ def foo3398(): Int = 3400
+ def foo3399(): Int = 3401
+ def foo3400(): Int = 3402
+ def foo3401(): Int = 3403
+ def foo3402(): Int = 3404
+ def foo3403(): Int = 3405
+ def foo3404(): Int = 3406
+ def foo3405(): Int = 3407
+ def foo3406(): Int = 3408
+ def foo3407(): Int = 3409
+ def foo3408(): Int = 3410
+ def foo3409(): Int = 3411
+ def foo3410(): Int = 3412
+ def foo3411(): Int = 3413
+ def foo3412(): Int = 3414
+ def foo3413(): Int = 3415
+ def foo3414(): Int = 3416
+ def foo3415(): Int = 3417
+ def foo3416(): Int = 3418
+ def foo3417(): Int = 3419
+ def foo3418(): Int = 3420
+ def foo3419(): Int = 3421
+ def foo3420(): Int = 3422
+ def foo3421(): Int = 3423
+ def foo3422(): Int = 3424
+ def foo3423(): Int = 3425
+ def foo3424(): Int = 3426
+ def foo3425(): Int = 3427
+ def foo3426(): Int = 3428
+ def foo3427(): Int = 3429
+ def foo3428(): Int = 3430
+ def foo3429(): Int = 3431
+ def foo3430(): Int = 3432
+ def foo3431(): Int = 3433
+ def foo3432(): Int = 3434
+ def foo3433(): Int = 3435
+ def foo3434(): Int = 3436
+ def foo3435(): Int = 3437
+ def foo3436(): Int = 3438
+ def foo3437(): Int = 3439
+ def foo3438(): Int = 3440
+ def foo3439(): Int = 3441
+ def foo3440(): Int = 3442
+ def foo3441(): Int = 3443
+ def foo3442(): Int = 3444
+ def foo3443(): Int = 3445
+ def foo3444(): Int = 3446
+ def foo3445(): Int = 3447
+ def foo3446(): Int = 3448
+ def foo3447(): Int = 3449
+ def foo3448(): Int = 3450
+ def foo3449(): Int = 3451
+ def foo3450(): Int = 3452
+ def foo3451(): Int = 3453
+ def foo3452(): Int = 3454
+ def foo3453(): Int = 3455
+ def foo3454(): Int = 3456
+ def foo3455(): Int = 3457
+ def foo3456(): Int = 3458
+ def foo3457(): Int = 3459
+ def foo3458(): Int = 3460
+ def foo3459(): Int = 3461
+ def foo3460(): Int = 3462
+ def foo3461(): Int = 3463
+ def foo3462(): Int = 3464
+ def foo3463(): Int = 3465
+ def foo3464(): Int = 3466
+ def foo3465(): Int = 3467
+ def foo3466(): Int = 3468
+ def foo3467(): Int = 3469
+ def foo3468(): Int = 3470
+ def foo3469(): Int = 3471
+ def foo3470(): Int = 3472
+ def foo3471(): Int = 3473
+ def foo3472(): Int = 3474
+ def foo3473(): Int = 3475
+ def foo3474(): Int = 3476
+ def foo3475(): Int = 3477
+ def foo3476(): Int = 3478
+ def foo3477(): Int = 3479
+ def foo3478(): Int = 3480
+ def foo3479(): Int = 3481
+ def foo3480(): Int = 3482
+ def foo3481(): Int = 3483
+ def foo3482(): Int = 3484
+ def foo3483(): Int = 3485
+ def foo3484(): Int = 3486
+ def foo3485(): Int = 3487
+ def foo3486(): Int = 3488
+ def foo3487(): Int = 3489
+ def foo3488(): Int = 3490
+ def foo3489(): Int = 3491
+ def foo3490(): Int = 3492
+ def foo3491(): Int = 3493
+ def foo3492(): Int = 3494
+ def foo3493(): Int = 3495
+ def foo3494(): Int = 3496
+ def foo3495(): Int = 3497
+ def foo3496(): Int = 3498
+ def foo3497(): Int = 3499
+ def foo3498(): Int = 3500
+ def foo3499(): Int = 3501
+ def foo3500(): Int = 3502
+}
diff --git a/test/files/t8449/Client.scala b/test/files/t8449/Client.scala
new file mode 100644
index 0000000000..5d273f06b2
--- /dev/null
+++ b/test/files/t8449/Client.scala
@@ -0,0 +1,3 @@
+object Client {
+ def foo: Any = new Test().foo
+}
diff --git a/test/files/t8449/Test.java b/test/files/t8449/Test.java
new file mode 100644
index 0000000000..ecb1711b24
--- /dev/null
+++ b/test/files/t8449/Test.java
@@ -0,0 +1,10 @@
+public class Test {
+ // Raw type over a Scala type constructor
+ public scala.Function1 foo() { return null; }
+ // scalac reported:
+ // % scalac-hash v2.11.2 -d /tmp sandbox/{Test.java,Client.scala}
+ // sandbox/Test.java:2: error: trait Function1 takes type parameters
+ // public scala.Function1 foo() { return null; }
+ // ^
+ // one error found
+}
diff --git a/test/instrumented/library/scala/runtime/ScalaRunTime.scala b/test/instrumented/library/scala/runtime/ScalaRunTime.scala
index e474ae737c..6b45a4e9f3 100644
--- a/test/instrumented/library/scala/runtime/ScalaRunTime.scala
+++ b/test/instrumented/library/scala/runtime/ScalaRunTime.scala
@@ -261,7 +261,7 @@ object ScalaRunTime {
*
* The primary motivation for this method is to provide a means for
* correctly obtaining a String representation of a value, while
- * avoiding the pitfalls of naïvely calling toString on said value.
+ * avoiding the pitfalls of naively calling toString on said value.
* In particular, it addresses the fact that (a) toString cannot be
* called on null and (b) depending on the apparent type of an
* array, toString may or may not print it in a human-readable form.
diff --git a/test/junit/scala/StringContextTest.scala b/test/junit/scala/StringContextTest.scala
new file mode 100644
index 0000000000..7e9e775d58
--- /dev/null
+++ b/test/junit/scala/StringContextTest.scala
@@ -0,0 +1,87 @@
+
+package scala
+
+import org.junit.Test
+import org.junit.Assert._
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil._
+
+@RunWith(classOf[JUnit4])
+class StringContextTest {
+
+ import StringContext._
+
+ @Test def noEscape() = {
+ val s = "string"
+ val res = processEscapes(s)
+ assertEquals(s, res)
+ }
+ @Test def tabbed() = {
+ val s = """a\tb"""
+ val res = processEscapes(s)
+ assertEquals("a\tb", res)
+ }
+ @Test def quoted() = {
+ val s = """hello, \"world\""""
+ val res = processEscapes(s)
+ assertEquals("""hello, "world"""", res)
+ }
+ @Test def octal() = {
+ val s = """\123cala"""
+ val res = treatEscapes(s)
+ assertEquals("Scala", res)
+ }
+ @Test def doubled() = {
+ val s = """\123cala\123yntax"""
+ val res = treatEscapes(s)
+ assertEquals("ScalaSyntax", res)
+ }
+ @Test def badly() = assertThrows[InvalidEscapeException] {
+ val s = """Scala\"""
+ val res = treatEscapes(s)
+ assertEquals("Scala", res)
+ }
+ @Test def noOctal() = assertThrows[InvalidEscapeException] {
+ val s = """\123cala"""
+ val res = processEscapes(s)
+ assertEquals("Scala", res)
+ }
+
+ @Test def t6631_baseline() = assertEquals("\f\r\n\t", s"""\f\r\n\t""")
+
+ @Test def t6631_badEscape() = assertThrows[InvalidEscapeException] {
+ s"""\x"""
+ }
+
+ // verifying that the standard interpolators can be supplanted
+ @Test def antiHijack_?() = {
+ object AllYourStringsAreBelongToMe { case class StringContext(args: Any*) { def s(args: Any) = "!!!!" } }
+ import AllYourStringsAreBelongToMe._
+ //assertEquals("????", s"????")
+ assertEquals("!!!!", s"????") // OK to hijack core interpolator ids
+ }
+
+ @Test def fIf() = {
+ val res = f"${if (true) 2.5 else 2.5}%.2f"
+ val expected = formatUsingCurrentLocale(2.50)
+ assertEquals(expected, res)
+ }
+
+ @Test def fIfNot() = {
+ val res = f"${if (false) 2.5 else 3.5}%.2f"
+ val expected = formatUsingCurrentLocale(3.50)
+ assertEquals(expected, res)
+ }
+
+ @Test def fHeteroArgs() = {
+ val res = f"${3.14}%.2f rounds to ${3}%d"
+ val expected = formatUsingCurrentLocale(3.14) + " rounds to 3"
+ assertEquals(expected, res)
+ }
+
+ // Use this method to avoid problems with a locale-dependent decimal mark.
+ // The string interpolation is not used here intentionally as this method is used to test string interpolation.
+ private def formatUsingCurrentLocale(number: Double, decimalPlaces: Int = 2) = ("%." + decimalPlaces + "f").format(number)
+}
diff --git a/test/junit/scala/collection/IndexedSeqOptimizedTest.scala b/test/junit/scala/collection/IndexedSeqOptimizedTest.scala
new file mode 100644
index 0000000000..419e1454cb
--- /dev/null
+++ b/test/junit/scala/collection/IndexedSeqOptimizedTest.scala
@@ -0,0 +1,29 @@
+package scala.collection
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Assert._
+import org.junit.Test
+
+@RunWith(classOf[JUnit4])
+class IndexedSeqOptimizedTest {
+
+ @Test
+ def notThrowsAnExceptionInLastIndexOf() {
+ assertEquals(0, (Array(2): collection.mutable.WrappedArray[Int]).lastIndexWhere(_ => true, 1))
+ assertEquals(2, "abc123".lastIndexWhere(_.isLetter, 6))
+ }
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ assertEquals("", "abc" take Int.MinValue)
+ assertEquals("", "abc" takeRight Int.MinValue)
+ assertEquals("abc", "abc" drop Int.MinValue)
+ assertEquals("abc", "abc" dropRight Int.MinValue)
+
+ assertArrayEquals(Array.empty[Int], Array(1, 2, 3) take Int.MinValue)
+ assertArrayEquals(Array.empty[Int], Array(1, 2, 3) takeRight Int.MinValue)
+ assertArrayEquals(Array(1, 2, 3), Array(1, 2, 3) drop Int.MinValue)
+ assertArrayEquals(Array(1, 2, 3), Array(1, 2, 3) dropRight Int.MinValue)
+ }
+}
diff --git a/test/junit/scala/collection/IterableViewLikeTest.scala b/test/junit/scala/collection/IterableViewLikeTest.scala
new file mode 100644
index 0000000000..435a43c215
--- /dev/null
+++ b/test/junit/scala/collection/IterableViewLikeTest.scala
@@ -0,0 +1,22 @@
+package scala.collection
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import language.postfixOps
+
+@RunWith(classOf[JUnit4])
+class IterableViewLikeTest {
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ val iter = Iterable(1, 2, 3)
+
+ import scala.language.postfixOps
+ assertEquals(Iterable.empty[Int], iter.view take Int.MinValue force)
+ assertEquals(Iterable.empty[Int], iter.view takeRight Int.MinValue force)
+ assertEquals(iter, iter.view drop Int.MinValue force)
+ assertEquals(iter, iter.view dropRight Int.MinValue force)
+ }
+}
diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala
new file mode 100644
index 0000000000..d5389afd0c
--- /dev/null
+++ b/test/junit/scala/collection/IteratorTest.scala
@@ -0,0 +1,157 @@
+
+package scala.collection
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil._
+
+import Seq.empty
+
+@RunWith(classOf[JUnit4])
+class IteratorTest {
+
+ @Test def groupedIteratorShouldNotAskForUnneededElement(): Unit = {
+ var counter = 0
+ val it = new Iterator[Int] { var i = 0 ; def hasNext = { counter = i; true } ; def next = { i += 1; i } }
+ val slidingIt = it sliding 2
+ slidingIt.next
+ assertEquals("Counter should be one, that means we didn't look further than needed", 1, counter)
+ }
+
+ @Test def groupedIteratorIsLazyWhenPadded(): Unit = {
+ var counter = 0
+ def it = new Iterator[Int] { var i = 0 ; def hasNext = { counter = i; true } ; def next = { i += 1; i } }
+ val slidingIt = it sliding 2 withPadding -1
+ slidingIt.next
+ assertEquals("Counter should be one, that means we didn't look further than needed", 1, counter)
+ }
+
+ @Test def dropDoesNotGrowStack(): Unit = {
+ def it = new Iterator[Throwable] { def hasNext = true ; def next = new Throwable }
+
+ assertEquals(it.drop(1).next.getStackTrace.length, it.drop(1).drop(1).next.getStackTrace.length)
+ }
+
+ @Test def dropIsChainable(): Unit = {
+ assertSameElements(1 to 4, Iterator from 0 take 5 drop 1)
+ assertSameElements(3 to 4, Iterator from 0 take 5 drop 3)
+ assertSameElements(empty, Iterator from 0 take 5 drop 5)
+ assertSameElements(empty, Iterator from 0 take 5 drop 10)
+ assertSameElements(0 to 4, Iterator from 0 take 5 drop 0)
+ assertSameElements(0 to 4, Iterator from 0 take 5 drop -1)
+ assertSameElements(2 to 8 by 2, Iterator from 0 take 5 drop 1 map (2 * _))
+ assertSameElements(2 to 8 by 2, Iterator from 0 take 5 map (2 * _) drop 1)
+ assertSameElements(3 to 4, Iterator from 0 take 5 drop 1 drop 2)
+ assertSameElements(3 to 4, Iterator from 0 take 5 drop 2 drop 1)
+ }
+
+ @Test def sliceIsChainable(): Unit = {
+ assertSameElements(3 to 6, Iterator from 0 slice (3, 7))
+ assertSameElements(empty, Iterator from 0 slice (3, 3))
+ assertSameElements(0 to 2, Iterator from 0 slice (-1, 3))
+ assertSameElements(empty, Iterator from 0 slice (3, -1))
+ assertSameElements(6 to 12 by 2, Iterator from 0 slice (3, 7) map (2 * _))
+ assertSameElements(6 to 12 by 2, Iterator from 0 map (2 * _) slice (3, 7))
+ assertSameElements(4 to 6, Iterator from 0 slice (3, 7) drop 1)
+ assertSameElements(4 to 7, Iterator from 0 drop 1 slice (3, 7))
+ assertSameElements(4 to 5, Iterator from 0 slice (3, 7) slice (1, 3))
+ assertSameElements(4 to 6, Iterator from 0 slice (3, 7) slice (1, 10))
+ }
+
+ // test/files/run/iterator-concat.scala
+ @Test def concatIsStackFriendly(): Unit = {
+ // Create `size` Function0s, each of which evaluates to an Iterator
+ // which produces 1. Then fold them over ++ to get a single iterator,
+ // which should sum to "size".
+ def mk(size: Int): Iterator[Int] = {
+ //val closures = (1 to size).toList.map(x => (() => Iterator(1)))
+ //closures.foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
+ List.fill(size)(() => Iterator(1)).foldLeft(Iterator.empty: Iterator[Int])((res, f) => res ++ f())
+ }
+ assertEquals(100, mk(100).sum)
+ assertEquals(1000, mk(1000).sum)
+ assertEquals(10000, mk(10000).sum)
+ assertEquals(100000, mk(100000).sum)
+ }
+
+ @Test def from(): Unit = {
+ val it1 = Iterator.from(-1)
+ val it2 = Iterator.from(0, -1)
+ assertEquals(-1, it1.next())
+ assertEquals(0, it2.next())
+ }
+ @Test def range(): Unit = {
+ assertEquals(5, Iterator.range(0, 10, 2).size)
+ assertEquals(0, Iterator.range(0, 10, -2).size)
+ assertEquals(5, Iterator.range(10, 0, -2).size)
+ assertEquals(0, Iterator.range(10, 0, 2).size)
+ assertEquals(1, Iterator.range(0, 10, 11).size)
+ assertEquals(10, Iterator.range(0, 10, 1).size)
+ assertEquals(10, Iterator.range(10, 0, -1).size)
+ }
+ @Test def range3(): Unit = {
+ val r1 = Iterator.range(0, 10)
+ assertTrue(r1 contains 5)
+ assertTrue(r1 contains 6)
+ assertFalse(r1 contains 4)
+ val r2a = Iterator.range(0, 10, 2)
+ assertFalse(r2a contains 5)
+ val r2b = Iterator.range(0, 10, 2)
+ assertTrue(r2b contains 6)
+ val r3 = Iterator.range(0, 10, 11)
+ assertFalse(r3 contains 5)
+ assertTrue(r3.isEmpty)
+ }
+ @Test def take(): Unit = {
+ assertEquals(10, (Iterator from 0 take 10).size)
+ }
+ @Test def foreach(): Unit = {
+ val it1 = Iterator.from(0) take 20
+ var n = 0
+ it1 foreach { n += _ }
+ assertEquals(190, n)
+ }
+ // ticket #429
+ @Test def fromArray(): Unit = {
+ val a = List(1, 2, 3, 4).toArray
+ var xs0 = a.iterator.toList;
+ var xs1 = a.slice(0, 1).iterator
+ var xs2 = a.slice(0, 2).iterator
+ var xs3 = a.slice(0, 3).iterator
+ var xs4 = a.slice(0, 4).iterator
+ assertEquals(14, xs0.size + xs1.size + xs2.size + xs3.size + xs4.size)
+ }
+ @Test def toSeq(): Unit = {
+ assertEquals("1x2x3x4x5", List(1, 2, 3, 4, 5).iterator.mkString("x"))
+ }
+ @Test def indexOf(): Unit = {
+ assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexOf(4))
+ assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexOf(16))
+ }
+ @Test def indexWhere(): Unit = {
+ assertEquals(3, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 4 })
+ assertEquals(-1, List(1, 2, 3, 4, 5).iterator.indexWhere { x: Int => x >= 16 })
+ }
+ // iterator-iterate-lazy.scala
+ // was java.lang.UnsupportedOperationException: tail of empty list
+ @Test def iterateIsSufficientlyLazy(): Unit = {
+ //Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).toList // suffices
+ Iterator.iterate((1 to 5).toList)(_.tail).takeWhile(_.nonEmpty).map(_.head).toList
+ }
+ // SI-3516
+ @Test def toStreamIsSufficientlyLazy(): Unit = {
+ val results = collection.mutable.ListBuffer.empty[Int]
+ def mkIterator = (1 to 5).iterator map (x => { results += x ; x })
+ def mkInfinite = Iterator continually { results += 1 ; 1 }
+
+ // Stream is strict in its head so we should see 1 from each of them.
+ val s1 = mkIterator.toStream
+ val s2 = mkInfinite.toStream
+ // back and forth without slipping into nontermination.
+ results += (Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next()
+ assertSameElements(List(1,1,21), results)
+ }
+}
diff --git a/test/junit/scala/collection/PagedSeq.scala b/test/junit/scala/collection/PagedSeq.scala
deleted file mode 100644
index 5f83cf6f31..0000000000
--- a/test/junit/scala/collection/PagedSeq.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package scala.collection.immutable
-
-import org.junit.runner.RunWith
-import org.junit.runners.JUnit4
-import org.junit.Test
-import org.junit.Assert._
-
-/* Test for SI-6615 */
-@RunWith(classOf[JUnit4])
-class PagedSeqTest {
- @Test
- def rovingDoesNotNPE(): Unit = {
- // should not NPE, and should equal the given Seq
- assertEquals(Seq('a'), PagedSeq.fromStrings(List.fill(5000)("a")).slice(4096, 4097))
- }
-}
diff --git a/test/junit/scala/collection/ParallelConsistencyTest.scala b/test/junit/scala/collection/ParallelConsistencyTest.scala
new file mode 100644
index 0000000000..da96362413
--- /dev/null
+++ b/test/junit/scala/collection/ParallelConsistencyTest.scala
@@ -0,0 +1,44 @@
+package scala.collection.immutable
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class ParallelConsistencyTest {
+
+ private val theSeq = Seq(1,2,3)
+
+ // This collection will throw an exception if you do anything but call .length or .seq
+ private val mustCallSeq: collection.GenSeq[Int] = new collection.parallel.ParSeq[Int] {
+ def length = 3
+
+ // This method is surely sequential & safe -- want all access to go through here
+ def seq = theSeq
+
+ def notSeq = throw new Exception("Access to parallel collection not via .seq")
+
+ // These methods could possibly be used dangerously explicitly or internally
+ // (apply could also be used safely; if it is, do test with mustCallSeq)
+ def apply(i: Int) = notSeq
+ def splitter = notSeq
+ }
+
+ // Test Vector ++ with a small parallel collection concatenation (SI-9072).
+ @Test
+ def testPlusPlus(): Unit = {
+ assert((Vector.empty ++ mustCallSeq) == theSeq, "Vector ++ unsafe with parallel vectors")
+ }
+
+ // SI-9126, 1 of 2
+ @Test
+ def testTranspose(): Unit = {
+ assert(List(mustCallSeq).transpose.flatten == theSeq, "Transposing inner parallel collection unsafe")
+ }
+
+ // SI-9126, 2 of 2
+ @Test
+ def testList_flatMap(): Unit = {
+ assert(List(1).flatMap(_ => mustCallSeq) == theSeq, "List#flatMap on inner parallel collection unsafe")
+ }
+}
diff --git a/test/junit/scala/collection/SetMapConsistencyTest.scala b/test/junit/scala/collection/SetMapConsistencyTest.scala
index eed6007eef..261c11a98b 100644
--- a/test/junit/scala/collection/SetMapConsistencyTest.scala
+++ b/test/junit/scala/collection/SetMapConsistencyTest.scala
@@ -514,4 +514,19 @@ class SetMapConsistencyTest {
assert( hs.toList.toSet == hs )
assert( hs == hs.toList.toSet )
}
+
+ @Test
+ def testSI8815() {
+ val lm = new scala.collection.mutable.LongMap[String]
+ lm += (Long.MinValue, "min")
+ lm += (-1, "neg-one")
+ lm += (0, "zero")
+ lm += (Long.MaxValue, "max")
+ var nit = 0
+ lm.iterator.foreach(_ => nit += 1)
+ var nfe = 0
+ lm.foreach(_ => nfe += 1)
+ assert(nit == 4)
+ assert(nfe == 4)
+ }
}
diff --git a/test/junit/scala/collection/TraversableOnceTest.scala b/test/junit/scala/collection/TraversableOnceTest.scala
index 56d8312336..196174c199 100644
--- a/test/junit/scala/collection/TraversableOnceTest.scala
+++ b/test/junit/scala/collection/TraversableOnceTest.scala
@@ -43,8 +43,8 @@ class TraversableOnceTest {
def testReturnTheFirstMatch() = {
val d = List(1, 2, 3, 4, 5, 6, 7, 8)
def f(x: Int) = x % 3;
- assert(d.maxBy(f) == 2, "If multiple elements evaluted to the largest value, maxBy should return the first one.")
- assert(d.minBy(f) == 3, "If multiple elements evaluted to the largest value, minBy should return the first one.")
+ assert(d.maxBy(f) == 2, "If multiple elements evaluated to the largest value, maxBy should return the first one.")
+ assert(d.minBy(f) == 3, "If multiple elements evaluated to the largest value, minBy should return the first one.")
}
// Make sure it evaluates f no more than list.length times.
@@ -56,7 +56,7 @@ class TraversableOnceTest {
evaluatedCountOfMaxBy += 1
x * 10
})
- assert(evaluatedCountOfMaxBy == list.length, s"maxBy: should evaluate f only ${list.length} times, but it evaluted $evaluatedCountOfMaxBy times.")
+ assert(evaluatedCountOfMaxBy == list.length, s"maxBy: should evaluate f only ${list.length} times, but it evaluated $evaluatedCountOfMaxBy times.")
var evaluatedCountOfMinBy = 0
@@ -64,7 +64,7 @@ class TraversableOnceTest {
evaluatedCountOfMinBy += 1
x * 10
})
- assert(evaluatedCountOfMinBy == list.length, s"minBy: should evaluate f only ${list.length} times, but it evaluted $evaluatedCountOfMinBy times.")
+ assert(evaluatedCountOfMinBy == list.length, s"minBy: should evaluate f only ${list.length} times, but it evaluated $evaluatedCountOfMinBy times.")
}
}
diff --git a/test/junit/scala/collection/convert/MapWrapperTest.scala b/test/junit/scala/collection/convert/MapWrapperTest.scala
index 060b6b5937..22eaf858ea 100644
--- a/test/junit/scala/collection/convert/MapWrapperTest.scala
+++ b/test/junit/scala/collection/convert/MapWrapperTest.scala
@@ -46,4 +46,14 @@ class MapWrapperTest {
assertFalse(javaMap.containsKey(null)) // negative test, null key
assertEquals(4, scalaMap.containsCounter)
}
+
+ // test for SI-8504
+ @Test
+ def testHashCode() {
+ import scala.collection.JavaConverters._
+ val javaMap = Map(1 -> null).asJava
+
+ // Before the fix for SI-8504, this throws a NPE
+ javaMap.hashCode
+ }
}
diff --git a/test/junit/scala/collection/immutable/ListTest.scala b/test/junit/scala/collection/immutable/ListTest.scala
new file mode 100644
index 0000000000..1006801029
--- /dev/null
+++ b/test/junit/scala/collection/immutable/ListTest.scala
@@ -0,0 +1,49 @@
+package scala.collection.immutable
+
+import org.junit.{Assert, Test}
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.ref.WeakReference
+
+@RunWith(classOf[JUnit4])
+class ListTest {
+ /**
+ * Test that empty iterator does not hold reference
+ * to complete List
+ */
+ @Test
+ def testIteratorGC(): Unit = {
+ var num = 0
+ var emptyIterators = Seq.empty[(Iterator[Int], WeakReference[List[Int]])]
+
+ do {
+ val list = List.fill(10000)(num)
+ val ref = WeakReference(list)
+
+ val i = list.iterator
+
+ while (i.hasNext) i.next()
+
+ emptyIterators = (i, ref) +: emptyIterators
+
+ num+=1
+ } while (emptyIterators.forall(_._2.get.isDefined) && num<1000)
+
+ // check something is result to protect from JIT optimizations
+ for ((i, _) <- emptyIterators) {
+ Assert.assertTrue(i.isEmpty)
+ }
+
+ // await gc up to ~5 seconds
+ var forceLoops = 50
+ while (emptyIterators.forall(_._2.get.isDefined) && forceLoops>0) {
+ System.gc()
+ Thread.sleep(100)
+ forceLoops -= 1
+ }
+
+ // real assertion
+ Assert.assertTrue(emptyIterators.exists(_._2.get.isEmpty))
+ }
+}
diff --git a/test/junit/scala/collection/immutable/PagedSeqTest.scala b/test/junit/scala/collection/immutable/PagedSeqTest.scala
new file mode 100644
index 0000000000..2b576a3655
--- /dev/null
+++ b/test/junit/scala/collection/immutable/PagedSeqTest.scala
@@ -0,0 +1,28 @@
+package scala.collection.immutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+@RunWith(classOf[JUnit4])
+class PagedSeqTest {
+ // should not NPE, and should equal the given Seq
+ @Test
+ def test_SI6615(): Unit = {
+ assertEquals(Seq('a'), PagedSeq.fromStrings(List.fill(5000)("a")).slice(4096, 4097))
+ }
+
+ // Slices shouldn't read outside where they belong
+ @Test
+ def test_SI6519 {
+ var readAttempt = 0
+ val sideEffectingIterator = new Iterator[Int] {
+ def hasNext = readAttempt < 65536
+ def next = { readAttempt += 1; readAttempt }
+ }
+ val s = PagedSeq.fromIterator(sideEffectingIterator).slice(0,2).mkString
+ assertEquals(s, "12")
+ assert(readAttempt <= 4096)
+ }
+}
diff --git a/test/junit/scala/collection/QueueTest.scala b/test/junit/scala/collection/immutable/QueueTest.scala
index 9a40d8fc90..9a40d8fc90 100644
--- a/test/junit/scala/collection/QueueTest.scala
+++ b/test/junit/scala/collection/immutable/QueueTest.scala
diff --git a/test/junit/scala/collection/NumericRangeTest.scala b/test/junit/scala/collection/immutable/RangeConsistencyTest.scala
index 3980c31577..3980c31577 100644
--- a/test/junit/scala/collection/NumericRangeTest.scala
+++ b/test/junit/scala/collection/immutable/RangeConsistencyTest.scala
diff --git a/test/junit/scala/collection/immutable/StringLikeTest.scala b/test/junit/scala/collection/immutable/StringLikeTest.scala
new file mode 100644
index 0000000000..3722bdfe4d
--- /dev/null
+++ b/test/junit/scala/collection/immutable/StringLikeTest.scala
@@ -0,0 +1,37 @@
+package scala.collection.immutable
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil
+import scala.util.Random
+
+/* Test for SI-8988 */
+@RunWith(classOf[JUnit4])
+class StringLikeTest {
+ @Test
+ def testStringSplitWithChar: Unit = {
+ val chars = (0 to 255).map(_.toChar)
+ def randString = Random.nextString(30)
+
+ for (c <- chars) {
+ val s = randString
+ val jString = new java.lang.String(s)
+
+ // make sure we can match a literal character done by Java's split
+ val jSplit = jString.split("\\Q" + c.toString + "\\E")
+ val sSplit = s.split(c)
+ AssertUtil.assertSameElements(jSplit, sSplit, s"Not same result as Java split for char $c in string $s")
+ }
+ }
+
+ @Test
+ def testSplitEdgeCases: Unit = {
+ AssertUtil.assertSameElements("abcd".split('d'), Array("abc")) // not Array("abc", "")
+ AssertUtil.assertSameElements("abccc".split('c'), Array("ab")) // not Array("ab", "", "", "")
+ AssertUtil.assertSameElements("xxx".split('x'), Array[String]()) // not Array("", "", "", "")
+ AssertUtil.assertSameElements("".split('x'), Array("")) // not Array()
+ AssertUtil.assertSameElements("--ch--omp--".split("-"), Array("", "", "ch", "", "omp")) // All the cases!
+ }
+}
diff --git a/test/junit/scala/collection/immutable/TreeMapTest.scala b/test/junit/scala/collection/immutable/TreeMapTest.scala
new file mode 100644
index 0000000000..4c21b94b24
--- /dev/null
+++ b/test/junit/scala/collection/immutable/TreeMapTest.scala
@@ -0,0 +1,20 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class TreeMapTest {
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ val tree = TreeMap(1 -> "a", 2 -> "b", 3 -> "c")
+
+ assertEquals(TreeMap.empty[Int, String], tree take Int.MinValue)
+ assertEquals(TreeMap.empty[Int, String], tree takeRight Int.MinValue)
+ assertEquals(tree, tree drop Int.MinValue)
+ assertEquals(tree, tree dropRight Int.MinValue)
+ }
+}
diff --git a/test/junit/scala/collection/immutable/TreeSetTest.scala b/test/junit/scala/collection/immutable/TreeSetTest.scala
new file mode 100644
index 0000000000..8efe1bfeb8
--- /dev/null
+++ b/test/junit/scala/collection/immutable/TreeSetTest.scala
@@ -0,0 +1,20 @@
+package scala.collection.immutable
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class TreeSetTest {
+
+ @Test
+ def hasCorrectDropAndTakeMethods() {
+ val set = TreeSet(1, 2, 3)
+
+ assertEquals(TreeSet.empty[Int], set take Int.MinValue)
+ assertEquals(TreeSet.empty[Int], set takeRight Int.MinValue)
+ assertEquals(set, set drop Int.MinValue)
+ assertEquals(set, set dropRight Int.MinValue)
+ }
+}
diff --git a/test/junit/scala/collection/mutable/ArrayBufferTest.scala b/test/junit/scala/collection/mutable/ArrayBufferTest.scala
new file mode 100644
index 0000000000..8c83164027
--- /dev/null
+++ b/test/junit/scala/collection/mutable/ArrayBufferTest.scala
@@ -0,0 +1,36 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.{Assert, Test}
+
+import scala.tools.testing.AssertUtil
+
+/* Test for SI-9043 */
+@RunWith(classOf[JUnit4])
+class ArrayBufferTest {
+ @Test
+ def testInsertAll: Unit = {
+ val traver = ArrayBuffer(2, 4, 5, 7)
+ val testSeq = List(1, 3, 6, 9)
+
+ def insertAt(x: Int) = {
+ val clone = traver.clone()
+ clone.insertAll(x, testSeq)
+ clone
+ }
+
+ // Just insert some at position 0
+ Assert.assertEquals(ArrayBuffer(1, 3, 6, 9, 2, 4, 5, 7), insertAt(0))
+
+ // Insert in the middle
+ Assert.assertEquals(ArrayBuffer(2, 4, 1, 3, 6, 9, 5, 7), insertAt(2))
+
+ // No strange last position weirdness
+ Assert.assertEquals(ArrayBuffer(2, 4, 5, 7, 1, 3, 6, 9), insertAt(traver.size))
+
+ // Overflow is caught
+ AssertUtil.assertThrows[IndexOutOfBoundsException] { insertAt(-1) }
+ AssertUtil.assertThrows[IndexOutOfBoundsException] { insertAt(traver.size + 10) }
+ }
+}
diff --git a/test/junit/scala/collection/ArraySortingTest.scala b/test/junit/scala/collection/mutable/ArraySortingTest.scala
index 4e54b39ce7..4e54b39ce7 100644
--- a/test/junit/scala/collection/ArraySortingTest.scala
+++ b/test/junit/scala/collection/mutable/ArraySortingTest.scala
diff --git a/test/junit/scala/collection/mutable/BitSetTest.scala b/test/junit/scala/collection/mutable/BitSetTest.scala
new file mode 100644
index 0000000000..d56cc45601
--- /dev/null
+++ b/test/junit/scala/collection/mutable/BitSetTest.scala
@@ -0,0 +1,31 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.{Test, Ignore}
+
+@RunWith(classOf[JUnit4])
+class BitSetTest {
+ // Test for SI-8910
+ @Test def capacityExpansionTest() {
+ val bitSet = BitSet.empty
+ val size = bitSet.toBitMask.length
+ bitSet ^= bitSet
+ assert(bitSet.toBitMask.length == size, "Capacity of bitset changed after ^=")
+ bitSet |= bitSet
+ assert(bitSet.toBitMask.length == size, "Capacity of bitset changed after |=")
+ bitSet &= bitSet
+ assert(bitSet.toBitMask.length == size, "Capacity of bitset changed after &=")
+ bitSet &~= bitSet
+ assert(bitSet.toBitMask.length == size, "Capacity of bitset changed after &~=")
+ }
+
+ @Test def test_SI8917() {
+ val bigBitSet = BitSet(1, 100, 10000)
+ val littleBitSet = BitSet(100)
+ bigBitSet &= littleBitSet
+ assert(!(bigBitSet contains 10000), "&= not applied to the full bitset")
+ littleBitSet &= bigBitSet
+ assert(littleBitSet.toBitMask.length < bigBitSet.toBitMask.length, "Needlessly extended the size of bitset on &=")
+ }
+}
diff --git a/test/junit/scala/collection/mutable/LinkedHashMapTest.scala b/test/junit/scala/collection/mutable/LinkedHashMapTest.scala
new file mode 100644
index 0000000000..37dcd028a5
--- /dev/null
+++ b/test/junit/scala/collection/mutable/LinkedHashMapTest.scala
@@ -0,0 +1,25 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.{ Assert, Test }
+
+import scala.collection.mutable
+
+/* Test for SI-9095 */
+@RunWith(classOf[JUnit4])
+class LinkedHashMapTest {
+ class TestClass extends mutable.LinkedHashMap[String, Int] {
+ def lastItemRef = lastEntry
+ }
+
+ @Test
+ def testClear: Unit = {
+ val lhm = new TestClass
+ Seq("a" -> 8, "b" -> 9).foreach(kv => lhm.put(kv._1, kv._2))
+
+ Assert.assertNotNull(lhm.lastItemRef)
+ lhm.clear()
+ Assert.assertNull(lhm.lastItemRef)
+ }
+}
diff --git a/test/junit/scala/collection/mutable/LinkedHashSetTest.scala b/test/junit/scala/collection/mutable/LinkedHashSetTest.scala
new file mode 100644
index 0000000000..b419ad37ec
--- /dev/null
+++ b/test/junit/scala/collection/mutable/LinkedHashSetTest.scala
@@ -0,0 +1,25 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.{ Assert, Test }
+
+import scala.collection.mutable
+
+/* Test for SI-9095 */
+@RunWith(classOf[JUnit4])
+class LinkedHashSetTest {
+ class TestClass extends mutable.LinkedHashSet[String] {
+ def lastItemRef = lastEntry
+ }
+
+ @Test
+ def testClear: Unit = {
+ val lhs = new TestClass
+ Seq("a", "b").foreach(k => lhs.add(k))
+
+ Assert.assertNotNull(lhs.lastItemRef)
+ lhs.clear()
+ Assert.assertNull(lhs.lastItemRef)
+ }
+}
diff --git a/test/junit/scala/collection/mutable/MutableListTest.scala b/test/junit/scala/collection/mutable/MutableListTest.scala
new file mode 100644
index 0000000000..ac6d30def0
--- /dev/null
+++ b/test/junit/scala/collection/mutable/MutableListTest.scala
@@ -0,0 +1,37 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+
+import scala.tools.testing.AssertUtil._
+
+@RunWith(classOf[JUnit4])
+class MutableListTest {
+
+ // Tests SI-8976
+ @Test def tailIteratorMustTerminateAtLength(): Unit = {
+ val is = MutableList(1,2,3)
+ val tl = is.tail
+ assertEquals(tl.length, tl.iterator.length)
+ is += 5
+ assertEquals(tl.length, tl.iterator.length)
+ assertSameElements(tl, tl.iterator)
+ }
+ @Test def iteratorMustFailEventually(): Unit = assertThrows[NoSuchElementException] {
+ MutableList[Unit]().iterator.next()
+ }
+ // was: Root empty iterator held reference
+ @Test def iteratorMustNotHoldOntoLast(): Unit = {
+ val is = MutableList(Some(1), Some(2))
+ val it = is.iterator
+ val x = Some(3)
+ is += x
+ assertNotReachable(x, it) {
+ it.next()
+ it.next()
+ }
+ assertTrue(it.isEmpty)
+ }
+}
diff --git a/test/junit/scala/collection/PriorityQueueTest.scala b/test/junit/scala/collection/mutable/PriorityQueueTest.scala
index a14f1bf4c8..a14f1bf4c8 100644
--- a/test/junit/scala/collection/PriorityQueueTest.scala
+++ b/test/junit/scala/collection/mutable/PriorityQueueTest.scala
diff --git a/test/junit/scala/collection/mutable/UnrolledBufferTest.scala b/test/junit/scala/collection/mutable/UnrolledBufferTest.scala
new file mode 100644
index 0000000000..8660b6cbc1
--- /dev/null
+++ b/test/junit/scala/collection/mutable/UnrolledBufferTest.scala
@@ -0,0 +1,25 @@
+package scala.collection.mutable
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+
+@RunWith(classOf[JUnit4])
+class UnrolledBufferTestTest {
+ @Test
+ def test_SI9254_original() {
+ val b = new UnrolledBuffer[Int]()
+ (1 to 16).foreach(i => b append i)
+ b.insert(0,-1)
+ b append 17
+ assert(b sameElements (Seq(-1) ++ (1 to 16) ++ Seq(17)))
+ }
+
+ @Test
+ def test_SI9254_additional() {
+ val b = new UnrolledBuffer[Int]()
+ (1 to 100).foreach(i => b append i)
+ b.insert(40, -1)
+ assert(b sameElements((1 to 40) ++ Seq(-1) ++ (41 to 100)))
+ }
+}
diff --git a/test/junit/scala/collection/VectorTest.scala b/test/junit/scala/collection/mutable/VectorTest.scala
index e9c4d44a72..b3219d1b02 100644
--- a/test/junit/scala/collection/VectorTest.scala
+++ b/test/junit/scala/collection/mutable/VectorTest.scala
@@ -38,7 +38,6 @@ class VectorTest {
def iteratorCat() {
def its = vecs.map(_.toList.toIterator)
val cats = vecs.map(a => its.map(a ++ _))
- println(cats)
assert( cats == ans )
}
diff --git a/test/junit/scala/concurrent/duration/SerializationTest.scala b/test/junit/scala/concurrent/duration/SerializationTest.scala
new file mode 100644
index 0000000000..af90a10984
--- /dev/null
+++ b/test/junit/scala/concurrent/duration/SerializationTest.scala
@@ -0,0 +1,24 @@
+package scala.concurrent.duration
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+
+
+@RunWith(classOf[JUnit4])
+class SerializationTest {
+ @Test
+ def test_SI9197 {
+ def ser(a: AnyRef): Array[Byte] = {
+ val bais = new java.io.ByteArrayOutputStream
+ (new java.io.ObjectOutputStream(bais)).writeObject(a)
+ bais.toByteArray
+ }
+ def des(ab: Array[Byte]): AnyRef =
+ (new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(ab))).readObject
+
+ assert(Duration.Undefined eq des(ser(Duration.Undefined)))
+ assert(Duration.Inf eq des(ser(Duration.Inf)))
+ assert(Duration.MinusInf eq des(ser(Duration.MinusInf)))
+ }
+}
diff --git a/test/junit/scala/io/SourceTest.scala b/test/junit/scala/io/SourceTest.scala
new file mode 100644
index 0000000000..3138a4589c
--- /dev/null
+++ b/test/junit/scala/io/SourceTest.scala
@@ -0,0 +1,86 @@
+
+package scala.io
+
+import org.junit.Test
+import org.junit.Assert._
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil._
+
+import java.io.{ Console => _, _ }
+
+@RunWith(classOf[JUnit4])
+class SourceTest {
+
+ private implicit val `our codec` = Codec.UTF8
+ private val charSet = Codec.UTF8.charSet.name
+
+ private def sampler = """
+ |Big-endian and little-endian approaches aren't
+ |readily interchangeable in general, because the
+ |laws of arithmetic send signals leftward from
+ |the bits that are "least significant."
+ |""".stripMargin.trim
+
+ private def in = new ByteArrayInputStream(sampler.getBytes)
+
+ @Test def canIterateLines() = {
+ assertEquals(sampler.lines.size, (Source fromString sampler).getLines.size)
+ }
+ @Test def canCustomizeReporting() = {
+ class CapitalReporting(is: InputStream) extends BufferedSource(is) {
+ override def report(pos: Int, msg: String, out: PrintStream): Unit = {
+ out print f"$pos%04x: ${msg.toUpperCase}"
+ }
+ class OffsetPositioner extends Positioner(null) {
+ override def next(): Char = {
+ ch = iter.next()
+ pos = pos + 1
+ ch
+ }
+ }
+ withPositioning(new OffsetPositioner)
+ }
+ val s = new CapitalReporting(in)
+ // skip to next line and report an error
+ do {
+ val c = s.next()
+ } while (s.ch != '\n')
+ s.next()
+ val out = new ByteArrayOutputStream
+ val ps = new PrintStream(out, true, charSet)
+ s.reportError(s.pos, "That doesn't sound right.", ps)
+ assertEquals("0030: THAT DOESN'T SOUND RIGHT.", out.toString(charSet))
+ }
+ @Test def canAltCustomizeReporting() = {
+ class CapitalReporting(is: InputStream)(implicit codec: Codec) extends Source {
+ override val iter = {
+ val r = new InputStreamReader(is, codec.decoder)
+ Iterator continually (codec wrap r.read()) takeWhile (_ != -1) map (_.toChar)
+ }
+ override def report(pos: Int, msg: String, out: PrintStream): Unit = {
+ out print f"$pos%04x: ${msg.toUpperCase}"
+ }
+ private[this] var _pos: Int = _
+ override def pos = _pos
+ private[this] var _ch: Char = _
+ override def ch = _ch
+ override def next = {
+ _ch = iter.next()
+ _pos += 1
+ _ch
+ }
+ }
+ val s = new CapitalReporting(in)
+ // skip to next line and report an error
+ do {
+ val c = s.next()
+ } while (s.ch != '\n')
+ s.next()
+ val out = new ByteArrayOutputStream
+ val ps = new PrintStream(out, true, charSet)
+ s.reportError(s.pos, "That doesn't sound right.", ps)
+ assertEquals("0030: THAT DOESN'T SOUND RIGHT.", out.toString(charSet))
+ }
+}
diff --git a/test/junit/scala/issues/BytecodeTests.scala b/test/junit/scala/issues/BytecodeTests.scala
new file mode 100644
index 0000000000..d4ed063a03
--- /dev/null
+++ b/test/junit/scala/issues/BytecodeTests.scala
@@ -0,0 +1,80 @@
+package scala.issues
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes
+import scala.tools.nsc.backend.jvm.AsmUtils
+import scala.tools.nsc.backend.jvm.CodeGenTools._
+import org.junit.Assert._
+import scala.collection.JavaConverters._
+import scala.tools.partest.ASMConverters._
+
+@RunWith(classOf[JUnit4])
+class BytecodeTests {
+ val compiler = newCompiler()
+
+ @Test
+ def t8731(): Unit = {
+ val code =
+ """class C {
+ | def f(x: Int) = (x: @annotation.switch) match {
+ | case 1 => 0
+ | case 2 => 1
+ | case 3 => 2
+ | }
+ | final val K = 10
+ | def g(x: Int) = (x: @annotation.switch) match {
+ | case K => 0
+ | case 1 => 10
+ | case 2 => 20
+ | }
+ |}
+ """.stripMargin
+
+ val List(c) = compileClasses(compiler)(code)
+
+ assertTrue(getSingleMethod(c, "f").instructions.count(_.isInstanceOf[TableSwitch]) == 1)
+ assertTrue(getSingleMethod(c, "g").instructions.count(_.isInstanceOf[LookupSwitch]) == 1)
+ }
+
+ @Test
+ def t8926(): Unit = {
+ import scala.reflect.internal.util.BatchSourceFile
+
+ // this test cannot be implemented using partest because of its mixed-mode compilation strategy:
+ // partest first compiles all files with scalac, then the java files, and then again the scala
+ // using the output classpath. this shadows the bug SI-8926.
+
+ val annotA =
+ """import java.lang.annotation.Retention;
+ |import java.lang.annotation.RetentionPolicy;
+ |@Retention(RetentionPolicy.RUNTIME)
+ |public @interface AnnotA { }
+ """.stripMargin
+ val annotB = "public @interface AnnotB { }"
+
+ val scalaSrc =
+ """@AnnotA class A
+ |@AnnotB class B
+ """.stripMargin
+
+ val compiler = newCompiler()
+ val run = new compiler.Run()
+ run.compileSources(List(new BatchSourceFile("AnnotA.java", annotA), new BatchSourceFile("AnnotB.java", annotB), new BatchSourceFile("Test.scala", scalaSrc)))
+ val outDir = compiler.settings.outputDirs.getSingleOutput.get
+ val outfiles = (for (f <- outDir.iterator if !f.isDirectory) yield (f.name, f.toByteArray)).toList
+
+ def check(classfile: String, annotName: String) = {
+ val f = (outfiles collect { case (`classfile`, bytes) => AsmUtils.readClass(bytes) }).head
+ val descs = f.visibleAnnotations.asScala.map(_.desc).toList
+ assertTrue(descs.toString, descs exists (_ contains annotName))
+ }
+
+ check("A.class", "AnnotA")
+
+ // known issue SI-8928: the visibility of AnnotB should be CLASS, but annotation classes without
+ // a @Retention annotation are currently emitted as RUNTIME.
+ check("B.class", "AnnotB")
+ }
+}
diff --git a/test/junit/scala/math/BigDecimalTest.scala b/test/junit/scala/math/BigDecimalTest.scala
index d1ba96fcc8..c7a63da890 100644
--- a/test/junit/scala/math/BigDecimalTest.scala
+++ b/test/junit/scala/math/BigDecimalTest.scala
@@ -222,4 +222,10 @@ class BigDecimalTest {
for (a <- different; b <- different if (a ne b))
assert(a != b, "BigDecimal representations of Double mistakenly conflated")
}
+
+ // Make sure hash code agrees with decimal representation of Double
+ @Test
+ def test_SI8970() {
+ assert((0.1).## == BigDecimal(0.1).##)
+ }
}
diff --git a/test/junit/scala/math/NumericTest.scala b/test/junit/scala/math/NumericTest.scala
index 4f0657f471..9bf7d4f1e4 100644
--- a/test/junit/scala/math/NumericTest.scala
+++ b/test/junit/scala/math/NumericTest.scala
@@ -1,4 +1,4 @@
-
+package scala.math
import org.junit.Assert._
import org.junit.Test
diff --git a/test/junit/scala/math/OrderingTest.scala b/test/junit/scala/math/OrderingTest.scala
new file mode 100644
index 0000000000..218622b8b4
--- /dev/null
+++ b/test/junit/scala/math/OrderingTest.scala
@@ -0,0 +1,61 @@
+package scala.math
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class OrderingTest {
+
+ /* Test for SI-9077 */
+ @Test
+ def testReverseOrdering {
+ def check[T: Ordering](t1: T, t2: T): Unit = {
+ val O = Ordering[T]
+ val R = O.reverse
+ assertEquals(O.min(t1, t2), R.max(t1, t2))
+ assertEquals(O.max(t1, t2), R.min(t1, t2))
+
+ assertEquals(O.lteq(t1, t2), R.lteq(t2, t1))
+ assertEquals(O.lt(t1, t2), R.lt(t2, t1))
+ assertEquals(O.gteq(t1, t2), R.gteq(t2, t1))
+ assertEquals(O.gt(t1, t2), R.gt(t2, t1))
+ assertEquals(O.compare(t1, t2), R.compare(t2, t1))
+
+ assertEquals(O.equiv(t1, t2), R.equiv(t1, t2))
+
+ assertEquals(O.on((x: T) => x).min(t1, t2), R.on((x: T) => x).max(t1, t2))
+
+ assertEquals(O.tryCompare(t1, t2), R.tryCompare(t2, t1))
+
+ assertEquals(O.mkOrderingOps(t1).<(t2), R.mkOrderingOps(t2).<(t1))
+ assertEquals(O.mkOrderingOps(t1).<=(t2), R.mkOrderingOps(t2).<=(t1))
+ assertEquals(O.mkOrderingOps(t1).>(t2), R.mkOrderingOps(t2).>(t1))
+ assertEquals(O.mkOrderingOps(t1).>=(t2), R.mkOrderingOps(t2).>=(t1))
+
+ assertEquals(O.mkOrderingOps(t1).min(t2), R.mkOrderingOps(t1).max(t2))
+ assertEquals(O.mkOrderingOps(t1).max(t2), R.mkOrderingOps(t1).min(t2))
+ }
+ def checkAll[T: Ordering](ts: T*): Unit = {
+ for (t1 <- ts; t2 <- ts) check(t1, t2)
+ }
+ checkAll[Unit](())
+ checkAll[Boolean](true, false)
+ checkAll[Byte](Byte.MinValue, -1.toByte, 0.toByte, 1.toByte, Byte.MaxValue)
+ checkAll[Char](Char.MinValue, -1.toChar, 0.toChar, 1.toChar, Char.MaxValue)
+ checkAll[Short](Short.MinValue, -1, 0, 1, Short.MaxValue)
+ checkAll[Int](Int.MinValue, -1, 0, 1, Int.MaxValue)
+ checkAll[Double](Double.MinValue, -1, -0, 0, 1, Double.MaxValue)
+ checkAll[Float](Float.MinValue, -1, -0, 0, 1, Float.MaxValue)
+
+ checkAll[BigInt](Int.MinValue, -1, 0, 1, Int.MaxValue)
+ checkAll[BigDecimal](Int.MinValue, -1, -0, 1, Int.MaxValue)
+ checkAll[String]("", "a", "b", "bb")
+ checkAll[String]("", "a", "b", "bb")
+ checkAll[Option[Int]](None, Some(1), Some(2))
+ checkAll[Iterable[Int]](Nil, List(1), List(1, 2))
+ checkAll[(Int, Int)]((1, 2), (1, 3), (4, 5))
+ }
+}
+
diff --git a/test/junit/scala/reflect/ClassTag.scala b/test/junit/scala/reflect/ClassTag.scala
new file mode 100644
index 0000000000..90cc981fc1
--- /dev/null
+++ b/test/junit/scala/reflect/ClassTag.scala
@@ -0,0 +1,29 @@
+package scala.reflect
+
+import org.junit.Test
+import org.junit.Assert._
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil._
+
+class Misc
+
+@RunWith(classOf[JUnit4])
+class ClassTagTest {
+ def checkNotString[A: ClassTag](a: Any) = a match { case x: String => false case x: A => true case _ => false }
+ def checkNotInt[A: ClassTag](a: Any) = a match { case x: Int => false case x: A => true case _ => false }
+ def checkNotLong[A: ClassTag](a: Any) = a match { case x: Long => false case x: A => true case _ => false }
+
+ @Test def checkMisc = assertTrue(checkNotString[Misc](new Misc))
+ @Test def checkString = assertTrue(checkNotInt[String] ("woele"))
+ @Test def checkByte = assertTrue(checkNotInt[Byte] (0.toByte))
+ @Test def checkShort = assertTrue(checkNotInt[Short] (0.toShort))
+ @Test def checkChar = assertTrue(checkNotInt[Char] (0.toChar))
+ @Test def checkInt = assertTrue(checkNotLong[Int] (0.toInt))
+ @Test def checkLong = assertTrue(checkNotInt[Long] (0.toLong))
+ @Test def checkFloat = assertTrue(checkNotInt[Float] (0.toFloat))
+ @Test def checkDouble = assertTrue(checkNotInt[Double] (0.toDouble))
+ @Test def checkBoolean = assertTrue(checkNotInt[Boolean](false))
+ @Test def checkUnit = assertTrue(checkNotInt[Unit] ({}))
+} \ No newline at end of file
diff --git a/test/junit/scala/reflect/QTest.scala b/test/junit/scala/reflect/QTest.scala
new file mode 100644
index 0000000000..24c35dc401
--- /dev/null
+++ b/test/junit/scala/reflect/QTest.scala
@@ -0,0 +1,23 @@
+
+package scala.reflect
+
+import org.junit.Test
+import org.junit.Assert._
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil._
+
+@RunWith(classOf[JUnit4])
+class QTest {
+
+ import reflect.runtime._
+ import universe._
+ @Test def qConstantsNotHomogenized() = {
+ //Apply(Select(Literal(Constant(1.0)), TermName("$plus")), List(Literal(Constant(1.0))))
+ val t = q"${1} + ${1.0}"
+ val Apply(Select(Literal(Constant(i)), TermName("$plus")), List(Literal(Constant(j)))) = t
+ assertEquals(1, i)
+ assertEquals(1.0, j)
+ }
+}
diff --git a/test/junit/scala/reflect/internal/NamesTest.scala b/test/junit/scala/reflect/internal/NamesTest.scala
new file mode 100644
index 0000000000..549c10abed
--- /dev/null
+++ b/test/junit/scala/reflect/internal/NamesTest.scala
@@ -0,0 +1,95 @@
+package scala.reflect.internal
+
+import scala.tools.testing.AssertUtil._
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import org.junit.Assert._
+import scala.tools.nsc.symtab.SymbolTableForUnitTesting
+
+@RunWith(classOf[JUnit4])
+class NamesTest {
+ object symbolTable extends SymbolTableForUnitTesting
+ import symbolTable._
+
+ val h1 = newTermName("hai")
+ val h2 = newTermName("hai")
+ val f = newTermName("fisch")
+
+ val h1y = h1.toTypeName
+ val h2y = newTypeName("hai")
+ val fy = newTypeName("fisch")
+
+ val uy = newTypeName("uhu")
+ val u = uy.toTermName // calling toTermName after constructing a typeName. This tests the fact
+ // that creating a typeName always also first creates a termName. There is
+ // an assertion for that in toTermName.
+
+ @Test
+ def termNamesAreHashConsed() {
+ assertTrue(h1 eq h2)
+ assertEquals(h1, h2)
+ assertTrue(h1 ne f)
+ assertTrue(h1 != f)
+ }
+
+ @Test
+ def termNamesNotEqualsTypeNames() {
+ assert(h1 ne h1y)
+ assert(h1 != h1y)
+ assert(h2 ne h2y)
+ assert(h2 != h2y)
+ }
+
+ @Test
+ def termNamesTypeNamesSameRange() {
+ assert(h1.start == h1y.start && h1.length == h1y.length)
+ assert(h2.start == h2y.start && h2.length == h2y.length)
+ assert(u.start == uy.start && u.length == uy.length)
+ }
+
+ @Test
+ def testLookupTypeName() {
+ assert(lookupTypeName("hai".toCharArray) eq h1y)
+ assert(lookupTypeName("fisch".toCharArray) eq fy)
+ assert(lookupTypeName("uhu".toCharArray) eq uy)
+
+ assertThrows[AssertionError](lookupTypeName("dog".toCharArray), _ contains "not yet created")
+ val d = newTermName("dog")
+ assertThrows[AssertionError](lookupTypeName("dog".toCharArray), _ contains "not yet created")
+ val dy = d.toTypeName
+ assert(lookupTypeName("dog".toCharArray) eq dy)
+ }
+
+ @Test
+ def emptyName() {
+ val z = newTermName("")
+ val zy = z.toTypeName
+ assertEquals(z.toString, "")
+ assertEquals(zy.toString, "")
+ assert(z eq newTermName(""))
+ assert(zy eq newTypeName(""))
+ }
+
+ @Test
+ def subNameTest() {
+ val i = f.subName(1, f.length)
+ assert(i.start == (f.start + 1) && i.length == (f.length - 1))
+ assert(f.subName(0, f.length) eq f)
+
+ val iy = fy.subName(1, fy.length)
+ assert(iy.start == (fy.start + 1) && iy.length == (fy.length - 1))
+ assert(fy.subName(0, fy.length) eq fy)
+
+ assert(f.subName(1,1) eq newTermName(""))
+ assert(f.subName(1, 0) eq newTermName(""))
+
+ assertThrows[IllegalArgumentException](f.subName(0 - f.start - 1, 1))
+ }
+
+ @Test
+ def stringEqualsTest() {
+ assert(h1 string_== h2)
+ assert(h1 string_== h1y)
+ }
+}
diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala
index 4587417a99..9bfe6eecb8 100644
--- a/test/junit/scala/reflect/internal/PrintersTest.scala
+++ b/test/junit/scala/reflect/internal/PrintersTest.scala
@@ -24,19 +24,19 @@ object PrinterHelper {
resultCode.lines mkString s"$LF"
def assertResultCode(code: String)(parsedCode: String = "", typedCode: String = "", wrap: Boolean = false, printRoot: Boolean = false) = {
- def toolboxTree(tree: => Tree) = try{
+ def toolboxTree(tree: => Tree) = try {
tree
} catch {
- case e:scala.tools.reflect.ToolBoxError => throw new Exception(e.getMessage + ": " + code)
+ case e:scala.tools.reflect.ToolBoxError => throw new Exception(e.getMessage + ": " + code, e)
}
def wrapCode(source: String) = {
val context = sm"""
|trait PrintersContext {
- | class baz extends scala.annotation.StaticAnnotation;
- | class foo1[A, B] extends scala.annotation.StaticAnnotation;
- | class foo2[A, B](a: scala.Int)(b: scala.Int) extends scala.annotation.StaticAnnotation;
- | class foo3[Af, Bf](a: scala.Int)(b: scala.Float, c: PrintersContext.this.foo1[Af, Bf]) extends scala.annotation.StaticAnnotation;
+ | class baz extends scala.annotation.Annotation with scala.annotation.StaticAnnotation;
+ | class foo1[A, B] extends scala.annotation.Annotation with scala.annotation.StaticAnnotation;
+ | class foo2[A, B](a: scala.Int)(b: scala.Int) extends scala.annotation.Annotation with scala.annotation.StaticAnnotation;
+ | class foo3[Af, Bf](a: scala.Int)(b: scala.Float, c: PrintersContext.this.foo1[Af, Bf]) extends scala.annotation.Annotation with scala.annotation.StaticAnnotation;
| trait A1;
| trait B1;
|${source.trim.lines map {" " + _} mkString s"$LF"}
@@ -54,8 +54,12 @@ object PrinterHelper {
}
}
- def assertTreeCode(tree: Tree)(code: String) = {
- assertEquals("using quasiquote or given tree"+LF, code.trim, normalizeEOL(showCode(tree)))
+ def assertTreeCode(tree: Tree, typecheck: Boolean = false)(code: String) = {
+ if (typecheck) {
+ assertEquals("using quasiquote or given tree (typechecked)"+LF, code.trim, normalizeEOL(showCode(toolbox.typecheck(tree))))
+ } else {
+ assertEquals("using quasiquote or given tree"+LF, code.trim, normalizeEOL(showCode(tree)))
+ }
}
def assertPrintedCode(source: String, checkTypedTree: Boolean = true, wrapCode: Boolean = false) = {
@@ -121,6 +125,8 @@ trait BasePrintTests {
@Test def testName19 = assertPrintedCode("""class `class`""")
@Test def testName20 = assertPrintedCode("""class `test name`""")
+
+ @Test def testName21 = assertPrintedCode("""class `test.name`""")
@Test def testIfExpr1 = assertResultCode(code = sm"""
|val a = 1
@@ -312,17 +318,17 @@ trait BasePrintTests {
@Test def testFunc1 = assertResultCode(
code = "List(1, 2, 3).map((i: Int) => i - 1)")(
parsedCode = "List(1, 2, 3).map(((i: Int) => i.-(1)))",
- typedCode = sm"scala.collection.immutable.List.apply(1, 2, 3).map(((i: scala.Int) => i.-(1)))(scala.collection.immutable.List.canBuildFrom)")
+ typedCode = sm"scala.collection.immutable.List.apply[Int](1, 2, 3).map[Int, List[Int]](((i: scala.Int) => i.-(1)))(scala.collection.immutable.List.canBuildFrom[Int])")
@Test def testFunc2 = assertResultCode(
code = "val sum: Seq[Int] => Int = _ reduceLeft (_+_)")(
parsedCode = "val sum: _root_.scala.Function1[Seq[Int], Int] = ((x$1) => x$1.reduceLeft(((x$2, x$3) => x$2.+(x$3))))",
- typedCode = "val sum: _root_.scala.Function1[scala.`package`.Seq[scala.Int], scala.Int] = ((x$1) => x$1.reduceLeft(((x$2, x$3) => x$2.+(x$3))))")
+ typedCode = "val sum: _root_.scala.Function1[scala.`package`.Seq[scala.Int], scala.Int] = ((x$1: Seq[Int]) => x$1.reduceLeft[Int](((x$2: Int, x$3: Int) => x$2.+(x$3))))")
@Test def testFunc3 = assertResultCode(
code = "List(1, 2, 3) map (_ - 1)")(
parsedCode = "List(1, 2, 3).map(((x$1) => x$1.-(1))) ",
- typedCode = "scala.collection.immutable.List.apply(1, 2, 3).map(((x$1) => x$1.-(1)))(scala.collection.immutable.List.canBuildFrom)")
+ typedCode = "scala.collection.immutable.List.apply[Int](1, 2, 3).map[Int, List[Int]](((x$1: Int) => x$1.-(1)))(scala.collection.immutable.List.canBuildFrom[Int])")
@Test def testFunc4 = assertResultCode(
code = "val x: String => Int = ((str: String) => 1)")(
@@ -350,6 +356,13 @@ trait ClassPrintTests {
| def y = "test"
|}""")
+ @Test def testClassConstructorModifiers = assertPrintedCode("class X private (x: scala.Int)")
+
+ @Test def testClassConstructorModifierVisibility = assertPrintedCode(sm"""
+ |object A {
+ | class X protected[A] (x: scala.Int)
+ |}""")
+
@Test def testClassWithPublicParams = assertPrintedCode("class X(val x: scala.Int, val s: scala.Predef.String)")
@Test def testClassWithParams1 = assertPrintedCode("class X(x: scala.Int, s: scala.Predef.String)")
@@ -401,7 +414,8 @@ trait ClassPrintTests {
@Test def testClassWithImplicitParams = assertPrintedCode("class X(var i: scala.Int)(implicit val d: scala.Double, var f: scala.Float)")
- @Test def testClassWithEarly = assertPrintedCode(sm"""
+ @Test def testClassWithEarly =
+ assertPrintedCode(sm"""
|class X(var i: scala.Int) extends {
| val a = i;
| type B
@@ -419,15 +433,22 @@ trait ClassPrintTests {
| throw Throw2.this.e
|}""")
- /*
- class Test {
- val (a, b) = (1, 2)
- }
- */
- @Test def testClassWithAssignmentWithTuple1 = assertPrintedCode(sm"""
+ @Test def testClassWithAssignmentWithTuple1 = assertResultCode(sm"""
+ |class Test {
+ | val (a, b) = (1, 2)
+ |}""")(
+ parsedCode = sm"""
+ |class Test {
+ | private[this] val x$$1 = (scala.Tuple2(1, 2): @scala.unchecked) match {
+ | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2(a, b)
+ | };
+ | val a = x$$1._1;
+ | val b = x$$1._2
+ |}""",
+ typedCode = sm"""
|class Test {
- | private[this] val x$$1 = (scala.Tuple2.apply(1, 2): @scala.unchecked) match {
- | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2.apply(a, b)
+ | private[this] val x$$1 = (scala.Tuple2.apply[Int, Int](1, 2): @scala.unchecked) match {
+ | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2.apply[Int, Int](a, b)
| };
| val a = Test.this.x$$1._1;
| val b = Test.this.x$$1._2
@@ -448,8 +469,8 @@ trait ClassPrintTests {
|}""",
typedCode = sm"""
|class Test {
- | private[this] val x$$1 = (scala.Predef.ArrowAssoc(1).->(2): @scala.unchecked) match {
- | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2.apply(a, b)
+ | private[this] val x$$1 = (scala.Predef.ArrowAssoc[Int](1).->[Int](2): @scala.unchecked) match {
+ | case scala.Tuple2((a @ _), (b @ _)) => scala.Tuple2.apply[Int, Int](a, b)
| };
| val a = Test.this.x$$1._1;
| val b = Test.this.x$$1._2
@@ -462,8 +483,8 @@ trait ClassPrintTests {
*/
@Test def testClassWithPatternMatchInAssignment = assertPrintedCode(sm"""
|class Test {
- | private[this] val x$$1 = (scala.collection.immutable.List.apply(1, 3, 5): @scala.unchecked) match {
- | case scala.collection.immutable.List((one @ _), (three @ _), (five @ _)) => scala.Tuple3.apply(one, three, five)
+ | private[this] val x$$1 = (scala.collection.immutable.List.apply[scala.Int](1, 3, 5): @scala.unchecked) match {
+ | case scala.collection.immutable.List((one @ _), (three @ _), (five @ _)) => scala.Tuple3.apply[scala.Int, scala.Int, scala.Int](one, three, five)
| };
| val one = Test.this.x$$1._1;
| val three = Test.this.x$$1._2;
@@ -626,7 +647,7 @@ trait ClassPrintTests {
@Test def testObjectWithPatternMatch1 = assertPrintedCode(sm"""
|object PM1 {
- | scala.collection.immutable.List.apply(1, 2) match {
+ | scala.collection.immutable.List.apply[scala.Int](1, 2) match {
| case (i @ _) => i
| }
|}""")
@@ -715,7 +736,7 @@ trait ClassPrintTests {
|}""",
typedCode = sm"""
|object PM5 {
- | scala.collection.immutable.List.apply(1, 2) match {
+ | scala.collection.immutable.List.apply[Int](1, 2) match {
| case scala.`package`.::((x @ _), (xs @ _)) => x
| }
|}""")
@@ -756,7 +777,7 @@ trait ClassPrintTests {
@Test def testObjectWithPatternMatch8 = assertPrintedCode(sm"""
|{
| object Extractor {
- | def unapply(i: scala.Int) = scala.Some.apply(i)
+ | def unapply(i: scala.Int) = scala.Some.apply[scala.Int](i)
| };
| object PM9 {
| 42 match {
@@ -991,7 +1012,7 @@ trait ValAndDefPrintTests {
@Test def testDefWithLazyVal2 = assertPrintedCode(sm"""
|def a = {
- | lazy val test = {
+ | lazy val test: Unit = {
| scala.Predef.println();
| scala.Predef.println()
| };
@@ -1161,4 +1182,17 @@ trait QuasiTreesPrintTests {
|case class X(x: Int, s: String) {
| def y = "test"
|}""")
-} \ No newline at end of file
+
+ @Test def testQuasiCaseClassWithTypes1 = assertTreeCode(q"""case class X(x: ${typeOf[Int]}, s: ${typeOf[String]}){ def y = "test" }""")(sm"""
+ |case class X(x: Int, s: String) {
+ | def y = "test"
+ |}""")
+
+ @Test def testQuasiCaseClassWithTypes2 = assertTreeCode(q"""case class X(x: ${typeOf[Int]}, s: ${typeOf[String]}){ def y = "test" }""", typecheck = true)(sm"""
+ |{
+ | case class X(x: Int, s: String) {
+ | def y = "test"
+ | };
+ | ()
+ |}""")
+}
diff --git a/test/junit/scala/reflect/internal/ScopeTest.scala b/test/junit/scala/reflect/internal/ScopeTest.scala
new file mode 100644
index 0000000000..1ab24facac
--- /dev/null
+++ b/test/junit/scala/reflect/internal/ScopeTest.scala
@@ -0,0 +1,54 @@
+package scala.reflect.internal
+
+import scala.tools.nsc.symtab
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil.assertThrows
+import scala.tools.nsc.symtab.SymbolTableForUnitTesting
+
+@RunWith(classOf[JUnit4])
+class ScopeTest {
+ object symbolTable extends SymbolTableForUnitTesting
+
+ import symbolTable._
+
+ @Test
+ def testNestedScopeSmall(): Unit = testNestedScope(0)
+ @Test
+ def testNestedScopeLarge(): Unit = testNestedScope(64) // exceeding MIN_HASH
+
+ private def testNestedScope(initSize: Int) {
+ def sym(termName: String): Symbol = NoSymbol.newValue(TermName(termName))
+ val foo = sym("foo")
+ val bar = sym("bar")
+
+ val outerElems = List.tabulate(initSize)(i => sym(i.toString))
+ val outer = newScopeWith(outerElems ++ List(foo, bar) : _*)
+ assertTrue(outer.containsName(foo.name))
+ assertTrue(outer.containsName(bar.name))
+
+ val baz = sym("baz")
+ val nested = newNestedScope(outer)
+
+ // Entries from the outer scope are entered in the nested.
+ assertTrue(outer.containsName(foo.name))
+ assertTrue(outer.containsName(bar.name))
+
+ // Nested scopes structurally share ScopeEntry-s with the outer.
+ assertSame(outer.lookupEntry(foo.name), nested.lookupEntry(foo.name))
+ nested.enter(baz)
+
+ // Symbols entered in the nested scope aren't visible in the outer.
+ assertTrue(nested.containsName(baz.name))
+ assertTrue(!outer.containsName(baz.name))
+
+ // Unlinking a symbol in the inner scope doesn't modify the outer
+ nested.unlink(bar)
+ assert(!nested.containsName(bar.name))
+ assert(outer.containsName(bar.name))
+ }
+}
diff --git a/test/junit/scala/reflect/internal/TypesTest.scala b/test/junit/scala/reflect/internal/TypesTest.scala
new file mode 100644
index 0000000000..95194ef0a4
--- /dev/null
+++ b/test/junit/scala/reflect/internal/TypesTest.scala
@@ -0,0 +1,35 @@
+package scala.reflect.internal
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import scala.tools.nsc.symtab.SymbolTableForUnitTesting
+
+@RunWith(classOf[JUnit4])
+class TypesTest {
+
+ object symbolTable extends SymbolTableForUnitTesting
+ import symbolTable._, definitions._
+
+ @Test
+ def testRefinedTypeSI8611(): Unit = {
+ def stringNarrowed = StringTpe.narrow
+ assert(stringNarrowed != stringNarrowed)
+ assert(!(stringNarrowed =:= stringNarrowed))
+
+ def boolWithString = refinedType(BooleanTpe :: StringTpe :: Nil, NoSymbol)
+ assert(boolWithString != boolWithString)
+ assert(boolWithString =:= boolWithString)
+
+ val boolWithString1 = boolWithString
+ val boolWithString1narrow1 = boolWithString1.narrow
+ val boolWithString1narrow2 = boolWithString1.narrow
+ // Two narrowings of the same refinement end up =:=. This was the root
+ // cause of SI-8611. See `narrowUniquely` in `Logic` for the workaround.
+ assert(boolWithString1narrow1 =:= boolWithString1narrow2)
+ val uniquelyNarrowed1 = refinedType(boolWithString1narrow1 :: Nil, NoSymbol)
+ val uniquelyNarrowed2 = refinedType(boolWithString1narrow2 :: Nil, NoSymbol)
+ assert(uniquelyNarrowed1 =:= uniquelyNarrowed2)
+ }
+}
diff --git a/test/junit/scala/reflect/internal/util/AbstractFileClassLoaderTest.scala b/test/junit/scala/reflect/internal/util/AbstractFileClassLoaderTest.scala
new file mode 100644
index 0000000000..a2537ddab7
--- /dev/null
+++ b/test/junit/scala/reflect/internal/util/AbstractFileClassLoaderTest.scala
@@ -0,0 +1,138 @@
+package scala.reflect.internal.util
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class AbstractFileClassLoaderTest {
+
+ import scala.reflect.io._
+ import scala.io.Source
+ import scala.io.Codec.UTF8
+ import scala.reflect.io.Streamable
+ import java.net.{ URLClassLoader, URL }
+
+ implicit def `we love utf8` = UTF8
+ implicit class `abs file ops`(f: AbstractFile) {
+ def writeContent(s: String): Unit = Streamable.closing(f.bufferedOutput)(os => os write s.getBytes(UTF8.charSet))
+ }
+ implicit class `url slurp`(url: URL) {
+ def slurp(): String = Streamable.slurp(url)
+ }
+
+ val NoClassLoader: ClassLoader = null
+
+ def fuzzBuzzBooz: (AbstractFile, AbstractFile) = {
+ val fuzz = new VirtualDirectory("fuzz", None)
+ val buzz = fuzz subdirectoryNamed "buzz"
+ val booz = buzz fileNamed "booz.class"
+ (fuzz, booz)
+ }
+
+ @Test
+ def afclGetsParent(): Unit = {
+ val p = new URLClassLoader(Array.empty[URL])
+ val d = new VirtualDirectory("vd", None)
+ val x = new AbstractFileClassLoader(d, p)
+ assertSame(p, x.getParent)
+ }
+
+ @Test
+ def afclGetsResource(): Unit = {
+ val (fuzz, booz) = fuzzBuzzBooz
+ booz writeContent "hello, world"
+ val x = new AbstractFileClassLoader(fuzz, NoClassLoader)
+ val r = x.getResource("buzz/booz.class")
+ assertNotNull(r)
+ assertEquals("hello, world", r.slurp())
+ }
+
+ @Test
+ def afclGetsResourceFromParent(): Unit = {
+ val (fuzz, booz) = fuzzBuzzBooz
+ val (fuzz_, booz_) = fuzzBuzzBooz
+ booz writeContent "hello, world"
+ booz_ writeContent "hello, world_"
+ val p = new AbstractFileClassLoader(fuzz, NoClassLoader)
+ val x = new AbstractFileClassLoader(fuzz_, p)
+ val r = x.getResource("buzz/booz.class")
+ assertNotNull(r)
+ assertEquals("hello, world", r.slurp())
+ }
+
+ @Test
+ def afclGetsResourceInDefaultPackage(): Unit = {
+ val fuzz = new VirtualDirectory("fuzz", None)
+ val booz = fuzz fileNamed "booz.class"
+ val bass = fuzz fileNamed "bass"
+ booz writeContent "hello, world"
+ bass writeContent "lo tone"
+ val x = new AbstractFileClassLoader(fuzz, NoClassLoader)
+ val r = x.getResource("booz.class")
+ assertNotNull(r)
+ assertEquals("hello, world", r.slurp())
+ assertEquals("lo tone", (x getResource "bass").slurp())
+ }
+
+ // SI-8843
+ @Test
+ def afclGetsResources(): Unit = {
+ val (fuzz, booz) = fuzzBuzzBooz
+ booz writeContent "hello, world"
+ val x = new AbstractFileClassLoader(fuzz, NoClassLoader)
+ val e = x.getResources("buzz/booz.class")
+ assertTrue(e.hasMoreElements)
+ assertEquals("hello, world", e.nextElement.slurp())
+ assertFalse(e.hasMoreElements)
+ }
+
+ @Test
+ def afclGetsResourcesFromParent(): Unit = {
+ val (fuzz, booz) = fuzzBuzzBooz
+ val (fuzz_, booz_) = fuzzBuzzBooz
+ booz writeContent "hello, world"
+ booz_ writeContent "hello, world_"
+ val p = new AbstractFileClassLoader(fuzz, NoClassLoader)
+ val x = new AbstractFileClassLoader(fuzz_, p)
+ val e = x.getResources("buzz/booz.class")
+ assertTrue(e.hasMoreElements)
+ assertEquals("hello, world", e.nextElement.slurp())
+ assertTrue(e.hasMoreElements)
+ assertEquals("hello, world_", e.nextElement.slurp())
+ assertFalse(e.hasMoreElements)
+ }
+
+ @Test
+ def afclGetsResourceAsStream(): Unit = {
+ val (fuzz, booz) = fuzzBuzzBooz
+ booz writeContent "hello, world"
+ val x = new AbstractFileClassLoader(fuzz, NoClassLoader)
+ val r = x.getResourceAsStream("buzz/booz.class")
+ assertNotNull(r)
+ assertEquals("hello, world", Streamable.closing(r)(is => Source.fromInputStream(is).mkString))
+ }
+
+ @Test
+ def afclGetsClassBytes(): Unit = {
+ val (fuzz, booz) = fuzzBuzzBooz
+ booz writeContent "hello, world"
+ val x = new AbstractFileClassLoader(fuzz, NoClassLoader)
+ val b = x.classBytes("buzz/booz.class")
+ assertEquals("hello, world", new String(b, UTF8.charSet))
+ }
+
+ @Test
+ def afclGetsClassBytesFromParent(): Unit = {
+ val (fuzz, booz) = fuzzBuzzBooz
+ val (fuzz_, booz_) = fuzzBuzzBooz
+ booz writeContent "hello, world"
+ booz_ writeContent "hello, world_"
+
+ val p = new AbstractFileClassLoader(fuzz, NoClassLoader)
+ val x = new AbstractFileClassLoader(fuzz_, p)
+ val b = x.classBytes("buzz/booz.class")
+ assertEquals("hello, world", new String(b, UTF8.charSet))
+ }
+}
diff --git a/test/junit/scala/reflect/internal/util/SourceFileTest.scala b/test/junit/scala/reflect/internal/util/SourceFileTest.scala
index 903e705ba2..cad23eba14 100644
--- a/test/junit/scala/reflect/internal/util/SourceFileTest.scala
+++ b/test/junit/scala/reflect/internal/util/SourceFileTest.scala
@@ -17,6 +17,11 @@ class SourceFileTest {
assertFalse(file.isEndOfLine(Int.MaxValue))
}
+ @Test def si8630_lineToString(): Unit = {
+ val code = "abc "
+ assertEquals(code, new BatchSourceFile("", code).lineToString(0))
+ }
+
@Test
def si8205_lineToString(): Unit = {
assertEquals("", lineContentOf("", 0))
diff --git a/test/junit/scala/tools/nsc/SampleTest.scala b/test/junit/scala/tools/nsc/SampleTest.scala
index 8e026da1ea..60bb09e98f 100644
--- a/test/junit/scala/tools/nsc/SampleTest.scala
+++ b/test/junit/scala/tools/nsc/SampleTest.scala
@@ -1,5 +1,4 @@
package scala.tools.nsc
-package test
import org.junit.Assert._
import org.junit.Test
@@ -12,6 +11,6 @@ import org.junit.runners.JUnit4
class SampleTest {
@Test
def testMath: Unit = {
- assert(2+2 == 4, "you didn't get the math right fellow")
+ assertTrue("you didn't get the math right fellow", 2 + 2 == 4)
}
}
diff --git a/test/junit/scala/tools/nsc/ScriptRunnerTest.scala b/test/junit/scala/tools/nsc/ScriptRunnerTest.scala
new file mode 100644
index 0000000000..9bae7a0487
--- /dev/null
+++ b/test/junit/scala/tools/nsc/ScriptRunnerTest.scala
@@ -0,0 +1,23 @@
+package scala.tools.nsc
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class ScriptRunnerTest {
+ @Test
+ def testEmptyScriptSucceeds: Unit = {
+ val s = new GenericRunnerSettings(s => ())
+ s.nc.value = true
+ s.usejavacp.value = true
+
+ // scala -nc -e ''
+ assertTrue(ScriptRunner.runCommand(s, "", Nil))
+
+ // scala -nc -save -e ''
+ s.save.value = true
+ assertTrue(ScriptRunner.runCommand(s, "", Nil))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala
new file mode 100644
index 0000000000..6ada0e20fb
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala
@@ -0,0 +1,96 @@
+package scala.tools.nsc
+package backend.jvm
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes
+import org.junit.Assert._
+
+import scala.tools.nsc.backend.jvm.CodeGenTools._
+import scala.tools.testing.ClearAfterClass
+
+object BTypesTest extends ClearAfterClass.Clearable {
+ var compiler = {
+ val comp = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
+ new comp.Run() // initializes some of the compiler
+ comp.exitingDelambdafy(comp.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler
+ comp.exitingDelambdafy(comp.genBCode.bTypes.initializeCoreBTypes())
+ comp
+ }
+ def clear(): Unit = { compiler = null }
+}
+
+@RunWith(classOf[JUnit4])
+class BTypesTest extends ClearAfterClass {
+ ClearAfterClass.stateToClear = BTypesTest
+
+ val compiler = BTypesTest.compiler
+ import compiler.genBCode.bTypes._
+
+ def classBTFS(sym: compiler.Symbol) = compiler.exitingDelambdafy(classBTypeFromSymbol(sym))
+
+ def jlo = compiler.definitions.ObjectClass
+ def jls = compiler.definitions.StringClass
+ def o = classBTFS(jlo)
+ def s = classBTFS(jls)
+ def oArr = ArrayBType(o)
+ def method = MethodBType(List(oArr, INT, DOUBLE, s), UNIT)
+
+ @Test
+ def classBTypesEquality() {
+ val s1 = classBTFS(jls)
+ val s2 = classBTFS(jls)
+ val o = classBTFS(jlo)
+ assertEquals(s1, s2)
+ assertEquals(s1.hashCode, s2.hashCode)
+ assert(s1 != o)
+ assert(s2 != o)
+ }
+
+ @Test
+ def typedOpcodes() {
+ assert(UNIT.typedOpcode(Opcodes.IALOAD) == Opcodes.IALOAD)
+ assert(INT.typedOpcode(Opcodes.IALOAD) == Opcodes.IALOAD)
+ assert(BOOL.typedOpcode(Opcodes.IALOAD) == Opcodes.BALOAD)
+ assert(BYTE.typedOpcode(Opcodes.IALOAD) == Opcodes.BALOAD)
+ assert(CHAR.typedOpcode(Opcodes.IALOAD) == Opcodes.CALOAD)
+ assert(SHORT.typedOpcode(Opcodes.IALOAD) == Opcodes.SALOAD)
+ assert(FLOAT.typedOpcode(Opcodes.IALOAD) == Opcodes.FALOAD)
+ assert(LONG.typedOpcode(Opcodes.IALOAD) == Opcodes.LALOAD)
+ assert(DOUBLE.typedOpcode(Opcodes.IALOAD) == Opcodes.DALOAD)
+ assert(classBTFS(jls).typedOpcode(Opcodes.IALOAD) == Opcodes.AALOAD)
+
+ assert(UNIT.typedOpcode(Opcodes.IRETURN) == Opcodes.RETURN)
+ assert(BOOL.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN)
+ assert(CHAR.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN)
+ assert(BYTE.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN)
+ assert(SHORT.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN)
+ assert(INT.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN)
+ assert(FLOAT.typedOpcode(Opcodes.IRETURN) == Opcodes.FRETURN)
+ assert(LONG.typedOpcode(Opcodes.IRETURN) == Opcodes.LRETURN)
+ assert(DOUBLE.typedOpcode(Opcodes.IRETURN) == Opcodes.DRETURN)
+ assert(classBTFS(jls).typedOpcode(Opcodes.IRETURN) == Opcodes.ARETURN)
+ }
+
+ @Test
+ def descriptors() {
+ assert(o.descriptor == "Ljava/lang/Object;")
+ assert(s.descriptor == "Ljava/lang/String;")
+ assert(oArr.descriptor == "[Ljava/lang/Object;")
+ assert(method.descriptor == "([Ljava/lang/Object;IDLjava/lang/String;)V")
+ }
+
+ @Test
+ def toAsmTypeTest() {
+ for (t <- List(o, s, oArr, method, INT, UNIT, DOUBLE)) {
+ assertEquals(o.descriptor, o.toASMType.getDescriptor)
+ }
+ }
+
+ // TODO @lry do more tests
+ @Test
+ def maxTypeTest() {
+
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala b/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala
new file mode 100644
index 0000000000..d0ffd06b01
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala
@@ -0,0 +1,166 @@
+package scala.tools.nsc.backend.jvm
+
+import org.junit.Assert._
+
+import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.BatchSourceFile
+import scala.reflect.io.VirtualDirectory
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree.{ClassNode, MethodNode}
+import scala.tools.cmd.CommandLineParser
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.reporters.StoreReporter
+import scala.tools.nsc.settings.MutableSettings
+import scala.tools.nsc.{Settings, Global}
+import scala.tools.partest.ASMConverters
+import scala.collection.JavaConverters._
+import scala.tools.testing.TempDir
+
+object CodeGenTools {
+ import ASMConverters._
+
+ def genMethod( flags: Int = Opcodes.ACC_PUBLIC,
+ name: String = "m",
+ descriptor: String = "()V",
+ genericSignature: String = null,
+ throwsExceptions: Array[String] = null,
+ handlers: List[ExceptionHandler] = Nil,
+ localVars: List[LocalVariable] = Nil)(body: Instruction*): MethodNode = {
+ val node = new MethodNode(flags, name, descriptor, genericSignature, throwsExceptions)
+ applyToMethod(node, Method(body.toList, handlers, localVars))
+ node
+ }
+
+ def wrapInClass(method: MethodNode): ClassNode = {
+ val cls = new ClassNode()
+ cls.visit(Opcodes.V1_6, Opcodes.ACC_PUBLIC, "C", null, "java/lang/Object", null)
+ cls.methods.add(method)
+ cls
+ }
+
+ private def resetOutput(compiler: Global): Unit = {
+ compiler.settings.outputDirs.setSingleOutput(new VirtualDirectory("(memory)", None))
+ }
+
+ def newCompiler(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Global = {
+ val compiler = newCompilerWithoutVirtualOutdir(defaultArgs, extraArgs)
+ resetOutput(compiler)
+ compiler
+ }
+
+ def newCompilerWithoutVirtualOutdir(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Global = {
+ val settings = new Settings()
+ val args = (CommandLineParser tokenize defaultArgs) ++ (CommandLineParser tokenize extraArgs)
+ settings.processArguments(args, processAll = true)
+ new Global(settings, new StoreReporter)
+ }
+
+ def newRun(compiler: Global): compiler.Run = {
+ compiler.reporter.reset()
+ resetOutput(compiler)
+ new compiler.Run()
+ }
+
+ def reporter(compiler: Global) = compiler.reporter.asInstanceOf[StoreReporter]
+
+ def makeSourceFile(code: String, filename: String): BatchSourceFile = new BatchSourceFile(filename, code)
+
+ def getGeneratedClassfiles(outDir: AbstractFile): List[(String, Array[Byte])] = {
+ def files(dir: AbstractFile): List[(String, Array[Byte])] = {
+ val res = ListBuffer.empty[(String, Array[Byte])]
+ for (f <- dir.iterator) {
+ if (!f.isDirectory) res += ((f.name, f.toByteArray))
+ else if (f.name != "." && f.name != "..") res ++= files(f)
+ }
+ res.toList
+ }
+ files(outDir)
+ }
+
+ def checkReport(compiler: Global, allowMessage: StoreReporter#Info => Boolean = _ => false): Unit = {
+ val disallowed = reporter(compiler).infos.toList.filter(!allowMessage(_)) // toList prevents an infer-non-wildcard-existential warning.
+ if (disallowed.nonEmpty) {
+ val msg = disallowed.mkString("\n")
+ assert(false, "The compiler issued non-allowed warnings or errors:\n" + msg)
+ }
+ }
+
+ def compile(compiler: Global)(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[(String, Array[Byte])] = {
+ val run = newRun(compiler)
+ run.compileSources(makeSourceFile(scalaCode, "unitTestSource.scala") :: javaCode.map(p => makeSourceFile(p._1, p._2)))
+ checkReport(compiler, allowMessage)
+ getGeneratedClassfiles(compiler.settings.outputDirs.getSingleOutput.get)
+ }
+
+ /**
+ * Compile multiple Scala files separately into a single output directory.
+ *
+ * Note that a new compiler instance is created for compiling each file because symbols survive
+ * across runs. This makes separate compilation slower.
+ *
+ * The output directory is a physical directory, I have not figured out if / how it's possible to
+ * add a VirtualDirectory to the classpath of a compiler.
+ */
+ def compileSeparately(codes: List[String], extraArgs: String = "", allowMessage: StoreReporter#Info => Boolean = _ => false, afterEach: AbstractFile => Unit = _ => ()): List[(String, Array[Byte])] = {
+ val outDir = AbstractFile.getDirectory(TempDir.createTempDir())
+ val outDirPath = outDir.canonicalPath
+ val argsWithOutDir = extraArgs + s" -d $outDirPath -cp $outDirPath"
+
+ for (code <- codes) {
+ val compiler = newCompilerWithoutVirtualOutdir(extraArgs = argsWithOutDir)
+ new compiler.Run().compileSources(List(makeSourceFile(code, "unitTestSource.scala")))
+ checkReport(compiler, allowMessage)
+ afterEach(outDir)
+ }
+
+ val classfiles = getGeneratedClassfiles(outDir)
+ outDir.delete()
+ classfiles
+ }
+
+ def compileClassesSeparately(codes: List[String], extraArgs: String = "", allowMessage: StoreReporter#Info => Boolean = _ => false, afterEach: AbstractFile => Unit = _ => ()) = {
+ readAsmClasses(compileSeparately(codes, extraArgs, allowMessage, afterEach))
+ }
+
+ def readAsmClasses(classfiles: List[(String, Array[Byte])]) = {
+ classfiles.map(p => AsmUtils.readClass(p._2)).sortBy(_.name)
+ }
+
+ def compileClasses(compiler: Global)(code: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = {
+ readAsmClasses(compile(compiler)(code, javaCode, allowMessage))
+ }
+
+ def compileMethods(compiler: Global)(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[MethodNode] = {
+ compileClasses(compiler)(s"class C { $code }", allowMessage = allowMessage).head.methods.asScala.toList.filterNot(_.name == "<init>")
+ }
+
+ def singleMethodInstructions(compiler: Global)(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[Instruction] = {
+ val List(m) = compileMethods(compiler)(code, allowMessage = allowMessage)
+ instructionsFromMethod(m)
+ }
+
+ def singleMethod(compiler: Global)(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): Method = {
+ val List(m) = compileMethods(compiler)(code, allowMessage = allowMessage)
+ convertMethod(m)
+ }
+
+ def assertSameCode(actual: List[Instruction], expected: List[Instruction]): Unit = {
+ assertTrue(s"\nExpected: $expected\nActual : $actual", actual === expected)
+ }
+
+ def getSingleMethod(classNode: ClassNode, name: String): Method =
+ convertMethod(classNode.methods.asScala.toList.find(_.name == name).get)
+
+ def assertHandlerLabelPostions(h: ExceptionHandler, instructions: List[Instruction], startIndex: Int, endIndex: Int, handlerIndex: Int): Unit = {
+ val insVec = instructions.toVector
+ assertTrue(h.start == insVec(startIndex) && h.end == insVec(endIndex) && h.handler == insVec(handlerIndex))
+ }
+
+ import scala.language.implicitConversions
+
+ implicit def aliveInstruction(ins: Instruction): (Instruction, Boolean) = (ins, true)
+
+ implicit class MortalInstruction(val ins: Instruction) extends AnyVal {
+ def dead: (Instruction, Boolean) = (ins, false)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala
new file mode 100644
index 0000000000..4086f7dd7b
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala
@@ -0,0 +1,98 @@
+package scala.tools.nsc.backend.jvm
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Assert._
+import CodeGenTools._
+import scala.tools.asm.Opcodes._
+import scala.tools.partest.ASMConverters._
+import scala.tools.testing.ClearAfterClass
+
+object DirectCompileTest extends ClearAfterClass.Clearable {
+ var compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:method")
+ def clear(): Unit = { compiler = null }
+}
+
+@RunWith(classOf[JUnit4])
+class DirectCompileTest extends ClearAfterClass {
+ ClearAfterClass.stateToClear = DirectCompileTest
+
+ val compiler = DirectCompileTest.compiler
+
+ @Test
+ def testCompile(): Unit = {
+ val List(("C.class", bytes)) = compile(compiler)(
+ """class C {
+ | def f = 1
+ |}
+ """.stripMargin)
+ def s(i: Int, n: Int) = (bytes(i) & 0xff) << n
+ assertTrue((s(0, 24) | s(1, 16) | s(2, 8) | s(3, 0)) == 0xcafebabe) // mocha java latte machiatto surpreme dark roasted espresso
+ }
+
+ @Test
+ def testCompileClasses(): Unit = {
+ val List(cClass, cModuleClass) = compileClasses(compiler)("class C; object C")
+
+ assertTrue(cClass.name == "C")
+ assertTrue(cModuleClass.name == "C$")
+
+ val List(dMirror, dModuleClass) = compileClasses(compiler)("object D")
+
+ assertTrue(dMirror.name == "D")
+ assertTrue(dModuleClass.name == "D$")
+ }
+
+ @Test
+ def testCompileMethods(): Unit = {
+ val List(f, g) = compileMethods(compiler)(
+ """def f = 10
+ |def g = f
+ """.stripMargin)
+ assertTrue(f.name == "f")
+ assertTrue(g.name == "g")
+
+ assertSameCode(instructionsFromMethod(f).dropNonOp,
+ List(IntOp(BIPUSH, 10), Op(IRETURN)))
+
+ assertSameCode(instructionsFromMethod(g).dropNonOp,
+ List(VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "f", "()I", itf = false), Op(IRETURN)))
+ }
+
+ @Test
+ def testDropNonOpAliveLabels(): Unit = {
+ // makes sure that dropNoOp doesn't drop labels that are being used
+ val List(f) = compileMethods(compiler)("""def f(x: Int) = if (x == 0) "a" else "b"""")
+ assertSameCode(instructionsFromMethod(f).dropLinesFrames, List(
+ Label(0),
+ VarOp(ILOAD, 1),
+ Op(ICONST_0),
+ Jump(IF_ICMPNE,
+ Label(7)),
+ Ldc(LDC, "a"),
+ Op(ARETURN),
+ Label(7),
+ Ldc(LDC, "b"),
+ Op(ARETURN),
+ Label(11)
+ ))
+ }
+
+ @Test
+ def testSeparateCompilation(): Unit = {
+ val codeA = "class A { def f = 1 }"
+ val codeB = "class B extends A { def g = f }"
+ val List(a, b) = compileClassesSeparately(List(codeA, codeB))
+ val ins = getSingleMethod(b, "g").instructions
+ assert(ins exists {
+ case Invoke(_, "B", "f", _, _) => true
+ case _ => false
+ }, ins)
+ }
+
+ @Test
+ def compileErroneous(): Unit = {
+ compileClasses(compiler)("class C { def f: String = 1 }", allowMessage = _.msg contains "type mismatch")
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala
new file mode 100644
index 0000000000..1b6c080234
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala
@@ -0,0 +1,103 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import scala.tools.nsc.backend.jvm.BTypes.InternalName
+import scala.tools.testing.AssertUtil._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+
+import BackendReporting._
+
+import scala.collection.convert.decorateAsScala._
+
+@RunWith(classOf[JUnit4])
+class BTypesFromClassfileTest {
+ // inliner enabled -> inlineInfos are collected (and compared) in ClassBTypes
+ val compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:inline-global")
+
+ import compiler._
+ import definitions._
+ import genBCode.bTypes
+ import bTypes._
+
+ def duringBackend[T](f: => T) = compiler.exitingDelambdafy(f)
+
+ val run = new compiler.Run() // initializes some of the compiler
+ duringBackend(compiler.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler
+ duringBackend(bTypes.initializeCoreBTypes())
+
+ def clearCache() = bTypes.classBTypeFromInternalName.clear()
+
+ def sameBType(fromSym: ClassBType, fromClassfile: ClassBType, checked: Set[InternalName] = Set.empty): Set[InternalName] = {
+ if (checked(fromSym.internalName)) checked
+ else {
+ assert(fromSym == fromClassfile, s"$fromSym != $fromClassfile")
+ sameInfo(fromSym.info.get, fromClassfile.info.get, checked + fromSym.internalName)
+ }
+ }
+
+ def sameBTypes(fromSyms: Iterable[ClassBType], fromClassfiles: Iterable[ClassBType], checked: Set[InternalName]): Set[InternalName] = {
+ assert(fromSyms.size == fromClassfiles.size, s"\n$fromSyms\n$fromClassfiles")
+ (fromSyms, fromClassfiles).zipped.foldLeft(checked) {
+ case (chk, (fromSym, fromClassfile)) => sameBType(fromSym, fromClassfile, chk)
+ }
+ }
+
+ def sameInfo(fromSym: ClassInfo, fromClassfile: ClassInfo, checked: Set[InternalName]): Set[InternalName] = {
+ assert({
+ // Nested class symbols can undergo makeNotPrivate (ExplicitOuter). But this is only applied
+ // for symbols of class symbols that are being compiled, not those read from a pickle.
+ // So a class may be public in bytecode, but the symbol still says private.
+ if (fromSym.nestedInfo.isEmpty) fromSym.flags == fromClassfile.flags
+ else (fromSym.flags | ACC_PRIVATE | ACC_PUBLIC) == (fromClassfile.flags | ACC_PRIVATE | ACC_PUBLIC)
+ }, s"class flags differ\n$fromSym\n$fromClassfile")
+
+ // we don't compare InlineInfos in this test: in both cases (from symbol and from classfile) they
+ // are actually created by looking at the classfile members, not the symbol's. InlineInfos are only
+ // built from symbols for classes that are being compiled, which is not the case here. Instead
+ // there's a separate InlineInfoTest.
+
+ val chk1 = sameBTypes(fromSym.superClass, fromClassfile.superClass, checked)
+ val chk2 = sameBTypes(fromSym.interfaces, fromClassfile.interfaces, chk1)
+
+ // The fromSym info has only member classes, no local or anonymous. The symbol is read from the
+ // Scala pickle data and only member classes are created / entered.
+ // (This is different for symbols that are being compiled, there flatten will enter all local
+ // and anonymous classes as members of the outer class. But not for unpickled symbols).
+ // The fromClassfile info has all nested classes, including anonymous and local. So we filter
+ // them out: member classes are identified by having the `outerName` defined.
+ val memberClassesFromClassfile = fromClassfile.nestedClasses.filter(_.info.get.nestedInfo.get.outerName.isDefined)
+ // Sorting is required: the backend sorts all InnerClass entries by internalName before writing
+ // them to the classfile (to make it deterministic: the entries are collected in a Set during
+ // code generation).
+ val chk3 = sameBTypes(fromSym.nestedClasses.sortBy(_.internalName), memberClassesFromClassfile.sortBy(_.internalName), chk2)
+ sameBTypes(fromSym.nestedInfo.map(_.enclosingClass), fromClassfile.nestedInfo.map(_.enclosingClass), chk3)
+ }
+
+ def check(classSym: Symbol): Unit = duringBackend {
+ clearCache()
+ val fromSymbol = classBTypeFromSymbol(classSym)
+ clearCache()
+ val fromClassfile = bTypes.classBTypeFromParsedClassfile(fromSymbol.internalName)
+ sameBType(fromSymbol, fromClassfile)
+ }
+
+ @Test
+ def compareClassBTypes(): Unit = {
+ // Note that not only these classes are tested, but also all their parents and all nested
+ // classes in their InnerClass attributes.
+ check(ObjectClass)
+ check(JavaNumberClass)
+ check(ConsClass)
+ check(ListModule.moduleClass)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala
new file mode 100644
index 0000000000..9fda034a04
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala
@@ -0,0 +1,152 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.collection.generic.Clearable
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import scala.tools.asm.tree._
+import scala.tools.asm.tree.analysis._
+import scala.tools.nsc.reporters.StoreReporter
+import scala.tools.testing.AssertUtil._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import AsmUtils._
+import BackendReporting._
+
+import scala.collection.convert.decorateAsScala._
+
+@RunWith(classOf[JUnit4])
+class CallGraphTest {
+ val compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:inline-global -Yopt-warnings")
+ import compiler.genBCode.bTypes._
+
+ // allows inspecting the caches after a compilation run
+ val notPerRun: List[Clearable] = List(classBTypeFromInternalName, byteCodeRepository.classes, callGraph.callsites)
+ notPerRun foreach compiler.perRunCaches.unrecordCache
+
+ def compile(code: String, allowMessage: StoreReporter#Info => Boolean): List[ClassNode] = {
+ notPerRun.foreach(_.clear())
+ compileClasses(compiler)(code, allowMessage = allowMessage)
+ }
+
+ def callsInMethod(methodNode: MethodNode): List[MethodInsnNode] = methodNode.instructions.iterator.asScala.collect({
+ case call: MethodInsnNode => call
+ }).toList
+
+ @Test
+ def callGraphStructure(): Unit = {
+ val code =
+ """class C {
+ | // try-catch prevents inlining - we want to analyze the callsite
+ | def f1 = try { 0 } catch { case _: Throwable => 1 }
+ | final def f2 = try { 0 } catch { case _: Throwable => 1 }
+ |
+ | @inline def f3 = try { 0 } catch { case _: Throwable => 1 }
+ | @inline final def f4 = try { 0 } catch { case _: Throwable => 1 }
+ |
+ | @noinline def f5 = try { 0 } catch { case _: Throwable => 1 }
+ | @noinline final def f6 = try { 0 } catch { case _: Throwable => 1 }
+ |
+ | @inline @noinline def f7 = try { 0 } catch { case _: Throwable => 1 }
+ |}
+ |class D extends C {
+ | @inline override def f1 = try { 0 } catch { case _: Throwable => 1 }
+ | override final def f3 = try { 0 } catch { case _: Throwable => 1 }
+ |}
+ |object C {
+ | def g1 = try { 0 } catch { case _: Throwable => 1 }
+ |}
+ |class Test {
+ | def t1(c: C) = c.f1 + c.f2 + c.f3 + c.f4 + c.f5 + c.f6 + c.f7 + C.g1
+ | def t2(d: D) = d.f1 + d.f2 + d.f3 + d.f4 + d.f5 + d.f6 + d.f7 + C.g1
+ |}
+ """.stripMargin
+
+ // Get the ClassNodes from the code repo (don't use the unparsed ClassNodes returned by compile).
+ // The callGraph.callsites map is indexed by instructions of those ClassNodes.
+
+ val ok = Set(
+ "D::f1()I is annotated @inline but cannot be inlined: the method is not final and may be overridden", // only one warning for D.f1: C.f1 is not annotated @inline
+ "C::f3()I is annotated @inline but cannot be inlined: the method is not final and may be overridden", // only one warning for C.f3: D.f3 does not have @inline (and it would also be safe to inline)
+ "C::f7()I is annotated @inline but cannot be inlined: the method is not final and may be overridden", // two warnings (the error message mentions C.f7 even if the receiver type is D, because f7 is inherited from C)
+ "operand stack at the callsite in Test::t1(LC;)I contains more values",
+ "operand stack at the callsite in Test::t2(LD;)I contains more values")
+ var msgCount = 0
+ val checkMsg = (m: StoreReporter#Info) => {
+ msgCount += 1
+ ok exists (m.msg contains _)
+ }
+ val List(cCls, cMod, dCls, testCls) = compile(code, checkMsg).map(c => byteCodeRepository.classNode(c.name).get)
+ assert(msgCount == 6, msgCount)
+
+ val List(cf1, cf2, cf3, cf4, cf5, cf6, cf7) = cCls.methods.iterator.asScala.filter(_.name.startsWith("f")).toList.sortBy(_.name)
+ val List(df1, df3) = dCls.methods.iterator.asScala.filter(_.name.startsWith("f")).toList.sortBy(_.name)
+ val g1 = cMod.methods.iterator.asScala.find(_.name == "g1").get
+ val List(t1, t2) = testCls.methods.iterator.asScala.filter(_.name.startsWith("t")).toList.sortBy(_.name)
+
+ val List(cf1Call, cf2Call, cf3Call, cf4Call, cf5Call, cf6Call, cf7Call, cg1Call) = callsInMethod(t1)
+ val List(df1Call, df2Call, df3Call, df4Call, df5Call, df6Call, df7Call, dg1Call) = callsInMethod(t2)
+
+ def checkCallsite(callsite: callGraph.Callsite,
+ call: MethodInsnNode, callsiteMethod: MethodNode, target: MethodNode, calleeDeclClass: ClassBType,
+ safeToInline: Boolean, atInline: Boolean, atNoInline: Boolean) = try {
+ assert(callsite.callsiteInstruction == call)
+ assert(callsite.callsiteMethod == callsiteMethod)
+ val callee = callsite.callee.get
+ assert(callee.callee == target)
+ assert(callee.calleeDeclarationClass == calleeDeclClass)
+ assert(callee.safeToInline == safeToInline)
+ assert(callee.annotatedInline == atInline)
+ assert(callee.annotatedNoInline == atNoInline)
+
+ assert(callsite.argInfos == List()) // not defined yet
+ } catch {
+ case e: Throwable => println(callsite); throw e
+ }
+
+ val cClassBType = classBTypeFromClassNode(cCls)
+ val cMClassBType = classBTypeFromClassNode(cMod)
+ val dClassBType = classBTypeFromClassNode(dCls)
+
+ checkCallsite(callGraph.callsites(cf1Call),
+ cf1Call, t1, cf1, cClassBType, false, false, false)
+ checkCallsite(callGraph.callsites(cf2Call),
+ cf2Call, t1, cf2, cClassBType, true, false, false)
+ checkCallsite(callGraph.callsites(cf3Call),
+ cf3Call, t1, cf3, cClassBType, false, true, false)
+ checkCallsite(callGraph.callsites(cf4Call),
+ cf4Call, t1, cf4, cClassBType, true, true, false)
+ checkCallsite(callGraph.callsites(cf5Call),
+ cf5Call, t1, cf5, cClassBType, false, false, true)
+ checkCallsite(callGraph.callsites(cf6Call),
+ cf6Call, t1, cf6, cClassBType, true, false, true)
+ checkCallsite(callGraph.callsites(cf7Call),
+ cf7Call, t1, cf7, cClassBType, false, true, true)
+ checkCallsite(callGraph.callsites(cg1Call),
+ cg1Call, t1, g1, cMClassBType, true, false, false)
+
+ checkCallsite(callGraph.callsites(df1Call),
+ df1Call, t2, df1, dClassBType, false, true, false)
+ checkCallsite(callGraph.callsites(df2Call),
+ df2Call, t2, cf2, cClassBType, true, false, false)
+ checkCallsite(callGraph.callsites(df3Call),
+ df3Call, t2, df3, dClassBType, true, false, false)
+ checkCallsite(callGraph.callsites(df4Call),
+ df4Call, t2, cf4, cClassBType, true, true, false)
+ checkCallsite(callGraph.callsites(df5Call),
+ df5Call, t2, cf5, cClassBType, false, false, true)
+ checkCallsite(callGraph.callsites(df6Call),
+ df6Call, t2, cf6, cClassBType, true, false, true)
+ checkCallsite(callGraph.callsites(df7Call),
+ df7Call, t2, cf7, cClassBType, false, true, true)
+ checkCallsite(callGraph.callsites(dg1Call),
+ dg1Call, t2, g1, cMClassBType, true, false, false)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala
new file mode 100644
index 0000000000..76492cfa23
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala
@@ -0,0 +1,80 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+
+@RunWith(classOf[JUnit4])
+class CompactLocalVariablesTest {
+
+ // recurse-unreachable-jumps is required for eliminating catch blocks, in the first dce round they
+ // are still live.only after eliminating the empty handler the catch blocks become unreachable.
+ val methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code,compact-locals")
+ val noCompactVarsCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code")
+
+ @Test
+ def compactUnused(): Unit = {
+ val code =
+ """def f: Double = {
+ | try { }
+ | catch {
+ | case _: Throwable =>
+ | // eliminated by dce
+ | val i = 1
+ | val d = 1d
+ | val f = 1f
+ | val l = 1l
+ | }
+ |
+ | val i = 1 // variable index 1 (it's an instance method, so at index 0 we have `this`)
+ | val d = 1d // 2,3
+ | val f = 1f // 4
+ | val l = 1l // 5,6
+ |
+ | try { }
+ | catch {
+ | case _: Throwable =>
+ | // eliminated by dce
+ | val i = 1
+ | val d = 1d
+ | val f = 1f
+ | val l = 1l
+ | }
+ |
+ | val ii = 1 // 7
+ | val dd = 1d // 8,9
+ | val ff = 1f // 10
+ | val ll = 1l // 11,12
+ |
+ | i + ii + d + dd + f + ff + l + ll
+ |}
+ |""".stripMargin
+
+ val List(noCompact) = compileMethods(noCompactVarsCompiler)(code)
+ val List(withCompact) = compileMethods(methodOptCompiler)(code)
+
+ // code is the same, except for local var indices
+ assertTrue(noCompact.instructions.size == withCompact.instructions.size)
+
+ val varOpSlots = convertMethod(withCompact).instructions collect {
+ case VarOp(_, v) => v
+ }
+ assertTrue(varOpSlots.toString, varOpSlots == List(1, 2, 4, 5, 7, 8, 10, 11, // stores
+ 1, 7, 2, 8, 4, 10, 5, 11)) // loads
+
+ // the local variables descriptor table is cleaned up to remove stale entries after dce,
+ // also when the slots are not compacted
+ assertTrue(noCompact.localVariables.size == withCompact.localVariables.size)
+
+ assertTrue(noCompact.maxLocals == 25)
+ assertTrue(withCompact.maxLocals == 13)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala
new file mode 100644
index 0000000000..cb01f3d164
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala
@@ -0,0 +1,99 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import scala.tools.testing.ClearAfterClass
+
+object EmptyExceptionHandlersTest extends ClearAfterClass.Clearable {
+ var noOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
+ var dceCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:unreachable-code")
+ def clear(): Unit = {
+ noOptCompiler = null
+ dceCompiler = null
+ }
+}
+
+@RunWith(classOf[JUnit4])
+class EmptyExceptionHandlersTest extends ClearAfterClass {
+ ClearAfterClass.stateToClear = EmptyExceptionHandlersTest
+
+ val noOptCompiler = EmptyExceptionHandlersTest.noOptCompiler
+ val dceCompiler = EmptyExceptionHandlersTest.dceCompiler
+
+ val exceptionDescriptor = "java/lang/Exception"
+
+ @Test
+ def eliminateEmpty(): Unit = {
+ val handlers = List(ExceptionHandler(Label(1), Label(2), Label(2), Some(exceptionDescriptor)))
+ val asmMethod = genMethod(handlers = handlers)(
+ Label(1),
+ Label(2),
+ Op(RETURN)
+ )
+ assertTrue(convertMethod(asmMethod).handlers.length == 1)
+ LocalOptImpls.removeEmptyExceptionHandlers(asmMethod)
+ assertTrue(convertMethod(asmMethod).handlers.isEmpty)
+ }
+
+ @Test
+ def eliminateHandlersGuardingNops(): Unit = {
+ val handlers = List(ExceptionHandler(Label(1), Label(2), Label(2), Some(exceptionDescriptor)))
+ val asmMethod = genMethod(handlers = handlers)(
+ Label(1), // nops only
+ Jump(GOTO, Label(3)),
+ Label(3),
+ Jump(GOTO, Label(4)),
+
+ Label(2), // handler
+ Op(ACONST_NULL),
+ Op(ATHROW),
+
+ Label(4), // return
+ Op(RETURN)
+ )
+ assertTrue(convertMethod(asmMethod).handlers.length == 1)
+ LocalOptImpls.removeEmptyExceptionHandlers(asmMethod)
+ assertTrue(convertMethod(asmMethod).handlers.isEmpty)
+ }
+
+ @Test
+ def eliminateUnreachableHandler(): Unit = {
+ val code = "def f: Unit = try { } catch { case _: Exception => println(0) }; println(1)"
+
+ assertTrue(singleMethod(noOptCompiler)(code).handlers.length == 1)
+ val optMethod = singleMethod(dceCompiler)(code)
+ assertTrue(optMethod.handlers.isEmpty)
+
+ val code2 =
+ """def f: Unit = {
+ | println(0)
+ | return
+ | try { throw new Exception("") } // removed by dce, so handler will be removed as well
+ | catch { case _: Exception => println(1) }
+ | println(2)
+ |}""".stripMargin
+
+ assertTrue(singleMethod(dceCompiler)(code2).handlers.isEmpty)
+ }
+
+ @Test
+ def keepAliveHandlers(): Unit = {
+ val code =
+ """def f: Int = {
+ | println(0)
+ | try { 1 }
+ | catch { case _: Exception => 2 }
+ |}""".stripMargin
+
+ assertTrue(singleMethod(dceCompiler)(code).handlers.length == 1)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala
new file mode 100644
index 0000000000..7283e20745
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala
@@ -0,0 +1,99 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+import scala.tools.testing.AssertUtil._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+
+@RunWith(classOf[JUnit4])
+class EmptyLabelsAndLineNumbersTest {
+ @Test
+ def removeEmptyLineNumbers(): Unit = {
+ val ops = List[(Instruction, Boolean)](
+ Label(1),
+ LineNumber(1, Label(1)),
+ Label(2),
+ Label(3),
+ Op(RETURN),
+
+ Label(4),
+ LineNumber(4, Label(4)).dead,
+ LineNumber(5, Label(4)),
+ Op(RETURN),
+
+ Label(5),
+ LineNumber(6, Label(5)).dead,
+ Label(6),
+ Label(7),
+ LineNumber(7, Label(7)),
+ Op(RETURN),
+
+ Label(9),
+ LineNumber(8, Label(9)).dead,
+ Label(10)
+ )
+
+ val method = genMethod()(ops.map(_._1): _*)
+ assertTrue(LocalOptImpls.removeEmptyLineNumbers(method))
+ assertSameCode(instructionsFromMethod(method), ops.filter(_._2).map(_._1))
+ }
+
+ @Test
+ def badlyLocatedLineNumbers(): Unit = {
+ def t(ops: Instruction*) =
+ assertThrows[AssertionError](LocalOptImpls.removeEmptyLineNumbers(genMethod()(ops: _*)))
+
+ // line numbers have to be right after their referenced label node
+ t(LineNumber(0, Label(1)), Label(1))
+ t(Label(0), Label(1), LineNumber(0, Label(0)))
+ }
+
+ @Test
+ def removeEmptyLabels(): Unit = {
+ val handler = List(ExceptionHandler(Label(4), Label(5), Label(6), Some("java/lang/Throwable")))
+ def ops(target1: Int, target2: Int, target3: Int, target4: Int, target5: Int, target6: Int) = List[(Instruction, Boolean)](
+ Label(1),
+ Label(2).dead,
+ Label(3).dead,
+ LineNumber(3, Label(target1)),
+ VarOp(ILOAD, 1),
+ Jump(IFGE, Label(target2)),
+
+ Label(4),
+ Label(5).dead,
+ Label(6).dead,
+ VarOp(ILOAD, 2),
+ Jump(IFGE, Label(target3)),
+
+ Label(7),
+ Label(8).dead,
+ Label(9).dead,
+ Op(RETURN),
+
+ LookupSwitch(LOOKUPSWITCH, Label(target4), List(1,2), List(Label(target4), Label(target5))),
+ TableSwitch(TABLESWITCH, 1, 2, Label(target4), List(Label(target4), Label(target5))),
+
+ Label(10),
+ LineNumber(10, Label(10)),
+ Label(11).dead,
+ LineNumber(12, Label(target6))
+ )
+
+ val method = genMethod(handlers = handler)(ops(2, 3, 8, 8, 9, 11).map(_._1): _*)
+ assertTrue(LocalOptImpls.removeEmptyLabelNodes(method))
+ val m = convertMethod(method)
+ assertSameCode(m.instructions, ops(1, 1, 7, 7, 7, 10).filter(_._2).map(_._1))
+ assertTrue(m.handlers match {
+ case List(ExceptionHandler(Label(4), Label(4), Label(4), _)) => true
+ case _ => false
+ })
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala
new file mode 100644
index 0000000000..57088bdd2f
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala
@@ -0,0 +1,67 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.collection.generic.Clearable
+import org.junit.Assert._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import AsmUtils._
+import scala.tools.testing.ClearAfterClass
+
+import BackendReporting._
+
+import scala.collection.convert.decorateAsScala._
+
+object InlineInfoTest extends ClearAfterClass.Clearable {
+ var compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:classpath")
+ def clear(): Unit = { compiler = null }
+
+ def notPerRun: List[Clearable] = List(compiler.genBCode.bTypes.classBTypeFromInternalName, compiler.genBCode.bTypes.byteCodeRepository.classes)
+ notPerRun foreach compiler.perRunCaches.unrecordCache
+}
+
+@RunWith(classOf[JUnit4])
+class InlineInfoTest {
+ val compiler = InlineInfoTest.compiler
+
+ def compile(code: String) = {
+ InlineInfoTest.notPerRun.foreach(_.clear())
+ compileClasses(compiler)(code)
+ }
+
+ @Test
+ def inlineInfosFromSymbolAndAttribute(): Unit = {
+ val code =
+ """trait T {
+ | @inline def f: Int
+ | @noinline final def g = 0
+ |}
+ |trait U { self: T =>
+ | @inline def f = 0
+ | final def h = 0
+ | final class K {
+ | @inline def i = 0
+ | }
+ |}
+ |sealed trait V {
+ | @inline def j = 0
+ |}
+ |class C extends T with U
+ """.stripMargin
+ val classes = compile(code)
+ val fromSyms = classes.map(c => compiler.genBCode.bTypes.classBTypeFromInternalName(c.name).info.get.inlineInfo)
+
+ val fromAttrs = classes.map(c => {
+ assert(c.attrs.asScala.exists(_.isInstanceOf[InlineInfoAttribute]), c.attrs)
+ compiler.genBCode.bTypes.inlineInfoFromClassfile(c)
+ })
+
+ assert(fromSyms == fromAttrs)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala
new file mode 100644
index 0000000000..029caa995c
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala
@@ -0,0 +1,194 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.collection.generic.Clearable
+import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.BatchSourceFile
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import scala.tools.asm.tree._
+import scala.tools.asm.tree.analysis._
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils.AsmAnalyzer
+import scala.tools.nsc.io._
+import scala.tools.nsc.reporters.StoreReporter
+import scala.tools.testing.AssertUtil._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import AsmUtils._
+
+import BackendReporting._
+
+import scala.collection.convert.decorateAsScala._
+import scala.tools.testing.ClearAfterClass
+
+object InlineWarningTest extends ClearAfterClass.Clearable {
+ val argsNoWarn = "-Ybackend:GenBCode -Yopt:l:classpath"
+ val args = argsNoWarn + " -Yopt-warnings"
+ var compiler = newCompiler(extraArgs = args)
+ def clear(): Unit = { compiler = null }
+}
+
+@RunWith(classOf[JUnit4])
+class InlineWarningTest extends ClearAfterClass {
+ ClearAfterClass.stateToClear = InlineWarningTest
+
+ val compiler = InlineWarningTest.compiler
+
+ def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = {
+ compileClasses(compiler)(scalaCode, javaCode, allowMessage)
+ }
+
+ @Test
+ def nonFinal(): Unit = {
+ val code =
+ """class C {
+ | @inline def m1 = 1
+ |}
+ |trait T {
+ | @inline def m2 = 1
+ |}
+ |class D extends C with T
+ |
+ |class Test {
+ | def t1(c: C, t: T, d: D) = c.m1 + t.m2 + d.m1 + d.m2
+ |}
+ """.stripMargin
+ var count = 0
+ val warns = Set(
+ "C::m1()I is annotated @inline but cannot be inlined: the method is not final and may be overridden",
+ "T::m2()I is annotated @inline but cannot be inlined: the method is not final and may be overridden",
+ "D::m2()I is annotated @inline but cannot be inlined: the method is not final and may be overridden")
+ compile(code, allowMessage = i => {count += 1; warns.exists(i.msg contains _)})
+ assert(count == 4, count)
+ }
+
+ @Test
+ def traitMissingImplClass(): Unit = {
+ val codeA = "trait T { @inline final def f = 1 }"
+ val codeB = "class C { def t1(t: T) = t.f }"
+
+ val removeImpl = (outDir: AbstractFile) => {
+ val f = outDir.lookupName("T$class.class", directory = false)
+ if (f != null) f.delete()
+ }
+
+ val warn =
+ """T::f()I is annotated @inline but cannot be inlined: the trait method call could not be rewritten to the static implementation method. Possible reason:
+ |The method f(LT;)I could not be found in the class T$class or any of its parents.
+ |Note that the following parent classes could not be found on the classpath: T$class""".stripMargin
+
+ var c = 0
+ compileSeparately(List(codeA, codeB), extraArgs = InlineWarningTest.args, afterEach = removeImpl, allowMessage = i => {c += 1; i.msg contains warn})
+ assert(c == 1, c)
+
+ // only summary here
+ compileSeparately(List(codeA, codeB), extraArgs = InlineWarningTest.argsNoWarn, afterEach = removeImpl, allowMessage = _.msg contains "there was one inliner warning")
+ }
+
+ @Test
+ def handlerNonEmptyStack(): Unit = {
+ val code =
+ """class C {
+ | @noinline def q = 0
+ | @inline final def foo = try { q } catch { case e: Exception => 2 }
+ | def t1 = println(foo) // inline warning here: foo cannot be inlined on top of a non-empty stack
+ |}
+ """.stripMargin
+
+ var c = 0
+ compile(code, allowMessage = i => {c += 1; i.msg contains "operand stack at the callsite in C::t1()V contains more values"})
+ assert(c == 1, c)
+ }
+
+ @Test
+ def mixedWarnings(): Unit = {
+ val javaCode =
+ """public class A {
+ | public static final int bar() { return 100; }
+ |}
+ """.stripMargin
+
+ val scalaCode =
+ """class B {
+ | @inline final def flop = A.bar
+ | def g = flop
+ |}
+ """.stripMargin
+
+ val warns = List(
+ """failed to determine if bar should be inlined:
+ |The method bar()I could not be found in the class A or any of its parents.
+ |Note that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: A""".stripMargin,
+
+ """B::flop()I is annotated @inline but could not be inlined:
+ |Failed to check if B::flop()I can be safely inlined to B without causing an IllegalAccessError. Checking instruction INVOKESTATIC A.bar ()I failed:
+ |The method bar()I could not be found in the class A or any of its parents.
+ |Note that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: A""".stripMargin)
+
+ var c = 0
+ val List(b) = compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.tail.exists(i.msg contains _)})
+ assert(c == 1, c)
+
+ // no warnings here
+ compileClasses(newCompiler(extraArgs = InlineWarningTest.argsNoWarn + " -Yopt-warnings:none"))(scalaCode, List((javaCode, "A.java")))
+
+ c = 0
+ compileClasses(newCompiler(extraArgs = InlineWarningTest.argsNoWarn + " -Yopt-warnings:no-inline-mixed"))(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)})
+ assert(c == 2, c)
+ }
+
+ @Test
+ def cannotInlinePrivateCallIntoDifferentClass(): Unit = {
+ val code =
+ """class M {
+ | @inline final def f = {
+ | @noinline def nested = 0
+ | nested
+ | }
+ |
+ | def t = f // ok
+ |}
+ |
+ |class N {
+ | def t(a: M) = a.f // not possible
+ |}
+ """.stripMargin
+
+ val warn =
+ """M::f()I is annotated @inline but could not be inlined:
+ |The callee M::f()I contains the instruction INVOKESPECIAL M.nested$1 ()I
+ |that would cause an IllegalAccessError when inlined into class N""".stripMargin
+
+ var c = 0
+ compile(code, allowMessage = i => { c += 1; i.msg contains warn })
+ assert(c == 1, c)
+ }
+
+ @Test
+ def cannotMixStrictfp(): Unit = {
+ val code =
+ """import annotation.strictfp
+ |class C {
+ | @strictfp @inline final def f = 0
+ | @strictfp def t1 = f
+ | def t2 = f
+ |}
+ """.stripMargin
+
+ val warn =
+ """C::f()I is annotated @inline but could not be inlined:
+ |The callsite method C::t2()I
+ |does not have the same strictfp mode as the callee C::f()I.""".stripMargin
+
+ var c = 0
+ compile(code, allowMessage = i => { c += 1; i.msg contains warn })
+ assert(c == 1, c)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala
new file mode 100644
index 0000000000..b4839dcec8
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala
@@ -0,0 +1,198 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import scala.tools.asm.tree._
+import scala.tools.testing.AssertUtil._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import AsmUtils._
+
+import scala.collection.convert.decorateAsScala._
+import scala.tools.testing.ClearAfterClass
+
+object InlinerIllegalAccessTest extends ClearAfterClass.Clearable {
+ var compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
+ def clear(): Unit = { compiler = null }
+}
+
+@RunWith(classOf[JUnit4])
+class InlinerIllegalAccessTest extends ClearAfterClass {
+ ClearAfterClass.stateToClear = InlinerIllegalAccessTest
+
+ val compiler = InlinerIllegalAccessTest.compiler
+ import compiler.genBCode.bTypes._
+
+ def addToRepo(cls: List[ClassNode]): Unit = for (c <- cls) byteCodeRepository.add(c, ByteCodeRepository.Classfile)
+ def assertEmpty(ins: Option[AbstractInsnNode]) = for (i <- ins) throw new AssertionError(textify(i))
+
+ @Test
+ def typeAccessible(): Unit = {
+ val code =
+ """package a {
+ | private class C { // the Scala compiler makes all classes public
+ | def f1 = new C // NEW a/C
+ | def f2 = new Array[C](0) // ANEWARRAY a/C
+ | def f3 = new Array[Array[C]](0) // ANEWARRAY [La/C;
+ | }
+ | class D
+ |}
+ |package b {
+ | class E
+ |}
+ """.stripMargin
+
+ val allClasses = compileClasses(compiler)(code)
+ val List(cClass, dClass, eClass) = allClasses
+ assert(cClass.name == "a/C" && dClass.name == "a/D" && eClass.name == "b/E", s"${cClass.name}, ${dClass.name}, ${eClass.name}")
+ addToRepo(allClasses) // they are not on the compiler's classpath, so we add them manually to the code repo
+
+ val methods = cClass.methods.asScala.filter(_.name(0) == 'f').toList
+
+ def check(classNode: ClassNode, test: Option[AbstractInsnNode] => Unit) = {
+ for (m <- methods)
+ test(inliner.findIllegalAccess(m.instructions, classBTypeFromParsedClassfile(cClass.name), classBTypeFromParsedClassfile(classNode.name)).map(_._1))
+ }
+
+ check(cClass, assertEmpty)
+ check(dClass, assertEmpty)
+ check(eClass, assertEmpty) // C is public, so accessible in E
+
+ byteCodeRepository.classes.clear()
+ classBTypeFromInternalName.clear()
+
+ cClass.access &= ~ACC_PUBLIC // ftw
+ addToRepo(allClasses)
+
+ // private classes can be accessed from the same package
+ check(cClass, assertEmpty)
+ check(dClass, assertEmpty) // accessing a private class in the same package is OK
+ check(eClass, {
+ case Some(ti: TypeInsnNode) if Set("a/C", "[La/C;")(ti.desc) => ()
+ // MatchError otherwise
+ })
+ }
+
+ @Test
+ def memberAccessible(): Unit = {
+ val code =
+ """package a {
+ | class C {
+ | /*public*/ def a = 0
+ | /*default*/ def b = 0
+ | protected def c = 0
+ | private def d = 0
+ |
+ | /*public static*/ def e = 0
+ | /*default static*/ def f = 0
+ | protected /*static*/ def g = 0
+ | private /*static*/ def h = 0
+ |
+ | def raC = a
+ | def rbC = b
+ | def rcC = c
+ | def rdC = d
+ | def reC = e
+ | def rfC = f
+ | def rgC = g
+ | def rhC = h
+ | }
+ |
+ | class D extends C {
+ | def rbD = b // 1: default access b, accessed in D, declared in C. can be inlined into any class in the same package as C.
+ | def rcD = c // 2: protected c, accessed in D. can be inlined into C, D or E, but not into F (F and D are unrelated).
+ |
+ | def rfD = f // 1
+ | def rgD = g // 2
+ | }
+ | class E extends D
+ |
+ | class F extends C
+ |
+ | class G
+ |}
+ |
+ |package b {
+ | class H extends a.C
+ | class I
+ |}
+ """.stripMargin
+
+ val allClasses = compileClasses(compiler)(code)
+ val List(cCl, dCl, eCl, fCl, gCl, hCl, iCl) = allClasses
+ addToRepo(allClasses)
+
+ // set flags that Scala scala doesn't (default access, static) - a hacky way to test all access modes.
+ val names = ('a' to 'h').map(_.toString).toSet
+ val List(a, b, c, d, e, f, g, h) = cCl.methods.asScala.toList.filter(m => names(m.name))
+
+ def checkAccess(a: MethodNode, expected: Int): Unit = {
+ assert((a.access & (ACC_STATIC | ACC_PUBLIC | ACC_PROTECTED | ACC_PRIVATE)) == expected, s"${a.name}, ${a.access}")
+ }
+
+ checkAccess(a, ACC_PUBLIC)
+ b.access &= ~ACC_PUBLIC; checkAccess(b, 0) // make it default access
+ c.access &= ~ACC_PUBLIC; c.access |= ACC_PROTECTED; checkAccess(c, ACC_PROTECTED) // make it protected - scalac actually never emits PROTECTED in bytecode, see javaFlags in BTypesFromSymbols
+ checkAccess(d, ACC_PRIVATE)
+
+ e.access |= ACC_STATIC; checkAccess(e, ACC_STATIC | ACC_PUBLIC)
+ f.access &= ~ACC_PUBLIC; f.access |= ACC_STATIC; checkAccess(f, ACC_STATIC)
+ g.access &= ~ACC_PUBLIC; g.access |= (ACC_STATIC | ACC_PROTECTED); checkAccess(g, ACC_STATIC | ACC_PROTECTED)
+ h.access |= ACC_STATIC; checkAccess(h, ACC_STATIC | ACC_PRIVATE)
+
+ val List(raC, rbC, rcC, rdC, reC, rfC, rgC, rhC) = cCl.methods.asScala.toList.filter(_.name(0) == 'r').sortBy(_.name)
+
+ val List(rbD, rcD, rfD, rgD) = dCl.methods.asScala.toList.filter(_.name(0) == 'r').sortBy(_.name)
+
+ def check(method: MethodNode, decl: ClassNode, dest: ClassNode, test: Option[AbstractInsnNode] => Unit): Unit = {
+ test(inliner.findIllegalAccess(method.instructions, classBTypeFromParsedClassfile(decl.name), classBTypeFromParsedClassfile(dest.name)).map(_._1))
+ }
+
+ val cOrDOwner = (_: Option[AbstractInsnNode] @unchecked) match {
+ case Some(mi: MethodInsnNode) if Set("a/C", "a/D")(mi.owner) => ()
+ // MatchError otherwise
+ }
+
+ // PUBLIC
+
+ // public methods allowed everywhere
+ for (m <- Set(raC, reC); c <- allClasses) check(m, cCl, c, assertEmpty)
+
+ // DEFAULT ACCESS
+
+ // default access OK in same package
+ for ((m, declCls) <- Set((rbC, cCl), (rfC, cCl), (rbD, dCl), (rfD, dCl)); c <- allClasses) {
+ if (c.name startsWith "a/") check(m, declCls, c, assertEmpty)
+ else check(m, declCls, c, cOrDOwner)
+ }
+
+ // PROTECTED
+
+ // protected accessed in same class, or protected static accessed in subclass(rgD).
+ // can be inlined to subclasses, and classes in the same package (gCl)
+ for ((m, declCls) <- Set((rcC, cCl), (rgC, cCl), (rgD, dCl)); c <- Set(cCl, dCl, eCl, fCl, gCl, hCl)) check(m, declCls, c, assertEmpty)
+
+ // protected in non-subclass and different package
+ for (m <- Set(rcC, rgC)) check(m, cCl, iCl, cOrDOwner)
+
+ // non-static protected accessed in subclass (rcD). can be inlined to related class, or classes in the same package
+ for (c <- Set(cCl, dCl, eCl, fCl, gCl)) check(rcD, dCl, c, assertEmpty)
+
+ // rcD cannot be inlined into non-related classes, if the declaration and destination are not in the same package
+ for (c <- Set(hCl, iCl)) check(rcD, dCl, c, cOrDOwner)
+
+ // PRIVATE
+
+ // privated method accesses can only be inlined in the same class
+ for (m <- Set(rdC, rhC)) check(m, cCl, cCl, assertEmpty)
+ for (m <- Set(rdC, rhC); c <- allClasses.tail) check(m, cCl, c, cOrDOwner)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala
new file mode 100644
index 0000000000..5c9bd1c188
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala
@@ -0,0 +1,115 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import AsmUtils._
+
+import scala.collection.convert.decorateAsScala._
+
+object InlinerSeparateCompilationTest {
+ val args = "-Ybackend:GenBCode -Yopt:l:classpath"
+}
+
+@RunWith(classOf[JUnit4])
+class InlinerSeparateCompilationTest {
+ import InlinerSeparateCompilationTest._
+ import InlinerTest.{listStringLines, assertInvoke, assertNoInvoke}
+
+ @Test
+ def inlnieMixedinMember(): Unit = {
+ val codeA =
+ """trait T {
+ | @inline def f = 0
+ |}
+ |object O extends T {
+ | @inline def g = 1
+ |}
+ """.stripMargin
+
+ val codeB =
+ """class C {
+ | def t1(t: T) = t.f
+ | def t2 = O.f
+ | def t3 = O.g
+ |}
+ """.stripMargin
+
+ val warn = "T::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden"
+ val List(c, o, oMod, t, tCls) = compileClassesSeparately(List(codeA, codeB), args + " -Yopt-warnings", _.msg contains warn)
+ assertInvoke(getSingleMethod(c, "t1"), "T", "f")
+ assertNoInvoke(getSingleMethod(c, "t2"))
+ assertNoInvoke(getSingleMethod(c, "t3"))
+ }
+
+ @Test
+ def inlineSealedMember(): Unit = {
+ val codeA =
+ """sealed trait T {
+ | @inline def f = 1
+ |}
+ """.stripMargin
+
+ val codeB =
+ """class C {
+ | def t1(t: T) = t.f
+ |}
+ """.stripMargin
+
+ val List(c, t, tCls) = compileClassesSeparately(List(codeA, codeB), args)
+ assertNoInvoke(getSingleMethod(c, "t1"))
+ }
+
+ @Test
+ def inlineInheritedMember(): Unit = {
+ val codeA =
+ """trait T {
+ | @inline final def f = 1
+ |}
+ |trait U extends T {
+ | @inline final def g = f
+ |}
+ """.stripMargin
+
+ val codeB =
+ """class C extends U {
+ | def t1 = this.f
+ | def t2 = this.g
+ | def t3(t: T) = t.f
+ |}
+ """.stripMargin
+
+ val List(c, t, tCls, u, uCls) = compileClassesSeparately(List(codeA, codeB), args)
+ for (m <- List("t1", "t2", "t3")) assertNoInvoke(getSingleMethod(c, m))
+ }
+
+ @Test
+ def inlineWithSelfType(): Unit = {
+ val assembly =
+ """trait Assembly extends T {
+ | @inline final def g = 1
+ | @inline final def n = m
+ |}
+ """.stripMargin
+
+ val codeA =
+ s"""trait T { self: Assembly =>
+ | @inline final def f = g
+ | @inline final def m = 1
+ |}
+ |$assembly
+ """.stripMargin
+
+ val List(a, aCls, t, tCls) = compileClassesSeparately(List(codeA, assembly), args)
+ assertNoInvoke(getSingleMethod(tCls, "f"))
+ assertNoInvoke(getSingleMethod(aCls, "n"))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
new file mode 100644
index 0000000000..0fc3601603
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala
@@ -0,0 +1,978 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.collection.generic.Clearable
+import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.BatchSourceFile
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import scala.tools.asm.tree._
+import scala.tools.asm.tree.analysis._
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils.AsmAnalyzer
+import scala.tools.nsc.io._
+import scala.tools.nsc.reporters.StoreReporter
+import scala.tools.testing.AssertUtil._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import AsmUtils._
+
+import BackendReporting._
+
+import scala.collection.convert.decorateAsScala._
+import scala.tools.testing.ClearAfterClass
+
+object InlinerTest extends ClearAfterClass.Clearable {
+ val args = "-Ybackend:GenBCode -Yopt:l:classpath -Yopt-warnings"
+ var compiler = newCompiler(extraArgs = args)
+
+ // allows inspecting the caches after a compilation run
+ def notPerRun: List[Clearable] = List(compiler.genBCode.bTypes.classBTypeFromInternalName, compiler.genBCode.bTypes.byteCodeRepository.classes, compiler.genBCode.bTypes.callGraph.callsites)
+ notPerRun foreach compiler.perRunCaches.unrecordCache
+
+ def clear(): Unit = { compiler = null }
+
+ implicit class listStringLines[T](val l: List[T]) extends AnyVal {
+ def stringLines = l.mkString("\n")
+ }
+
+ def assertNoInvoke(m: Method): Unit = assertNoInvoke(m.instructions)
+ def assertNoInvoke(ins: List[Instruction]): Unit = {
+ assert(!ins.exists(_.isInstanceOf[Invoke]), ins.stringLines)
+ }
+
+ def assertInvoke(m: Method, receiver: String, method: String): Unit = assertInvoke(m.instructions, receiver, method)
+ def assertInvoke(l: List[Instruction], receiver: String, method: String): Unit = {
+ assert(l.exists {
+ case Invoke(_, `receiver`, `method`, _, _) => true
+ case _ => false
+ }, l.stringLines)
+ }
+}
+
+@RunWith(classOf[JUnit4])
+class InlinerTest extends ClearAfterClass {
+ ClearAfterClass.stateToClear = InlinerTest
+
+ import InlinerTest.{listStringLines, assertInvoke, assertNoInvoke}
+
+ val compiler = InlinerTest.compiler
+ import compiler.genBCode.bTypes._
+
+ def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = {
+ InlinerTest.notPerRun.foreach(_.clear())
+ compileClasses(compiler)(scalaCode, javaCode, allowMessage)
+ }
+
+ def checkCallsite(callsite: callGraph.Callsite, callee: MethodNode) = {
+ assert(callsite.callsiteMethod.instructions.contains(callsite.callsiteInstruction), instructionsFromMethod(callsite.callsiteMethod))
+
+ val callsiteClassNode = byteCodeRepository.classNode(callsite.callsiteClass.internalName).get
+ assert(callsiteClassNode.methods.contains(callsite.callsiteMethod), callsiteClassNode.methods.asScala.map(_.name).toList)
+
+ assert(callsite.callee.get.callee == callee, callsite.callee.get.callee.name)
+ }
+
+ // inline first invocation of f into g in class C
+ def inlineTest(code: String, mod: ClassNode => Unit = _ => ()): (MethodNode, Option[CannotInlineWarning]) = {
+ val List(cls) = compile(code)
+ mod(cls)
+ val clsBType = classBTypeFromParsedClassfile(cls.name)
+
+ val List(f, g) = cls.methods.asScala.filter(m => Set("f", "g")(m.name)).toList.sortBy(_.name)
+ val fCall = g.instructions.iterator.asScala.collect({ case i: MethodInsnNode if i.name == "f" => i }).next()
+
+ val analyzer = new AsmAnalyzer(g, clsBType.internalName)
+
+ val r = inliner.inline(
+ fCall,
+ analyzer.frameAt(fCall).getStackSize,
+ g,
+ clsBType,
+ f,
+ clsBType,
+ receiverKnownNotNull = true,
+ keepLineNumbers = true)
+ (g, r)
+ }
+
+ @Test
+ def simpleInlineOK(): Unit = {
+ val code =
+ """class C {
+ | def f = 1
+ | def g = f + f
+ |}
+ """.stripMargin
+
+ val (g, _) = inlineTest(code)
+
+ val gConv = convertMethod(g)
+ assertSameCode(gConv.instructions.dropNonOp,
+ List(
+ VarOp(ALOAD, 0), VarOp(ASTORE, 1), // store this
+ Op(ICONST_1), VarOp(ISTORE, 2), Jump(GOTO, Label(10)), // store return value
+ Label(10), VarOp(ILOAD, 2), // load return value
+ VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "f", "()I", false), Op(IADD), Op(IRETURN)))
+
+ // line numbers are kept, so there's a line 2 (from the inlined f)
+ assert(gConv.instructions exists {
+ case LineNumber(2, _) => true
+ case _ => false
+ }, gConv.instructions.filter(_.isInstanceOf[LineNumber]))
+
+ assert(gConv.localVars.map(_.name).sorted == List("f_this", "this"), gConv.localVars)
+ assert(g.maxStack == 2 && g.maxLocals == 3, s"${g.maxLocals} - ${g.maxStack}")
+ }
+
+ @Test
+ def nothingTypedOK(): Unit = {
+ val code =
+ """class C {
+ | def f: Nothing = ???
+ | def g: Int = { f; 1 }
+ |}
+ """.stripMargin
+
+ // On the bytecode level, methods of type Nothing have return type Nothing$.
+ // This can be treated like any other result object.
+
+ // See also discussion around ATHROW in BCodeBodyBuilder
+
+ val (g, _) = inlineTest(code)
+ val expectedInlined = List(
+ VarOp(ALOAD, 0), VarOp(ASTORE, 1), // store this
+ Field(GETSTATIC, "scala/Predef$", "MODULE$", "Lscala/Predef$;"), Invoke(INVOKEVIRTUAL, "scala/Predef$", "$qmark$qmark$qmark", "()Lscala/runtime/Nothing$;", false)) // inlined call to ???
+
+ assertSameCode(convertMethod(g).instructions.dropNonOp.take(4), expectedInlined)
+
+ compiler.genBCode.bTypes.localOpt.methodOptimizations(g, "C")
+ assertSameCode(convertMethod(g).instructions.dropNonOp,
+ expectedInlined ++ List(VarOp(ASTORE, 2), VarOp(ALOAD, 2), Op(ATHROW)))
+ }
+
+ @Test
+ def synchronizedNoInline(): Unit = {
+ val code =
+ """class C {
+ | def f: Int = 0
+ | def g: Int = f
+ |}
+ """.stripMargin
+
+ val (_, can) = inlineTest(code, cls => {
+ val f = cls.methods.asScala.find(_.name == "f").get
+ f.access |= ACC_SYNCHRONIZED
+ })
+ assert(can.get.isInstanceOf[SynchronizedMethod], can)
+ }
+
+ @Test
+ def tryCatchOK(): Unit = {
+ val code =
+ """class C {
+ | def f: Int = try { 1 } catch { case _: Exception => 2 }
+ | def g = f + 1
+ |}
+ """.stripMargin
+ val (_, r) = inlineTest(code)
+ assert(r.isEmpty, r)
+ }
+
+ @Test
+ def tryCatchNoInline(): Unit = {
+ // cannot inline f: there's a value on g's stack. if f throws and enters the handler, all values
+ // on the stack are removed, including the one of g's stack that we still need.
+ val code =
+ """class C {
+ | def f: Int = try { 1 } catch { case _: Exception => 2 }
+ | def g = println(f)
+ |}
+ """.stripMargin
+ val (_, r) = inlineTest(code)
+ assert(r.get.isInstanceOf[MethodWithHandlerCalledOnNonEmptyStack], r)
+ }
+
+ @Test
+ def illegalAccessNoInline(): Unit = {
+ val code =
+ """package a {
+ | class C {
+ | private def f: Int = 0
+ | def g: Int = f
+ | }
+ |}
+ |package b {
+ | class D {
+ | def h(c: a.C): Int = c.g + 1
+ | }
+ |}
+ """.stripMargin
+
+ val List(c, d) = compile(code)
+
+ val cTp = classBTypeFromParsedClassfile(c.name)
+ val dTp = classBTypeFromParsedClassfile(d.name)
+
+ val g = c.methods.asScala.find(_.name == "g").get
+ val h = d.methods.asScala.find(_.name == "h").get
+ val gCall = h.instructions.iterator.asScala.collect({
+ case m: MethodInsnNode if m.name == "g" => m
+ }).next()
+
+ val analyzer = new AsmAnalyzer(h, dTp.internalName)
+
+ val r = inliner.inline(
+ gCall,
+ analyzer.frameAt(gCall).getStackSize,
+ h,
+ dTp,
+ g,
+ cTp,
+ receiverKnownNotNull = true,
+ keepLineNumbers = true)
+
+ assert(r.get.isInstanceOf[IllegalAccessInstruction], r)
+ }
+
+ @Test
+ def inlineSimpleAtInline(): Unit = {
+ val code =
+ """class C {
+ | @inline final def f = 0
+ | final def g = 1
+ |
+ | def test = f + g
+ |}
+ """.stripMargin
+ val List(cCls) = compile(code)
+ val instructions = getSingleMethod(cCls, "test").instructions
+ assert(instructions.contains(Op(ICONST_0)), instructions.stringLines)
+ assert(!instructions.contains(Op(ICONST_1)), instructions)
+ }
+
+ @Test
+ def cyclicInline(): Unit = {
+ val code =
+ """class C {
+ | @inline final def f: Int = g
+ | @inline final def g: Int = f
+ |}
+ """.stripMargin
+ val List(c) = compile(code)
+ val methods @ List(_, g) = c.methods.asScala.filter(_.name.length == 1).toList
+ val List(fIns, gIns) = methods.map(instructionsFromMethod(_).dropNonOp)
+ val invokeG = Invoke(INVOKEVIRTUAL, "C", "g", "()I", false)
+ assert(fIns contains invokeG, fIns) // no inlining into f, that request is elided
+ assert(gIns contains invokeG, gIns) // f is inlined into g, g invokes itself recursively
+
+ assert(callGraph.callsites.size == 3, callGraph.callsites)
+ for (callsite <- callGraph.callsites.values if methods.contains(callsite.callsiteMethod)) {
+ checkCallsite(callsite, g)
+ }
+ }
+
+ @Test
+ def cyclicInline2(): Unit = {
+ val code =
+ """class C {
+ | @inline final def h: Int = f
+ | @inline final def f: Int = g + g
+ | @inline final def g: Int = h
+ |}
+ """.stripMargin
+ val List(c) = compile(code)
+ val methods @ List(f, g, h) = c.methods.asScala.filter(_.name.length == 1).sortBy(_.name).toList
+ val List(fIns, gIns, hIns) = methods.map(instructionsFromMethod(_).dropNonOp)
+ val invokeG = Invoke(INVOKEVIRTUAL, "C", "g", "()I", false)
+ assert(fIns.count(_ == invokeG) == 2, fIns) // no inlining into f, these requests are elided
+ assert(gIns.count(_ == invokeG) == 2, gIns)
+ assert(hIns.count(_ == invokeG) == 2, hIns)
+
+ assert(callGraph.callsites.size == 7, callGraph.callsites)
+ for (callsite <- callGraph.callsites.values if methods.contains(callsite.callsiteMethod)) {
+ checkCallsite(callsite, g)
+ }
+ }
+
+ @Test
+ def arraycopy(): Unit = {
+ // also tests inlining of a void-returning method (no return value on the stack)
+ val code =
+ """// can't use the `compat.Platform.arraycopy` from the std lib for now, because the classfile doesn't have a ScalaInlineInfo attribute
+ |object Platform {
+ | @inline def arraycopy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int) {
+ | System.arraycopy(src, srcPos, dest, destPos, length)
+ | }
+ |}
+ |class C {
+ | def f(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = {
+ | Platform.arraycopy(src, srcPos, dest, destPos, length)
+ | }
+ |}
+ """.stripMargin
+ val List(c, _, _) = compile(code)
+ val ins = getSingleMethod(c, "f").instructions
+ val invokeSysArraycopy = Invoke(INVOKESTATIC, "java/lang/System", "arraycopy", "(Ljava/lang/Object;ILjava/lang/Object;II)V", false)
+ assert(ins contains invokeSysArraycopy, ins.stringLines)
+ }
+
+ @Test
+ def arrayMemberMethod(): Unit = {
+ // This used to crash when building the call graph. The `owner` field of the MethodInsnNode
+ // for the invocation of `clone` is not an internal name, but a full array descriptor
+ // [Ljava.lang.Object; - the documentation in the ASM library didn't mention that possibility.
+ val code =
+ """class C {
+ | def f(a: Array[Object]) = {
+ | a.clone()
+ | }
+ |}
+ """.stripMargin
+ val List(c) = compile(code)
+ assert(callGraph.callsites.values exists (_.callsiteInstruction.name == "clone"))
+ }
+
+ @Test
+ def atInlineInTrait(): Unit = {
+ val code =
+ """trait T {
+ | @inline final def f = 0
+ |}
+ |class C {
+ | def g(t: T) = t.f
+ |}
+ """.stripMargin
+ val List(c, t, tClass) = compile(code)
+ assertNoInvoke(getSingleMethod(c, "g"))
+ }
+
+ @Test
+ def inlinePrivateMethodWithHandler(): Unit = {
+ val code =
+ """class C {
+ | @inline private def f = try { 0 } catch { case _: Throwable => 1 }
+ | def g = f
+ |}
+ """.stripMargin
+ val List(c) = compile(code)
+ // no more invoke, f is inlined
+ assertNoInvoke(getSingleMethod(c, "g"))
+ }
+
+ @Test
+ def inlineStaticCall(): Unit = {
+ val code =
+ """class C {
+ | def f = Integer.lowestOneBit(103)
+ |}
+ """.stripMargin
+
+ val List(c) = compile(code)
+ val f = c.methods.asScala.find(_.name == "f").get
+ val callsiteIns = f.instructions.iterator().asScala.collect({ case c: MethodInsnNode => c }).next()
+ val clsBType = classBTypeFromParsedClassfile(c.name)
+ val analyzer = new AsmAnalyzer(f, clsBType.internalName)
+
+ val integerClassBType = classBTypeFromInternalName("java/lang/Integer")
+ val lowestOneBitMethod = byteCodeRepository.methodNode(integerClassBType.internalName, "lowestOneBit", "(I)I").get._1
+
+ val r = inliner.inline(
+ callsiteIns,
+ analyzer.frameAt(callsiteIns).getStackSize,
+ f,
+ clsBType,
+ lowestOneBitMethod,
+ integerClassBType,
+ receiverKnownNotNull = false,
+ keepLineNumbers = false)
+
+ assert(r.isEmpty, r)
+ val ins = instructionsFromMethod(f)
+
+ // no invocations, lowestOneBit is inlined
+ assertNoInvoke(ins)
+
+ // no null check when inlining a static method
+ ins foreach {
+ case Jump(IFNONNULL, _) => assert(false, ins.stringLines)
+ case _ =>
+ }
+ }
+
+ @Test
+ def maxLocalsMaxStackAfterInline(): Unit = {
+ val code =
+ """class C {
+ | @inline final def f1(x: Int): Int = {
+ | val a = x + 1
+ | math.max(a, math.min(10, a - 1))
+ | }
+ |
+ | @inline final def f2(x: Int): Unit = {
+ | val a = x + 1
+ | println(math.max(a, 10))
+ | }
+ |
+ | def g1 = println(f1(32))
+ | def g2 = println(f2(32))
+ |}
+ """.stripMargin
+
+ val List(c) = compile(code)
+ val ms @ List(f1, f2, g1, g2) = c.methods.asScala.filter(_.name.length == 2).toList
+
+ // stack height at callsite of f1 is 1, so max of g1 after inlining is max of f1 + 1
+ assert(g1.maxStack == 7 && f1.maxStack == 6, s"${g1.maxStack} - ${f1.maxStack}")
+
+ // locals in f1: this, x, a
+ // locals in g1 after inlining: this, this-of-f1, x, a, return value
+ assert(g1.maxLocals == 5 && f1.maxLocals == 3, s"${g1.maxLocals} - ${f1.maxLocals}")
+
+ // like maxStack in g1 / f1
+ assert(g2.maxStack == 5 && f2.maxStack == 4, s"${g2.maxStack} - ${f2.maxStack}")
+
+ // like maxLocals for g1 / f1, but no return value
+ assert(g2.maxLocals == 4 && f2.maxLocals == 3, s"${g2.maxLocals} - ${f2.maxLocals}")
+ }
+
+ @Test
+ def mixedCompilationNoInline(): Unit = {
+ // The inliner checks if the invocation `A.bar` can be safely inlined. For that it needs to have
+ // the bytecode of the invoked method. In mixed compilation, there's no classfile available for
+ // A, so `flop` cannot be inlined, we cannot check if it's safe.
+
+ val javaCode =
+ """public class A {
+ | public static final int bar() { return 100; }
+ |}
+ """.stripMargin
+
+ val scalaCode =
+ """class B {
+ | @inline final def flop = A.bar
+ | def g = flop
+ |}
+ """.stripMargin
+
+ val warn =
+ """B::flop()I is annotated @inline but could not be inlined:
+ |Failed to check if B::flop()I can be safely inlined to B without causing an IllegalAccessError. Checking instruction INVOKESTATIC A.bar ()I failed:
+ |The method bar()I could not be found in the class A or any of its parents.
+ |Note that the following parent classes are defined in Java sources (mixed compilation), no bytecode is available: A""".stripMargin
+
+ var c = 0
+ val List(b) = compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; i.msg contains warn})
+ assert(c == 1, c)
+ val ins = getSingleMethod(b, "g").instructions
+ val invokeFlop = Invoke(INVOKEVIRTUAL, "B", "flop", "()I", false)
+ assert(ins contains invokeFlop, ins.stringLines)
+ }
+
+ @Test
+ def inlineFromTraits(): Unit = {
+ val code =
+ """trait T {
+ | @inline final def f = g
+ | @inline final def g = 1
+ |}
+ |
+ |class C extends T {
+ | def t1(t: T) = t.f
+ | def t2(c: C) = c.f
+ |}
+ """.stripMargin
+ val List(c, t, tClass) = compile(code)
+ // both are just `return 1`, no more calls
+ assertNoInvoke(getSingleMethod(c, "t1"))
+ assertNoInvoke(getSingleMethod(c, "t2"))
+ }
+
+ @Test
+ def inlineMixinMethods(): Unit = {
+ val code =
+ """trait T {
+ | @inline final def f = 1
+ |}
+ |class C extends T
+ """.stripMargin
+ val List(c, t, tClass) = compile(code)
+ // the static implementaiton method is inlined into the mixin, so there's no invocation in the mixin
+ assertNoInvoke(getSingleMethod(c, "f"))
+ }
+
+ @Test
+ def inlineTraitInherited(): Unit = {
+ val code =
+ """trait T {
+ | @inline final def f = 1
+ |}
+ |trait U extends T {
+ | @inline final def g = f
+ |}
+ |class C extends U {
+ | def t1 = f
+ | def t2 = g
+ |}
+ """.stripMargin
+ val List(c, t, tClass, u, uClass) = compile(code)
+ assertNoInvoke(getSingleMethod(c, "t1"))
+ assertNoInvoke(getSingleMethod(c, "t2"))
+ }
+
+ @Test
+ def virtualTraitNoInline(): Unit = {
+ val code =
+ """trait T {
+ | @inline def f = 1
+ |}
+ |class C extends T {
+ | def t1(t: T) = t.f
+ | def t2 = this.f
+ |}
+ """.stripMargin
+ val warns = Set(
+ "C::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden",
+ "T::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden")
+ var count = 0
+ val List(c, t, tClass) = compile(code, allowMessage = i => {count += 1; warns.exists(i.msg contains _)})
+ assert(count == 2, count)
+ assertInvoke(getSingleMethod(c, "t1"), "T", "f")
+ assertInvoke(getSingleMethod(c, "t2"), "C", "f")
+ }
+
+ @Test
+ def sealedTraitInline(): Unit = {
+ val code =
+ """sealed trait T {
+ | @inline def f = 1
+ |}
+ |class C {
+ | def t1(t: T) = t.f
+ |}
+ """.stripMargin
+ val List(c, t, tClass) = compile(code)
+ assertNoInvoke(getSingleMethod(c, "t1"))
+ }
+
+ @Test
+ def inlineFromObject(): Unit = {
+ val code =
+ """trait T {
+ | @inline def f = 0
+ |}
+ |object O extends T {
+ | @inline def g = 1
+ | // mixin generates `def f = T$class.f(this)`, which is inlined here (we get ICONST_0)
+ |}
+ |class C {
+ | def t1 = O.f // the mixin method of O is inlined, so we directly get the ICONST_0
+ | def t2 = O.g // object members are inlined
+ | def t3(t: T) = t.f // no inlining here
+ |}
+ """.stripMargin
+ val warn = "T::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden"
+ var count = 0
+ val List(c, oMirror, oModule, t, tClass) = compile(code, allowMessage = i => {count += 1; i.msg contains warn})
+ assert(count == 1, count)
+
+ assertNoInvoke(getSingleMethod(oModule, "f"))
+
+ assertNoInvoke(getSingleMethod(c, "t1"))
+ assertNoInvoke(getSingleMethod(c, "t2"))
+ assertInvoke(getSingleMethod(c, "t3"), "T", "f")
+ }
+
+ @Test
+ def selfTypeInline(): Unit = {
+ val code =
+ """trait T { self: Assembly =>
+ | @inline final def f = g
+ | @inline final def m = 1
+ |}
+ |trait Assembly extends T {
+ | @inline final def g = 1
+ | @inline final def n = m // inlined. (*)
+ | // (*) the declaration class of m is T. the signature of T$class.m is m(LAssembly;)I. so we need the self type to build the
+ | // signature. then we can look up the MethodNode of T$class.m and then rewrite the INVOKEINTERFACE to INVOKESTATIC.
+ |}
+ |class C {
+ | def t1(a: Assembly) = a.f // like above, decl class is T, need self-type of T to rewrite the interface call to static.
+ | def t2(a: Assembly) = a.n
+ |}
+ """.stripMargin
+
+ val List(assembly, assemblyClass, c, t, tClass) = compile(code)
+
+ assertNoInvoke(getSingleMethod(tClass, "f"))
+
+ assertNoInvoke(getSingleMethod(assemblyClass, "n"))
+
+ assertNoInvoke(getSingleMethod(c, "t1"))
+ assertNoInvoke(getSingleMethod(c, "t2"))
+ }
+
+ @Test
+ def selfTypeInline2(): Unit = {
+ // There are some interesting things going on here with the self types. Here's a short version:
+ //
+ // trait T1 { def f = 1 }
+ // trait T2a { self: T1 with T2a => // self type in the backend: T1
+ // def f = 2
+ // def g = f // resolved to T2a.f
+ // }
+ // trait T2b { self: T2b with T1 => // self type in the backend: T2b
+ // def f = 2
+ // def g = f // resolved to T1.f
+ // }
+ //
+ // scala> val t = typeOf[T2a]; exitingMixin(t.typeOfThis.typeSymbol) // self type of T2a is T1
+ // res28: $r.intp.global.Symbol = trait T1
+ //
+ // scala> typeOf[T2a].typeOfThis.member(newTermName("f")).owner // f in T2a is resolved as T2a.f
+ // res29: $r.intp.global.Symbol = trait T2a
+ //
+ // scala> val t = typeOf[T2b]; exitingMixin(t.typeOfThis.typeSymbol) // self type of T2b is T1
+ // res30: $r.intp.global.Symbol = trait T2b
+ //
+ // scala> typeOf[T2b].typeOfThis.member(newTermName("f")).owner // f in T2b is resolved as T1.f
+ // res31: $r.intp.global.Symbol = trait T1
+
+ val code =
+ """trait T1 {
+ | @inline def f: Int = 0
+ | @inline def g1 = f // not inlined: f not final, so T1$class.g1 has an interface call T1.f
+ |}
+ |
+ |// erased self-type (used in impl class for `self` parameter): T1
+ |trait T2a { self: T1 with T2a =>
+ | @inline override final def f = 1
+ | @inline def g2a = f // inlined: resolved as T2a.f, which is re-written to T2a$class.f, so T2a$class.g2a has ICONST_1
+ |}
+ |
+ |final class Ca extends T1 with T2a {
+ | // mixin generates accessors like `def g1 = T1$class.g1`, the impl class method call is inlined into the accessor.
+ |
+ | def m1a = g1 // call to accessor, inlined, we get the interface call T1.f
+ | def m2a = g2a // call to accessor, inlined, we get ICONST_1
+ | def m3a = f // call to accessor, inlined, we get ICONST_1
+ |
+ | def m4a(t: T1) = t.f // T1.f is not final, so not inlined, interface call to T1.f
+ | def m5a(t: T2a) = t.f // re-written to T2a$class.f, inlined, ICONST_1
+ |}
+ |
+ |// erased self-type: T2b
+ |trait T2b { self: T2b with T1 =>
+ | @inline override final def f = 1
+ | @inline def g2b = f // not inlined: resolved as T1.f, so T2b$class.g2b has an interface call T1.f
+ |}
+ |
+ |final class Cb extends T1 with T2b {
+ | def m1b = g1 // inlined, we get the interface call to T1.f
+ | def m2b = g2b // inlined, we get the interface call to T1.f
+ | def m3b = f // inlined, we get ICONST_1
+ |
+ | def m4b(t: T1) = t.f // T1.f is not final, so not inlined, interface call to T1.f
+ | def m5b(t: T2b) = t.f // re-written to T2b$class.f, inlined, ICONST_1
+ |}
+ """.stripMargin
+
+ val warning = "T1::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden"
+ var count = 0
+ val List(ca, cb, t1, t1C, t2a, t2aC, t2b, t2bC) = compile(code, allowMessage = i => {count += 1; i.msg contains warning})
+ assert(count == 4, count) // see comments, f is not inlined 4 times
+
+ val t2aCfDesc = t2aC.methods.asScala.find(_.name == "f").get.desc
+ assert(t2aCfDesc == "(LT1;)I", t2aCfDesc) // self-type of T2a is T1
+
+ val t2bCfDesc = t2bC.methods.asScala.find(_.name == "f").get.desc
+ assert(t2bCfDesc == "(LT2b;)I", t2bCfDesc) // self-type of T2b is T2b
+
+ assertNoInvoke(getSingleMethod(t2aC, "g2a"))
+ assertInvoke(getSingleMethod(t2bC, "g2b"), "T1", "f")
+
+ assertInvoke(getSingleMethod(ca, "m1a"), "T1", "f")
+ assertNoInvoke(getSingleMethod(ca, "m2a")) // no invoke, see comment on def g2a
+ assertNoInvoke(getSingleMethod(ca, "m3a"))
+ assertInvoke(getSingleMethod(ca, "m4a"), "T1", "f")
+ assertNoInvoke(getSingleMethod(ca, "m5a"))
+
+ assertInvoke(getSingleMethod(cb, "m1b"), "T1", "f")
+ assertInvoke(getSingleMethod(cb, "m2b"), "T1", "f") // invoke, see comment on def g2b
+ assertNoInvoke(getSingleMethod(cb, "m3b"))
+ assertInvoke(getSingleMethod(cb, "m4b"), "T1", "f")
+ assertNoInvoke(getSingleMethod(cb, "m5b"))
+ }
+
+ @Test
+ def finalSubclassInline(): Unit = {
+ val code =
+ """class C {
+ | @inline def f = 0
+ | @inline final def g = 1
+ |}
+ |final class D extends C
+ |object E extends C
+ |class T {
+ | def t1(d: D) = d.f + d.g + E.f + E.g // d.f can be inlined because the receiver type is D, which is final.
+ |} // so d.f can be resolved statically. same for E.f
+ """.stripMargin
+ val List(c, d, e, eModule, t) = compile(code)
+ assertNoInvoke(getSingleMethod(t, "t1"))
+ }
+
+ @Test
+ def inlineFromNestedClasses(): Unit = {
+ val code =
+ """class C {
+ | trait T { @inline final def f = 1 }
+ | class D extends T{
+ | def m(t: T) = t.f
+ | }
+ |
+ | def m(d: D) = d.f
+ |}
+ """.stripMargin
+ val List(c, d, t, tC) = compile(code)
+ assertNoInvoke(getSingleMethod(d, "m"))
+ assertNoInvoke(getSingleMethod(c, "m"))
+ }
+
+ @Test
+ def inlineTraitCastReceiverToSelf(): Unit = {
+ val code =
+ """class C { def foo(x: Int) = x }
+ |trait T { self: C =>
+ | @inline final def f(x: Int) = foo(x)
+ | def t1 = f(1)
+ | def t2(t: T) = t.f(2)
+ |}
+ """.stripMargin
+ val List(c, t, tc) = compile(code)
+ val t1 = getSingleMethod(tc, "t1")
+ val t2 = getSingleMethod(tc, "t2")
+ val cast = TypeOp(CHECKCAST, "C")
+ Set(t1, t2).foreach(m => assert(m.instructions.contains(cast), m.instructions))
+ }
+
+ @Test
+ def abstractMethodWarning(): Unit = {
+ val code =
+ """abstract class C {
+ | @inline def foo: Int
+ |}
+ |class T {
+ | def t1(c: C) = c.foo
+ |}
+ """.stripMargin
+ val warn = "C::foo()I is annotated @inline but cannot be inlined: the method is not final and may be overridden"
+ var c = 0
+ compile(code, allowMessage = i => {c += 1; i.msg contains warn})
+ assert(c == 1, c)
+ }
+
+ @Test
+ def abstractFinalMethodError(): Unit = {
+ val code =
+ """abstract class C {
+ | @inline final def foo: Int
+ |}
+ |trait T {
+ | @inline final def bar: Int
+ |}
+ """.stripMargin
+ val err = "abstract member may not have final modifier"
+ var i = 0
+ compile(code, allowMessage = info => {i += 1; info.msg contains err})
+ assert(i == 2, i)
+ }
+
+ @Test
+ def noInlineTraitFieldAccessors(): Unit = {
+ val code =
+ """sealed trait T {
+ | lazy val a = 0
+ | val b = 1
+ | final lazy val c = 2
+ | final val d = 3
+ | final val d1: Int = 3
+ |
+ | @noinline def f = 5 // re-written to T$class
+ | @noinline final def g = 6 // re-written
+ |
+ | @noinline def h: Int
+ | @inline def i: Int
+ |}
+ |
+ |trait U { // not sealed
+ | lazy val a = 0
+ | val b = 1
+ | final lazy val c = 2
+ | final val d = 3
+ | final val d1: Int = 3
+ |
+ | @noinline def f = 5 // not re-written (not final)
+ | @noinline final def g = 6 // re-written
+ |
+ | @noinline def h: Int
+ | @inline def i: Int
+ |}
+ |
+ |class C {
+ | def m1(t: T) = t.a + t.b + t.c + t.d1
+ | def m2(t: T) = t.d // inlined by the type-checker's constant folding
+ | def m3(t: T) = t.f + t.g + t.h + t.i
+ |
+ | def m4(u: U) = u.a + u.b + u.c + u.d1
+ | def m5(u: U) = u.d
+ | def m6(u: U) = u.f + u.g + u.h + u.i
+ |}
+ """.stripMargin
+
+ val List(c, t, tClass, u, uClass) = compile(code, allowMessage = _.msg contains "i()I is annotated @inline but cannot be inlined")
+ val m1 = getSingleMethod(c, "m1")
+ assertInvoke(m1, "T", "a")
+ assertInvoke(m1, "T", "b")
+ assertInvoke(m1, "T", "c")
+
+ assertNoInvoke(getSingleMethod(c, "m2"))
+
+ val m3 = getSingleMethod(c, "m3")
+ assertInvoke(m3, "T$class", "f")
+ assertInvoke(m3, "T$class", "g")
+ assertInvoke(m3, "T", "h")
+ assertInvoke(m3, "T", "i")
+
+ val m4 = getSingleMethod(c, "m4")
+ assertInvoke(m4, "U", "a")
+ assertInvoke(m4, "U", "b")
+ assertInvoke(m4, "U", "c")
+
+ assertNoInvoke(getSingleMethod(c, "m5"))
+
+ val m6 = getSingleMethod(c, "m6")
+ assertInvoke(m6, "U", "f")
+ assertInvoke(m6, "U$class", "g")
+ assertInvoke(m6, "U", "h")
+ assertInvoke(m6, "U", "i")
+ }
+
+ @Test
+ def mixedNoCrashSI9111(): Unit = {
+ val javaCode =
+ """public final class A {
+ | public static final class T { }
+ | public static final class Inner {
+ | public static final class T { }
+ | public T newT() { return null; }
+ | }
+ |}
+ """.stripMargin
+
+ val scalaCode =
+ """class C {
+ | val i = new A.Inner()
+ |}
+ """.stripMargin
+
+ // We don't get to see the warning about SI-9111, because it is associated with the MethodInlineInfo
+ // of method newT, which is not actually used.
+ // The problem is: if we reference `newT` in the scalaCode, the scala code does not compile,
+ // because then SI-9111 triggers during type-checking class C, in the compiler frontend, and
+ // we don't even get to the backend.
+ // Nevertheless, the workaround for SI-9111 in BcodeAsmCommon.buildInlineInfoFromClassSymbol
+ // is still necessary, otherwise this test crashes.
+ // The warning below is the typical warning we get in mixed compilation.
+ val warn =
+ """failed to determine if <init> should be inlined:
+ |The method <init>()V could not be found in the class A$Inner or any of its parents.
+ |Note that the following parent classes could not be found on the classpath: A$Inner""".stripMargin
+
+ var c = 0
+
+ compileClasses(newCompiler(extraArgs = InlinerTest.args + " -Yopt-warnings:_"))(
+ scalaCode,
+ List((javaCode, "A.java")),
+ allowMessage = i => {c += 1; i.msg contains warn})
+ assert(c == 1, c)
+ }
+
+ @Test
+ def inlineInvokeSpecial(): Unit = {
+ val code =
+ """class Aa {
+ | def f1 = 0
+ |}
+ |class B extends Aa {
+ | @inline final override def f1 = 1 + super.f1 // invokespecial Aa.f1
+ |
+ | private def f2m = 0 // public B$$f2m in bytecode
+ | @inline final def f2 = f2m // invokevirtual B.B$$f2m
+ |
+ | private def this(x: Int) = this() // public in bytecode
+ | @inline final def f3 = new B() // invokespecial B.<init>()
+ | @inline final def f4 = new B(1) // invokespecial B.<init>(I)
+ |
+ | def t1 = f1 // inlined
+ | def t2 = f2 // inlined
+ | def t3 = f3 // inlined
+ | def t4 = f4 // inlined
+ |}
+ |class T {
+ | def t1(b: B) = b.f1 // cannot inline: contains a super call
+ | def t2(b: B) = b.f2 // inlined
+ | def t3(b: B) = b.f3 // inlined
+ | def t4(b: B) = b.f4 // inlined
+ |}
+ """.stripMargin
+
+ val warn =
+ """B::f1()I is annotated @inline but could not be inlined:
+ |The callee B::f1()I contains the instruction INVOKESPECIAL Aa.f1 ()I
+ |that would cause an IllegalAccessError when inlined into class T.""".stripMargin
+ var c = 0
+ val List(a, b, t) = compile(code, allowMessage = i => {c += 1; i.msg contains warn})
+ assert(c == 1, c)
+
+ assertInvoke(getSingleMethod(b, "t1"), "Aa", "f1")
+ assertInvoke(getSingleMethod(b, "t2"), "B", "B$$f2m")
+ assertInvoke(getSingleMethod(b, "t3"), "B", "<init>")
+ assertInvoke(getSingleMethod(b, "t4"), "B", "<init>")
+
+ assertInvoke(getSingleMethod(t, "t1"), "B", "f1")
+ assertInvoke(getSingleMethod(t, "t2"), "B", "B$$f2m")
+ assertInvoke(getSingleMethod(t, "t3"), "B", "<init>")
+ assertInvoke(getSingleMethod(t, "t4"), "B", "<init>")
+ }
+
+ @Test
+ def dontInlineNative(): Unit = {
+ val code =
+ """class C {
+ | def t = System.arraycopy(null, 0, null, 0, 0)
+ |}
+ """.stripMargin
+ val List(c) = compileClasses(newCompiler(extraArgs = InlinerTest.args + " -Yopt-inline-heuristics:everything"))(code)
+ assertInvoke(getSingleMethod(c, "t"), "java/lang/System", "arraycopy")
+ }
+
+ @Test
+ def inlineMayRenderCodeDead(): Unit = {
+ val code =
+ """class C {
+ | @inline final def f: String = throw new Error("")
+ | @inline final def g: String = "a" + f + "b" // after inlining f, need to run DCE, because the rest of g becomes dead.
+ | def t = g // the inliner requires no dead code when inlining g (uses an Analyzer).
+ |}
+ """.stripMargin
+
+ val List(c) = compile(code)
+ assertInvoke(getSingleMethod(c, "t"), "java/lang/Error", "<init>")
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala
new file mode 100644
index 0000000000..1ce1b88ff2
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala
@@ -0,0 +1,92 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import scala.tools.testing.AssertUtil._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import scala.tools.testing.ClearAfterClass
+
+object MethodLevelOpts extends ClearAfterClass.Clearable {
+ var methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:method")
+ def clear(): Unit = { methodOptCompiler = null }
+}
+
+@RunWith(classOf[JUnit4])
+class MethodLevelOpts extends ClearAfterClass {
+ ClearAfterClass.stateToClear = MethodLevelOpts
+
+ val methodOptCompiler = MethodLevelOpts.methodOptCompiler
+
+ def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1))
+
+ @Test
+ def eliminateEmptyTry(): Unit = {
+ val code = "def f = { try {} catch { case _: Throwable => 0; () }; 1 }"
+ val warn = "a pure expression does nothing in statement position"
+ assertSameCode(singleMethodInstructions(methodOptCompiler)(code, allowMessage = _.msg contains warn), wrapInDefault(Op(ICONST_1), Op(IRETURN)))
+ }
+
+ @Test
+ def cannotEliminateLoadBoxedUnit(): Unit = {
+ // the compiler inserts a boxed into the try block. it's therefore non-empty (and live) and not eliminated.
+ val code = "def f = { try {} catch { case _: Throwable => 0 }; 1 }"
+ val m = singleMethod(methodOptCompiler)(code)
+ assertTrue(m.handlers.length == 1)
+ assertSameCode(m.instructions.take(3), List(Label(0), LineNumber(1, Label(0)), Field(GETSTATIC, "scala/runtime/BoxedUnit", "UNIT", "Lscala/runtime/BoxedUnit;")))
+ }
+
+ @Test
+ def inlineThrowInCatchNotTry(): Unit = {
+ // the try block does not contain the `ATHROW` instruction, but in the catch block, `ATHROW` is inlined
+ val code = "def f(e: Exception) = throw { try e catch { case _: Throwable => e } }"
+ val m = singleMethod(methodOptCompiler)(code)
+ assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5)
+ assertSameCode(m.instructions,
+ wrapInDefault(VarOp(ALOAD, 1), Label(3), Op(ATHROW), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), VarOp(ALOAD, 1), Op(ATHROW))
+ )
+ }
+
+ @Test
+ def inlineReturnInCachtNotTry(): Unit = {
+ val code = "def f: Int = return { try 1 catch { case _: Throwable => 2 } }"
+ // cannot inline the IRETURN into the try block (because RETURN may throw IllegalMonitorState)
+ val m = singleMethod(methodOptCompiler)(code)
+ assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5)
+ assertSameCode(m.instructions,
+ wrapInDefault(Op(ICONST_1), Label(3), Op(IRETURN), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), Op(ICONST_2), Op(IRETURN)))
+ }
+
+ @Test
+ def simplifyJumpsInTryCatchFinally(): Unit = {
+ val code =
+ """def f: Int =
+ | try {
+ | return 1
+ | } catch {
+ | case _: Throwable =>
+ | return 2
+ | } finally {
+ | return 2
+ | // dead
+ | val x = try 10 catch { case _: Throwable => 11 }
+ | println(x)
+ | }
+ """.stripMargin
+ val m = singleMethod(methodOptCompiler)(code)
+ assertTrue(m.handlers.length == 2)
+ assertSameCode(m.instructions.dropNonOp, // drop line numbers and labels that are only used by line numbers
+
+ // one single label left :-)
+ List(Op(ICONST_1), VarOp(ISTORE, 2), Jump(GOTO, Label(20)), Op(POP), Op(ICONST_2), VarOp(ISTORE, 2), Jump(GOTO, Label(20)), VarOp(ASTORE, 3), Op(ICONST_2), Op(IRETURN), Label(20), Op(ICONST_2), Op(IRETURN))
+ )
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala
new file mode 100644
index 0000000000..f8e887426b
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala
@@ -0,0 +1,85 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import CodeGenTools._
+import scala.tools.nsc.backend.jvm.BTypes.{MethodInlineInfo, InlineInfo}
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import scala.collection.convert.decorateAsScala._
+
+object ScalaInlineInfoTest {
+ var compiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none")
+ def clear(): Unit = { compiler = null }
+}
+
+@RunWith(classOf[JUnit4])
+class ScalaInlineInfoTest {
+ val compiler = newCompiler()
+
+ @Test
+ def traitMembersInlineInfo(): Unit = {
+ val code =
+ """trait T {
+ | def f1 = 1 // concrete method
+ | private def f2 = 1 // implOnly method (does not end up in the interface)
+ | def f3 = {
+ | def nest = 0 // nested method (does not end up in the interface)
+ | nest
+ | }
+ |
+ | @inline
+ | def f4 = super.toString // super accessor
+ |
+ | object O // module accessor (method is generated)
+ | def f5 = {
+ | object L { val x = 0 } // nested module (just flattened out)
+ | L.x
+ | }
+ |
+ | @noinline
+ | def f6: Int // abstract method (not in impl class)
+ |
+ | // fields
+ |
+ | val x1 = 0
+ | var y2 = 0
+ | var x3: Int
+ | lazy val x4 = 0
+ |
+ | final val x5 = 0
+ |}
+ """.stripMargin
+
+ val cs @ List(t, tl, to, tCls) = compileClasses(compiler)(code)
+ val List(info) = t.attrs.asScala.collect({ case a: InlineInfoAttribute => a.inlineInfo }).toList
+ val expect = InlineInfo(
+ None, // self type
+ false, // final class
+ Map(
+ ("O()LT$O$;", MethodInlineInfo(true, false,false,false)),
+ ("T$$super$toString()Ljava/lang/String;",MethodInlineInfo(false,false,false,false)),
+ ("T$_setter_$x1_$eq(I)V", MethodInlineInfo(false,false,false,false)),
+ ("f1()I", MethodInlineInfo(false,true, false,false)),
+ ("f3()I", MethodInlineInfo(false,true, false,false)),
+ ("f4()Ljava/lang/String;", MethodInlineInfo(false,true, true, false)),
+ ("f5()I", MethodInlineInfo(false,true, false,false)),
+ ("f6()I", MethodInlineInfo(false,false,false,true )),
+ ("x1()I", MethodInlineInfo(false,false,false,false)),
+ ("x3()I", MethodInlineInfo(false,false,false,false)),
+ ("x3_$eq(I)V", MethodInlineInfo(false,false,false,false)),
+ ("x4()I", MethodInlineInfo(false,false,false,false)),
+ ("x5()I", MethodInlineInfo(true, false,false,false)),
+ ("y2()I", MethodInlineInfo(false,false,false,false)),
+ ("y2_$eq(I)V", MethodInlineInfo(false,false,false,false))),
+ None // warning
+ )
+ assert(info == expect, info)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala
new file mode 100644
index 0000000000..a685ae7dd5
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala
@@ -0,0 +1,221 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+
+@RunWith(classOf[JUnit4])
+class SimplifyJumpsTest {
+ @Test
+ def simpleGotoReturn(): Unit = {
+ val ops = List(
+ Jump(GOTO, Label(2)), // replaced by RETURN
+ Op(ICONST_1), // need some code, otherwise removeJumpToSuccessor kicks in
+ Op(POP),
+ Label(1), // multiple labels OK
+ Label(2),
+ Label(3),
+ Op(RETURN)
+ )
+ val method = genMethod()(ops: _*)
+ assertTrue(LocalOptImpls.simplifyJumps(method))
+ assertSameCode(instructionsFromMethod(method), Op(RETURN) :: ops.tail)
+ }
+
+ @Test
+ def simpleGotoThrow(): Unit = {
+ val rest = List(
+ Op(ICONST_1), // need some code, otherwise removeJumpToSuccessor kicks in
+ Op(POP),
+ Label(1),
+ Label(2),
+ Label(3),
+ Op(ATHROW)
+ )
+ val method = genMethod()(
+ Op(ACONST_NULL) ::
+ Jump(GOTO, Label(2)) :: // replaced by ATHROW
+ rest: _*
+ )
+ assertTrue(LocalOptImpls.simplifyJumps(method))
+ assertSameCode(instructionsFromMethod(method), Op(ACONST_NULL) :: Op(ATHROW) :: rest)
+ }
+
+ @Test
+ def gotoThrowInTry(): Unit = {
+ val handler = List(ExceptionHandler(Label(1), Label(2), Label(4), Some("java/lang/Throwable")))
+ val initialInstrs = List(
+ Label(1),
+ Op(ACONST_NULL),
+ Jump(GOTO, Label(3)), // not by ATHROW (would move the ATHROW into a try block)
+ Label(2),
+ Op(ICONST_1), // need some code, otherwise removeJumpToSuccessor kicks in
+ Op(POP),
+ Label(3),
+ Op(ATHROW),
+ Label(4),
+ Op(POP),
+ Op(RETURN)
+ )
+ val method = genMethod(handlers = handler)(initialInstrs: _*)
+ assertFalse(LocalOptImpls.simplifyJumps(method))
+ assertSameCode(instructionsFromMethod(method), initialInstrs)
+
+ val optMethod = genMethod()(initialInstrs: _*) // no handler
+ assertTrue(LocalOptImpls.simplifyJumps(optMethod))
+ assertSameCode(instructionsFromMethod(optMethod).take(3), List(Label(1), Op(ACONST_NULL), Op(ATHROW)))
+ }
+
+ @Test
+ def simplifyBranchOverGoto(): Unit = {
+ val begin = List(
+ VarOp(ILOAD, 1),
+ Jump(IFGE, Label(2))
+ )
+ val rest = List(
+ Jump(GOTO, Label(3)),
+ Label(11), // other labels here are allowed
+ Label(2),
+ VarOp(ILOAD, 1),
+ Op(RETURN),
+ Label(3),
+ VarOp(ILOAD, 1),
+ Op(IRETURN)
+ )
+ val method = genMethod()(begin ::: rest: _*)
+ assertTrue(LocalOptImpls.simplifyJumps(method))
+ assertSameCode(
+ instructionsFromMethod(method),
+ List(VarOp(ILOAD, 1), Jump(IFLT, Label(3))) ::: rest.tail )
+
+ // no label allowed between begin and rest. if there's another label, then there could be a
+ // branch that label. eliminating the GOTO would change the behavior.
+ val nonOptMethod = genMethod()(begin ::: Label(22) :: rest: _*)
+ assertFalse(LocalOptImpls.simplifyJumps(nonOptMethod))
+ }
+
+ @Test
+ def ensureGotoRemoved(): Unit = {
+ def code(jumps: Instruction*) = List(
+ VarOp(ILOAD, 1)) ::: jumps.toList ::: List(
+ Label(2),
+
+ Op(RETURN),
+ Label(3),
+ Op(RETURN)
+ )
+
+ // ensures that the goto is safely removed. ASM supports removing while iterating, but not the
+ // next element of the current. Here, the current is the IFGE, the next is the GOTO.
+ val method = genMethod()(code(Jump(IFGE, Label(2)), Jump(GOTO, Label(3))): _*)
+ assertTrue(LocalOptImpls.simplifyJumps(method))
+ assertSameCode(instructionsFromMethod(method), code(Jump(IFLT, Label(3))))
+ }
+
+ @Test
+ def removeJumpToSuccessor(): Unit = {
+ val ops = List(
+ Jump(GOTO, Label(1)),
+ Label(11),
+ Label(1),
+ Label(2),
+ VarOp(ILOAD, 1),
+ Op(IRETURN)
+ )
+ val method = genMethod()(ops: _*)
+ assertTrue(LocalOptImpls.simplifyJumps(method))
+ assertSameCode(instructionsFromMethod(method), ops.tail)
+ }
+
+ @Test
+ def collapseJumpChains(): Unit = {
+ def ops(target1: Int, target2: Int, target3: Int) = List(
+ VarOp(ILOAD, 1),
+ Jump(IFGE, Label(target1)), // initially 1, then 3
+ VarOp(ILOAD, 1),
+ Op(IRETURN),
+
+ Label(2),
+ Jump(GOTO, Label(target3)),
+
+ Label(1),
+ Jump(GOTO, Label(target2)), // initially 2, then 3
+
+ VarOp(ILOAD, 1), // some code to prevent jumpToSuccessor optimization (once target2 is replaced by 3)
+ Op(RETURN),
+
+ Label(3),
+ VarOp(ILOAD, 1),
+ Op(IRETURN)
+ )
+ val method = genMethod()(ops(1, 2, 3): _*)
+ assertTrue(LocalOptImpls.simplifyJumps(method))
+ assertSameCode(instructionsFromMethod(method), ops(3, 3, 3))
+ }
+
+ @Test
+ def collapseJumpChainLoop(): Unit = {
+ def ops(target: Int) = List(
+ VarOp(ILOAD, 1),
+ Jump(IFGE, Label(target)),
+
+ Label(4),
+ Jump(GOTO, Label(3)),
+
+ VarOp(ILOAD, 1), // some code to prevent jumpToSuccessor (label 3)
+ Op(IRETURN),
+
+ Label(3),
+ Jump(GOTO, Label(4)),
+
+ Label(2),
+ Jump(GOTO, Label(3))
+ )
+
+ val method = genMethod()(ops(2): _*)
+ assertTrue(LocalOptImpls.simplifyJumps(method))
+ assertSameCode(instructionsFromMethod(method), ops(3))
+ }
+
+ @Test
+ def simplifyThenElseSameTarget(): Unit = {
+ def ops(jumpOp: Instruction) = List(
+ VarOp(ILOAD, 1),
+ jumpOp,
+ Label(2),
+ Jump(GOTO, Label(1)),
+
+ VarOp(ILOAD, 1), // some code to prevent jumpToSuccessor (label 1)
+ Op(IRETURN),
+
+ Label(1),
+ VarOp(ILOAD, 1),
+ Op(IRETURN)
+ )
+
+ val method = genMethod()(ops(Jump(IFGE, Label(1))): _*)
+ assertTrue(LocalOptImpls.simplifyJumps(method))
+ assertSameCode(instructionsFromMethod(method), ops(Op(POP)))
+ }
+
+ @Test
+ def thenElseSameTargetLoop(): Unit = {
+ def ops(br: List[Instruction]) = List(
+ VarOp(ILOAD, 1),
+ VarOp(ILOAD, 2)) ::: br ::: List(
+ Label(1),
+ Jump(GOTO, Label(1))
+ )
+ val method = genMethod()(ops(List(Jump(IF_ICMPGE, Label(1)))): _*)
+ assertTrue(LocalOptImpls.simplifyJumps(method))
+ assertSameCode(instructionsFromMethod(method), ops(List(Op(POP), Op(POP))))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala
new file mode 100644
index 0000000000..902af7b7fa
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala
@@ -0,0 +1,228 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+
+import scala.tools.testing.AssertUtil._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import scala.tools.testing.ClearAfterClass
+
+object UnreachableCodeTest extends ClearAfterClass.Clearable {
+ // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks,
+ // see comment in BCodeBodyBuilder
+ var methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:method")
+ var dceCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code")
+ var noOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:none")
+
+ // jvm-1.5 disables computing stack map frames, and it emits dead code as-is. note that this flag triggers a deprecation warning
+ var noOptNoFramesCompiler = newCompiler(extraArgs = "-target:jvm-1.5 -Ybackend:GenBCode -Yopt:l:none -deprecation")
+
+ def clear(): Unit = {
+ methodOptCompiler = null
+ dceCompiler = null
+ noOptCompiler = null
+ noOptNoFramesCompiler = null
+ }
+}
+
+@RunWith(classOf[JUnit4])
+class UnreachableCodeTest extends ClearAfterClass {
+ ClearAfterClass.stateToClear = UnreachableCodeTest
+
+ val methodOptCompiler = UnreachableCodeTest.methodOptCompiler
+ val dceCompiler = UnreachableCodeTest.dceCompiler
+ val noOptCompiler = UnreachableCodeTest.noOptCompiler
+ val noOptNoFramesCompiler = UnreachableCodeTest.noOptNoFramesCompiler
+
+ def assertEliminateDead(code: (Instruction, Boolean)*): Unit = {
+ val method = genMethod()(code.map(_._1): _*)
+ LocalOptImpls.removeUnreachableCodeImpl(method, "C")
+ val nonEliminated = instructionsFromMethod(method)
+ val expectedLive = code.filter(_._2).map(_._1).toList
+ assertSameCode(nonEliminated, expectedLive)
+ }
+
+ @Test
+ def basicElimination(): Unit = {
+ assertEliminateDead(
+ Op(ACONST_NULL),
+ Op(ATHROW),
+ Op(RETURN).dead
+ )
+
+ assertEliminateDead(
+ Op(RETURN)
+ )
+
+ assertEliminateDead(
+ Op(RETURN),
+ Op(ACONST_NULL).dead,
+ Op(ATHROW).dead
+ )
+ }
+
+ @Test
+ def eliminateNop(): Unit = {
+ assertEliminateDead(
+ // reachable, but removed anyway.
+ Op(NOP).dead,
+ Op(RETURN),
+ Op(NOP).dead
+ )
+ }
+
+ @Test
+ def eliminateBranchOver(): Unit = {
+ assertEliminateDead(
+ Jump(GOTO, Label(1)),
+ Op(ACONST_NULL).dead,
+ Op(ATHROW).dead,
+ Label(1),
+ Op(RETURN)
+ )
+
+ assertEliminateDead(
+ Jump(GOTO, Label(1)),
+ Label(1),
+ Op(RETURN)
+ )
+ }
+
+ @Test
+ def deadLabelsRemain(): Unit = {
+ assertEliminateDead(
+ Op(RETURN),
+ Jump(GOTO, Label(1)).dead,
+ // not dead - labels may be referenced from other places in a classfile (eg exceptions table).
+ // will need a different opt to get rid of them
+ Label(1)
+ )
+ }
+
+ @Test
+ def pushPopNotEliminated(): Unit = {
+ assertEliminateDead(
+ // not dead, visited by data flow analysis.
+ Op(ACONST_NULL),
+ Op(POP),
+ Op(RETURN)
+ )
+ }
+
+ @Test
+ def nullnessNotConsidered(): Unit = {
+ assertEliminateDead(
+ Op(ACONST_NULL),
+ Jump(IFNULL, Label(1)),
+ Op(RETURN), // not dead
+ Label(1),
+ Op(RETURN)
+ )
+ }
+
+ @Test
+ def basicEliminationCompiler(): Unit = {
+ val code = "def f: Int = { return 1; 2 }"
+ val withDce = singleMethodInstructions(dceCompiler)(code)
+ assertSameCode(withDce.dropNonOp, List(Op(ICONST_1), Op(IRETURN)))
+
+ val noDce = singleMethodInstructions(noOptCompiler)(code)
+
+ // The emitted code is ICONST_1, IRETURN, ICONST_2, IRETURN. The latter two are dead.
+ //
+ // GenBCode puts the last IRETURN into a new basic block: it emits a label before the second
+ // IRETURN. This is an implementation detail, it may change; it affects the outcome of this test.
+ //
+ // During classfile writing with COMPUTE_FAMES (-target:jvm-1.6 or larger), the ClassfileWriter
+ // puts the ICONST_2 into a new basic block, because the preceding operation (IRETURN) ends
+ // the current block. We get something like
+ //
+ // L1: ICONST_1; IRETURN
+ // L2: ICONST_2 << dead
+ // L3: IRETURN << dead
+ //
+ // Finally, instructions in the dead basic blocks are replaced by ATHROW, as explained in
+ // a comment in BCodeBodyBuilder.
+ assertSameCode(noDce.dropNonOp, List(Op(ICONST_1), Op(IRETURN), Op(ATHROW), Op(ATHROW)))
+
+ // when NOT computing stack map frames, ASM's ClassWriter does not replace dead code by NOP/ATHROW
+ val warn = "target:jvm-1.5 is deprecated"
+ val noDceNoFrames = singleMethodInstructions(noOptNoFramesCompiler)(code, allowMessage = _.msg contains warn)
+ assertSameCode(noDceNoFrames.dropNonOp, List(Op(ICONST_1), Op(IRETURN), Op(ICONST_2), Op(IRETURN)))
+ }
+
+ @Test
+ def eliminateDeadCatchBlocks(): Unit = {
+ // the Label(1) is live: it's used in the local variable descriptor table (local variable "this" has a range from 0 to 1).
+ def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1))
+
+ val code = "def f: Int = { return 0; try { 1 } catch { case _: Exception => 2 } }"
+ val m = singleMethod(dceCompiler)(code)
+ assertTrue(m.handlers.isEmpty) // redundant (if code is gone, handler is gone), but done once here for extra safety
+ assertSameCode(m.instructions,
+ wrapInDefault(Op(ICONST_0), Op(IRETURN)))
+
+ val code2 = "def f: Unit = { try { } catch { case _: Exception => () }; () }"
+ // requires fixpoint optimization of methodOptCompiler (dce alone is not enough): first the handler is eliminated, then it's dead catch block.
+ assertSameCode(singleMethodInstructions(methodOptCompiler)(code2), wrapInDefault(Op(RETURN)))
+
+ val code3 = "def f: Unit = { try { } catch { case _: Exception => try { } catch { case _: Exception => () } }; () }"
+ assertSameCode(singleMethodInstructions(methodOptCompiler)(code3), wrapInDefault(Op(RETURN)))
+
+ // this example requires two iterations to get rid of the outer handler.
+ // the first iteration of DCE cannot remove the inner handler. then the inner (empty) handler is removed.
+ // then the second iteration of DCE removes the inner catch block, and then the outer handler is removed.
+ val code4 = "def f: Unit = { try { try { } catch { case _: Exception => () } } catch { case _: Exception => () }; () }"
+ assertSameCode(singleMethodInstructions(methodOptCompiler)(code4), wrapInDefault(Op(RETURN)))
+ }
+
+ @Test // test the dce-testing tools
+ def metaTest(): Unit = {
+ assertThrows[AssertionError](
+ assertEliminateDead(Op(RETURN).dead),
+ _.contains("Expected: List()\nActual : List(Op(RETURN))")
+ )
+
+ assertThrows[AssertionError](
+ assertEliminateDead(Op(RETURN), Op(RETURN)),
+ _.contains("Expected: List(Op(RETURN), Op(RETURN))\nActual : List(Op(RETURN))")
+ )
+ }
+
+ @Test
+ def bytecodeEquivalence: Unit = {
+ assertTrue(List(VarOp(ILOAD, 1)) ===
+ List(VarOp(ILOAD, 2)))
+ assertTrue(List(VarOp(ILOAD, 1), VarOp(ISTORE, 1)) ===
+ List(VarOp(ILOAD, 2), VarOp(ISTORE, 2)))
+
+ // the first Op will associate 1->2, then the 2->2 will fail
+ assertFalse(List(VarOp(ILOAD, 1), VarOp(ISTORE, 2)) ===
+ List(VarOp(ILOAD, 2), VarOp(ISTORE, 2)))
+
+ // will associate 1->2 and 2->1, which is OK
+ assertTrue(List(VarOp(ILOAD, 1), VarOp(ISTORE, 2)) ===
+ List(VarOp(ILOAD, 2), VarOp(ISTORE, 1)))
+
+ assertTrue(List(Label(1), Label(2), Label(1)) ===
+ List(Label(2), Label(4), Label(2)))
+ assertTrue(List(LineNumber(1, Label(1)), Label(1)) ===
+ List(LineNumber(1, Label(3)), Label(3)))
+ assertFalse(List(LineNumber(1, Label(1)), Label(1)) ===
+ List(LineNumber(1, Label(3)), Label(1)))
+
+ assertTrue(List(TableSwitch(TABLESWITCH, 1, 3, Label(4), List(Label(5), Label(6))), Label(4), Label(5), Label(6)) ===
+ List(TableSwitch(TABLESWITCH, 1, 3, Label(9), List(Label(3), Label(4))), Label(9), Label(3), Label(4)))
+
+ assertTrue(List(FrameEntry(F_FULL, List(INTEGER, DOUBLE, Label(3)), List("java/lang/Object", Label(4))), Label(3), Label(4)) ===
+ List(FrameEntry(F_FULL, List(INTEGER, DOUBLE, Label(1)), List("java/lang/Object", Label(3))), Label(1), Label(3)))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala
new file mode 100644
index 0000000000..769736669b
--- /dev/null
+++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala
@@ -0,0 +1,95 @@
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import org.junit.Test
+import scala.tools.asm.Opcodes._
+import org.junit.Assert._
+import scala.collection.JavaConverters._
+
+import CodeGenTools._
+import scala.tools.partest.ASMConverters
+import ASMConverters._
+import scala.tools.testing.ClearAfterClass
+
+object UnusedLocalVariablesTest extends ClearAfterClass.Clearable {
+ var dceCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:unreachable-code")
+ def clear(): Unit = { dceCompiler = null }
+}
+
+@RunWith(classOf[JUnit4])
+class UnusedLocalVariablesTest extends ClearAfterClass {
+ ClearAfterClass.stateToClear = UnusedLocalVariablesTest
+
+ val dceCompiler = UnusedLocalVariablesTest.dceCompiler
+
+ @Test
+ def removeUnusedVar(): Unit = {
+ val code = """def f(a: Long, b: String, c: Double): Unit = { return; var x = a; var y = x + 10 }"""
+ assertLocalVarCount(code, 4) // `this, a, b, c`
+
+ val code2 = """def f(): Unit = { var x = if (true) return else () }"""
+ assertLocalVarCount(code2, 1) // x is eliminated, constant folding in scalac removes the if
+
+ val code3 = """def f: Unit = return""" // paramless method
+ assertLocalVarCount(code3, 1) // this
+ }
+
+ @Test
+ def keepUsedVar(): Unit = {
+ val code = """def f(a: Long, b: String, c: Double): Unit = { val x = 10 + a; val y = x + 10 }"""
+ assertLocalVarCount(code, 6)
+
+ val code2 = """def f(a: Long): Unit = { var x = if (a == 0l) return else () }"""
+ assertLocalVarCount(code2, 3) // remains
+ }
+
+ @Test
+ def constructorLocals(): Unit = {
+ val code = """class C {
+ | def this(a: Int) = {
+ | this()
+ | throw new Exception("")
+ | val y = 0
+ | }
+ |}
+ |""".stripMargin
+ val cls = compileClasses(dceCompiler)(code).head
+ val m = convertMethod(cls.methods.asScala.toList.find(_.desc == "(I)V").get)
+ assertTrue(m.localVars.length == 2) // this, a, but not y
+
+
+ val code2 =
+ """class C {
+ | {
+ | throw new Exception("")
+ | val a = 0
+ | }
+ |}
+ |
+ |object C {
+ | {
+ | throw new Exception("")
+ | val b = 1
+ | }
+ |}
+ """.stripMargin
+
+ val clss2 = compileClasses(dceCompiler)(code2)
+ val cls2 = clss2.find(_.name == "C").get
+ val companion2 = clss2.find(_.name == "C$").get
+
+ val clsConstr = convertMethod(cls2.methods.asScala.toList.find(_.name == "<init>").get)
+ val companionConstr = convertMethod(companion2.methods.asScala.toList.find(_.name == "<init>").get)
+
+ assertTrue(clsConstr.localVars.length == 1) // this
+ assertTrue(companionConstr.localVars.length == 1) // this
+ }
+
+ def assertLocalVarCount(code: String, numVars: Int): Unit = {
+ assertTrue(singleMethod(dceCompiler)(code).localVars.length == numVars)
+ }
+
+}
diff --git a/test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala
new file mode 100644
index 0000000000..9a004d5e0e
--- /dev/null
+++ b/test/junit/scala/tools/nsc/classpath/AggregateFlatClassPathTest.scala
@@ -0,0 +1,208 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.net.URL
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import scala.reflect.io.VirtualFile
+import scala.tools.nsc.io.AbstractFile
+
+/**
+ * Tests whether AggregateFlatClassPath returns correct entries taken from
+ * cp instances used during creating it and whether it preserves the ordering
+ * (in the case of the repeated entry for a class or a source it returns the first one).
+ */
+@RunWith(classOf[JUnit4])
+class AggregateFlatClassPathTest {
+
+ private class TestFlatClassPath extends FlatClassPath {
+ override def packages(inPackage: String): Seq[PackageEntry] = unsupported
+ override def sources(inPackage: String): Seq[SourceFileEntry] = unsupported
+ override def classes(inPackage: String): Seq[ClassFileEntry] = unsupported
+
+ override def list(inPackage: String): FlatClassPathEntries = unsupported
+ override def findClassFile(name: String): Option[AbstractFile] = unsupported
+
+ override def asClassPathStrings: Seq[String] = unsupported
+ override def asSourcePathString: String = unsupported
+ override def asURLs: Seq[URL] = unsupported
+ }
+
+ private case class TestClassPath(virtualPath: String, classesInPackage: EntryNamesInPackage*) extends TestFlatClassPath {
+
+ override def classes(inPackage: String): Seq[ClassFileEntry] =
+ for {
+ entriesWrapper <- classesInPackage if entriesWrapper.inPackage == inPackage
+ name <- entriesWrapper.names
+ } yield classFileEntry(virtualPath, inPackage, name)
+
+ override def sources(inPackage: String): Seq[SourceFileEntry] = Nil
+
+ // we'll ignore packages
+ override def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(Nil, classes(inPackage))
+ }
+
+ private case class TestSourcePath(virtualPath: String, sourcesInPackage: EntryNamesInPackage*) extends TestFlatClassPath {
+
+ override def sources(inPackage: String): Seq[SourceFileEntry] =
+ for {
+ entriesWrapper <- sourcesInPackage if entriesWrapper.inPackage == inPackage
+ name <- entriesWrapper.names
+ } yield sourceFileEntry(virtualPath, inPackage, name)
+
+ override def classes(inPackage: String): Seq[ClassFileEntry] = Nil
+
+ // we'll ignore packages
+ override def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(Nil, sources(inPackage))
+ }
+
+ private case class EntryNamesInPackage(inPackage: String)(val names: String*)
+
+ private val dir1 = "./dir1"
+ private val dir2 = "./dir2"
+ private val dir3 = "./dir3"
+ private val dir4 = ""
+
+ private val pkg1 = "pkg1"
+ private val pkg2 = "pkg2"
+ private val pkg3 = "pkg1.nested"
+ private val nonexistingPkg = "nonexisting"
+
+ private def unsupported = throw new UnsupportedOperationException
+
+ private def classFileEntry(pathPrefix: String, inPackage: String, fileName: String) =
+ ClassFileEntryImpl(classFile(pathPrefix, inPackage, fileName))
+
+ private def sourceFileEntry(pathPrefix: String, inPackage: String, fileName: String) =
+ SourceFileEntryImpl(sourceFile(pathPrefix, inPackage, fileName))
+
+ private def classFile(pathPrefix: String, inPackage: String, fileName: String) =
+ virtualFile(pathPrefix, inPackage, fileName, ".class")
+
+ private def sourceFile(pathPrefix: String, inPackage: String, fileName: String) =
+ virtualFile(pathPrefix, inPackage, fileName, ".scala")
+
+ private def virtualFile(pathPrefix: String, inPackage: String, fileName: String, extension: String) = {
+ val packageDirs =
+ if (inPackage == FlatClassPath.RootPackage) ""
+ else inPackage.split('.').mkString("/", "/", "")
+ new VirtualFile(fileName + extension, s"$pathPrefix$packageDirs/$fileName$extension")
+ }
+
+ private def createDefaultTestClasspath() = {
+ val partialClassPaths = Seq(TestSourcePath(dir1, EntryNamesInPackage(pkg1)("F", "A", "G")),
+ TestClassPath(dir2, EntryNamesInPackage(pkg1)("C", "B", "A"), EntryNamesInPackage(pkg2)("D", "A", "E")),
+ TestClassPath(dir3, EntryNamesInPackage(pkg1)("A", "D", "F")),
+ TestSourcePath(dir4, EntryNamesInPackage(pkg2)("A", "H", "I"), EntryNamesInPackage(pkg1)("A")),
+ TestSourcePath(dir2, EntryNamesInPackage(pkg3)("J", "K", "L"))
+ )
+
+ AggregateFlatClassPath(partialClassPaths)
+ }
+
+ @Test
+ def testGettingPackages: Unit = {
+ case class ClassPathWithPackages(packagesInPackage: EntryNamesInPackage*) extends TestFlatClassPath {
+ override def packages(inPackage: String): Seq[PackageEntry] =
+ packagesInPackage.find(_.inPackage == inPackage).map(_.names).getOrElse(Nil) map PackageEntryImpl
+ }
+
+ val partialClassPaths = Seq(ClassPathWithPackages(EntryNamesInPackage(pkg1)("pkg1.a", "pkg1.d", "pkg1.f")),
+ ClassPathWithPackages(EntryNamesInPackage(pkg1)("pkg1.c", "pkg1.b", "pkg1.a"),
+ EntryNamesInPackage(pkg2)("pkg2.d", "pkg2.a", "pkg2.e"))
+ )
+ val cp = AggregateFlatClassPath(partialClassPaths)
+
+ val packagesInPkg1 = Seq("pkg1.a", "pkg1.d", "pkg1.f", "pkg1.c", "pkg1.b")
+ assertEquals(packagesInPkg1, cp.packages(pkg1).map(_.name))
+
+ val packagesInPkg2 = Seq("pkg2.d", "pkg2.a", "pkg2.e")
+ assertEquals(packagesInPkg2, cp.packages(pkg2).map(_.name))
+
+ assertEquals(Seq.empty, cp.packages(nonexistingPkg))
+ }
+
+ @Test
+ def testGettingClasses: Unit = {
+ val cp = createDefaultTestClasspath()
+
+ val classesInPkg1 = Seq(classFileEntry(dir2, pkg1, "C"),
+ classFileEntry(dir2, pkg1, "B"),
+ classFileEntry(dir2, pkg1, "A"),
+ classFileEntry(dir3, pkg1, "D"),
+ classFileEntry(dir3, pkg1, "F")
+ )
+ assertEquals(classesInPkg1, cp.classes(pkg1))
+
+ val classesInPkg2 = Seq(classFileEntry(dir2, pkg2, "D"),
+ classFileEntry(dir2, pkg2, "A"),
+ classFileEntry(dir2, pkg2, "E")
+ )
+ assertEquals(classesInPkg2, cp.classes(pkg2))
+
+ assertEquals(Seq.empty, cp.classes(pkg3))
+ assertEquals(Seq.empty, cp.classes(nonexistingPkg))
+ }
+
+ @Test
+ def testGettingSources: Unit = {
+ val partialClassPaths = Seq(TestClassPath(dir1, EntryNamesInPackage(pkg1)("F", "A", "G")),
+ TestSourcePath(dir2, EntryNamesInPackage(pkg1)("C", "B", "A"), EntryNamesInPackage(pkg2)("D", "A", "E")),
+ TestSourcePath(dir3, EntryNamesInPackage(pkg1)("A", "D", "F")),
+ TestClassPath(dir4, EntryNamesInPackage(pkg2)("A", "H", "I")),
+ TestClassPath(dir2, EntryNamesInPackage(pkg3)("J", "K", "L"))
+ )
+ val cp = AggregateFlatClassPath(partialClassPaths)
+
+ val sourcesInPkg1 = Seq(sourceFileEntry(dir2, pkg1, "C"),
+ sourceFileEntry(dir2, pkg1, "B"),
+ sourceFileEntry(dir2, pkg1, "A"),
+ sourceFileEntry(dir3, pkg1, "D"),
+ sourceFileEntry(dir3, pkg1, "F")
+ )
+ assertEquals(sourcesInPkg1, cp.sources(pkg1))
+
+ val sourcesInPkg2 = Seq(sourceFileEntry(dir2, pkg2, "D"),
+ sourceFileEntry(dir2, pkg2, "A"),
+ sourceFileEntry(dir2, pkg2, "E")
+ )
+ assertEquals(sourcesInPkg2, cp.sources(pkg2))
+
+ assertEquals(Seq.empty, cp.sources(pkg3))
+ assertEquals(Seq.empty, cp.sources(nonexistingPkg))
+ }
+
+ @Test
+ def testList: Unit = {
+ val cp = createDefaultTestClasspath()
+
+ val classesAndSourcesInPkg1 = Seq(
+ ClassAndSourceFilesEntry(classFile(dir3, pkg1, "F"), sourceFile(dir1, pkg1, "F")),
+ ClassAndSourceFilesEntry(classFile(dir2, pkg1, "A"), sourceFile(dir1, pkg1, "A")),
+ sourceFileEntry(dir1, pkg1, "G"),
+ classFileEntry(dir2, pkg1, "C"),
+ classFileEntry(dir2, pkg1, "B"),
+ classFileEntry(dir3, pkg1, "D")
+ )
+ assertEquals(classesAndSourcesInPkg1, cp.list(pkg1).classesAndSources)
+
+ assertEquals(FlatClassPathEntries(Nil, Nil), cp.list(nonexistingPkg))
+ }
+
+ @Test
+ def testFindClass: Unit = {
+ val cp = createDefaultTestClasspath()
+
+ assertEquals(
+ Some(ClassAndSourceFilesEntry(classFile(dir2, pkg1, "A"), sourceFile(dir1, pkg1, "A"))),
+ cp.findClass(s"$pkg1.A")
+ )
+ assertEquals(Some(classFileEntry(dir3, pkg1, "D")), cp.findClass(s"$pkg1.D"))
+ assertEquals(Some(sourceFileEntry(dir2, pkg3, "L")), cp.findClass(s"$pkg3.L"))
+ assertEquals(None, cp.findClass("Nonexisting"))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala b/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala
new file mode 100644
index 0000000000..a37ba31b31
--- /dev/null
+++ b/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala
@@ -0,0 +1,159 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import org.junit.Assert._
+import org.junit._
+import org.junit.rules.TemporaryFolder
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import scala.annotation.tailrec
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
+import scala.tools.nsc.Settings
+import scala.tools.util.FlatClassPathResolver
+import scala.tools.util.PathResolver
+
+@RunWith(classOf[JUnit4])
+class FlatClassPathResolverTest {
+
+ val tempDir = new TemporaryFolder()
+
+ private val packagesToTest = List(FlatClassPath.RootPackage, "scala", "scala.reflect", "scala.reflect.io")
+ private val classFilesToFind = List("scala.tools.util.FlatClassPathResolver",
+ "scala.reflect.io.AbstractFile",
+ "scala.collection.immutable.List",
+ "scala.Option",
+ "scala.collection.immutable.Vector",
+ "scala.util.hashing.MurmurHash3",
+ "java.lang.Object",
+ "java.util.Date")
+
+ private val classesToFind = classFilesToFind ++ List("TestSourceInRootPackage",
+ "scala.reflect.io.TestScalaSource",
+ "scala.reflect.io.TestJavaSource")
+
+ private val settings = new Settings
+
+ @Before
+ def initTempDirAndSourcePath: Unit = {
+ // In Java TemporaryFolder in JUnit is managed automatically using @Rule.
+ // It would work also in Scala after adding and extending a class like
+ // TestWithTempFolder.java containing it. But in this case it doesn't work when running tests
+ // from the command line - java class is not compiled due to some, misterious reasons.
+ // That's why such dirs are here created and deleted manually.
+ tempDir.create()
+ tempDir.newFile("TestSourceInRootPackage.scala")
+ val ioDir = tempDir.newFolder("scala", "reflect", "io")
+ new File(ioDir, "AbstractFile.scala").createNewFile()
+ new File(ioDir, "ZipArchive.java").createNewFile()
+ new File(ioDir, "TestScalaSource.scala").createNewFile()
+ new File(ioDir, "TestJavaSource.java").createNewFile()
+
+ settings.usejavacp.value = true
+ settings.sourcepath.value = tempDir.getRoot.getAbsolutePath
+ }
+
+ @After
+ def deleteTempDir: Unit = tempDir.delete()
+
+ private def createFlatClassPath(settings: Settings) =
+ new FlatClassPathResolver(settings).result
+
+ @Test
+ def testEntriesFromListOperationAgainstSeparateMethods: Unit = {
+ val classPath = createFlatClassPath(settings)
+
+ def compareEntriesInPackage(inPackage: String): Unit = {
+ val packages = classPath.packages(inPackage)
+ val classes = classPath.classes(inPackage)
+ val sources = classPath.sources(inPackage)
+ val FlatClassPathEntries(packagesFromList, classesAndSourcesFromList) = classPath.list(inPackage)
+
+ val packageNames = packages.map(_.name).sorted
+ val packageNamesFromList = packagesFromList.map(_.name).sorted
+ assertEquals(s"Methods list and packages for package '$inPackage' should return the same packages",
+ packageNames, packageNamesFromList)
+
+ val classFileNames = classes.map(_.name).sorted
+ val classFileNamesFromList = classesAndSourcesFromList.filter(_.binary.isDefined).map(_.name).sorted
+ assertEquals(s"Methods list and classes for package '$inPackage' should return entries for the same class files",
+ classFileNames, classFileNamesFromList)
+
+ val sourceFileNames = sources.map(_.name).sorted
+ val sourceFileNamesFromList = classesAndSourcesFromList.filter(_.source.isDefined).map(_.name).sorted
+ assertEquals(s"Methods list and sources for package '$inPackage' should return entries for the same source files",
+ sourceFileNames, sourceFileNamesFromList)
+
+ val uniqueNamesOfClassAndSourceFiles = (classFileNames ++ sourceFileNames).toSet
+ assertEquals(s"Class and source entries with the same name obtained via list for package '$inPackage' should be merged into one containing both files",
+ uniqueNamesOfClassAndSourceFiles.size, classesAndSourcesFromList.length)
+ }
+
+ packagesToTest foreach compareEntriesInPackage
+ }
+
+ @Test
+ def testCreatedEntriesAgainstRecursiveClassPath: Unit = {
+ val flatClassPath = createFlatClassPath(settings)
+ val recursiveClassPath = new PathResolver(settings).result
+
+ def compareEntriesInPackage(inPackage: String): Unit = {
+
+ @tailrec
+ def traverseToPackage(packageNameParts: Seq[String], cp: ClassPath[AbstractFile]): ClassPath[AbstractFile] = {
+ packageNameParts match {
+ case Nil => cp
+ case h :: t =>
+ cp.packages.find(_.name == h) match {
+ case Some(nestedCp) => traverseToPackage(t, nestedCp)
+ case _ => throw new Exception(s"There's no package $inPackage in recursive classpath - error when searching for '$h'")
+ }
+ }
+ }
+
+ val packageNameParts = if (inPackage == FlatClassPath.RootPackage) Nil else inPackage.split('.').toList
+ val recursiveClassPathInPackage = traverseToPackage(packageNameParts, recursiveClassPath)
+
+ val flatCpPackages = flatClassPath.packages(inPackage).map(_.name)
+ val pkgPrefix = PackageNameUtils.packagePrefix(inPackage)
+ val recursiveCpPackages = recursiveClassPathInPackage.packages.map(pkgPrefix + _.name)
+ assertEquals(s"Packages in package '$inPackage' on flat cp should be the same as on the recursive cp",
+ recursiveCpPackages, flatCpPackages)
+
+ val flatCpSources = flatClassPath.sources(inPackage).map(_.name).sorted
+ val recursiveCpSources = recursiveClassPathInPackage.classes
+ .filter(_.source.nonEmpty)
+ .map(_.name).sorted
+ assertEquals(s"Source entries in package '$inPackage' on flat cp should be the same as on the recursive cp",
+ recursiveCpSources, flatCpSources)
+
+ val flatCpClasses = flatClassPath.classes(inPackage).map(_.name).sorted
+ val recursiveCpClasses = recursiveClassPathInPackage.classes
+ .filter(_.binary.nonEmpty)
+ .map(_.name).sorted
+ assertEquals(s"Class entries in package '$inPackage' on flat cp should be the same as on the recursive cp",
+ recursiveCpClasses, flatCpClasses)
+ }
+
+ packagesToTest foreach compareEntriesInPackage
+ }
+
+ @Test
+ def testFindClassFile: Unit = {
+ val classPath = createFlatClassPath(settings)
+ classFilesToFind foreach { className =>
+ assertTrue(s"File for $className should be found", classPath.findClassFile(className).isDefined)
+ }
+ }
+
+ @Test
+ def testFindClass: Unit = {
+ val classPath = createFlatClassPath(settings)
+ classesToFind foreach { className =>
+ assertTrue(s"File for $className should be found", classPath.findClass(className).isDefined)
+ }
+ }
+}
diff --git a/test/junit/scala/tools/nsc/doc/html/HtmlDocletTest.scala b/test/junit/scala/tools/nsc/doc/html/HtmlDocletTest.scala
new file mode 100644
index 0000000000..13a955b55d
--- /dev/null
+++ b/test/junit/scala/tools/nsc/doc/html/HtmlDocletTest.scala
@@ -0,0 +1,22 @@
+package scala.tools.nsc.doc.html
+
+import org.junit.Test
+import org.junit.Assert._
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil._
+
+@RunWith(classOf[JUnit4])
+class HtmlDocletTest {
+ @Test
+ def testSyntaxHighlightningUnicode() {
+ val in = "unicode: …"
+
+ val out = SyntaxHigh(in).toString
+
+ // SI-9038, this failed with
+ // "unicode: …" != "unicode: ¬タᆭ"
+ assertEquals(in, out)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/interpreter/TabulatorTest.scala b/test/junit/scala/tools/nsc/interpreter/TabulatorTest.scala
index 21e338eac0..263265026a 100644
--- a/test/junit/scala/tools/nsc/interpreter/TabulatorTest.scala
+++ b/test/junit/scala/tools/nsc/interpreter/TabulatorTest.scala
@@ -82,4 +82,24 @@ class TabulatorTest {
assert(rows(0).size == 1)
assert(rows(0)(0).size == "efg".length + sut.marginSize) // 6
}
+ @Test def badFit() = {
+ val sut = VTabby(isAcross = true)
+ val items = ('a' until 'z').map(_.toString).toList
+ val rows = sut tabulate items
+ assert(rows.size == 2)
+ assert(rows(0).size == 20) // 20 * 4 = 80
+ assert(rows(1)(0).dropRight(sut.marginSize) == "u")
+ }
+ @Test def badFitter() = {
+ val sut = VTabby(isAcross = true)
+ val items = List (
+ "%", "&", "*", "+", "-", "/", ">", ">=", ">>", ">>>", "^",
+ "asInstanceOf", "isInstanceOf", "toByte", "toChar", "toDouble", "toFloat",
+ "toInt", "toLong", "toShort", "toString", "unary_+", "unary_-", "unary_~", "|"
+ )
+ val rows = sut tabulate items
+ assert(rows.size == 4)
+ assert(rows(3).size == 4) // 7 cols
+ assert(rows(3)(0).dropRight(sut.marginSize) == "unary_+")
+ }
}
diff --git a/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala b/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala
new file mode 100644
index 0000000000..77a2da828e
--- /dev/null
+++ b/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala
@@ -0,0 +1,18 @@
+package scala.tools.nsc
+package settings
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import scala.tools.testing.AssertUtil.assertThrows
+
+@RunWith(classOf[JUnit4])
+class ScalaVersionTest {
+ // SI-8711
+ @Test def versionUnparse() {
+ val v = "2.11.3"
+
+ assertEquals(ScalaVersion(v).unparse, v)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/settings/SettingsTest.scala b/test/junit/scala/tools/nsc/settings/SettingsTest.scala
index e4b5ecc7c3..96f83c4c2f 100644
--- a/test/junit/scala/tools/nsc/settings/SettingsTest.scala
+++ b/test/junit/scala/tools/nsc/settings/SettingsTest.scala
@@ -26,7 +26,7 @@ class SettingsTest {
assertThrows[IllegalArgumentException](check("-Ytest-setting:rubbish"))
}
- @Test def userSettingsHavePredecenceOverOptimize() {
+ @Test def userSettingsHavePrecedenceOverOptimize() {
def check(args: String*): MutableSettings#BooleanSetting = {
val s = new MutableSettings(msg => throw new IllegalArgumentException(msg))
val (ok, residual) = s.processArguments(args.toList, processAll = true)
@@ -38,15 +38,146 @@ class SettingsTest {
assertFalse(check("-Yinline:false", "-optimise").value)
}
- @Test def userSettingsHavePredecenceOverLint() {
- def check(args: String*): MutableSettings#BooleanSetting = {
+ // for the given args, select the desired setting
+ private def check(args: String*)(b: MutableSettings => Boolean): Boolean = {
+ val s = new MutableSettings(msg => throw new IllegalArgumentException(msg))
+ val (ok, residual) = s.processArguments(args.toList, processAll = true)
+ assert(residual.isEmpty)
+ b(s)
+ }
+ @Test def userSettingsHavePrecedenceOverLint() {
+ assertTrue(check("-Xlint")(_.warnAdaptedArgs))
+ assertFalse(check("-Xlint", "-Ywarn-adapted-args:false")(_.warnAdaptedArgs))
+ assertFalse(check("-Ywarn-adapted-args:false", "-Xlint")(_.warnAdaptedArgs))
+ }
+
+ @Test def anonymousLintersCanBeNamed() {
+ assertTrue(check("-Xlint")(_.warnMissingInterpolator)) // among Xlint
+ assertFalse(check("-Xlint:-missing-interpolator")(_.warnMissingInterpolator))
+
+ // positive overrides negative, but not the other way around
+ assertTrue(check("-Xlint:-missing-interpolator,missing-interpolator")(_.warnMissingInterpolator))
+ assertTrue(check("-Xlint:-missing-interpolator", "-Xlint:missing-interpolator")(_.warnMissingInterpolator))
+
+ assertTrue(check("-Xlint:missing-interpolator,-missing-interpolator")(_.warnMissingInterpolator))
+ assertTrue(check("-Xlint:missing-interpolator", "-Xlint:-missing-interpolator")(_.warnMissingInterpolator))
+
+ // -Xlint:_ adds all possible choices, but explicit negative settings will override
+ assertFalse(check("-Xlint:-missing-interpolator,_")(_.warnMissingInterpolator))
+ assertFalse(check("-Xlint:-missing-interpolator", "-Xlint:_")(_.warnMissingInterpolator))
+ assertFalse(check("-Xlint:_", "-Xlint:-missing-interpolator")(_.warnMissingInterpolator))
+ assertFalse(check("-Xlint:_,-missing-interpolator")(_.warnMissingInterpolator))
+
+ // -Xlint is the same as -Xlint:_
+ assertFalse(check("-Xlint:-missing-interpolator", "-Xlint")(_.warnMissingInterpolator))
+ assertFalse(check("-Xlint", "-Xlint:-missing-interpolator")(_.warnMissingInterpolator))
+
+ // combination of positive, negative and _
+ assertTrue(check("-Xlint:_,-missing-interpolator,missing-interpolator")(_.warnMissingInterpolator))
+ assertTrue(check("-Xlint:-missing-interpolator,_,missing-interpolator")(_.warnMissingInterpolator))
+ assertTrue(check("-Xlint:-missing-interpolator,missing-interpolator,_")(_.warnMissingInterpolator))
+ assertTrue(check("-Xlint:missing-interpolator,-missing-interpolator,_")(_.warnMissingInterpolator))
+ assertTrue(check("-Xlint:missing-interpolator,_,-missing-interpolator")(_.warnMissingInterpolator))
+ }
+
+ @Test def xLintInvalidChoices(): Unit = {
+ assertThrows[IllegalArgumentException](check("-Xlint:-_")(_.warnAdaptedArgs))
+ assertThrows[IllegalArgumentException](check("-Xlint:-warn-adapted-args")(_.warnAdaptedArgs)) // "warn-" should not be there
+ }
+
+ @Test def xLintNonColonated(): Unit = {
+ assertTrue(check("-Xlint", "adapted-args", "-deprecation")(_.warnAdaptedArgs))
+ assertFalse(check("-Xlint", "adapted-args", "-deprecation")(_.warnMissingInterpolator))
+ assertTrue(check("-Xlint", "adapted-args", "missing-interpolator", "-deprecation")(s => s.warnMissingInterpolator && s.warnAdaptedArgs))
+ assertThrows[IllegalArgumentException](check("-Xlint", "adapted-args", "-missing-interpolator")(_.warnAdaptedArgs)) // non-colonated: cannot provide negative args
+ }
+
+ @Test def xLintContainsValues(): Unit = {
+ // make sure that lint.contains and lint.value.contains are consistent
+ def t(s: MutableSettings, v: String) = {
+ val r = s.lint.contains(v)
+ assertSame(r, s.lint.value.contains((s.LintWarnings withName v).asInstanceOf[s.lint.domain.Value]))
+ r
+ }
+
+ assertTrue(check("-Xlint")(t(_, "adapted-args")))
+ assertTrue(check("-Xlint:_")(t(_, "adapted-args")))
+ assertFalse(check("-Xlint:_,-adapted-args")(t(_, "adapted-args")))
+ assertFalse(check("-Xlint:-adapted-args,_")(t(_, "adapted-args")))
+ assertTrue(check("-Xlint:-adapted-args,_,adapted-args")(t(_, "adapted-args")))
+ }
+
+ @Test def xLintDeprecatedAlias(): Unit = {
+ assertTrue(check("-Ywarn-adapted-args")(_.warnAdaptedArgs))
+ assertTrue(check("-Xlint:_,-adapted-args", "-Ywarn-adapted-args")(_.warnAdaptedArgs))
+ assertTrue(check("-Xlint:-adapted-args", "-Ywarn-adapted-args")(_.warnAdaptedArgs))
+ assertTrue(check("-Ywarn-adapted-args", "-Xlint:-adapted-args,_")(_.warnAdaptedArgs))
+
+ assertFalse(check("-Ywarn-adapted-args:false")(_.warnAdaptedArgs))
+ assertFalse(check("-Ywarn-adapted-args:false", "-Xlint:_")(_.warnAdaptedArgs))
+ assertFalse(check("-Ywarn-adapted-args:false", "-Xlint:_,-adapted-args")(_.warnAdaptedArgs))
+ assertTrue(check("-Ywarn-adapted-args:false", "-Xlint:_,adapted-args")(_.warnAdaptedArgs))
+ }
+
+ @Test def expandingMultichoice(): Unit = {
+ val s = new MutableSettings(msg => throw new IllegalArgumentException(msg))
+ object mChoices extends s.MultiChoiceEnumeration {
+ val a = Choice("a")
+ val b = Choice("b")
+ val c = Choice("c")
+ val d = Choice("d")
+
+ val ab = Choice("ab", expandsTo = List(a, b))
+ val ac = Choice("ac", expandsTo = List(a, c))
+ val uber = Choice("uber", expandsTo = List(ab, d))
+ }
+ val m = s.MultiChoiceSetting("-m", "args", "magic sauce", mChoices, Some(List("ac")))
+
+ def check(args: String*)(t: s.MultiChoiceSetting[mChoices.type] => Boolean): Boolean = {
+ m.clear()
+ val (ok, rest) = s.processArguments(args.toList, processAll = true)
+ assert(rest.isEmpty)
+ t(m)
+ }
+
+ import mChoices._
+
+ assertTrue(check("-m")(_.value == Set(a,c)))
+ assertTrue(check("-m:a,-b,c")(_.value == Set(a,c)))
+
+ // expanding options don't end up in the value set, only the terminal ones
+ assertTrue(check("-m:ab,ac")(_.value == Set(a,b,c)))
+ assertTrue(check("-m:_")(_.value == Set(a,b,c,d)))
+ assertTrue(check("-m:uber,ac")(_.value == Set(a,b,c,d))) // recursive expansion of uber
+
+ // explicit nays
+ assertTrue(check("-m:_,-b")(_.value == Set(a,c,d)))
+ assertTrue(check("-m:b,_,-b")(_.value == Set(a,b,c,d)))
+ assertTrue(check("-m:ac,-c")(_.value == Set(a)))
+ assertTrue(check("-m:ac,-a,-c")(_.value == Set()))
+ assertTrue(check("-m:-d,ac")(_.value == Set(a,c)))
+ assertTrue(check("-m:-b,ac,uber")(_.value == Set(a,c,d)))
+
+ assertFalse(check("-m:uber")(_.contains("i-m-not-an-option")))
+
+ assertThrows[IllegalArgumentException](check("-m:-_")(_ => true), _ contains "'-_' is not a valid choice")
+ assertThrows[IllegalArgumentException](check("-m:a,b,-ab")(_ => true), _ contains "'ab' cannot be negated")
+ assertThrows[IllegalArgumentException](check("-m:a,ac,-uber,uber")(_ => true), _ contains "'uber' cannot be negated")
+ }
+
+ @Test def xSourceTest(): Unit = {
+ def check(expected: String, args: String*): Unit = {
val s = new MutableSettings(msg => throw new IllegalArgumentException(msg))
- val (ok, residual) = s.processArguments(args.toList, processAll = true)
+ val (_, residual) = s.processArguments(args.toList, processAll = true)
assert(residual.isEmpty)
- s.warnAdaptedArgs // among Xlint
+ assertTrue(s.source.value == ScalaVersion(expected))
}
- assertTrue(check("-Xlint").value)
- assertFalse(check("-Xlint", "-Ywarn-adapted-args:false").value)
- assertFalse(check("-Ywarn-adapted-args:false", "-Xlint").value)
+ check(expected = "2.11.0") // default
+ check(expected = "2.11.0", "-Xsource:2.11")
+ check(expected = "2.10", "-Xsource:2.10.0")
+ check(expected = "2.12", "-Xsource:2.12")
+ assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource"), _ == "-Xsource requires an argument, the syntax is -Xsource:<version>")
+ assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource", "2.11"), _ == "-Xsource requires an argument, the syntax is -Xsource:<version>")
+ assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource:2.invalid"), _ contains "There was a problem parsing 2.invalid")
}
}
diff --git a/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala b/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala
index 355771bf04..69931c9e24 100644
--- a/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala
+++ b/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala
@@ -47,7 +47,7 @@ class CannotHaveAttrsTest {
assertEquals(t.tpe, NoType)
}
- @Test
+ @Test @org.junit.Ignore // SI-8816
def nonDefaultPosAssignmentFails = {
val pos = new OffsetPosition(null, 0)
attrlessTrees.foreach { t =>
@@ -56,7 +56,7 @@ class CannotHaveAttrsTest {
}
}
- @Test
+ @Test @org.junit.Ignore // SI-8816
def nonDefaultTpeAssignmentFails = {
val tpe = typeOf[Int]
attrlessTrees.foreach { t =>
@@ -64,4 +64,16 @@ class CannotHaveAttrsTest {
assertThrows[IllegalArgumentException] { t.setType(tpe) }
}
}
+
+ class Attach
+ @Test
+ def attachmentsAreIgnored = {
+ attrlessTrees.foreach { t =>
+ t.setAttachments(NoPosition.update(new Attach))
+ assert(t.attachments == NoPosition)
+ t.updateAttachment(new Attach)
+ assert(t.attachments == NoPosition)
+ t.removeAttachment[Attach] // no exception
+ }
+ }
}
diff --git a/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala b/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala
index cf09abdfff..7796345351 100644
--- a/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala
+++ b/test/junit/scala/tools/nsc/symtab/FreshNameExtractorTest.scala
@@ -32,16 +32,16 @@ class FreshNameExtractorTest {
val Creator = new FreshNameCreator(prefixes.head)
val Extractor = new FreshNameExtractor(prefixes.tail.head)
assertThrows[MatchError] {
- val Extractor(_) = TermName(Creator.newName("foo"))
+ TermName(Creator.newName("foo")) match { case Extractor(_) => }
}
}
@Test
- def extractionsFailsIfNameDoesntEndWithNumber = {
- val Creator = new FreshNameCreator(prefixes.head)
+ def `no numeric suffix? no problem!` = {
+ val Creator = new FreshNameCreator(prefixes.head)
val Extractor = new FreshNameExtractor(prefixes.head)
- assertThrows[MatchError] {
- val Extractor(_) = TermName(Creator.newName("foo") + "bar")
+ TermName(Creator.newName("foo") + "bar") match {
+ case Extractor(_) =>
}
}
-} \ No newline at end of file
+}
diff --git a/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala b/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala
index 4a39cf9d48..91f94e09b6 100644
--- a/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala
+++ b/test/junit/scala/tools/nsc/symtab/StdNamesTest.scala
@@ -15,12 +15,16 @@ class StdNamesTest {
@Test
def testNewTermNameInvalid(): Unit = {
- assertThrows[IllegalArgumentException](newTermName("foo".toCharArray, 0, -1))
- assertThrows[IllegalArgumentException](newTermName("foo".toCharArray, 0, 0))
assertThrows[IllegalArgumentException](newTermName("foo".toCharArray, -1, 1))
}
@Test
+ def testNewTermNameNegativeLength(): Unit = {
+ assertEquals(nme.EMPTY, newTermName("foo".toCharArray, 0, -1))
+ assertEquals(nme.EMPTY, newTermName("foo".toCharArray, 0, 0))
+ }
+
+ @Test
def testUnspecializedName(): Unit = {
def test(expected: Name, nme: Name) {
assertEquals(expected, unspecializedName(nme))
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
index 25d8c4667f..f0f20acf07 100644
--- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
@@ -3,6 +3,9 @@ package symtab
import scala.reflect.ClassTag
import scala.reflect.internal.{Phase, NoPhase, SomePhase}
+import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.util.FlatClassPathResolver
import scala.tools.util.PathResolver
import util.ClassPath
import io.AbstractFile
@@ -26,14 +29,28 @@ class SymbolTableForUnitTesting extends SymbolTable {
class LazyTreeCopier extends super.LazyTreeCopier with TreeCopier
override def isCompilerUniverse: Boolean = true
- def classPath = new PathResolver(settings).result
+
+ def classPath = platform.classPath
+ def flatClassPath: FlatClassPath = platform.flatClassPath
object platform extends backend.Platform {
val symbolTable: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
lazy val loaders: SymbolTableForUnitTesting.this.loaders.type = SymbolTableForUnitTesting.this.loaders
+
def platformPhases: List[SubComponent] = Nil
- val classPath: ClassPath[AbstractFile] = new PathResolver(settings).result
- def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true
+
+ lazy val classPath: ClassPath[AbstractFile] = {
+ assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Recursive,
+ "It's not possible to use the recursive classpath representation, when it's not the chosen classpath scanning method")
+ new PathResolver(settings).result
+ }
+
+ private[nsc] lazy val flatClassPath: FlatClassPath = {
+ assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Flat,
+ "It's not possible to use the flat classpath representation, when it's not the chosen classpath scanning method")
+ new FlatClassPathResolver(settings).result
+ }
+
def isMaybeBoxed(sym: Symbol): Boolean = ???
def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ???
def externalEquals: Symbol = ???
@@ -51,7 +68,12 @@ class SymbolTableForUnitTesting extends SymbolTable {
class GlobalMirror extends Roots(NoSymbol) {
val universe: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
- def rootLoader: LazyType = new loaders.PackageLoader(classPath)
+
+ def rootLoader: LazyType = settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat => new loaders.PackageLoaderUsingFlatClassPath(FlatClassPath.RootPackage, flatClassPath)
+ case ClassPathRepresentationType.Recursive => new loaders.PackageLoader(classPath)
+ }
+
override def toString = "compiler mirror"
}
@@ -61,7 +83,7 @@ class SymbolTableForUnitTesting extends SymbolTable {
rm.asInstanceOf[Mirror]
}
- def settings: Settings = {
+ lazy val settings: Settings = {
val s = new Settings
// initialize classpath using java classpath
s.usejavacp.value = true
@@ -72,6 +94,18 @@ class SymbolTableForUnitTesting extends SymbolTable {
def picklerPhase: scala.reflect.internal.Phase = SomePhase
def erasurePhase: scala.reflect.internal.Phase = SomePhase
+ // Members declared in scala.reflect.internal.Reporting
+ def reporter = new scala.reflect.internal.ReporterImpl {
+ protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = println(msg)
+ }
+
+ // minimal Run to get Reporting wired
+ def currentRun = new RunReporting {}
+ class PerRunReporting extends PerRunReportingBase {
+ def deprecationWarning(pos: Position, msg: String): Unit = reporter.warning(pos, msg)
+ }
+ protected def PerRunReporting = new PerRunReporting
+
// Members declared in scala.reflect.internal.SymbolTable
def currentRunId: Int = 1
def log(msg: => AnyRef): Unit = println(msg)
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala
index 11e955a4bb..5a921a5eda 100644
--- a/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala
@@ -33,10 +33,10 @@ class SymbolTableTest {
import symbolTable._
symbolTable.definitions.init()
val rootClass = symbolTable.rootMirror.RootClass
- val fooSymbol = rootClass.newClassSymbol("Foo": TypeName, NoPosition, 0)
+ val fooSymbol = rootClass.newClassSymbol(TypeName("Foo"), NoPosition, 0)
val fooType = new ClassInfoType(Nil, EmptyScope, fooSymbol)
fooSymbol.info = fooType
- val barSymbol = rootClass.newClassSymbol("Bar": TypeName, NoPosition, 0)
+ val barSymbol = rootClass.newClassSymbol(TypeName("Bar"), NoPosition, 0)
val fooTypeRef = TypeRef(fooSymbol.owner.tpe, fooSymbol, Nil)
val barType = new ClassInfoType(List(fooTypeRef), EmptyScope, barSymbol)
barSymbol.info = barType
@@ -44,4 +44,9 @@ class SymbolTableTest {
assertFalse("Foo should be a superclass of Foo", fooSymbol.tpe <:< barSymbol.tpe)
}
+ @Test
+ def noSymbolOuterClass_t9133: Unit = {
+ import symbolTable._
+ assert(NoSymbol.outerClass == NoSymbol)
+ }
}
diff --git a/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala b/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala
new file mode 100644
index 0000000000..7bcb90a2ee
--- /dev/null
+++ b/test/junit/scala/tools/nsc/transform/patmat/SolvingTest.scala
@@ -0,0 +1,610 @@
+package scala.tools.nsc.transform.patmat
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.collection.mutable
+import scala.reflect.internal.util.Position
+import scala.tools.nsc.{Global, Settings}
+
+object TestSolver extends Logic with Solving {
+
+ val global: Global = new Global(new Settings())
+
+ // disable max recursion depth in order to get all solutions
+ global.settings.YpatmatExhaustdepth.tryToSet("off" :: Nil)
+
+ object TestSolver extends Solver {
+
+ class Const {
+ override def toString: String = "Const"
+ }
+
+ val NullConst = new Const
+ type Type = Int
+
+ case class TypeConst(i: Int) extends Const
+
+ object TypeConst extends TypeConstExtractor
+
+ case class ValueConst(i: Int) extends Const
+
+ object ValueConst extends ValueConstExtractor {
+ def apply(t: Tree): Const = ???
+ }
+
+ case class Tree(name: String)
+
+ class Var(val x: Tree) extends AbsVar {
+
+ override def equals(other: scala.Any): Boolean = other match {
+ case that: Var => this.x == that.x
+ case _ => false
+ }
+
+ override def hashCode(): Int = x.hashCode()
+
+ override def toString: String = {
+ s"Var($x)"
+ }
+
+ def domainSyms = None
+
+ def groupedDomains: List[Set[TestSolver.Sym]] = Nil
+
+ def implications = Nil
+
+ def mayBeNull = false
+
+ def propForEqualsTo(c: Const): Prop = ???
+
+ def registerEquality(c: Const) = ()
+
+ def registerNull() = ()
+
+ def symForStaticTp = None
+ }
+
+ object Var extends VarExtractor {
+ def apply(x: Tree): Var = new Var(x)
+
+ def unapply(v: Var): Some[Tree] = Some(v.x)
+ }
+
+ def prepareNewAnalysis() = {}
+
+ def uncheckedWarning(pos: Position, msg: String) = sys.error(msg)
+
+ def reportWarning(msg: String) = sys.error(msg)
+
+ /**
+ * The DPLL procedure only returns a minimal mapping from literal to value
+ * such that the CNF formula is satisfied.
+ * E.g. for:
+ * `(a \/ b)`
+ * The DPLL procedure will find either {a = true} or {b = true}
+ * as solution.
+ *
+ * The expansion step will amend both solutions with the unassigned variable
+ * i.e., {a = true} will be expanded to {a = true, b = true} and
+ * {a = true, b = false}.
+ */
+ def expandUnassigned(solution: Solution): List[Model] = {
+ import solution._
+
+ // the number of solutions is doubled for every unassigned variable
+ val expandedModels = 1 << unassigned.size
+ var current = mutable.ArrayBuffer[Model]()
+ var next = mutable.ArrayBuffer[Model]()
+ current.sizeHint(expandedModels)
+ next.sizeHint(expandedModels)
+
+ current += model
+
+ // we use double buffering:
+ // read from `current` and create a two models for each model in `next`
+ for {
+ s <- unassigned
+ } {
+ for {
+ model <- current
+ } {
+ def force(s: Sym, pol: Boolean) = model + (s -> pol)
+
+ next += force(s, pol = true)
+ next += force(s, pol = false)
+ }
+
+ val tmp = current
+ current = next
+ next = tmp
+
+ next.clear()
+ }
+
+ current.toList
+ }
+
+ /**
+ * Old CNF conversion code, used for reference:
+ * - convert formula into NNF
+ * (i.e., no negated terms, only negated variables)
+ * - use distributive laws to convert into CNF
+ */
+ def eqFreePropToSolvableViaDistribution(p: Prop) = {
+ val symbolMapping = new SymbolMapping(gatherSymbols(p))
+
+ type Formula = Array[TestSolver.Clause]
+
+ def formula(c: Clause*): Formula = c.toArray
+
+ def merge(a: Clause, b: Clause) = a ++ b
+
+ def negationNormalFormNot(p: Prop): Prop = p match {
+ case And(ps) => Or(ps map negationNormalFormNot)
+ case Or(ps) => And(ps map negationNormalFormNot)
+ case Not(p) => negationNormalForm(p)
+ case True => False
+ case False => True
+ case s: Sym => Not(s)
+ }
+
+ def negationNormalForm(p: Prop): Prop = p match {
+ case Or(ps) => Or(ps map negationNormalForm)
+ case And(ps) => And(ps map negationNormalForm)
+ case Not(negated) => negationNormalFormNot(negated)
+ case True
+ | False
+ | (_: Sym) => p
+ }
+
+ val TrueF: Formula = Array()
+ val FalseF = Array(clause())
+ def lit(sym: Sym) = Array(clause(symbolMapping.lit(sym)))
+ def negLit(sym: Sym) = Array(clause(-symbolMapping.lit(sym)))
+
+ def conjunctiveNormalForm(p: Prop): Formula = {
+ def distribute(a: Formula, b: Formula): Formula =
+ (a, b) match {
+ // true \/ _ = true
+ // _ \/ true = true
+ case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF
+ // lit \/ lit
+ case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0)))
+ // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d))
+ // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn))
+ case (cs, ds) =>
+ val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs)
+ big flatMap (c => distribute(formula(c), small))
+ }
+
+ p match {
+ case True => TrueF
+ case False => FalseF
+ case s: Sym => lit(s)
+ case Not(s: Sym) => negLit(s)
+ case And(ps) =>
+ ps.toArray flatMap conjunctiveNormalForm
+ case Or(ps) =>
+ ps map conjunctiveNormalForm reduceLeft { (a, b) =>
+ distribute(a, b)
+ }
+ }
+ }
+ val cnf = conjunctiveNormalForm(negationNormalForm(p))
+ Solvable(cnf, symbolMapping)
+ }
+
+ }
+
+}
+
+/**
+ * Testing CNF conversion via Tseitin vs NNF & expansion.
+ */
+@RunWith(classOf[JUnit4])
+class SolvingTest {
+
+ import scala.tools.nsc.transform.patmat.TestSolver.TestSolver._
+
+ object SymName {
+ def unapply(s: Sym): Option[String] = {
+ val Var(Tree(name)) = s.variable
+ Some(name)
+ }
+ }
+
+ implicit val ModelOrd: Ordering[TestSolver.TestSolver.Model] = Ordering.by {
+ _.toSeq.sortWith {
+ case ((sym1, v1), (sym2, v2)) =>
+ val SymName(name1) = sym1
+ val SymName(name2) = sym2
+ if (name1 < name2)
+ true
+ else if (name1 > name2)
+ false
+ else
+ v1 < v2
+ }.toIterable
+ }
+
+ implicit val SolutionOrd: Ordering[TestSolver.TestSolver.Solution] =
+ Ordering.by(_.model)
+
+ def formatSolution(solution: Solution): String = {
+ formatModel(solution.model)
+ }
+
+ def formatModel(model: Model): String = {
+ (for {
+ (SymName(name), value) <- model
+ } yield {
+ val v = if (value) "T" else "F"
+ s"$name -> $v"
+ }).mkString(", ")
+ }
+
+ def sym(name: String) = Sym(Var(Tree(name)), NullConst)
+
+ @Test
+ def testSymCreation() {
+ val s1 = sym("hello")
+ val s2 = sym("hello")
+ assertEquals(s1, s2)
+ }
+
+ /**
+ * Simplest possible test: solve a formula and check the solution(s)
+ */
+ @Test
+ def testUnassigned() {
+ val pSym = sym("p")
+ val solvable = propToSolvable(Or(pSym, Not(pSym)))
+ val solutions = TestSolver.TestSolver.findAllModelsFor(solvable)
+ val expected = List(Solution(Map(), List(pSym)))
+ assertEquals(expected, solutions)
+ }
+
+ /**
+ * Unassigned variables must be expanded
+ * for stable results
+ */
+ @Test
+ def testNoUnassigned() {
+ val pSym = sym("p")
+ val qSym = sym("q")
+ val solvable = propToSolvable(Or(pSym, Not(qSym)))
+ val solutions = findAllModelsFor(solvable)
+ val expanded = solutions.flatMap(expandUnassigned).sorted
+ val expected = Seq(
+ Map(pSym -> false, qSym -> false),
+ Map(pSym -> true, qSym -> false),
+ Map(pSym -> true, qSym -> true)
+ ).sorted
+
+ assertEquals(expected, expanded)
+ }
+
+ @Test
+ def testTseitinVsExpansionFrom_t7020() {
+ val formulas = Seq(
+ And(And(And(Not(sym("V1=null")),
+ sym("V1=scala.collection.immutable.::[?]")), And(Not(sym("V1=null")),
+ And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), sym("V3=Nil")))),
+ And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")),
+ Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"),
+ Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")),
+ Not(sym("V3=null"))),
+ And(Or(Not(sym("V3=scala.collection.immutable.::[?]")),
+ Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null")))))))), And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))))),
+
+ And(And(And(Not(sym("V1=null")),
+ sym("V1=scala.collection.immutable.::[?]")), And(Not(sym("V1=null")),
+ And(sym("V2=7"), sym("V3=Nil")))),
+ And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")),
+ Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"),
+ Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")),
+ Not(sym("V3=null"))),
+ And(Or(Not(sym("V3=scala.collection.immutable.::[?]")),
+ Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null")))))))), And(And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil")))))))),
+
+ And(And(Not(sym("V1=null")),
+ sym("V1=scala.collection.immutable.::[?]")), And(Not(sym("V1=null")),
+ And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), sym("V3=Nil")))),
+
+ And(And(Not(sym("V1=null")), sym("V1=scala.collection.immutable.::[?]")),
+ And(Not(sym("V1=null")), And(sym("V2=7"), sym("V3=Nil")))),
+
+ And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")),
+ Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"),
+ Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")),
+ Not(sym("V3=null"))),
+ And(Or(Not(sym("V3=scala.collection.immutable.::[?]")),
+ Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null")))))))), And(And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil"))))))),
+
+ And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")),
+ Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"),
+ Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")),
+ Not(sym("V3=null"))),
+ And(Or(Not(sym("V3=scala.collection.immutable.::[?]")),
+ Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null")))))))), And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil"))))))),
+
+ And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")),
+ Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"),
+ Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")),
+ Not(sym("V3=null"))),
+ And(Or(Not(sym("V3=scala.collection.immutable.::[?]")),
+ Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null")))))))), And(sym("V1=Nil"), And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil"))))))))),
+
+ And(And(Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), And(Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil"))))), And(Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil"))))),
+
+ And(And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil")))))),
+
+ And(And(Or(sym("V3=scala.collection.immutable.::[?]"), sym("V3=Nil")),
+ Or(sym("V1=scala.collection.immutable.::[?]"), sym("V1=Nil"))),
+ And(And(Or(Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))),
+ Or(False, Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))), And(Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil"))))), And(Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil")))))),
+
+ And(Not(sym("V1=null")), And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))),
+ sym("V3=Nil"))),
+
+ And(Not(sym("V1=null")), And(sym("V2=7"), sym("V3=Nil"))),
+
+ And(Not(sym("V1=null")), sym("V1=scala.collection.immutable.::[?]")),
+
+ And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+
+ And(Not(sym("V2=5")), Not(sym("V2=6"))),
+
+ And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null")))),
+
+ And(Or(Not(sym("V1=scala.collection.immutable.::[?]")),
+ Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"),
+ Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")),
+ Not(sym("V3=null"))),
+ And(Or(Not(sym("V3=scala.collection.immutable.::[?]")),
+ Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null")))))))),
+
+ And(Or(Not(sym("V3=Nil")), Not(sym("V3=null"))),
+ And(Or(Not(sym("V3=scala.collection.immutable.::[?]")),
+ Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null")))))),
+
+ And(Or(Not(sym("V3=scala.collection.immutable.::[?]")),
+ Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null"))))),
+
+ And(Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil"))))), And(Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil")))),
+
+ And(Or(Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))),
+
+ And(Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))), Not(sym("V1=Nil"))),
+
+ And(Or(Or(sym("V1=null"), Not(sym("V1=scala.collection.immutable.::[?]"))),
+ Or(sym("V1=null"), Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")),
+ Not(sym("V2=6")))), Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil"))))))),
+
+ And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))),
+
+ And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=7")), Not(sym("V3=Nil"))))),
+ And(And(Or(Not(sym("V1=scala.collection.immutable.::[?]")),
+ Not(sym("V1=null"))), And(Or(sym("V3=scala.collection.immutable.::[?]"),
+ Or(sym("V3=Nil"), sym("V3=null"))), And(Or(Not(sym("V3=Nil")),
+ Not(sym("V3=null"))),
+ And(Or(Not(sym("V3=scala.collection.immutable.::[?]")),
+ Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null")))))))), And(sym("V1=Nil"), And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))))))),
+
+ And(Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))), sym("V3=Nil")),
+
+ And(Or(sym("V3=scala.collection.immutable.::[?]"), Or(sym("V3=Nil"),
+ sym("V3=null"))), And(Or(Not(sym("V3=Nil")), Not(sym("V3=null"))),
+ And(Or(Not(sym("V3=scala.collection.immutable.::[?]")),
+ Not(sym("V3=null"))), And(Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+ Or(sym("V1=scala.collection.immutable.::[?]"), Or(sym("V1=Nil"),
+ sym("V1=null"))))))),
+
+ And(Or(sym("V3=scala.collection.immutable.::[?]"),
+ sym("V3=Nil")), Or(sym("V1=scala.collection.immutable.::[?]"),
+ sym("V1=Nil"))),
+
+ And(sym("V1=Nil"), And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil"))))), And(Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))), Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=2")), Not(sym("V3=Nil")))))))),
+
+ And(sym("V2=7"), sym("V3=Nil")),
+
+ False,
+
+ Not(sym("V1=Nil")),
+
+ Or(And(Not(sym("V2=4")),
+ And(Not(sym("V2=5")), Not(sym("V2=6")))), Not(sym("V3=Nil"))),
+
+ Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))),
+
+ Or(False,
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil")))),
+
+ Or(False, Or(Not(sym("V2=1")), Not(sym("V3=Nil")))),
+
+ Or(Not(sym("V1=Nil")), Not(sym("V1=null"))),
+
+ Or(Not(sym("V3=scala.collection.immutable.::[?]")), Not(sym("V3=null"))),
+
+ Or(Or(False, Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil"))))),
+
+ Or(Or(False,
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(False,
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))),
+
+ Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil"))))),
+
+ Or(Or(sym("V1=null"),
+ Not(sym("V1=scala.collection.immutable.::[?]"))), Or(sym("V1=null"),
+ Or(Not(sym("V2=1")), Not(sym("V3=Nil"))))),
+
+ Or(sym("V1=null"), Not(sym("V1=scala.collection.immutable.::[?]"))),
+
+ Or(sym("V1=null"),
+ Or(And(Not(sym("V2=4")), And(Not(sym("V2=5")), Not(sym("V2=6")))),
+ Not(sym("V3=Nil")))),
+
+ Or(sym("V1=null"), Or(Not(sym("V2=1")), Not(sym("V3=Nil")))),
+
+ Or(sym("V1=scala.collection.immutable.::[?]"),
+ Or(sym("V1=Nil"), sym("V1=null"))),
+
+ Or(sym("V1=scala.collection.immutable.::[?]"), sym("V1=Nil")),
+
+ Or(sym("V2=4"), Or(sym("V2=5"), sym("V2=6"))),
+
+ sym("V3=scala.collection.immutable.::[?]")
+ )
+
+ formulas foreach {
+ f =>
+ // build CNF
+ val tseitinCnf = propToSolvable(f)
+ val expansionCnf = eqFreePropToSolvableViaDistribution(f)
+
+ // ALL-SAT
+ val tseitinSolutions = findAllModelsFor(tseitinCnf)
+ val expansionSolutins = findAllModelsFor(expansionCnf)
+
+ // expand unassigned variables
+ // (otherwise solutions can not be compared)
+ val tseitinNoUnassigned = tseitinSolutions.flatMap(expandUnassigned).sorted
+ val expansionNoUnassigned = expansionSolutins.flatMap(expandUnassigned).sorted
+ assertEquals(tseitinNoUnassigned, expansionNoUnassigned)
+ }
+ }
+
+ def pairWiseEncoding(ops: List[Sym]) = {
+ And(ops.combinations(2).collect {
+ case a :: b :: Nil => Or(Not(a), Not(b))
+ }.toSet[TestSolver.TestSolver.Prop])
+ }
+
+ @Test
+ def testAtMostOne() {
+ val dummySym = sym("dummy")
+ val syms = "pqrstu".map(c => sym(c.toString)).toList
+ // expand unassigned variables
+ // (otherwise solutions can not be compared)
+ val expected = TestSolver.TestSolver.findAllModelsFor(propToSolvable(And(dummySym, pairWiseEncoding(syms)))).flatMap(expandUnassigned)
+ val actual = TestSolver.TestSolver.findAllModelsFor(propToSolvable(And(dummySym, AtMostOne(syms)))).flatMap(expandUnassigned)
+ assertEquals(expected.toSet, actual.toSet)
+ }
+}
+
+
diff --git a/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala b/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala
new file mode 100644
index 0000000000..f2926e3e17
--- /dev/null
+++ b/test/junit/scala/tools/nsc/util/ClassPathImplComparator.scala
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.util
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.Settings
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.util.PathResolverFactory
+
+/**
+ * Simple application to compare efficiency of the recursive and the flat classpath representations
+ */
+object ClassPathImplComparator {
+
+ private class TestSettings extends Settings {
+ val checkClasses = PathSetting("-checkClasses", "Specify names of classes which should be found separated with ;", "")
+ val requiredIterations = IntSetting("-requiredIterations",
+ "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None)
+ val cpCreationRepetitions = IntSetting("-cpCreationRepetitions",
+ "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None)
+ val cpLookupRepetitions = IntSetting("-cpLookupRepetitions",
+ "Repeat tests specified number of times (to check e.g. impact of caches)", 1, Some((1, Int.MaxValue)), (_: String) => None)
+ }
+
+ private class DurationStats(name: String) {
+ private var sum = 0L
+ private var iterations = 0
+
+ def noteMeasuredTime(millis: Long): Unit = {
+ sum += millis
+ iterations += 1
+ }
+
+ def printResults(): Unit = {
+ val avg = if (iterations == 0) 0 else sum.toDouble / iterations
+ println(s"$name - total duration: $sum ms; iterations: $iterations; avg: $avg ms")
+ }
+ }
+
+ private lazy val defaultClassesToFind = List(
+ "scala.collection.immutable.List",
+ "scala.Option",
+ "scala.Int",
+ "scala.collection.immutable.Vector",
+ "scala.util.hashing.MurmurHash3"
+ )
+
+ private val oldCpCreationStats = new DurationStats("Old classpath - create")
+ private val oldCpSearchingStats = new DurationStats("Old classpath - search")
+
+ private val flatCpCreationStats = new DurationStats("Flat classpath - create")
+ private val flatCpSearchingStats = new DurationStats("Flat classpath - search")
+
+ def main(args: Array[String]): Unit = {
+
+ if (args contains "-help")
+ usage()
+ else {
+ val oldCpSettings = loadSettings(args.toList, ClassPathRepresentationType.Recursive)
+ val flatCpSettings = loadSettings(args.toList, ClassPathRepresentationType.Flat)
+
+ val classesToCheck = oldCpSettings.checkClasses.value
+ val classesToFind =
+ if (classesToCheck.isEmpty) defaultClassesToFind
+ else classesToCheck.split(";").toList
+
+ def doTest(classPath: => ClassFileLookup[AbstractFile], cpCreationStats: DurationStats, cpSearchingStats: DurationStats,
+ cpCreationRepetitions: Int, cpLookupRepetitions: Int)= {
+
+ def createClassPaths() = (1 to cpCreationRepetitions).map(_ => classPath).last
+ def testClassLookup(cp: ClassFileLookup[AbstractFile]): Boolean = (1 to cpCreationRepetitions).foldLeft(true) {
+ case (a, _) => a && checkExistenceOfClasses(classesToFind)(cp)
+ }
+
+ val cp = withMeasuredTime("Creating classpath", createClassPaths(), cpCreationStats)
+ val result = withMeasuredTime("Searching for specified classes", testClassLookup(cp), cpSearchingStats)
+ println(s"The end of the test case. All expected classes found = $result \n")
+ }
+
+ (1 to oldCpSettings.requiredIterations.value) foreach { iteration =>
+ if (oldCpSettings.requiredIterations.value > 1)
+ println(s"Iteration no $iteration")
+
+ println("Recursive (old) classpath representation:")
+ doTest(PathResolverFactory.create(oldCpSettings).result, oldCpCreationStats, oldCpSearchingStats,
+ oldCpSettings.cpCreationRepetitions.value, oldCpSettings.cpLookupRepetitions.value)
+
+ println("Flat classpath representation:")
+ doTest(PathResolverFactory.create(flatCpSettings).result, flatCpCreationStats, flatCpSearchingStats,
+ flatCpSettings.cpCreationRepetitions.value, flatCpSettings.cpLookupRepetitions.value)
+ }
+
+ if (oldCpSettings.requiredIterations.value > 1) {
+ println("\nOld classpath - summary")
+ oldCpCreationStats.printResults()
+ oldCpSearchingStats.printResults()
+
+ println("\nFlat classpath - summary")
+ flatCpCreationStats.printResults()
+ flatCpSearchingStats.printResults()
+ }
+ }
+ }
+
+ /**
+ * Prints usage information
+ */
+ private def usage(): Unit =
+ println("""Use classpath and sourcepath options like in the case of e.g. 'scala' command.
+ | There are also two additional options:
+ | -checkClasses <semicolon separated class names> Specify names of classes which should be found
+ | -requiredIterations <int value> Repeat tests specified count of times (to check e.g. impact of caches)
+ | Note: Option -YclasspathImpl will be set automatically for each case.
+ """.stripMargin.trim)
+
+ private def loadSettings(args: List[String], implType: String) = {
+ val settings = new TestSettings()
+ settings.processArguments(args, processAll = true)
+ settings.YclasspathImpl.value = implType
+ if (settings.classpath.isDefault)
+ settings.classpath.value = sys.props("java.class.path")
+ settings
+ }
+
+ private def withMeasuredTime[T](operationName: String, f: => T, durationStats: DurationStats): T = {
+ val startTime = System.currentTimeMillis()
+ val res = f
+ val elapsed = System.currentTimeMillis() - startTime
+ durationStats.noteMeasuredTime(elapsed)
+ println(s"$operationName - elapsed $elapsed ms")
+ res
+ }
+
+ private def checkExistenceOfClasses(classesToCheck: Seq[String])(classPath: ClassFileLookup[AbstractFile]): Boolean =
+ classesToCheck.foldLeft(true) {
+ case (res, classToCheck) =>
+ val found = classPath.findClass(classToCheck).isDefined
+ if (!found)
+ println(s"Class $classToCheck not found") // of course in this case the measured time will be affected by IO operation
+ found
+ }
+}
diff --git a/test/junit/scala/tools/testing/AssertThrowsTest.scala b/test/junit/scala/tools/testing/AssertThrowsTest.scala
index a70519e63c..76758f51d2 100644
--- a/test/junit/scala/tools/testing/AssertThrowsTest.scala
+++ b/test/junit/scala/tools/testing/AssertThrowsTest.scala
@@ -31,4 +31,13 @@ class AssertThrowsTest {
}
})
-} \ No newline at end of file
+ @Test
+ def errorIfNoThrow: Unit = {
+ try {
+ assertThrows[Foo] { () }
+ } catch {
+ case e: AssertionError => return
+ }
+ fail("assertThrows should error if the tested expression does not throw anything")
+ }
+}
diff --git a/test/junit/scala/tools/testing/AssertUtil.scala b/test/junit/scala/tools/testing/AssertUtil.scala
index 9efac64a97..d798f2e53e 100644
--- a/test/junit/scala/tools/testing/AssertUtil.scala
+++ b/test/junit/scala/tools/testing/AssertUtil.scala
@@ -1,19 +1,92 @@
package scala.tools
package testing
+import org.junit.Assert
+import Assert._
+import scala.runtime.ScalaRunTime.stringOf
+import scala.collection.{ GenIterable, IterableLike }
+import scala.collection.JavaConverters._
+import scala.collection.mutable
+import java.lang.ref._
+import java.lang.reflect._
+import java.util.IdentityHashMap
+
/** This module contains additional higher-level assert statements
* that are ultimately based on junit.Assert primitives.
*/
object AssertUtil {
- /** Check if exception T (or a subclass) was thrown during evaluation of f.
- * If any other exception or throwable is found instead it will be re-thrown.
+ private final val timeout = 60 * 1000L // wait a minute
+
+ private implicit class `ref helper`[A](val r: Reference[A]) extends AnyVal {
+ def isEmpty: Boolean = r.get == null
+ def nonEmpty: Boolean = !isEmpty
+ }
+ private implicit class `class helper`(val clazz: Class[_]) extends AnyVal {
+ def allFields: List[Field] = {
+ def loop(k: Class[_]): List[Field] =
+ if (k == null) Nil
+ else k.getDeclaredFields.toList ::: loop(k.getSuperclass)
+ loop(clazz)
+ }
+ }
+ private implicit class `field helper`(val f: Field) extends AnyVal {
+ def follow(o: AnyRef): AnyRef = {
+ f setAccessible true
+ f get o
+ }
+ }
+
+ /** Check that throwable T (or a subclass) was thrown during evaluation of `body`,
+ * and that its message satisfies the `checkMessage` predicate.
+ * Any other exception is propagated.
*/
- def assertThrows[T <: Exception](f: => Any)(implicit manifest: Manifest[T]): Unit =
- try f
- catch {
- case e: Exception =>
- val clazz = manifest.erasure.asInstanceOf[Class[T]]
- if (!clazz.isAssignableFrom(e.getClass))
- throw e
+ def assertThrows[T <: Throwable](body: => Any,
+ checkMessage: String => Boolean = s => true)
+ (implicit manifest: Manifest[T]): Unit = {
+ try {
+ body
+ fail("Expression did not throw!")
+ } catch {
+ case e: Throwable if (manifest.runtimeClass isAssignableFrom e.getClass) &&
+ checkMessage(e.getMessage) =>
+ }
+ }
+
+ /** JUnit-style assertion for `IterableLike.sameElements`.
+ */
+ def assertSameElements[A, B >: A](expected: IterableLike[A, _], actual: GenIterable[B], message: String = ""): Unit =
+ if (!(expected sameElements actual))
+ fail(
+ f"${ if (message.nonEmpty) s"$message " else "" }expected:<${ stringOf(expected) }> but was:<${ stringOf(actual) }>"
+ )
+
+ /** Convenient for testing iterators.
+ */
+ def assertSameElements[A, B >: A](expected: IterableLike[A, _], actual: Iterator[B]): Unit =
+ assertSameElements(expected, actual.toList, "")
+
+ /** Value is not strongly reachable from roots after body is evaluated.
+ */
+ def assertNotReachable[A <: AnyRef](a: => A, roots: AnyRef*)(body: => Unit): Unit = {
+ val wkref = new WeakReference(a)
+ def refs(root: AnyRef): mutable.Set[AnyRef] = {
+ val seen = new IdentityHashMap[AnyRef, Unit]
+ def loop(o: AnyRef): Unit =
+ if (wkref.nonEmpty && o != null && !seen.containsKey(o)) {
+ seen.put(o, ())
+ for {
+ f <- o.getClass.allFields
+ if !Modifier.isStatic(f.getModifiers)
+ if !f.getType.isPrimitive
+ if !classOf[Reference[_]].isAssignableFrom(f.getType)
+ } loop(f follow o)
+ }
+ loop(root)
+ seen.keySet.asScala
+ }
+ body
+ for (r <- roots if wkref.nonEmpty) {
+ assertFalse(s"Root $r held reference", refs(r) contains wkref.get)
}
-} \ No newline at end of file
+ }
+}
diff --git a/test/junit/scala/tools/testing/AssertUtilTest.scala b/test/junit/scala/tools/testing/AssertUtilTest.scala
new file mode 100644
index 0000000000..03d8815ab2
--- /dev/null
+++ b/test/junit/scala/tools/testing/AssertUtilTest.scala
@@ -0,0 +1,21 @@
+package scala.tools
+package testing
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import AssertUtil._
+
+import java.lang.ref._
+
+@RunWith(classOf[JUnit4])
+class AssertUtilTest {
+
+ @Test def reachableIgnoresReferences(): Unit = {
+ class Holder[A](val ref: SoftReference[A])
+ val o = new Object
+ val r = new SoftReference(o)
+ assertNotReachable(o, new Holder(r)) { }
+ }
+}
diff --git a/test/junit/scala/tools/testing/ClearAfterClass.java b/test/junit/scala/tools/testing/ClearAfterClass.java
new file mode 100644
index 0000000000..232d459c4e
--- /dev/null
+++ b/test/junit/scala/tools/testing/ClearAfterClass.java
@@ -0,0 +1,20 @@
+package scala.tools.testing;
+
+import org.junit.AfterClass;
+
+/**
+ * Extend this class to use JUnit's @AfterClass. This annotation only works on static methods,
+ * which cannot be written in Scala.
+ *
+ * Example: {@link scala.tools.nsc.backend.jvm.opt.InlinerTest}
+ */
+public class ClearAfterClass {
+ public static interface Clearable {
+ void clear();
+ }
+
+ public static Clearable stateToClear;
+
+ @AfterClass
+ public static void clearState() { stateToClear.clear(); }
+}
diff --git a/test/junit/scala/tools/testing/TempDir.scala b/test/junit/scala/tools/testing/TempDir.scala
new file mode 100644
index 0000000000..475de8c4a2
--- /dev/null
+++ b/test/junit/scala/tools/testing/TempDir.scala
@@ -0,0 +1,18 @@
+package scala.tools.testing
+
+import java.io.{IOException, File}
+
+object TempDir {
+ final val TEMP_DIR_ATTEMPTS = 10000
+ def createTempDir(): File = {
+ val baseDir = new File(System.getProperty("java.io.tmpdir"))
+ val baseName = System.currentTimeMillis() + "-"
+ var c = 0
+ while (c < TEMP_DIR_ATTEMPTS) {
+ val tempDir = new File(baseDir, baseName + c)
+ if (tempDir.mkdir()) return tempDir
+ c += 1
+ }
+ throw new IOException(s"Failed to create directory")
+ }
+}
diff --git a/test/junit/scala/util/RandomTest.scala b/test/junit/scala/util/RandomTest.scala
new file mode 100644
index 0000000000..32959675ee
--- /dev/null
+++ b/test/junit/scala/util/RandomTest.scala
@@ -0,0 +1,15 @@
+package scala.util
+
+import org.junit.{ Assert, Test }
+
+class RandomTest {
+ // Test for SI-9059
+ @Test def testAlphanumeric: Unit = {
+ def isAlphaNum(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')
+
+ val items = Random.alphanumeric.take(100000)
+ for (c <- items) {
+ Assert.assertTrue(s"$c should be alphanumeric", isAlphaNum(c))
+ }
+ }
+}
diff --git a/test/junit/scala/util/t7265.scala b/test/junit/scala/util/SpecVersionTest.scala
index 71f085d21d..e3e7a978f2 100644
--- a/test/junit/scala/util/t7265.scala
+++ b/test/junit/scala/util/SpecVersionTest.scala
@@ -1,14 +1,11 @@
package scala.util
-package test
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
-import scala.util.PropertiesTrait
-
/** The java version property uses the spec version
* and must work for all "major.minor" and fail otherwise.
*/
@@ -24,6 +21,7 @@ class SpecVersionTest {
override lazy val scalaProps = new java.util.Properties
}
+ // SI-7265
@Test
def comparesCorrectly(): Unit = {
assert(sut isJavaAtLeast "1.5")
diff --git a/test/junit/scala/util/matching/regextract-char.scala b/test/junit/scala/util/matching/CharRegexTest.scala
index 50fdcd9d46..50fdcd9d46 100644
--- a/test/junit/scala/util/matching/regextract-char.scala
+++ b/test/junit/scala/util/matching/CharRegexTest.scala
diff --git a/test/junit/scala/util/matching/RegexTest.scala b/test/junit/scala/util/matching/RegexTest.scala
index d25842cc57..5b13397d6a 100644
--- a/test/junit/scala/util/matching/RegexTest.scala
+++ b/test/junit/scala/util/matching/RegexTest.scala
@@ -27,4 +27,21 @@ class RegexTest {
assertEquals("1", x)
assertEquals("1", y)
}
+
+ @Test def t8787nullMatch() = {
+ val r = """\d+""".r
+ val s: String = null
+ val x = s match { case r() => 1 ; case _ => 2 }
+ assertEquals(2, x)
+ }
+
+ @Test def t8787nullMatcher() = {
+ val r = """(\d+):(\d+)""".r
+ val s = "1:2 3:4 5:6"
+ val z = ((r findAllMatchIn s).toList :+ null) flatMap {
+ case r(x, y) => Some((x.toInt, y.toInt))
+ case _ => None
+ }
+ assertEquals(List((1,2),(3,4),(5,6)), z)
+ }
}
diff --git a/test/osgi/src/BasicLibrary.scala b/test/osgi/src/BasicLibrary.scala
index 6618f02102..ee8b7634ff 100644
--- a/test/osgi/src/BasicLibrary.scala
+++ b/test/osgi/src/BasicLibrary.scala
@@ -7,19 +7,16 @@ import org.ops4j.pax.exam.CoreOptions._
import org.junit.Test
import org.junit.runner.RunWith
import org.ops4j.pax.exam
-import org.ops4j.pax.exam.junit.{
- Configuration,
- ExamReactorStrategy,
- JUnit4TestRunner
-}
-import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
-import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.ops4j.pax.exam.Configuration
+import org.ops4j.pax.exam.junit.PaxExam
+import org.ops4j.pax.exam.spi.reactors.{ ExamReactorStrategy, PerMethod }
+import org.ops4j.pax.swissbox.tracker.ServiceLookup
import org.osgi.framework.BundleContext
-@RunWith(classOf[JUnit4TestRunner])
-@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+@RunWith(classOf[PaxExam])
+@ExamReactorStrategy(Array(classOf[PerMethod]))
class BasicLibraryTest extends ScalaOsgiHelper {
@Configuration
def config(): Array[exam.Option] =
diff --git a/test/osgi/src/BasicReflection.scala b/test/osgi/src/BasicReflection.scala
index d601f04f89..53ab7e5345 100644
--- a/test/osgi/src/BasicReflection.scala
+++ b/test/osgi/src/BasicReflection.scala
@@ -10,13 +10,10 @@ import org.ops4j.pax.exam.CoreOptions._
import org.junit.Test
import org.junit.runner.RunWith
import org.ops4j.pax.exam
-import org.ops4j.pax.exam.junit.{
- Configuration,
- ExamReactorStrategy,
- JUnit4TestRunner
-}
-import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
-import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.ops4j.pax.exam.Configuration
+import org.ops4j.pax.exam.junit.PaxExam
+import org.ops4j.pax.exam.spi.reactors.{ ExamReactorStrategy, PerMethod }
+import org.ops4j.pax.swissbox.tracker.ServiceLookup
import org.osgi.framework.BundleContext
@@ -38,8 +35,8 @@ class C {
object M
-@RunWith(classOf[JUnit4TestRunner])
-@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+@RunWith(classOf[PaxExam])
+@ExamReactorStrategy(Array(classOf[PerMethod]))
class BasicReflectionTest extends ScalaOsgiHelper {
@Configuration
diff --git a/test/osgi/src/BasicTest.scala b/test/osgi/src/BasicTest.scala
index 109b7b911a..5adf87ecc1 100644
--- a/test/osgi/src/BasicTest.scala
+++ b/test/osgi/src/BasicTest.scala
@@ -6,21 +6,18 @@ import org.ops4j.pax.exam.CoreOptions._
import org.junit.Test
import org.junit.runner.RunWith
import org.ops4j.pax.exam
-import org.ops4j.pax.exam.junit.{
- Configuration,
- ExamReactorStrategy,
- JUnit4TestRunner
-}
-import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
-import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.ops4j.pax.exam.Configuration
+import org.ops4j.pax.exam.junit.PaxExam
+import org.ops4j.pax.exam.spi.reactors.{ ExamReactorStrategy, PerMethod }
+import org.ops4j.pax.swissbox.tracker.ServiceLookup
import org.osgi.framework.BundleContext
-@RunWith(classOf[JUnit4TestRunner])
-@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+@RunWith(classOf[PaxExam])
+@ExamReactorStrategy(Array(classOf[PerMethod]))
class BasicTest extends ScalaOsgiHelper {
@Configuration
def config(): Array[exam.Option] = {
diff --git a/test/osgi/src/ReflectionToolboxTest.scala b/test/osgi/src/ReflectionToolboxTest.scala
index bb48078e95..a23de18d07 100644
--- a/test/osgi/src/ReflectionToolboxTest.scala
+++ b/test/osgi/src/ReflectionToolboxTest.scala
@@ -8,13 +8,10 @@ import org.ops4j.pax.exam.CoreOptions._
import org.junit.Test
import org.junit.runner.RunWith
import org.ops4j.pax.exam
-import org.ops4j.pax.exam.junit.{
- Configuration,
- ExamReactorStrategy,
- JUnit4TestRunner
-}
-import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
-import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.ops4j.pax.exam.Configuration
+import org.ops4j.pax.exam.junit.PaxExam
+import org.ops4j.pax.exam.spi.reactors.{ ExamReactorStrategy, PerMethod }
+import org.ops4j.pax.swissbox.tracker.ServiceLookup
import org.osgi.framework.BundleContext
@@ -22,8 +19,8 @@ class C {
val f1 = 2
}
-@RunWith(classOf[JUnit4TestRunner])
-@ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+@RunWith(classOf[PaxExam])
+@ExamReactorStrategy(Array(classOf[PerMethod]))
class ReflectionToolBoxTest extends ScalaOsgiHelper {
@Configuration
diff --git a/test/osgi/src/ScalaOsgiHelper.scala b/test/osgi/src/ScalaOsgiHelper.scala
index 084afe8643..7ba8883bb8 100644
--- a/test/osgi/src/ScalaOsgiHelper.scala
+++ b/test/osgi/src/ScalaOsgiHelper.scala
@@ -20,19 +20,19 @@ trait ScalaOsgiHelper {
def standardOptions: Array[exam.Option] = {
val bundles = (allBundleFiles map makeBundle)
- bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
+ bundles ++ Array[exam.Option](junitBundles())
// to change the local repo used (for some operations, but not all -- which is why I didn't bother):
// systemProperty("org.ops4j.pax.url.mvn.localRepository").value(sys.props("maven.repo.local")))
}
def justReflectionOptions: Array[exam.Option] = {
val bundles = filteredBundleFiles("scala-library", "scala-reflect")
- bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
+ bundles ++ Array[exam.Option](junitBundles())
}
def justCoreLibraryOptions: Array[exam.Option] = {
val bundles = filteredBundleFiles("scala-library")
- bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
+ bundles ++ Array[exam.Option](junitBundles())
}
}
diff --git a/test/pending/jvm/cf-attributes.scala b/test/pending/jvm/cf-attributes.scala
index f4964b63b1..2d08f22d8b 100644
--- a/test/pending/jvm/cf-attributes.scala
+++ b/test/pending/jvm/cf-attributes.scala
@@ -62,7 +62,7 @@ object anonymousClasses {
//InnerClass:
// public final #_; //class anonymousClasses$$anon$1 of class anonymousClasses$
val x = new Foo() {
- override def foo() { println("foo (overriden)"); }
+ override def foo() { println("foo (overridden)"); }
def dummy = 0
}
}
diff --git a/test/pending/jvm/javasigs.scala b/test/pending/jvm/javasigs.scala
index 8da59ab0a0..d18a4e6fb5 100644
--- a/test/pending/jvm/javasigs.scala
+++ b/test/pending/jvm/javasigs.scala
@@ -32,7 +32,7 @@ object Scalatest {
}
- /** Execute cmd, wait for the process to end and pipe it's output to stdout */
+ /** Execute cmd, wait for the process to end and pipe its output to stdout */
def exec(cmd: String) {
val proc = Runtime.getRuntime().exec(cmd)
val inp = new BufferedReader(new InputStreamReader(proc.getInputStream))
diff --git a/test/pending/jvm/timeout.scala b/test/pending/jvm/timeout.scala
index 22b3647dce..8f29f8ddbe 100644
--- a/test/pending/jvm/timeout.scala
+++ b/test/pending/jvm/timeout.scala
@@ -1,4 +1,4 @@
-// Test is in pending because although it suceeds locally,
+// Test is in pending because although it succeeds locally,
// it takes too long on the machine which runs nightly tests.
//
// [partest] EXPECTED: 100 < x < 900
diff --git a/test/pending/pos/t3439.scala b/test/pending/pos/t3439.scala
deleted file mode 100644
index 425f1aeeb5..0000000000
--- a/test/pending/pos/t3439.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-abstract class ParametricMessage[M: Manifest](msg: M) { def message = msg }
-case class ParametricMessage1[M: Manifest](msg: M, p1: Class[_]) extends ParametricMessage(msg)
diff --git a/test/pending/run/delambdafy-lambdametafactory.scala b/test/pending/run/delambdafy-lambdametafactory.scala
new file mode 100644
index 0000000000..daea8a39fe
--- /dev/null
+++ b/test/pending/run/delambdafy-lambdametafactory.scala
@@ -0,0 +1,50 @@
+//
+// Tests that the static accessor method for lambda bodies
+// (generated under -Ydelambdafy:method) are compatible with
+// Java 8's LambdaMetafactory.
+//
+import java.lang.invoke._
+
+class C {
+ def test1: Unit = {
+ (x: String) => x.reverse
+ }
+ def test2: Unit = {
+ val capture1 = "capture1"
+ (x: String) => capture1 + " " + x.reverse
+ }
+ def test3: Unit = {
+ (x: String) => C.this + " " + x.reverse
+ }
+}
+trait T {
+ def test4: Unit = {
+ (x: String) => x.reverse
+ }
+}
+
+// A functional interface. Function1 contains abstract methods that are filled in by mixin
+trait Function1ish[A, B] {
+ def apply(a: A): B
+}
+
+object Test {
+ def lambdaFactory[A, B](hostClass: Class[_], instantiatedParam: Class[A], instantiatedRet: Class[B], accessorName: String,
+ capturedParams: Array[(Class[_], AnyRef)] = Array()) = {
+ val caller = MethodHandles.lookup
+ val methodType = MethodType.methodType(classOf[AnyRef], Array[Class[_]](classOf[AnyRef]))
+ val instantiatedMethodType = MethodType.methodType(instantiatedRet, Array[Class[_]](instantiatedParam))
+ val (capturedParamTypes, captured) = capturedParams.unzip
+ val targetMethodType = MethodType.methodType(instantiatedRet, capturedParamTypes :+ instantiatedParam)
+ val invokedType = MethodType.methodType(classOf[Function1ish[_, _]], capturedParamTypes)
+ val target = caller.findStatic(hostClass, accessorName, targetMethodType)
+ val site = LambdaMetafactory.metafactory(caller, "apply", invokedType, methodType, target, instantiatedMethodType)
+ site.getTarget.invokeWithArguments(captured: _*).asInstanceOf[Function1ish[A, B]]
+ }
+ def main(args: Array[String]) {
+ println(lambdaFactory(classOf[C], classOf[String], classOf[String], "accessor$1").apply("abc"))
+ println(lambdaFactory(classOf[C], classOf[String], classOf[String], "accessor$2", Array(classOf[String] -> "capture1")).apply("abc"))
+ println(lambdaFactory(classOf[C], classOf[String], classOf[String], "accessor$3", Array(classOf[C] -> new C)).apply("abc"))
+ println(lambdaFactory(Class.forName("T$class"), classOf[String], classOf[String], "accessor$4", Array(classOf[T] -> new T{})).apply("abc"))
+ }
+}
diff --git a/test/scaladoc/filters b/test/scaladoc/filters
new file mode 100644
index 0000000000..51a7507848
--- /dev/null
+++ b/test/scaladoc/filters
@@ -0,0 +1,8 @@
+#
+#Java HotSpot(TM) 64-Bit Server VM warning: Failed to reserve shared memory (errno = 28).
+Java HotSpot\(TM\) .* warning:
+# Hotspot receiving VM options through the $_JAVA_OPTIONS
+# env variable outputs them on stderr
+Picked up _JAVA_OPTIONS:
+# Filter out a message caused by this bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=8021205
+objc\[\d+\]: Class JavaLaunchHelper is implemented in both .* and .*\. One of the two will be used\. Which one is undefined\.
diff --git a/test/scaladoc/resources/SI-3314-diagrams.scala b/test/scaladoc/resources/SI-3314-diagrams.scala
index b80a97b522..7d2cc9447c 100644
--- a/test/scaladoc/resources/SI-3314-diagrams.scala
+++ b/test/scaladoc/resources/SI-3314-diagrams.scala
@@ -7,7 +7,7 @@ package scala.test.scaladoc {
* / / / | \ \ \
* Mon Tue Wed Thu Fri Sat Sun
*
- * - each member should receive an inhertiance diagram:
+ * - each member should receive an inheritance diagram:
* Value
* |
* |
diff --git a/test/scaladoc/resources/SI-4476.scala b/test/scaladoc/resources/SI-4476.scala
new file mode 100644
index 0000000000..eb35ef45e7
--- /dev/null
+++ b/test/scaladoc/resources/SI-4476.scala
@@ -0,0 +1,9 @@
+package foo
+
+@deprecated("","")
+class A
+
+class B {
+ @deprecated("","")
+ def bar = 1
+}
diff --git a/test/scaladoc/resources/SI-8144.scala b/test/scaladoc/resources/SI-8144.scala
new file mode 100644
index 0000000000..7b225acb32
--- /dev/null
+++ b/test/scaladoc/resources/SI-8144.scala
@@ -0,0 +1,17 @@
+package some.pack
+
+class SomeType(arg: String) {
+
+ type TypeAlias = String
+
+ def >@<(): TypeAlias = "Tricky method name"
+
+ def >#<(): Int = 1
+
+}
+
+object SomeType {
+
+ val someVal = "Some arbitrary companion object value"
+
+}
diff --git a/test/scaladoc/resources/SI-8514.scala b/test/scaladoc/resources/SI-8514.scala
new file mode 100644
index 0000000000..4c5476604b
--- /dev/null
+++ b/test/scaladoc/resources/SI-8514.scala
@@ -0,0 +1,10 @@
+package a {
+ class DeveloperApi extends scala.annotation.StaticAnnotation
+
+ /** Some doc here */
+ @DeveloperApi
+ class A
+
+ @DeveloperApi
+ class B
+}
diff --git a/test/scaladoc/resources/Trac4420.scala b/test/scaladoc/resources/Trac4420.scala
index dbe053f3da..d8e207876b 100644
--- a/test/scaladoc/resources/Trac4420.scala
+++ b/test/scaladoc/resources/Trac4420.scala
@@ -1,7 +1,7 @@
import java.io.File
/**
- * @define PP This class is an instance of XXX so it's members are not called directly.
+ * @define PP This class is an instance of XXX so its members are not called directly.
* Instead these classes are instantiated via a driver's ''process''. See YYY for more details. */
abstract class test
diff --git a/test/scaladoc/resources/code-indent.scala b/test/scaladoc/resources/code-indent.scala
index 88946ffc7f..2eee3352b4 100644
--- a/test/scaladoc/resources/code-indent.scala
+++ b/test/scaladoc/resources/code-indent.scala
@@ -20,6 +20,12 @@
* an alternative
* the e l s e branch
* }}}
+ * {{{
+ * Trait example {
+ * Val x = a
+ * Val y = b
+ * }
+ * }}}
* NB: Trailing spaces are necessary for this test!
* {{{
* l1
diff --git a/test/scaladoc/resources/implicit-inheritance-override.scala b/test/scaladoc/resources/implicit-inheritance-override.scala
index 5d692f59ad..b59d2f410d 100644
--- a/test/scaladoc/resources/implicit-inheritance-override.scala
+++ b/test/scaladoc/resources/implicit-inheritance-override.scala
@@ -35,7 +35,7 @@ class DerivedC extends Base {
class DerivedD extends Base {
/**
- * @tparam T The overriden type parameter comment
+ * @tparam T The overridden type parameter comment
*/
override def function[T](arg1: T, arg2: String): Double = 3.0d
} \ No newline at end of file
diff --git a/test/scaladoc/resources/implicits-ambiguating-res.scala b/test/scaladoc/resources/implicits-ambiguating-res.scala
index 6ed51366cb..90e43ac2ed 100644
--- a/test/scaladoc/resources/implicits-ambiguating-res.scala
+++ b/test/scaladoc/resources/implicits-ambiguating-res.scala
@@ -1,5 +1,5 @@
/**
- * Test scaladoc implicits distinguishing -- supress all members by implicit conversion that are shadowed by the
+ * Test scaladoc implicits distinguishing -- suppress all members by implicit conversion that are shadowed by the
* class' own members
*
* {{{
diff --git a/test/scaladoc/resources/implicits-shadowing-res.scala b/test/scaladoc/resources/implicits-shadowing-res.scala
index c5e9493bf3..b7f3ceb895 100644
--- a/test/scaladoc/resources/implicits-shadowing-res.scala
+++ b/test/scaladoc/resources/implicits-shadowing-res.scala
@@ -1,5 +1,5 @@
/**
- * Test scaladoc implicits distinguishing -- supress all members by implicit conversion that are shadowed by the
+ * Test scaladoc implicits distinguishing -- suppress all members by implicit conversion that are shadowed by the
* class' own members
*
* {{{
diff --git a/test/scaladoc/run/SI-8479.check b/test/scaladoc/run/SI-8479.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/SI-8479.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-8479.scala b/test/scaladoc/run/SI-8479.scala
new file mode 100755
index 0000000000..3c91395025
--- /dev/null
+++ b/test/scaladoc/run/SI-8479.scala
@@ -0,0 +1,32 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.base.comment._
+import scala.tools.partest.ScaladocModelTest
+import java.net.{URI, URL}
+import java.io.File
+
+object Test extends ScaladocModelTest {
+
+ override def code =
+ """
+ |object Test {
+ | val x = new SparkContext(master = "")
+ |}
+ |
+ |class SparkContext(config: Any) {
+ |
+ | /** Scaladoc comment */
+ | def this(
+ | master: String,
+ | appName: String = "") = this(null)
+ |}
+ |
+ |
+ """.stripMargin
+
+ override def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) {
+ // it didn't crash
+ }
+}
diff --git a/test/scaladoc/run/t5730.check b/test/scaladoc/run/t5730.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/t5730.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/t5730.scala b/test/scaladoc/run/t5730.scala
new file mode 100644
index 0000000000..cc4c2444b1
--- /dev/null
+++ b/test/scaladoc/run/t5730.scala
@@ -0,0 +1,36 @@
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.T5730
+
+ /**
+ * A link:
+ *
+ * [[scala.Option$ object Option]].
+ */
+ sealed abstract class A
+
+ case object B extends A
+
+ abstract final class C
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val p = rootPackage._package("scala")._package("test")._package("scaladoc")._package("T5730")
+
+ val a = p._class("A")
+ val c = p._class("C")
+
+ assert(a.constructors.isEmpty, s"there should be no constructors, found: ${a.constructors}")
+ assert(c.constructors.isEmpty, s"there should be no constructors, found: ${c.constructors}")
+ }
+}
diff --git a/test/scaladoc/run/t5795.check b/test/scaladoc/run/t5795.check
new file mode 100644
index 0000000000..d08ab619ed
--- /dev/null
+++ b/test/scaladoc/run/t5795.check
@@ -0,0 +1,4 @@
+newSource:16: warning: Could not find any member to link for "Exception".
+ /**
+ ^
+Done.
diff --git a/test/scaladoc/run/t5795.scala b/test/scaladoc/run/t5795.scala
new file mode 100644
index 0000000000..767e4f1a72
--- /dev/null
+++ b/test/scaladoc/run/t5795.scala
@@ -0,0 +1,63 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+/**
+ * Only the 'deprecated' tag should stay.
+ *
+ * @author
+ * @since
+ * @todo
+ * @note
+ * @see
+ * @version
+ * @deprecated
+ * @example
+ * @constructor
+ */
+object Test {
+ /**
+ * Only the 'throws' tag should stay.
+ * @param foo
+ * @param bar
+ * @param baz
+ * @return
+ * @throws Exception
+ * @tparam T
+ */
+ def foo[T](foo: Any, bar: Any, baz: Any): Int = 1
+}
+ """
+
+ def scaladocSettings = ""
+
+ def test(b: Boolean, text: => String): Unit = if (!b) println(text)
+
+ def testModel(root: Package) = {
+ import access._
+ val obj = root._object("Test")
+ val c = obj.comment.get
+
+ test(c.authors.isEmpty, s"expected no authors, found: ${c.authors}")
+ test(!c.since.isDefined, s"expected no since tag, found: ${c.since}")
+ test(c.todo.isEmpty, s"expected no todos, found: ${c.todo}")
+ test(c.note.isEmpty, s"expected no note, found: ${c.note}")
+ test(c.see.isEmpty, s"expected no see, found: ${c.see}")
+ test(!c.version.isDefined, s"expected no version tag, found: ${c.version}")
+ // deprecated stays
+ test(c.deprecated.isDefined, s"expected deprecated tag, found none")
+ test(c.example.isEmpty, s"expected no example, found: ${c.example}")
+ test(!c.constructor.isDefined, s"expected no constructor tag, found: ${c.constructor}")
+
+ val method = obj._method("foo")
+ val mc = method.comment.get
+
+ test(mc.valueParams.isEmpty, s"expected empty value params, found: ${mc.valueParams}")
+ test(mc.typeParams.isEmpty, s"expected empty type params, found: ${mc.typeParams}")
+ test(!mc.result.isDefined, s"expected no result tag, found: ${mc.result}")
+ // throws stay
+ test(!mc.throws.isEmpty, s"expected an exception tag, found: ${mc.throws}")
+ }
+}
diff --git a/test/scaladoc/run/t6626.check b/test/scaladoc/run/t6626.check
new file mode 100644
index 0000000000..de3a6c5c0b
--- /dev/null
+++ b/test/scaladoc/run/t6626.check
@@ -0,0 +1,7 @@
+newSource:10: warning: Could not find any member to link for "SomeUnknownException".
+ /**
+ ^
+newSource:10: warning: Could not find any member to link for "IOException".
+ /**
+ ^
+Done.
diff --git a/test/scaladoc/run/t6626.scala b/test/scaladoc/run/t6626.scala
new file mode 100644
index 0000000000..6c61c605d6
--- /dev/null
+++ b/test/scaladoc/run/t6626.scala
@@ -0,0 +1,42 @@
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.base.comment._
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+
+package org.foo
+
+class MyException extends Exception
+
+class MyOtherException extends Exception
+
+object Foo {
+ /**
+ * Test exception linking
+ *
+ * @throws org.foo.MyException linked with a fully-qualified name
+ * @throws MyOtherException linked with a relative name
+ * @throws SomeUnknownException not linked at all (but with some text)
+ * @throws IOException
+ */
+ def test(): Unit = ???
+}
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val a = rootPackage._package("org")._package("foo")._object("Foo")._method("test")
+ val throws = a.comment.get.throws
+ val allbodies = Body(throws.values.flatMap(_.blocks).toSeq)
+
+ val links = countLinksInBody(allbodies, _.link.isInstanceOf[LinkToTpl[_]])
+ assert(links == 2, links + " == 2 (links to MyException and MyOtherException)")
+ }
+}
diff --git a/test/scaladoc/run/t8113.check b/test/scaladoc/run/t8113.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/t8113.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/t8113.scala b/test/scaladoc/run/t8113.scala
new file mode 100644
index 0000000000..f006213ef2
--- /dev/null
+++ b/test/scaladoc/run/t8113.scala
@@ -0,0 +1,36 @@
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.base.comment._
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ /**
+ * Check out [[http://www.scala-lang.org
+ * this great website]]!
+ */
+ class Test
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ import access._
+
+ val test = rootPackage._class("Test")
+
+ // find Link
+ def find(body: Any): Option[Link] = body match {
+ case l: Link => Some(l)
+ case s: Seq[_] => s.toList.map(find(_)).flatten.headOption
+ case p: Product => p.productIterator.toList.map(find(_)).flatten.headOption
+ case _ => None
+ }
+
+ val link = find(test.comment.get.body).collect { case Link(ta, Text(ti)) => (ta, ti) }
+ assert(link.isDefined)
+ val expected = ("http://www.scala-lang.org", "this great website")
+ link.foreach {l => assert(l == expected, s"$l != $expected")}
+ }
+}
diff --git a/test/scaladoc/run/t8314.check b/test/scaladoc/run/t8314.check
new file mode 100644
index 0000000000..aa04c12c8f
--- /dev/null
+++ b/test/scaladoc/run/t8314.check
@@ -0,0 +1,3 @@
+Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(This should be ), Monospace(Text(monospaced))))))))))
+
+Done.
diff --git a/test/scaladoc/run/t8314.scala b/test/scaladoc/run/t8314.scala
new file mode 100644
index 0000000000..7f6d6fdb00
--- /dev/null
+++ b/test/scaladoc/run/t8314.scala
@@ -0,0 +1,16 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ /** This should be `monospaced` */
+ class A
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ root._class("A").comment foreach println
+ }
+}
diff --git a/test/scaladoc/run/t8557.check b/test/scaladoc/run/t8557.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/t8557.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/t8557.scala b/test/scaladoc/run/t8557.scala
new file mode 100644
index 0000000000..451f004d7d
--- /dev/null
+++ b/test/scaladoc/run/t8557.scala
@@ -0,0 +1,32 @@
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.T8857
+
+ /**
+ * A link:
+ *
+ * [[scala.Option$ object Option]].
+ */
+ class A
+ """
+
+ // a non-canonical path to scala-library.jar should still work
+ // this is a bit fragile (depends on the current directory being the root of the repo ;
+ // ant & partest seem to do that properly)
+ def scaladocSettings = "-doc-external-doc build/pack/bin/../lib/scala-library.jar#http://www.scala-lang.org/api/current/"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val a = rootPackage._package("scala")._package("test")._package("scaladoc")._package("T8857")._class("A")
+
+ val links = countLinks(a.comment.get, _.link.isInstanceOf[LinkToExternal])
+ assert(links == 1, links + " == 1 (the links to external in class A)")
+ }
+}
diff --git a/test/scaladoc/run/t8672.check b/test/scaladoc/run/t8672.check
new file mode 100644
index 0000000000..d7194c73bf
--- /dev/null
+++ b/test/scaladoc/run/t8672.check
@@ -0,0 +1,4 @@
+Some(Chain(List(Text(New in release 1.2.3.4, it works), Text(.))))
+Some(Text(Sentence no period))
+Some(Chain(List(Text(Sentence period at end), Text(.))))
+Done.
diff --git a/test/scaladoc/run/t8672.scala b/test/scaladoc/run/t8672.scala
new file mode 100644
index 0000000000..8a9b5086bd
--- /dev/null
+++ b/test/scaladoc/run/t8672.scala
@@ -0,0 +1,32 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ class C {
+
+ /**
+ * New in release 1.2.3.4, it works. Next sentence.
+ * Next Line.
+ */
+ def method1 = 0
+
+ /** Sentence no period */
+ def method2 = 0
+
+ /** Sentence period at end.*/
+ def method3 = 0
+ }
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ val ms = List("method1", "method2", "method3")
+ for (m <- ms) {
+ val method = root._class("C")._method(m)
+ println(method.comment.get.body.summary)
+ }
+ }
+}
diff --git a/test/scaladoc/scalacheck/CommentFactoryTest.scala b/test/scaladoc/scalacheck/CommentFactoryTest.scala
index ff64a25602..d30b78087c 100644
--- a/test/scaladoc/scalacheck/CommentFactoryTest.scala
+++ b/test/scaladoc/scalacheck/CommentFactoryTest.scala
@@ -24,8 +24,11 @@ class Factory(val g: Global, val s: doc.Settings)
}
}
+ def getComment(s: String): Comment =
+ parse(s, "", scala.tools.nsc.util.NoPosition, null)
+
def parseComment(s: String): Option[Inline] =
- strip(parse(s, "", scala.tools.nsc.util.NoPosition, null))
+ strip(getComment(s))
def createBody(s: String) =
parse(s, "", scala.tools.nsc.util.NoPosition, null).body
@@ -166,4 +169,19 @@ object Test extends Properties("CommentFactory") {
}
}
+ property("Empty parameter text should be empty") = {
+ // used to fail with
+ // body == Body(List(Paragraph(Chain(List(Summary(Text('\n')))))))
+ factory.getComment(
+ """
+/**
+ * @deprecated
+ */
+ """).deprecated match {
+ case Some(Body(l)) if l.isEmpty => true
+ case other =>
+ println(other)
+ false
+ }
+ }
}
diff --git a/test/scaladoc/scalacheck/DeprecatedIndexTest.scala b/test/scaladoc/scalacheck/DeprecatedIndexTest.scala
new file mode 100644
index 0000000000..4a5a2001d4
--- /dev/null
+++ b/test/scaladoc/scalacheck/DeprecatedIndexTest.scala
@@ -0,0 +1,50 @@
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.html.page.DeprecatedIndex
+import java.net.{URLClassLoader, URLDecoder}
+
+object Test extends Properties("IndexScript") {
+
+ def getClasspath = {
+ // these things can be tricky
+ // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
+ // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
+ // this test _will_ fail again some time in the future.
+ // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader
+ val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader]
+ val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath))
+ paths mkString java.io.File.pathSeparator
+ }
+
+ val docFactory = {
+ val settings = new doc.Settings({Console.err.println(_)})
+ settings.scaladocQuietRun = true
+ settings.nowarn.value = true
+ settings.classpath.value = getClasspath
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+ new doc.DocFactory(reporter, settings)
+ }
+
+ val indexModelFactory = doc.model.IndexModelFactory
+
+ def createDeprecatedScript(path: String) =
+ docFactory.makeUniverse(Left(List(path))) match {
+ case Some(universe) => {
+ val index = new DeprecatedIndex(universe, indexModelFactory.makeIndex(universe))
+ Some(index)
+ }
+ case _ =>
+ None
+ }
+
+ property("deprecated-list page lists deprecated members") = {
+ createDeprecatedScript("test/scaladoc/resources/SI-4476.scala") match {
+ case Some(p) =>
+ p.deprecatedEntries.find(_._1 == "A").isDefined &&
+ p.deprecatedEntries.find(_._1 == "bar").isDefined
+ case None => false
+ }
+ }
+}
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
index 56328ea875..6a6b1f8901 100644
--- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
@@ -149,7 +149,6 @@ object Test extends Properties("HtmlFactory") {
result
}
-
def shortComments(root: scala.xml.Node) =
XMLUtil.stripGroup(root).descendant.flatMap {
case e: scala.xml.Elem => {
@@ -417,7 +416,7 @@ object Test extends Properties("HtmlFactory") {
checkText("SI_5054_q1.scala")(
(None,"""def test(): Int""", true)
//Disabled because the full signature is now displayed
- //(None,"""def test(implicit lost: Int): Int""", false)
+ //(None, """def test(implicit lost: Int): Int""", false)
)
property("SI-5054: Use cases should keep their flags - final should not be lost") =
@@ -486,7 +485,7 @@ object Test extends Properties("HtmlFactory") {
""", true),
(Some("DerivedD"),
"""def function[T](arg1: T, arg2: String): Double
- T The overriden type parameter comment
+ T The overridden type parameter comment
arg1 The T term comment
arg2 The string comment
returns The return comment
@@ -564,7 +563,7 @@ object Test extends Properties("HtmlFactory") {
property("Comment inheritance: Correct explicit inheritance for override") =
checkText("explicit-inheritance-override.scala")(
(Some("InheritDocDerived"),
- """def function[T](arg1: T, arg2: String): Double
+ """def function[T](arg1: T, arg2: String): Double
Starting line
Starting line
The base comment. And another sentence...
@@ -591,7 +590,7 @@ object Test extends Properties("HtmlFactory") {
property("Comment inheritance: Correct explicit inheritance for usecase") =
checkText("explicit-inheritance-usecase.scala")(
(Some("UseCaseInheritDoc"),
- """def function[T](arg1: T, arg2: String): Double
+ """def function[T](arg1: T, arg2: String): Double
[use case] Starting line
[use case] Starting line
The base comment. And another sentence...
@@ -660,6 +659,7 @@ object Test extends Properties("HtmlFactory") {
s.contains("<pre>two lines, one useful</pre>") &&
s.contains("<pre>line1\nline2\nline3\nline4</pre>") &&
s.contains("<pre>a ragged example\na (condition)\n the t h e n branch\nan alternative\n the e l s e branch</pre>") &&
+ s.contains("<pre>Trait example {\n Val x = a\n Val y = b\n}</pre>") &&
s.contains("<pre>l1\n\nl2\n\nl3\n\nl4\n\nl5</pre>")
}
case _ => false
@@ -684,8 +684,8 @@ object Test extends Properties("HtmlFactory") {
oneAuthor match {
case node: scala.xml.Node => {
val s = node.toString
- s.contains("<h6>Author:</h6>")
- s.contains("<p>The Only Author\n</p>")
+ s.contains("<h6>Author:</h6>") &&
+ s.contains("<p>The Only Author</p>")
}
case _ => false
}
@@ -697,9 +697,9 @@ object Test extends Properties("HtmlFactory") {
twoAuthors match {
case node: scala.xml.Node => {
val s = node.toString
- s.contains("<h6>Authors:</h6>")
- s.contains("<p>The First Author\n</p>")
- s.contains("<p>The Second Author\n</p>")
+ s.contains("<h6>Authors:</h6>") &&
+ s.contains("<p>The First Author</p>") &&
+ s.contains("<p>The Second Author</p>")
}
case _ => false
}
@@ -740,5 +740,78 @@ object Test extends Properties("HtmlFactory") {
case node: scala.xml.Node => true
case _ => false
}
+
+ property("SI-8514: No inconsistencies") =
+ checkText("SI-8514.scala")(
+ (Some("a/package"),
+ """class A extends AnyRef
+ Some doc here
+ Some doc here
+ Annotations @DeveloperApi()
+ """, true),
+ (Some("a/package"),
+ """class B extends AnyRef
+ Annotations @DeveloperApi()
+ """, true)
+ )
+ }
+
+ // SI-8144
+ {
+ implicit class AttributesAwareNode(val node: NodeSeq) {
+
+ def \@(attrName: String): String =
+ node \ ("@" + attrName) text
+
+ def \@(attrName: String, attrValue: String): NodeSeq =
+ node filter { _ \ ("@" + attrName) exists (_.text == attrValue) }
+ }
+
+ implicit class AssertionAwareNode(node: scala.xml.NodeSeq) {
+
+ def assertTypeLink(expectedUrl: String): Boolean = {
+ val linkElement: NodeSeq = node \\ "div" \@ ("id", "definition") \\ "span" \@ ("class", "permalink") \ "a"
+ linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top"
+ }
+
+ def assertMemberLink(group: String)(memberName: String, expectedUrl: String): Boolean = {
+ val linkElement: NodeSeq = node \\ "div" \@ ("id", group) \\ "li" \@ ("name", memberName) \\ "span" \@ ("class", "permalink") \ "a"
+ linkElement \@ "href" == expectedUrl && linkElement \@ "target" == "_top"
+ }
+
+ }
+
+ val files = createTemplates("SI-8144.scala")
+
+ def check(pagePath: String)(f: NodeSeq => org.scalacheck.Prop): org.scalacheck.Prop =
+ files(pagePath) match {
+ case node: scala.xml.Node => f(XMLUtil.stripGroup(node))
+ case _ => false
+ }
+
+ property("SI-8144: Members' permalink - package") = check("some/package.html") { node =>
+ ("type link" |: node.assertTypeLink("../index.html#some.package")) &&
+ ("member: some.pack" |: node.assertMemberLink("values")("some.pack", "../index.html#some.package@pack"))
+ }
+
+ property("SI-8144: Members' permalink - inner package") = check("some/pack/package.html") { node =>
+ ("type link" |: node.assertTypeLink("../../index.html#some.pack.package")) &&
+ ("member: SomeType (object)" |: node.assertMemberLink("values")("some.pack.SomeType", "../../index.html#some.pack.package@SomeType")) &&
+ ("member: SomeType (class)" |: node.assertMemberLink("types")("some.pack.SomeType", "../../index.html#some.pack.package@SomeTypeextendsAnyRef"))
+ }
+
+ property("SI-8144: Members' permalink - companion object") = check("some/pack/SomeType$.html") { node =>
+ ("type link" |: node.assertTypeLink("../../index.html#some.pack.SomeType$")) &&
+ ("member: someVal" |: node.assertMemberLink("allMembers")("some.pack.SomeType#someVal", "../../index.html#some.pack.SomeType$@someVal:String"))
+ }
+
+ property("SI-8144: Members' permalink - class") = check("some/pack/SomeType.html") { node =>
+ ("type link" |: node.assertTypeLink("../../index.html#some.pack.SomeType")) &&
+ ("constructor " |: node.assertMemberLink("constructors")("some.pack.SomeType#<init>", "../../index.html#some.pack.SomeType@<init>(arg:String):some.pack.SomeType")) &&
+ ( "member: type TypeAlias" |: node.assertMemberLink("types")("some.pack.SomeType.TypeAlias", "../../index.html#some.pack.SomeType@TypeAlias=String")) &&
+ ( "member: def >#<():Int " |: node.assertMemberLink("values")("some.pack.SomeType#>#<", "../../index.html#some.pack.SomeType@>#<():Int")) &&
+ ( "member: def >@<():TypeAlias " |: node.assertMemberLink("values")("some.pack.SomeType#>@<", "../../index.html#some.pack.SomeType@>@<():SomeType.this.TypeAlias"))
+ }
+
}
}
diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
index abc0e5da01..7dbd2103a6 100644
--- a/test/scaladoc/scalacheck/IndexTest.scala
+++ b/test/scaladoc/scalacheck/IndexTest.scala
@@ -71,7 +71,7 @@ object Test extends Properties("Index") {
case None => false
}
}
- property("browser contants a script element") = {
+ property("browser contains a script element") = {
createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
(index.browser \ "script").size == 1
@@ -86,4 +86,10 @@ object Test extends Properties("Index") {
case None => false
}
}
+ property("index should report if there are deprecated members") = {
+ createIndex("test/scaladoc/resources/SI-4476.scala") match {
+ case Some(indexPage) => indexPage.index.hasDeprecatedMembers
+ case None => false
+ }
+ }
}
diff --git a/test/script-tests/README b/test/script-tests/README
index 3f5c2ce19c..7b3291c407 100755
--- a/test/script-tests/README
+++ b/test/script-tests/README
@@ -5,4 +5,9 @@ putting self-contained script tests in here to run some way that doesn't
depend on all the platform stars aligning all the time. Feel free to
join me.
--- extempore, Nov 21 2011 \ No newline at end of file
+-- extempore, Nov 21 2011
+
+But there's a problem that probably nobody would run such tests so they would become outdated quite quickly.
+And therefore they wouldn't work (and even compile) after some time - like this one existing currently.
+
+-- mpociecha, Oct 9 2014 \ No newline at end of file
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
index 654ba21547..84245ca54e 100755
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -2,15 +2,16 @@
#
# Library to push and pull binary artifacts from a remote repository using CURL.
-
remote_urlget="http://repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
remote_urlpush="http://private-repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
libraryJar="$(pwd)/lib/scala-library.jar"
desired_ext=".desired.sha1"
push_jar="$(pwd)/tools/push.jar"
+
if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then push_jar="$(cygpath -m "$push_jar")"; fi
# Cache dir has .sbt in it to line up with SBT build.
-cache_dir="${HOME}/.sbt/cache/scala"
+SCALA_BUILD_REPOS_HOME=${SCALA_BUILD_REPOS_HOME:=$HOME}
+cache_dir="${SCALA_BUILD_REPOS_HOME}/.sbt/cache/scala"
# Checks whether or not curl is installed and issues a warning on failure.
checkCurl() {
@@ -55,7 +56,7 @@ curlDownload() {
if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then
jar=$(cygpath -m $1)
fi
- http_code=$(curl --write-out '%{http_code}' --silent --fail --output "$jar" "$url")
+ http_code=$(curl --write-out '%{http_code}' --silent --fail -L --output "$jar" "$url")
if (( $? != 0 )); then
echo "Error downloading $jar: response code: $http_code"
echo "$url"
diff --git a/tools/scaladoc-compare b/tools/scaladoc-compare
index 74fbfd1dd4..46e1b75a19 100755
--- a/tools/scaladoc-compare
+++ b/tools/scaladoc-compare
@@ -7,7 +7,7 @@ if [ $# -ne 2 ]
then
echo
echo "scaladoc-compare will compare the scaladoc-generated pages in two different locations and output the diff"
- echo "it's main purpose is to track changes to scaladoc and prevent updates that break things."
+ echo "its main purpose is to track changes to scaladoc and prevent updates that break things."
echo
echo "This script is meant to be used with the scaladoc -raw-output option, as it compares .html.raw files "
echo "instead of markup-heavy .html files."
diff --git a/versions.properties b/versions.properties
index 4e0485fdd0..2ecfd23800 100644
--- a/versions.properties
+++ b/versions.properties
@@ -1,33 +1,34 @@
-#Fri, 04 Apr 2014 23:11:56 +0200
+#Wed, 23 Jul 2014 08:37:26 +0200
# NOTE: this file determines the content of the scala-distribution
# via scala-dist-pom.xml and scala-library-all-pom.xml
# when adding new properties that influence a release,
# also add them to the update.versions mechanism in build.xml,
# which is used by scala-release-2.11.x in scala/jenkins-scripts
-starr.version=2.11.0-RC4
+starr.version=2.11.6
starr.use.released=1
# These are the versions of the modules that go with this release.
# These properties are used during PR validation and in dbuild builds.
# e.g. 2.11.0-RC1, 2.11
-scala.binary.version=2.11.0-RC4
+scala.binary.version=2.11
# e.g. 2.11.0-RC1, 2.11.0, 2.11.1-RC1, 2.11.1
# this defines the dependency on scala-continuations-plugin in scala-dist's pom
-scala.full.version=2.11.0-RC4
+scala.full.version=2.11.6
# external modules shipped with distribution, as specified by scala-library-all's pom
-scala-xml.version.number=1.0.1
-scala-parser-combinators.version.number=1.0.1
-scala-continuations-plugin.version.number=1.0.1
-scala-continuations-library.version.number=1.0.1
+scala-xml.version.number=1.0.3
+scala-parser-combinators.version.number=1.0.3
+scala-continuations-plugin.version.number=1.0.2
+scala-continuations-library.version.number=1.0.2
scala-swing.version.number=1.0.1
-akka-actor.version.number=2.3.2
+akka-actor.version.number=2.3.4
actors-migration.version.number=1.1.0
+jline.version=2.12.1
# external modules, used internally (not shipped)
-partest.version.number=1.0.0
-scalacheck.version.number=1.11.3
+partest.version.number=1.0.6
+scalacheck.version.number=1.11.4
# TODO: modularize the compiler
#scala-compiler-doc.version.number=1.0.0-RC1