summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actors/scala/actors/Actor.scala2
-rw-r--r--src/actors/scala/actors/LinkedQueue.java2
-rw-r--r--src/actors/scala/actors/remote/Proxy.scala2
-rw-r--r--src/actors/scala/actors/threadpool/AbstractCollection.java2
-rw-r--r--src/actors/scala/actors/threadpool/ExecutorCompletionService.java2
-rw-r--r--src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java4
-rw-r--r--src/asm/scala/tools/asm/Label.java2
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Analyzer.java4
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Interpreter.java14
-rw-r--r--src/asm/scala/tools/asm/util/Printer.java18
-rw-r--r--src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala2
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Errors.scala1
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Resolvers.scala6
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Validators.scala2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Infrastructure.scala2
-rw-r--r--src/compiler/scala/reflect/macros/util/Helpers.scala6
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Parsers.scala2
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Reifiers.scala1
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala1
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala4
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala4
-rwxr-xr-xsrc/compiler/scala/tools/ant/templates/tool-unix.tmpl12
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl2
-rw-r--r--src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala77
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala1
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala4
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala39
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala30
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala5
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala207
-rw-r--r--src/compiler/scala/tools/nsc/ObjectRunner.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Parsing.scala1
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Reporting.scala1
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala9
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala14
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala192
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala17
-rw-r--r--src/compiler/scala/tools/nsc/backend/Platform.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Primitives.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala69
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala76
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala51
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala198
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala177
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala142
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala15
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala83
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala112
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala184
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala516
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/opt/OptimizerReporting.scala24
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala4
-rw-r--r--src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala125
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala55
-rw-r--r--src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala162
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FileUtils.scala68
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala101
-rw-r--r--src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala38
-rw-r--r--src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala26
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala180
-rw-r--r--src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala67
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala2
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala6
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala51
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala48
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala55
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala17
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala13
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala52
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala27
-rw-r--r--src/compiler/scala/tools/nsc/transform/Statics.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala253
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala261
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala18
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala38
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala499
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala48
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala43
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala21
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala185
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala25
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassFileLookup.scala57
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala138
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala2
-rw-r--r--src/compiler/scala/tools/reflect/ReflectMain.scala8
-rw-r--r--src/compiler/scala/tools/util/Javap.scala32
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala102
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala4
-rw-r--r--src/intellij-14/README12
-rw-r--r--src/intellij-14/actors.iml.SAMPLE14
-rw-r--r--src/intellij-14/asm.iml.SAMPLE12
-rw-r--r--src/intellij-14/compiler.iml.SAMPLE16
-rwxr-xr-xsrc/intellij-14/diff.sh8
-rw-r--r--src/intellij-14/forkjoin.iml.SAMPLE11
-rw-r--r--src/intellij-14/interactive.iml.SAMPLE16
-rw-r--r--src/intellij-14/library.iml.SAMPLE13
-rw-r--r--src/intellij-14/manual.iml.SAMPLE15
-rw-r--r--src/intellij-14/partest-extras.iml.SAMPLE18
-rw-r--r--src/intellij-14/partest-javaagent.iml.SAMPLE13
-rw-r--r--src/intellij-14/reflect.iml.SAMPLE13
-rw-r--r--src/intellij-14/repl.iml.SAMPLE17
-rw-r--r--src/intellij-14/scala.iml.SAMPLE11
-rw-r--r--src/intellij-14/scala.ipr.SAMPLE261
-rw-r--r--src/intellij-14/scaladoc.iml.SAMPLE17
-rw-r--r--src/intellij-14/scalap.iml.SAMPLE15
-rwxr-xr-xsrc/intellij-14/setup.sh14
-rw-r--r--src/intellij-14/test-junit.iml.SAMPLE22
-rw-r--r--src/intellij-14/test.iml.SAMPLE22
-rwxr-xr-xsrc/intellij-14/update.sh22
-rw-r--r--src/intellij/scala-lang.ipr.SAMPLE1
-rw-r--r--src/intellij/test-osgi.iml.SAMPLE23
-rw-r--r--src/intellij/test/files/neg/virtpatmat_exhaust_big.check7
-rw-r--r--src/intellij/test/files/neg/virtpatmat_exhaust_big.flags1
-rw-r--r--src/intellij/test/files/neg/virtpatmat_exhaust_big.scala32
-rw-r--r--src/intellij/test/files/pos/virtpatmat_exhaust_big.scala34
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala27
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Pickler.scala6
-rw-r--r--src/library/scala/Enumeration.scala4
-rw-r--r--src/library/scala/Option.scala9
-rw-r--r--src/library/scala/Predef.scala2
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/StringContext.scala14
-rw-r--r--src/library/scala/collection/GenSeqLike.scala4
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala6
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala2
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala4
-rw-r--r--src/library/scala/collection/IterableViewLike.scala9
-rw-r--r--src/library/scala/collection/Iterator.scala18
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala4
-rw-r--r--src/library/scala/collection/LinearSeq.scala7
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala30
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala41
-rw-r--r--src/library/scala/collection/MapLike.scala2
-rw-r--r--src/library/scala/collection/SeqLike.scala10
-rw-r--r--src/library/scala/collection/SeqViewLike.scala25
-rw-r--r--src/library/scala/collection/TraversableLike.scala4
-rw-r--r--src/library/scala/collection/TraversableOnce.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala22
-rw-r--r--src/library/scala/collection/concurrent/Map.scala13
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala56
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala35
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala6
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala6
-rw-r--r--src/library/scala/collection/immutable/List.scala5
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala19
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala6
-rw-r--r--src/library/scala/collection/immutable/Map.scala22
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala8
-rw-r--r--src/library/scala/collection/immutable/Queue.scala6
-rw-r--r--src/library/scala/collection/immutable/Range.scala8
-rw-r--r--src/library/scala/collection/immutable/Stack.scala4
-rw-r--r--src/library/scala/collection/immutable/Stream.scala87
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala2
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala30
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala4
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala4
-rw-r--r--src/library/scala/collection/mutable/AnyRefMap.scala4
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala21
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala5
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala2
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala8
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala2
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedListLike.scala4
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedList.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala6
-rw-r--r--src/library/scala/collection/mutable/LongMap.scala6
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala4
-rw-r--r--src/library/scala/collection/mutable/MultiMap.scala7
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala2
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala2
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala14
-rw-r--r--src/library/scala/collection/mutable/Queue.scala2
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala2
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala2
-rw-r--r--src/library/scala/collection/mutable/Stack.scala4
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala2
-rw-r--r--src/library/scala/collection/package.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterable.scala3
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala5
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/ParSetLike.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala2
-rw-r--r--src/library/scala/collection/parallel/package.scala2
-rw-r--r--src/library/scala/compat/Platform.scala14
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala18
-rw-r--r--src/library/scala/concurrent/Future.scala2
-rw-r--r--src/library/scala/concurrent/SyncVar.scala4
-rw-r--r--src/library/scala/concurrent/package.scala4
-rw-r--r--src/library/scala/io/Source.scala17
-rw-r--r--src/library/scala/io/StdIn.scala2
-rw-r--r--src/library/scala/language.scala10
-rw-r--r--src/library/scala/languageFeature.scala10
-rw-r--r--src/library/scala/math/BigDecimal.scala6
-rw-r--r--src/library/scala/math/Ordering.scala5
-rw-r--r--src/library/scala/reflect/ClassTag.scala3
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java2
-rw-r--r--src/library/scala/runtime/MethodCache.scala2
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala2
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala4
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala7
-rw-r--r--src/library/scala/sys/SystemProperties.scala2
-rw-r--r--src/library/scala/sys/process/package.scala2
-rw-r--r--src/library/scala/util/Either.scala8
-rw-r--r--src/manual/scala/man1/Command.scala2
-rw-r--r--src/manual/scala/man1/scalac.scala2
-rw-r--r--src/partest-extras/scala/tools/partest/BytecodeTest.scala8
-rw-r--r--src/partest-extras/scala/tools/partest/instrumented/Profiler.java2
-rw-r--r--src/reflect/scala/reflect/api/Constants.scala4
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala2
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala6
-rw-r--r--src/reflect/scala/reflect/api/Liftables.scala2
-rw-r--r--src/reflect/scala/reflect/api/Mirror.scala2
-rw-r--r--src/reflect/scala/reflect/api/Mirrors.scala6
-rw-r--r--src/reflect/scala/reflect/api/Names.scala12
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala30
-rw-r--r--src/reflect/scala/reflect/api/StandardDefinitions.scala6
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala4
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala10
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala1
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala28
-rw-r--r--src/reflect/scala/reflect/internal/Depth.scala16
-rw-r--r--src/reflect/scala/reflect/internal/Internals.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala4
-rw-r--r--src/reflect/scala/reflect/internal/Positions.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala5
-rw-r--r--src/reflect/scala/reflect/internal/ReificationSupport.scala3
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala4
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala5
-rw-r--r--src/reflect/scala/reflect/internal/SymbolPairs.scala1
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala107
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala2
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala9
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala12
-rw-r--r--src/reflect/scala/reflect/internal/Variances.scala2
-rw-r--r--src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala2
-rw-r--r--src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala4
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala33
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala3
-rw-r--r--src/reflect/scala/reflect/internal/tpe/FindMembers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/tpe/GlbLubs.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala2
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala16
-rw-r--r--src/reflect/scala/reflect/internal/transform/PostErasure.scala1
-rw-r--r--src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala66
-rw-r--r--src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala6
-rw-r--r--src/reflect/scala/reflect/internal/util/WeakHashSet.scala8
-rw-r--r--src/reflect/scala/reflect/io/AbstractFile.scala16
-rw-r--r--src/reflect/scala/reflect/io/VirtualFile.scala4
-rw-r--r--src/reflect/scala/reflect/io/ZipArchive.scala34
-rw-r--r--src/reflect/scala/reflect/macros/Enclosures.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Parsers.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Typers.scala8
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala11
-rw-r--r--src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala8
-rw-r--r--src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala288
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala76
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JavapClass.scala382
-rw-r--r--src/repl/scala/tools/nsc/interpreter/LoopCommands.scala6
-rw-r--r--src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala23
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Power.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplProps.scala3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplReporter.scala18
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplStrings.scala5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/SimpleReader.scala29
-rw-r--r--src/repl/scala/tools/nsc/interpreter/package.scala11
-rw-r--r--src/scaladoc/scala/tools/ant/Scaladoc.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/ScalaDoc.scala20
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocParser.scala3
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala18
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala20
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala6
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css12
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Entity.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala8
-rw-r--r--src/scaladoc/scala/tools/partest/ScaladocModelTest.scala8
-rw-r--r--src/scalap/scala/tools/scalap/Arguments.scala22
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala197
337 files changed, 6289 insertions, 2438 deletions
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index 75160fa18f..293335f720 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -205,7 +205,7 @@ object Actor extends Combinators {
* Actions in `f` have to contain the rest of the computation of `self`,
* as this method will never return.
*
- * A common method of continuting the computation is to send a message
+ * A common method of continuing the computation is to send a message
* to another actor:
* {{{
* react {
diff --git a/src/actors/scala/actors/LinkedQueue.java b/src/actors/scala/actors/LinkedQueue.java
index 796f428cf5..3f7b93c386 100644
--- a/src/actors/scala/actors/LinkedQueue.java
+++ b/src/actors/scala/actors/LinkedQueue.java
@@ -22,7 +22,7 @@ package scala.actors;
* and takes when the queue is not empty.
* Normally a put and a take can proceed simultaneously.
* (Although it does not allow multiple concurrent puts or takes.)
- * This class tends to perform more efficently than
+ * This class tends to perform more efficiently than
* other Channel implementations in producer/consumer
* applications.
* <p>[<a href="http://gee.cs.oswego.edu/dl/classes/EDU/oswego/cs/dl/util/concurrent/intro.html"> Introduction to this package. </a>]
diff --git a/src/actors/scala/actors/remote/Proxy.scala b/src/actors/scala/actors/remote/Proxy.scala
index 9949b36181..2cb03544f2 100644
--- a/src/actors/scala/actors/remote/Proxy.scala
+++ b/src/actors/scala/actors/remote/Proxy.scala
@@ -84,7 +84,7 @@ private[remote] class Proxy(node: Node, name: Symbol, @transient var kernel: Net
}
// Proxy is private[remote], but these classes are public and use it in a public
-// method signature. That makes the only method they have non-overriddable.
+// method signature. That makes the only method they have non-overridable.
// So I made them final, which seems appropriate anyway.
final class LinkToFun extends Function2[AbstractActor, Proxy, Unit] with Serializable {
diff --git a/src/actors/scala/actors/threadpool/AbstractCollection.java b/src/actors/scala/actors/threadpool/AbstractCollection.java
index f3dc1e1292..195a0064ab 100644
--- a/src/actors/scala/actors/threadpool/AbstractCollection.java
+++ b/src/actors/scala/actors/threadpool/AbstractCollection.java
@@ -1,6 +1,6 @@
/*
* Written by Dawid Kurzyniec, based on public domain code written by Doug Lea
- * and publictly available documentation, and released to the public domain, as
+ * and publicly available documentation, and released to the public domain, as
* explained at http://creativecommons.org/licenses/publicdomain
*/
diff --git a/src/actors/scala/actors/threadpool/ExecutorCompletionService.java b/src/actors/scala/actors/threadpool/ExecutorCompletionService.java
index 9a4a4fb71c..02e9bbe297 100644
--- a/src/actors/scala/actors/threadpool/ExecutorCompletionService.java
+++ b/src/actors/scala/actors/threadpool/ExecutorCompletionService.java
@@ -135,7 +135,7 @@ public class ExecutorCompletionService implements CompletionService {
* @param completionQueue the queue to use as the completion queue
* normally one dedicated for use by this service. This queue is
* treated as unbounded -- failed attempted <tt>Queue.add</tt>
- * operations for completed taskes cause them not to be
+ * operations for completed tasks cause them not to be
* retrievable.
* @throws NullPointerException if executor or completionQueue are <tt>null</tt>
*/
diff --git a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
index 437af77c7a..914d242100 100644
--- a/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
+++ b/src/actors/scala/actors/threadpool/locks/ReentrantReadWriteLock.java
@@ -20,13 +20,13 @@ import scala.actors.threadpool.helpers.*;
*
* <p>The order of entry
* to the read and write lock is unspecified, subject to reentrancy
- * constraints. A nonfair lock that is continously contended may
+ * constraints. A nonfair lock that is continuously contended may
* indefinitely postpone one or more reader or writer threads, but
* will normally have higher throughput than a fair lock.
* <p>
*
* DEPARTURE FROM java.util.concurrent: this implementation impose
- * a writer-preferrence and thus its acquisition order may be different
+ * a writer-preference and thus its acquisition order may be different
* than in java.util.concurrent.
*
* <li><b>Reentrancy</b>
diff --git a/src/asm/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java
index 5d5529ce74..c094eba408 100644
--- a/src/asm/scala/tools/asm/Label.java
+++ b/src/asm/scala/tools/asm/Label.java
@@ -545,7 +545,7 @@ public class Label {
}
// ------------------------------------------------------------------------
- // Overriden Object methods
+ // Overridden Object methods
// ------------------------------------------------------------------------
/**
diff --git a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
index 0134555f10..ff840aabde 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
@@ -375,7 +375,7 @@ public class Analyzer<V extends Value> implements Opcodes {
* instruction of the method. The size of the returned array is
* equal to the number of instructions (and labels) of the method. A
* given frame is <tt>null</tt> if the corresponding instruction
- * cannot be reached, or if an error occured during the analysis of
+ * cannot be reached, or if an error occurred during the analysis of
* the method.
*/
public Frame<V>[] getFrames() {
@@ -435,7 +435,7 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Creates a control flow graph edge. The default implementation of this
- * method does nothing. It can be overriden in order to construct the
+ * method does nothing. It can be overridden in order to construct the
* control flow graph of a method (this method is called by the
* {@link #analyze analyze} method during its visit of the method's code).
*
diff --git a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
index 56f4bedc00..00fe6c8bff 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
@@ -82,7 +82,7 @@ public abstract class Interpreter<V extends Value> {
* the bytecode instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V newOperation(AbstractInsnNode insn)
throws AnalyzerException;
@@ -101,7 +101,7 @@ public abstract class Interpreter<V extends Value> {
* @return the result of the interpretation of the given instruction. The
* returned value must be <tt>equal</tt> to the given value.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V copyOperation(AbstractInsnNode insn, V value)
throws AnalyzerException;
@@ -122,7 +122,7 @@ public abstract class Interpreter<V extends Value> {
* the argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V unaryOperation(AbstractInsnNode insn, V value)
throws AnalyzerException;
@@ -146,7 +146,7 @@ public abstract class Interpreter<V extends Value> {
* the second argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2)
throws AnalyzerException;
@@ -167,7 +167,7 @@ public abstract class Interpreter<V extends Value> {
* the third argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V ternaryOperation(AbstractInsnNode insn, V value1,
V value2, V value3) throws AnalyzerException;
@@ -185,7 +185,7 @@ public abstract class Interpreter<V extends Value> {
* the arguments of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract V naryOperation(AbstractInsnNode insn,
List<? extends V> values) throws AnalyzerException;
@@ -203,7 +203,7 @@ public abstract class Interpreter<V extends Value> {
* @param expected
* the expected return type of the analyzed method.
* @throws AnalyzerException
- * if an error occured during the interpretation.
+ * if an error occurred during the interpretation.
*/
public abstract void returnOperation(AbstractInsnNode insn, V value,
V expected) throws AnalyzerException;
diff --git a/src/asm/scala/tools/asm/util/Printer.java b/src/asm/scala/tools/asm/util/Printer.java
index 4135672c6b..773f129ad9 100644
--- a/src/asm/scala/tools/asm/util/Printer.java
+++ b/src/asm/scala/tools/asm/util/Printer.java
@@ -181,7 +181,7 @@ public abstract class Printer {
*/
public Printer visitClassTypeAnnotation(final int typeRef,
final TypePath typePath, final String desc, final boolean visible) {
- throw new RuntimeException("Must be overriden");
+ throw new RuntimeException("Must be overridden");
}
/**
@@ -264,7 +264,7 @@ public abstract class Printer {
*/
public Printer visitFieldTypeAnnotation(final int typeRef,
final TypePath typePath, final String desc, final boolean visible) {
- throw new RuntimeException("Must be overriden");
+ throw new RuntimeException("Must be overridden");
}
/**
@@ -287,7 +287,7 @@ public abstract class Printer {
* {@link scala.tools.asm.MethodVisitor#visitParameter(String, int)}.
*/
public void visitParameter(String name, int access) {
- throw new RuntimeException("Must be overriden");
+ throw new RuntimeException("Must be overridden");
}
/**
@@ -309,7 +309,7 @@ public abstract class Printer {
*/
public Printer visitMethodTypeAnnotation(final int typeRef,
final TypePath typePath, final String desc, final boolean visible) {
- throw new RuntimeException("Must be overriden");
+ throw new RuntimeException("Must be overridden");
}
/**
@@ -380,7 +380,7 @@ public abstract class Printer {
visitMethodInsn(opcode, owner, name, desc, itf);
return;
}
- throw new RuntimeException("Must be overriden");
+ throw new RuntimeException("Must be overridden");
}
/**
@@ -397,7 +397,7 @@ public abstract class Printer {
visitMethodInsn(opcode, owner, name, desc);
return;
}
- throw new RuntimeException("Must be overriden");
+ throw new RuntimeException("Must be overridden");
}
/**
@@ -457,7 +457,7 @@ public abstract class Printer {
*/
public Printer visitInsnAnnotation(final int typeRef,
final TypePath typePath, final String desc, final boolean visible) {
- throw new RuntimeException("Must be overriden");
+ throw new RuntimeException("Must be overridden");
}
/**
@@ -473,7 +473,7 @@ public abstract class Printer {
*/
public Printer visitTryCatchAnnotation(final int typeRef,
final TypePath typePath, final String desc, final boolean visible) {
- throw new RuntimeException("Must be overriden");
+ throw new RuntimeException("Must be overridden");
}
/**
@@ -491,7 +491,7 @@ public abstract class Printer {
public Printer visitLocalVariableAnnotation(final int typeRef,
final TypePath typePath, final Label[] start, final Label[] end,
final int[] index, final String desc, final boolean visible) {
- throw new RuntimeException("Must be overriden");
+ throw new RuntimeException("Must be overridden");
}
/**
diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
index a13a778b2f..b8384851da 100644
--- a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
+++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
@@ -12,7 +12,7 @@ abstract class DefaultMacroCompiler extends Resolvers
import treeInfo._
import definitions._
val runDefinitions = currentRun.runDefinitions
- import runDefinitions.{Predef_???, _}
+ import runDefinitions.Predef_???
val typer: global.analyzer.Typer
val context = typer.context
diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala
index cc4508e696..98fd091e9c 100644
--- a/src/compiler/scala/reflect/macros/compiler/Errors.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala
@@ -11,7 +11,6 @@ trait Errors extends Traces {
import analyzer._
import definitions._
import treeInfo._
- import typer.TyperErrorGen._
import typer.infer.InferErrorGen._
import runDefinitions._
def globalSettings = global.settings
diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
index 4484c234aa..d3f49390ea 100644
--- a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
@@ -1,18 +1,12 @@
package scala.reflect.macros
package compiler
-import scala.reflect.internal.Flags._
-import scala.reflect.macros.TypecheckException
-
trait Resolvers {
self: DefaultMacroCompiler =>
import global._
import analyzer._
- import definitions._
import treeInfo._
- import gen._
- import runDefinitions._
trait Resolver {
self: MacroImplRefCompiler =>
diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala
index a146818ae3..fc932f2b18 100644
--- a/src/compiler/scala/reflect/macros/compiler/Validators.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala
@@ -9,7 +9,7 @@ trait Validators {
import global._
import analyzer._
import definitions._
- import runDefinitions.{Predef_???, _}
+ import runDefinitions.Predef_???
trait Validator {
self: MacroImplRefCompiler =>
diff --git a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
index df7aa4d2be..7088058145 100644
--- a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
@@ -12,5 +12,5 @@ trait Infrastructure {
def compilerSettings: List[String] = universe.settings.recreateArgs
- def classPath: List[java.net.URL] = global.classPath.asURLs
+ def classPath: List[java.net.URL] = global.classPath.asURLs.toList
}
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
index bddc42d1f9..961c41dab5 100644
--- a/src/compiler/scala/reflect/macros/util/Helpers.scala
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -54,14 +54,10 @@ trait Helpers {
*
* @see Metalevels.scala for more information and examples about metalevels
*/
- def increaseMetalevel(pre: Type, tp: Type): Type = {
- val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
-
+ def increaseMetalevel(pre: Type, tp: Type): Type =
transparentShallowTransform(RepeatedParamClass, tp) {
case tp => typeRef(pre, MacroContextExprClass, List(tp))
}
- }
/** Transforms c.Expr[T] types into c.Tree and leaves the rest unchanged.
*/
diff --git a/src/compiler/scala/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/reflect/quasiquotes/Parsers.scala
index 007bac27da..97ec7dbfc3 100644
--- a/src/compiler/scala/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Parsers.scala
@@ -90,7 +90,7 @@ trait Parsers { self: Quasiquotes =>
case _ => super.makePatDef(mods, pat, rhs)
}
}
- import treeBuilder.{global => _, unit => _, _}
+ import treeBuilder.{global => _, unit => _}
// q"def foo($x)"
override def param(owner: Name, implicitmod: Int, caseParam: Boolean): ValDef =
diff --git a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
index 07becdc3c6..cc98717c4e 100644
--- a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala
@@ -8,7 +8,6 @@ import scala.reflect.internal.Flags._
trait Reifiers { self: Quasiquotes =>
import global._
import global.build._
- import global.treeInfo._
import global.definitions._
import Rank._
import universeTypes._
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index b1cc797389..a3e0f02dcc 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -21,7 +21,6 @@ abstract class Reifier extends States
import global._
import definitions._
private val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
val typer: global.analyzer.Typer
val universe: Tree
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index 093c2bee22..0863ee38f9 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -79,8 +79,7 @@ abstract class Taggers {
try materializer
catch {
case ReificationException(pos, msg) =>
- c.error(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling
- EmptyTree
+ c.abort(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling
case UnexpectedReificationException(pos, err, cause) if cause != null =>
throw cause
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 4512b2cb6f..de9fec0df5 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -5,10 +5,6 @@ trait GenUtils {
self: Reifier =>
import global._
- import treeInfo._
- import definitions._
- private val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
def reifyList(xs: List[Any]): Tree =
mkList(xs map reify)
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 1747405f03..13bf0ef4c6 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -97,7 +97,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Defines valid values for the `target` property. */
object Target extends PermissibleValue {
- val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7")
+ val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8")
}
/** Defines valid values for the `deprecation` and `unchecked` properties. */
@@ -479,7 +479,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Tests if a file exists and prints a warning in case it doesn't. Always
* returns the file, even if it doesn't exist.
- * @param file A file to test for existance.
+ * @param file A file to test for existence.
* @return The same file. */
protected def existing(file: File): File = {
if (!file.exists)
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index f58223a39e..7acb3632d2 100755
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -86,10 +86,14 @@ fi
TOOL_CLASSPATH="@classpath@"
if [[ -z "$TOOL_CLASSPATH" ]]; then
for ext in "$SCALA_HOME"/lib/* ; do
- if [[ -z "$TOOL_CLASSPATH" ]]; then
- TOOL_CLASSPATH="$ext"
- else
- TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
+ file_extension="${ext##*.}"
+ # SI-8967 Only consider directories and files named '*.jar'
+ if [[ -d "$ext" || $file_extension == "jar" ]]; then
+ if [[ -z "$TOOL_CLASSPATH" ]]; then
+ TOOL_CLASSPATH="$ext"
+ else
+ TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
+ fi
fi
done
fi
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index cf0e003f10..50e44fb669 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -128,7 +128,7 @@ if defined _JAVA_PARAMS set _JAVA_OPTS=%_JAVA_OPTS% %_JAVA_PARAMS%
set _TOOL_CLASSPATH=@classpath@
if "%_TOOL_CLASSPATH%"=="" (
- for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
+ for %%f in ("!_SCALA_HOME!\lib\*.jar") do call :add_cpath "%%f"
for /d %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
)
diff --git a/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala
new file mode 100644
index 0000000000..2faf6c6272
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc
+
+import scala.io.StdIn.readLine
+
+/**
+ * Simple application to check out amount of memory used by chosen classpath representation.
+ * It allows us to create many scalac-like calls based on specified parameters, where each main retains Global.
+ * And we need additional tool (e.g. profiler) to measure memory consumption itself.
+ */
+object ClassPathMemoryConsumptionTester {
+
+ private class TestSettings extends Settings {
+ val requiredInstances = IntSetting("-requiredInstances",
+ "Determine how many times classpath should be loaded", 10, Some((1, 10000)), (_: String) => None)
+ }
+
+ private class MainRetainsGlobal extends scala.tools.nsc.MainClass {
+ var retainedGlobal: Global = _
+ override def doCompile(compiler: Global) {
+ retainedGlobal = compiler
+ super.doCompile(compiler)
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ if (args contains "-help") usage()
+ else doTest(args)
+ }
+
+ private def doTest(args: Array[String]) = {
+ val settings = loadSettings(args.toList)
+
+ val mains = (1 to settings.requiredInstances.value) map (_ => new MainRetainsGlobal)
+
+ // we need original settings without additional params to be able to use them later
+ val baseArgs = argsWithoutRequiredInstances(args)
+
+ println(s"Loading classpath ${settings.requiredInstances.value} times")
+ val startTime = System.currentTimeMillis()
+
+ mains map (_.process(baseArgs))
+
+ val elapsed = System.currentTimeMillis() - startTime
+ println(s"Operation finished - elapsed $elapsed ms")
+ println("Memory consumption can be now measured")
+
+ var textFromStdIn = ""
+ while (textFromStdIn.toLowerCase != "exit")
+ textFromStdIn = readLine("Type 'exit' to close application: ")
+ }
+
+ /**
+ * Prints usage information
+ */
+ private def usage(): Unit =
+ println( """Use classpath and sourcepath options like in the case of e.g. 'scala' command.
+ | There's also one additional option:
+ | -requiredInstances <int value> Determine how many times classpath should be loaded
+ """.stripMargin.trim)
+
+ private def loadSettings(args: List[String]) = {
+ val settings = new TestSettings()
+ settings.processArguments(args, processAll = true)
+ if (settings.classpath.isDefault)
+ settings.classpath.value = sys.props("java.class.path")
+ settings
+ }
+
+ private def argsWithoutRequiredInstances(args: Array[String]) = {
+ val instancesIndex = args.indexOf("-requiredInstances")
+ if (instancesIndex == -1) args
+ else args.dropRight(args.length - instancesIndex) ++ args.drop(instancesIndex + 2)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 0a356ed7b6..1a6843a249 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
import scala.reflect.internal.util.{ SourceFile, NoSourceFile, FreshNameCreator }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
-import scala.tools.nsc.reporters.Reporter
trait CompilationUnits { global: Global =>
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 3017d8c9cc..f259504473 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -43,8 +43,8 @@ class StandardCompileClient extends HasCompileSocket with CompileOutputCommon {
info(vmArgs.mkString("[VM arguments: ", " ", "]"))
val socket =
- if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown)
- else Some(compileSocket.getSocket(settings.server.value))
+ if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown, settings.port.value)
+ else compileSocket.getSocket(settings.server.value)
socket match {
case Some(sock) => compileOnServer(sock, fscArgs)
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 029e1c4629..aa02957a6c 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc
import java.io.PrintStream
+import io.Directory
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.reflect.internal.util.{FakePos, Position}
import scala.tools.util.SocketServer
@@ -19,7 +20,7 @@ import settings.FscSettings
* @author Martin Odersky
* @version 1.0
*/
-class StandardCompileServer extends SocketServer {
+class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) {
lazy val compileSocket: CompileSocket = CompileSocket
private var compiler: Global = null
@@ -166,12 +167,12 @@ class StandardCompileServer extends SocketServer {
}
-object CompileServer extends StandardCompileServer {
+object CompileServer {
/** A directory holding redirected output */
- private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
+ //private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
- private def createRedirect(filename: String) =
- new PrintStream((redirectDir / filename).createFile().bufferedOutput())
+ private def createRedirect(dir: Directory, filename: String) =
+ new PrintStream((dir / filename).createFile().bufferedOutput())
def main(args: Array[String]) =
execute(() => (), args)
@@ -187,21 +188,33 @@ object CompileServer extends StandardCompileServer {
*/
def execute(startupCallback : () => Unit, args: Array[String]) {
val debug = args contains "-v"
+ var port = 0
+ val i = args.indexOf("-p")
+ if (i >= 0 && args.length > i + 1) {
+ scala.util.control.Exception.ignoring(classOf[NumberFormatException]) {
+ port = args(i + 1).toInt
+ }
+ }
+
+ // Create instance rather than extend to pass a port parameter.
+ val server = new StandardCompileServer(port)
+ val redirectDir = (server.compileSocket.tmpDir / "output-redirects").createDirectory()
+
if (debug) {
- echo("Starting CompileServer on port " + port)
- echo("Redirect dir is " + redirectDir)
+ server.echo("Starting CompileServer on port " + server.port)
+ server.echo("Redirect dir is " + redirectDir)
}
- Console.withErr(createRedirect("scala-compile-server-err.log")) {
- Console.withOut(createRedirect("scala-compile-server-out.log")) {
- Console.err.println("...starting server on socket "+port+"...")
+ Console.withErr(createRedirect(redirectDir, "scala-compile-server-err.log")) {
+ Console.withOut(createRedirect(redirectDir, "scala-compile-server-out.log")) {
+ Console.err.println("...starting server on socket "+server.port+"...")
Console.err.flush()
- compileSocket setPort port
+ server.compileSocket setPort server.port
startupCallback()
- run()
+ server.run()
- compileSocket deletePort port
+ server.compileSocket deletePort server.port
}
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index c693fbe8e2..27a14141fa 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -46,6 +46,9 @@ trait HasCompileSocket {
class CompileSocket extends CompileOutputCommon {
protected lazy val compileClient: StandardCompileClient = CompileClient
def verbose = compileClient.verbose
+
+ /* Fixes the port where to start the server, 0 yields some free port */
+ var fixPort = 0
/** The prefix of the port identification file, which is followed
* by the port number.
@@ -64,7 +67,7 @@ class CompileSocket extends CompileOutputCommon {
/** The class name of the scala compile server */
protected val serverClass = "scala.tools.nsc.CompileServer"
- protected def serverClassArgs = if (verbose) List("-v") else Nil // debug
+ protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil)
/** A temporary directory to use */
val tmpDir = {
@@ -104,9 +107,14 @@ class CompileSocket extends CompileOutputCommon {
def portFile(port: Int) = portsDir / File(port.toString)
/** Poll for a server port number; return -1 if none exists yet */
- private def pollPort(): Int = portsDir.list.toList match {
+ private def pollPort(): Int = if (fixPort > 0) {
+ if (portsDir.list.toList.exists(_.name == fixPort.toString)) fixPort else -1
+ } else portsDir.list.toList match {
case Nil => -1
- case x :: xs => try x.name.toInt finally xs foreach (_.delete())
+ case x :: xs => try x.name.toInt catch {
+ case e: Exception => x.delete()
+ throw e
+ }
}
/** Get the port number to which a scala compile server is connected;
@@ -152,7 +160,8 @@ class CompileSocket extends CompileOutputCommon {
* create a new daemon if necessary. Returns None if the connection
* cannot be established.
*/
- def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = {
+ def getOrCreateSocket(vmArgs: String, create: Boolean = true, fixedPort: Int = 0): Option[Socket] = {
+ fixPort = fixedPort
val maxMillis = 10L * 1000 // try for 10 seconds
val retryDelay = 50L
val maxAttempts = (maxMillis / retryDelay).toInt
@@ -186,14 +195,17 @@ class CompileSocket extends CompileOutputCommon {
try { Some(x.toInt) }
catch { case _: NumberFormatException => None }
- def getSocket(serverAdr: String): Socket = (
- for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
+ def getSocket(serverAdr: String): Option[Socket] = (
+ for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
getSocket(name, port)
) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
- def getSocket(hostName: String, port: Int): Socket =
- Socket(hostName, port).opt getOrElse fatal("Unable to establish connection to server %s:%d; exiting".format(hostName, port))
-
+ def getSocket(hostName: String, port: Int): Option[Socket] = {
+ val sock = Socket(hostName, port).opt
+ if (sock.isEmpty) warn("Unable to establish connection to server %s:%d".format(hostName, port))
+ sock
+ }
+
def getPassword(port: Int): String = {
val ff = portFile(port)
val f = ff.bufferedReader()
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index ad75d02bff..1289d55c37 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -5,10 +5,11 @@
package scala.tools.nsc
-import scala.tools.util.PathResolver
+import java.net.URL
+import scala.tools.util.PathResolverFactory
class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
- def classpathURLs = new PathResolver(this).asURLs
+ def classpathURLs: Seq[URL] = PathResolverFactory.create(this).resultAsURLs
val howtorun =
ChoiceSetting(
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 452081cff1..1c9dbad4dd 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -8,18 +8,17 @@ package tools
package nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
+import java.net.URL
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
-import java.util.UUID._
import scala.compat.Platform.currentTime
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString }
+import util.{ ClassFileLookup, ClassPath, MergedClassPath, StatisticsInfo, returning }
import scala.reflect.ClassTag
-import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
-import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import scala.reflect.io.VirtualFile
-import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import scala.reflect.internal.pickling.PickleBuffer
+import symtab.{ Flags, SymbolTable, SymbolTrackers }
import symtab.classfile.Pickler
import plugins.Plugins
import ast._
@@ -28,13 +27,15 @@ import typechecker._
import transform.patmat.PatternMatching
import transform._
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
-import backend.{ ScalaPrimitives, Platform, JavaPlatform }
+import backend.{ ScalaPrimitives, JavaPlatform }
import backend.jvm.GenBCode
import backend.jvm.GenASM
import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
import scala.language.postfixOps
import scala.tools.nsc.ast.{TreeGen => AstTreeGen}
+import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.settings.ClassPathRepresentationType
class Global(var currentSettings: Settings, var reporter: Reporter)
extends SymbolTable
@@ -58,7 +59,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
class GlobalMirror extends Roots(NoSymbol) {
val universe: self.type = self
- def rootLoader: LazyType = new loaders.PackageLoader(classPath)
+ def rootLoader: LazyType = {
+ settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat => new loaders.PackageLoaderUsingFlatClassPath(FlatClassPath.RootPackage, flatClassPath)
+ case ClassPathRepresentationType.Recursive => new loaders.PackageLoader(recursiveClassPath)
+ }
+ }
override def toString = "compiler mirror"
}
implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[GlobalMirror])
@@ -104,7 +110,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
type PlatformClassPath = ClassPath[AbstractFile]
type OptClassPath = Option[PlatformClassPath]
- def classPath: PlatformClassPath = platform.classPath
+ def classPath: ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat => flatClassPath
+ case ClassPathRepresentationType.Recursive => recursiveClassPath
+ }
+
+ private def recursiveClassPath: ClassPath[AbstractFile] = platform.classPath
+
+ private def flatClassPath: FlatClassPath = platform.flatClassPath
// sub-components --------------------------------------------------
@@ -221,7 +234,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Called by ScalaDocAnalyzer when a doc comment has been parsed. */
def signalParsedDocComment(comment: String, pos: Position) = {
- // TODO: this is all very borken (only works for scaladoc comments, not regular ones)
+ // TODO: this is all very broken (only works for scaladoc comments, not regular ones)
// --> add hooks to parser and refactor Interactive global to handle comments directly
// in any case don't use reporter for parser hooks
reporter.comment(pos, comment)
@@ -319,7 +332,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
None
}
- val charset = ( if (settings.encoding.isSetByUser) Some(settings.encoding.value) else None ) flatMap loadCharset getOrElse {
+ val charset = settings.encoding.valueSetByUser flatMap loadCharset getOrElse {
settings.encoding.value = defaultEncoding // A mandatory charset
Charset.forName(defaultEncoding)
}
@@ -334,16 +347,16 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- ( if (settings.sourceReader.isSetByUser) Some(settings.sourceReader.value) else None ) flatMap loadReader getOrElse {
+ settings.sourceReader.valueSetByUser flatMap loadReader getOrElse {
new SourceReader(charset.newDecoder(), reporter)
}
}
- if (settings.verbose || settings.Ylogcp) {
+ if (settings.verbose || settings.Ylogcp)
reporter.echo(
- s"[search path for source files: ${classPath.sourcepaths.mkString(",")}]\n"+
- s"[search path for class files: ${classPath.asClasspathString}")
- }
+ s"[search path for source files: ${classPath.asSourcePathString}]\n" +
+ s"[search path for class files: ${classPath.asClassPathString}]"
+ )
// The current division between scala.reflect.* and scala.tools.nsc.* is pretty
// clunky. It is often difficult to have a setting influence something without having
@@ -842,6 +855,156 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
} reverse
}
+ // ------------ REPL utilities ---------------------------------
+
+ /** Extend classpath of `platform` and rescan updated packages. */
+ def extendCompilerClassPath(urls: URL*): Unit = {
+ if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat)
+ throw new UnsupportedOperationException("Flat classpath doesn't support extending the compiler classpath")
+
+ val newClassPath = platform.classPath.mergeUrlsIntoClassPath(urls: _*)
+ platform.currentClassPath = Some(newClassPath)
+ // Reload all specified jars into this compiler instance
+ invalidateClassPathEntries(urls.map(_.getPath): _*)
+ }
+
+ // ------------ Invalidations ---------------------------------
+
+ /** Is given package class a system package class that cannot be invalidated?
+ */
+ private def isSystemPackageClass(pkg: Symbol) =
+ pkg == RootClass || (pkg.hasTransOwner(definitions.ScalaPackageClass) && !pkg.hasTransOwner(this.rootMirror.staticPackage("scala.tools").moduleClass.asClass))
+
+ /** Invalidates packages that contain classes defined in a classpath entry, and
+ * rescans that entry.
+ *
+ * First, the classpath entry referred to by one of the `paths` is rescanned,
+ * so that any new files or changes in subpackages are picked up.
+ * Second, any packages for which one of the following conditions is met is invalidated:
+ * - the classpath entry contained during the last compilation run now contains classfiles
+ * that represent a member in the package;
+ * - the classpath entry now contains classfiles that represent a member in the package;
+ * - the set of subpackages has changed.
+ *
+ * The invalidated packages are reset in their entirety; all member classes and member packages
+ * are re-accessed using the new classpath.
+ *
+ * System packages that the compiler needs to access as part of standard definitions
+ * are not invalidated. A system package is:
+ * Any package rooted in "scala", with the exception of packages rooted in "scala.tools".
+ *
+ * @param paths Fully-qualified names that refer to directories or jar files that are
+ * entries on the classpath.
+ */
+ def invalidateClassPathEntries(paths: String*): Unit = {
+ if (settings.YclasspathImpl.value == ClassPathRepresentationType.Flat)
+ throw new UnsupportedOperationException("Flat classpath doesn't support the classpath invalidation")
+
+ implicit object ClassPathOrdering extends Ordering[PlatformClassPath] {
+ def compare(a:PlatformClassPath, b:PlatformClassPath) = a.asClassPathString compare b.asClassPathString
+ }
+ val invalidated, failed = new mutable.ListBuffer[ClassSymbol]
+ classPath match {
+ case cp: MergedClassPath[_] =>
+ def assoc(path: String): List[(PlatformClassPath, PlatformClassPath)] = {
+ val dir = AbstractFile.getDirectory(path)
+ val canonical = dir.canonicalPath
+ def matchesCanonical(e: ClassPath[_]) = e.origin match {
+ case Some(opath) =>
+ AbstractFile.getDirectory(opath).canonicalPath == canonical
+ case None =>
+ false
+ }
+ cp.entries find matchesCanonical match {
+ case Some(oldEntry) =>
+ List(oldEntry -> cp.context.newClassPath(dir))
+ case None =>
+ error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath")
+ List()
+ }
+ }
+ val subst = immutable.TreeMap(paths flatMap assoc: _*)
+ if (subst.nonEmpty) {
+ platform updateClassPath subst
+ informProgress(s"classpath updated on entries [${subst.keys mkString ","}]")
+ def mkClassPath(elems: Iterable[PlatformClassPath]): PlatformClassPath =
+ if (elems.size == 1) elems.head
+ else new MergedClassPath(elems, recursiveClassPath.context)
+ val oldEntries = mkClassPath(subst.keys)
+ val newEntries = mkClassPath(subst.values)
+ mergeNewEntries(newEntries, RootClass, Some(recursiveClassPath), Some(oldEntries), invalidated, failed)
+ }
+ }
+ def show(msg: String, syms: scala.collection.Traversable[Symbol]) =
+ if (syms.nonEmpty)
+ informProgress(s"$msg: ${syms map (_.fullName) mkString ","}")
+ show("invalidated packages", invalidated)
+ show("could not invalidate system packages", failed)
+ }
+
+ /** Merges new classpath entries into the symbol table
+ *
+ * @param newEntries The new classpath entries
+ * @param root The root symbol to be resynced (a package class)
+ * @param allEntries Optionally, the corresponding package in the complete current classpath
+ * @param oldEntries Optionally, the corresponding package in the old classpath entries
+ * @param invalidated A listbuffer collecting the invalidated package classes
+ * @param failed A listbuffer collecting system package classes which could not be invalidated
+ *
+ * The merging strategy is determined by the absence or presence of classes and packages.
+ *
+ * If either oldEntries or newEntries contains classes, root is invalidated provided that a corresponding package
+ * exists in allEntries. Otherwise it is removed.
+ * Otherwise, the action is determined by the following matrix, with columns:
+ *
+ * old sym action
+ * + + recurse into all child packages of newEntries
+ * - + invalidate root
+ * - - create and enter root
+ *
+ * Here, old means classpath, and sym means symboltable. + is presence of an entry in its column, - is absence.
+ */
+ private def mergeNewEntries(newEntries: PlatformClassPath, root: ClassSymbol,
+ allEntries: OptClassPath, oldEntries: OptClassPath,
+ invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
+ ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
+
+ val getName: ClassPath[AbstractFile] => String = (_.name)
+ def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty
+ def invalidateOrRemove(root: ClassSymbol) = {
+ allEntries match {
+ case Some(cp) => root setInfo new loaders.PackageLoader(cp)
+ case None => root.owner.info.decls unlink root.sourceModule
+ }
+ invalidated += root
+ }
+ def subPackage(cp: PlatformClassPath, name: String): OptClassPath =
+ cp.packages find (cp1 => getName(cp1) == name)
+
+ val classesFound = hasClasses(oldEntries) || newEntries.classes.nonEmpty
+ if (classesFound && !isSystemPackageClass(root)) {
+ invalidateOrRemove(root)
+ } else {
+ if (classesFound) {
+ if (root.isRoot) invalidateOrRemove(EmptyPackageClass)
+ else failed += root
+ }
+ if (!oldEntries.isDefined) invalidateOrRemove(root)
+ else
+ for (pstr <- newEntries.packages.map(getName)) {
+ val pname = newTermName(pstr)
+ val pkg = (root.info decl pname) orElse {
+ // package does not exist in symbol table, create symbol to track it
+ assert(!subPackage(oldEntries.get, pstr).isDefined)
+ loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get))
+ }
+ mergeNewEntries(subPackage(newEntries, pstr).get, pkg.moduleClass.asClass,
+ subPackage(allEntries.get, pstr), subPackage(oldEntries.get, pstr),
+ invalidated, failed)
+ }
+ }
+ }
+
// ----------- Runs ---------------------------------------
private var curRun: Run = null
@@ -1232,13 +1395,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** does this run compile given class, module, or case factory? */
// NOTE: Early initialized members temporarily typechecked before the enclosing class, see typedPrimaryConstrBody!
- // Here we work around that wrinkle by claiming that a top-level, early-initialized member is compiled in
+ // Here we work around that wrinkle by claiming that a early-initialized member is compiled in
// *every* run. This approximation works because this method is exclusively called with `this` == `currentRun`.
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
else if (symSource.isDefinedAt(sym)) true
- else if (sym.isTopLevel && sym.isEarlyInitialized) true
- else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClass)
+ else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClassOrDummy)
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
@@ -1299,7 +1461,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
- /** Caching member symbols that are def-s in Defintions because they might change from Run to Run. */
+ /** Caching member symbols that are def-s in Definitions because they might change from Run to Run. */
val runDefinitions: definitions.RunDefinitions = new definitions.RunDefinitions
/** Compile list of source files,
@@ -1447,10 +1609,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- /** Reset package class to state at typer (not sure what this
- * is needed for?)
+ /** Reset package class to state at typer (not sure what this is needed for?)
*/
- private def resetPackageClass(pclazz: Symbol) {
+ private def resetPackageClass(pclazz: Symbol): Unit = if (typerPhase != NoPhase) {
enteringPhase(firstPhase) {
pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info))
}
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index 95264aeda6..7c14f4943f 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -18,14 +18,14 @@ trait CommonRunner {
* @throws NoSuchMethodException
* @throws InvocationTargetException
*/
- def run(urls: List[URL], objectName: String, arguments: Seq[String]) {
+ def run(urls: Seq[URL], objectName: String, arguments: Seq[String]) {
(ScalaClassLoader fromURLs urls).run(objectName, arguments)
}
/** Catches exceptions enumerated by run (in the case of InvocationTargetException,
* unwrapping it) and returns it any thrown in Left(x).
*/
- def runAndCatch(urls: List[URL], objectName: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
+ def runAndCatch(urls: Seq[URL], objectName: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
try { run(urls, objectName, arguments) ; Right(true) }
catch { case e: Throwable => Left(unwrap(e)) }
}
diff --git a/src/compiler/scala/tools/nsc/Parsing.scala b/src/compiler/scala/tools/nsc/Parsing.scala
index 4dd3c3f378..9e5999ce4f 100644
--- a/src/compiler/scala/tools/nsc/Parsing.scala
+++ b/src/compiler/scala/tools/nsc/Parsing.scala
@@ -7,7 +7,6 @@ package scala
package tools.nsc
import scala.reflect.internal.Positions
-import scala.tools.nsc.reporters.Reporter
/** Similar to Reporting: gather global functionality specific to parsing.
*/
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index cfb4cd23a1..e1cfa63960 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -18,7 +18,7 @@ trait PhaseAssembly {
/**
* Aux datastructure for solving the constraint system
- * The depency graph container with helper methods for node and edge creation
+ * The dependency graph container with helper methods for node and edge creation
*/
private class DependencyGraph {
@@ -199,7 +199,7 @@ trait PhaseAssembly {
// Add all phases in the set to the graph
val graph = phasesSetToDepGraph(phasesSet)
- val dot = if (settings.genPhaseGraph.isSetByUser) Some(settings.genPhaseGraph.value) else None
+ val dot = settings.genPhaseGraph.valueSetByUser
// Output the phase dependency graph at this stage
def dump(stage: Int) = dot foreach (n => graphToDotFile(graph, s"$n-$stage.dot"))
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index bec686ec05..9f160e2485 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -14,7 +14,9 @@ object Properties extends scala.util.PropertiesTrait {
// settings based on jar properties, falling back to System prefixed by "scala."
def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
- def shellInterruptedString = scalaPropOrElse("shell.interrupted", ":quit\n")
+ // message to display at EOF (which by default ends with
+ // a newline so as not to break the user's terminal)
+ def shellInterruptedString = scalaPropOrElse("shell.interrupted", f":quit$lineSeparator")
// derived values
def isEmacsShell = propOrEmpty("env.emacs") != ""
diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala
index c9782de7c8..4d7e9e753f 100644
--- a/src/compiler/scala/tools/nsc/Reporting.scala
+++ b/src/compiler/scala/tools/nsc/Reporting.scala
@@ -7,7 +7,6 @@ package scala
package tools
package nsc
-import reporters.{ Reporter, ConsoleReporter }
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.countElementsAsString
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 7d5c6f6fff..6d24b31531 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -8,7 +8,10 @@ package tools.nsc
import io.{ AbstractFile, Directory, File, Path }
import java.io.IOException
+import scala.tools.nsc.classpath.DirectoryFlatClassPath
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.ClassPath.DefaultJavaContext
import util.Exceptional.unwrap
/** An object that runs Scala code in script files.
@@ -112,8 +115,10 @@ class ScriptRunner extends HasCompileSocket {
}
def hasClassToRun(d: Directory): Boolean = {
- import util.ClassPath.{ DefaultJavaContext => ctx }
- val cp = ctx.newClassPath(AbstractFile.getDirectory(d))
+ val cp = settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Recursive => DefaultJavaContext.newClassPath(AbstractFile.getDirectory(d))
+ case ClassPathRepresentationType.Flat => DirectoryFlatClassPath(d.jfile)
+ }
cp.findClass(mainClass).isDefined
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index d3f495f280..96939e616c 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -346,12 +346,11 @@ trait MarkupParsers {
// parse more XML ?
if (charComingAfter(xSpaceOpt()) == '<') {
- xSpaceOpt()
- while (ch == '<') {
+ do {
+ xSpaceOpt()
nextch()
ts append element
- xSpaceOpt()
- }
+ } while (charComingAfter(xSpaceOpt()) == '<')
handle.makeXMLseq(r2p(start, start, curOffset), ts)
}
else {
@@ -426,11 +425,10 @@ trait MarkupParsers {
if (ch != '/') ts append xPattern // child
else return false // terminate
- case '{' => // embedded Scala patterns
- while (ch == '{') {
- nextch()
+ case '{' if xCheckEmbeddedBlock => // embedded Scala patterns, if not double brace
+ do {
ts ++= xScalaPatterns
- }
+ } while (xCheckEmbeddedBlock)
assert(!xEmbeddedBlock, "problem with embedded block")
case SU =>
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 15d6a4d1b4..4663810003 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1559,7 +1559,7 @@ self =>
}
/** {{{
- * PrefixExpr ::= [`-' | `+' | `~' | `!' | `&'] SimpleExpr
+ * PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr
* }}}
*/
def prefixExpr(): Tree = {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 9ebc94b5fc..92833d647b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -453,18 +453,15 @@ trait Scanners extends ScannersCommon {
getOperatorRest()
}
case '0' =>
- def fetchZero() = {
- putChar(ch)
+ def fetchLeadingZero(): Unit = {
nextChar()
- if (ch == 'x' || ch == 'X') {
- nextChar()
- base = 16
- } else {
- base = 8
+ ch match {
+ case 'x' | 'X' => base = 16 ; nextChar()
+ case _ => base = 8 // single decimal zero, perhaps
}
- getNumber()
}
- fetchZero()
+ fetchLeadingZero()
+ getNumber()
case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
base = 10
getNumber()
@@ -902,62 +899,61 @@ trait Scanners extends ScannersCommon {
*/
def charVal: Char = if (strVal.length > 0) strVal.charAt(0) else 0
- /** Convert current strVal, base to long value
+ /** Convert current strVal, base to long value.
* This is tricky because of max negative value.
+ *
+ * Conversions in base 10 and 16 are supported. As a permanent migration
+ * path, attempts to write base 8 literals except `0` emit a verbose error.
*/
def intVal(negated: Boolean): Long = {
- if (token == CHARLIT && !negated) {
- charVal.toLong
- } else {
- var value: Long = 0
- val divider = if (base == 10) 1 else 2
- val limit: Long =
- if (token == LONGLIT) Long.MaxValue else Int.MaxValue
- var i = 0
+ def malformed: Long = {
+ if (base == 8) syntaxError("Decimal integer literals may not have a leading zero. (Octal syntax is obsolete.)")
+ else syntaxError("malformed integer number")
+ 0
+ }
+ def tooBig: Long = {
+ syntaxError("integer number too large")
+ 0
+ }
+ def intConvert: Long = {
val len = strVal.length
- while (i < len) {
- val d = digit2int(strVal charAt i, base)
- if (d < 0) {
- syntaxError("malformed integer number")
- return 0
- }
- if (value < 0 ||
- limit / (base / divider) < value ||
- limit - (d / divider) < value * (base / divider) &&
- !(negated && limit == value * base - 1 + d)) {
- syntaxError("integer number too large")
- return 0
- }
- value = value * base + d
- i += 1
+ if (len == 0) {
+ if (base != 8) syntaxError("missing integer number") // e.g., 0x;
+ 0
+ } else {
+ val divider = if (base == 10) 1 else 2
+ val limit: Long = if (token == LONGLIT) Long.MaxValue else Int.MaxValue
+ @tailrec def convert(value: Long, i: Int): Long =
+ if (i >= len) value
+ else {
+ val d = digit2int(strVal charAt i, base)
+ if (d < 0)
+ malformed
+ else if (value < 0 ||
+ limit / (base / divider) < value ||
+ limit - (d / divider) < value * (base / divider) &&
+ !(negated && limit == value * base - 1 + d))
+ tooBig
+ else
+ convert(value * base + d, i + 1)
+ }
+ val result = convert(0, 0)
+ if (base == 8) malformed else if (negated) -result else result
}
- if (negated) -value else value
}
+ if (token == CHARLIT && !negated) charVal.toLong else intConvert
}
def intVal: Long = intVal(negated = false)
/** Convert current strVal, base to double value
- */
+ */
def floatVal(negated: Boolean): Double = {
-
- val limit: Double =
- if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
+ val limit: Double = if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
try {
val value: Double = java.lang.Double.valueOf(strVal).doubleValue()
- def isDeprecatedForm = {
- val idx = strVal indexOf '.'
- (idx == strVal.length - 1) || (
- (idx >= 0)
- && (idx + 1 < strVal.length)
- && (!Character.isDigit(strVal charAt (idx + 1)))
- )
- }
if (value > limit)
syntaxError("floating point number too large")
- if (isDeprecatedForm)
- syntaxError("floating point number is missing digit after dot")
-
if (negated) -value else value
} catch {
case _: NumberFormatException =>
@@ -968,86 +964,44 @@ trait Scanners extends ScannersCommon {
def floatVal: Double = floatVal(negated = false)
- def checkNoLetter(): Unit = {
+ def checkNoLetter(): Unit = {
if (isIdentifierPart(ch) && ch >= ' ')
syntaxError("Invalid literal number")
}
- /** Read a number into strVal and set base */
- protected def getNumber(): Unit = {
- val base1 = if (base < 10) 10 else base
- // Read 8,9's even if format is octal, produce a malformed number error afterwards.
- // At this point, we have already read the first digit, so to tell an innocent 0 apart
- // from an octal literal 0123... (which we want to disallow), we check whether there
- // are any additional digits coming after the first one we have already read.
- var notSingleZero = false
- while (digit2int(ch, base1) >= 0) {
- putChar(ch)
- nextChar()
- notSingleZero = true
- }
- token = INTLIT
-
- /* When we know for certain it's a number after using a touch of lookahead */
- def restOfNumber() = {
- putChar(ch)
- nextChar()
+ /** Read a number into strVal.
+ *
+ * The `base` can be 8, 10 or 16, where base 8 flags a leading zero.
+ * For ints, base 8 is legal only for the case of exactly one zero.
+ */
+ protected def getNumber(): Unit = {
+ // consume digits of a radix
+ def consumeDigits(radix: Int): Unit =
+ while (digit2int(ch, radix) >= 0) {
+ putChar(ch)
+ nextChar()
+ }
+ // adding decimal point is always OK because `Double valueOf "0."` is OK
+ def restOfNonIntegralNumber(): Unit = {
+ putChar('.')
+ if (ch == '.') nextChar()
getFraction()
}
- def restOfUncertainToken() = {
- def isEfd = ch match { case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' => true ; case _ => false }
- def isL = ch match { case 'l' | 'L' => true ; case _ => false }
-
- if (base <= 10 && isEfd)
- getFraction()
- else {
- // Checking for base == 8 is not enough, because base = 8 is set
- // as soon as a 0 is read in `case '0'` of method fetchToken.
- if (base == 8 && notSingleZero) syntaxError("Non-zero integral values may not have a leading zero.")
- setStrVal()
- if (isL) {
- nextChar()
- token = LONGLIT
- }
- else checkNoLetter()
+ // after int: 5e7f, 42L, 42.toDouble but not 42b. Repair 0d.
+ def restOfNumber(): Unit = {
+ ch match {
+ case 'e' | 'E' | 'f' | 'F' |
+ 'd' | 'D' => if (cbuf.isEmpty) putChar('0'); restOfNonIntegralNumber()
+ case 'l' | 'L' => token = LONGLIT ; setStrVal() ; nextChar()
+ case _ => token = INTLIT ; setStrVal() ; checkNoLetter()
}
}
- if (base > 10 || ch != '.')
- restOfUncertainToken()
- else {
- val lookahead = lookaheadReader
- val c = lookahead.getc()
-
- /* Prohibit 1. */
- if (!isDigit(c))
- return setStrVal()
-
- val isDefinitelyNumber = (c: @switch) match {
- /** Another digit is a giveaway. */
- case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
- true
+ // consume leading digits, provisionally an Int
+ consumeDigits(if (base == 16) 16 else 10)
- /* Backquoted idents like 22.`foo`. */
- case '`' =>
- return setStrVal() /** Note the early return */
-
- /* These letters may be part of a literal, or a method invocation on an Int.
- */
- case 'd' | 'D' | 'f' | 'F' =>
- !isIdentifierPart(lookahead.getc())
-
- /* A little more special handling for e.g. 5e7 */
- case 'e' | 'E' =>
- val ch = lookahead.getc()
- !isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
-
- case x =>
- !isIdentifierStart(x)
- }
- if (isDefinitelyNumber) restOfNumber()
- else restOfUncertainToken()
- }
+ val detectedFloat: Boolean = base != 16 && ch == '.' && isDigit(lookaheadReader.getc)
+ if (detectedFloat) restOfNonIntegralNumber() else restOfNumber()
}
/** Parse character literal if current character is followed by \',
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 1abc0c860c..8cd915bf22 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -184,7 +184,8 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
)
val uri1 = attrMap(z) match {
- case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri)
+ case Apply(Select(New(Select(Select(Select(Ident(nme.ROOTPKG), nme.scala_), nme.xml), tpnme.Text)), nme.CONSTRUCTOR), List(uri @ Literal(Constant(_)))) =>
+ mkAssign(uri)
case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626
case x => mkAssign(x)
}
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 7236bf70d5..6bd123c51f 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -7,7 +7,10 @@ package scala.tools.nsc
package backend
import io.AbstractFile
-import util.{ClassPath,MergedClassPath,DeltaClassPath}
+import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.{ ClassPath, DeltaClassPath, MergedClassPath }
+import scala.tools.util.FlatClassPathResolver
import scala.tools.util.PathResolver
trait JavaPlatform extends Platform {
@@ -16,13 +19,23 @@ trait JavaPlatform extends Platform {
import global._
import definitions._
- private var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
+ private[nsc] var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
def classPath: ClassPath[AbstractFile] = {
+ assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Recursive,
+ "To use recursive classpath representation you must enable it with -YclasspathImpl:recursive compiler option.")
+
if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
currentClassPath.get
}
+ private[nsc] lazy val flatClassPath: FlatClassPath = {
+ assert(settings.YclasspathImpl.value == ClassPathRepresentationType.Flat,
+ "To use flat classpath representation you must enable it with -YclasspathImpl:flat compiler option.")
+
+ new FlatClassPathResolver(settings).result
+ }
+
/** Update classpath with a substituted subentry */
def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) =
currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
index 439cc1efb8..c3bc213be1 100644
--- a/src/compiler/scala/tools/nsc/backend/Platform.scala
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -8,6 +8,7 @@ package backend
import util.ClassPath
import io.AbstractFile
+import scala.tools.nsc.classpath.FlatClassPath
/** The platform dependent pieces of Global.
*/
@@ -15,9 +16,12 @@ trait Platform {
val symbolTable: symtab.SymbolTable
import symbolTable._
- /** The compiler classpath. */
+ /** The old, recursive implementation of compiler classpath. */
def classPath: ClassPath[AbstractFile]
+ /** The new implementation of compiler classpath. */
+ private[nsc] def flatClassPath: FlatClassPath
+
/** Update classpath with a substitution that maps entries to entries */
def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]])
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index f9551697d2..ad1975ef23 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -300,14 +300,16 @@ trait BasicBlocks {
if (!closed)
instructionList = instructionList map (x => map.getOrElse(x, x))
else
- instrs.zipWithIndex collect {
- case (oldInstr, i) if map contains oldInstr =>
- // SI-6288 clone important here because `replaceInstruction` assigns
- // a position to `newInstr`. Without this, a single instruction can
- // be added twice, and the position last position assigned clobbers
- // all previous positions in other usages.
- val newInstr = map(oldInstr).clone()
- code.touched |= replaceInstruction(i, newInstr)
+ instrs.iterator.zipWithIndex foreach {
+ case (oldInstr, i) =>
+ if (map contains oldInstr) {
+ // SI-6288 clone important here because `replaceInstruction` assigns
+ // a position to `newInstr`. Without this, a single instruction can
+ // be added twice, and the position last position assigned clobbers
+ // all previous positions in other usages.
+ val newInstr = map(oldInstr).clone()
+ code.touched |= replaceInstruction(i, newInstr)
+ }
}
////////////////////// Emit //////////////////////
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index a1cec2ee0b..72aa44d8d9 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1077,7 +1077,7 @@ abstract class GenICode extends SubComponent {
()
case (_, UNIT) =>
ctx.bb.emit(DROP(from), pos)
- // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem
+ // otherwise we'd better be doing a primitive -> primitive coercion or there's a problem
case _ if !from.isRefOrArrayType && !to.isRefOrArrayType =>
coerce(from, to)
case _ =>
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index bc35a9e7de..10f0c6ee00 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -113,7 +113,8 @@ abstract class ICodes extends AnyRef
global.loaders.lookupMemberAtTyperPhaseIfPossible(sym, name)
lazy val symbolTable: global.type = global
lazy val loaders: global.loaders.type = global.loaders
- def classPath: util.ClassPath[AbstractFile] = ICodes.this.global.platform.classPath
+
+ def classFileLookup: util.ClassFileLookup[AbstractFile] = global.classPath
}
/** A phase which works on icode. */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index f81c42d836..27bf836484 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -60,7 +60,7 @@ trait Primitives { self: ICodes =>
// type : (buf,el) => buf
// range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR }
- // jvm : It should call the appropiate 'append' method on StringBuffer
+ // jvm : It should call the appropriate 'append' method on StringBuffer
case class StringConcat(el: TypeKind) extends Primitive
/** Signals the beginning of a series of concatenations.
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 676ee12683..b0ad5bdaf9 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -332,13 +332,13 @@ abstract class TypeFlowAnalysis {
`remainingCALLs` also caches info about the typestack just before the callsite, so as to spare computing them again at inlining time.
Besides caching, a further optimization involves skipping those basic blocks whose in-flow and out-flow isn't needed anyway (as explained next).
- A basic block lacking a callsite in `remainingCALLs`, when visisted by the standard algorithm, won't cause any inlining.
+ A basic block lacking a callsite in `remainingCALLs`, when visited by the standard algorithm, won't cause any inlining.
But as we know from the way type-flows are computed, computing the in- and out-flow for a basic block relies in general on those of other basic blocks.
In detail, we want to focus on that sub-graph of the CFG such that control flow may reach a remaining candidate callsite.
Those basic blocks not in that subgraph can be skipped altogether. That's why:
- `forwardAnalysis()` in `MTFAGrowable` now checks for inclusion of a basic block in `relevantBBs`
- same check is performed before adding a block to the worklist, and as part of choosing successors.
- The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overridding most methods of the dataflow-analysis.
+ The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overriding most methods of the dataflow-analysis.
The rest of the story takes place in Inliner, which does not visit all of the method's basic blocks but only on those represented in `remainingCALLs`.
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
index 2af2037fec..75aa0fc984 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala
@@ -5,10 +5,11 @@
package scala.tools.nsc.backend.jvm
-import scala.tools.asm.tree.{ClassNode, MethodNode}
-import java.io.PrintWriter
+import scala.tools.asm.tree.{InsnList, AbstractInsnNode, ClassNode, MethodNode}
+import java.io.{StringWriter, PrintWriter}
import scala.tools.asm.util.{TraceClassVisitor, TraceMethodVisitor, Textifier}
import scala.tools.asm.ClassReader
+import scala.collection.convert.decorateAsScala._
object AsmUtils {
@@ -36,19 +37,12 @@ object AsmUtils {
def traceMethod(mnode: MethodNode): Unit = {
println(s"Bytecode for method ${mnode.name}")
- val p = new Textifier
- val tracer = new TraceMethodVisitor(p)
- mnode.accept(tracer)
- val w = new PrintWriter(System.out)
- p.print(w)
- w.flush()
+ println(textify(mnode))
}
def traceClass(cnode: ClassNode): Unit = {
println(s"Bytecode for class ${cnode.name}")
- val w = new PrintWriter(System.out)
- cnode.accept(new TraceClassVisitor(w))
- w.flush()
+ println(textify(cnode))
}
def traceClass(bytes: Array[Byte]): Unit = traceClass(readClass(bytes))
@@ -58,4 +52,57 @@ object AsmUtils {
new ClassReader(bytes).accept(node, 0)
node
}
+
+ /**
+ * Returns a human-readable representation of the cnode ClassNode.
+ */
+ def textify(cnode: ClassNode): String = {
+ val trace = new TraceClassVisitor(new PrintWriter(new StringWriter))
+ cnode.accept(trace)
+ val sw = new StringWriter
+ val pw = new PrintWriter(sw)
+ trace.p.print(pw)
+ sw.toString
+ }
+
+ /**
+ * Returns a human-readable representation of the code in the mnode MethodNode.
+ */
+ def textify(mnode: MethodNode): String = {
+ val trace = new TraceClassVisitor(new PrintWriter(new StringWriter))
+ mnode.accept(trace)
+ val sw = new StringWriter
+ val pw = new PrintWriter(sw)
+ trace.p.print(pw)
+ sw.toString
+ }
+
+ /**
+ * Returns a human-readable representation of the given instruction.
+ */
+ def textify(insn: AbstractInsnNode): String = {
+ val trace = new TraceMethodVisitor(new Textifier)
+ insn.accept(trace)
+ val sw = new StringWriter
+ val pw = new PrintWriter(sw)
+ trace.p.print(pw)
+ sw.toString.trim
+ }
+
+ /**
+ * Returns a human-readable representation of the given instruction sequence.
+ */
+ def textify(insns: Iterator[AbstractInsnNode]): String = {
+ val trace = new TraceMethodVisitor(new Textifier)
+ insns.foreach(_.accept(trace))
+ val sw: StringWriter = new StringWriter
+ val pw: PrintWriter = new PrintWriter(sw)
+ trace.p.print(pw)
+ sw.toString.trim
+ }
+
+ /**
+ * Returns a human-readable representation of the given instruction sequence.
+ */
+ def textify(insns: InsnList): String = textify(insns.iterator().asScala)
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
index 0c0d726630..a5f33aa786 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc.backend.jvm
import scala.tools.nsc.Global
-import PartialFunction._
/**
* This trait contains code shared between GenBCode and GenASM that depends on types defined in
@@ -14,6 +13,13 @@ import PartialFunction._
*/
final class BCodeAsmCommon[G <: Global](val global: G) {
import global._
+ import definitions._
+
+ val ExcludedForwarderFlags = {
+ import scala.tools.nsc.symtab.Flags._
+ // Should include DEFERRED but this breaks findMember.
+ SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO
+ }
/**
* True if `classSym` is an anonymous class or a local class. I.e., false if `classSym` is a
@@ -23,10 +29,10 @@ final class BCodeAsmCommon[G <: Global](val global: G) {
*/
def isAnonymousOrLocalClass(classSym: Symbol): Boolean = {
assert(classSym.isClass, s"not a class: $classSym")
- val res = (classSym.isAnonymousClass || !classSym.originalOwner.isClass)
- // lambda classes are always top-level classes.
- if (res) assert(!classSym.isDelambdafyFunction)
- res
+ // Here used to be an `assert(!classSym.isDelambdafyFunction)`: delambdafy lambda classes are
+ // always top-level. However, SI-8900 shows an example where the weak name-based implementation
+ // of isDelambdafyFunction failed (for a function declared in a package named "lambda").
+ classSym.isAnonymousClass || !classSym.originalOwner.isClass
}
/**
@@ -124,4 +130,64 @@ final class BCodeAsmCommon[G <: Global](val global: G) {
assert(r != NoSymbol, sym.fullLocationString)
r
})(collection.breakOut)
+
+ lazy val AnnotationRetentionPolicyModule = AnnotationRetentionPolicyAttr.companionModule
+ lazy val AnnotationRetentionPolicySourceValue = AnnotationRetentionPolicyModule.tpe.member(TermName("SOURCE"))
+ lazy val AnnotationRetentionPolicyClassValue = AnnotationRetentionPolicyModule.tpe.member(TermName("CLASS"))
+ lazy val AnnotationRetentionPolicyRuntimeValue = AnnotationRetentionPolicyModule.tpe.member(TermName("RUNTIME"))
+
+ /** Whether an annotation should be emitted as a Java annotation
+ * .initialize: if 'annot' is read from pickle, atp might be un-initialized
+ */
+ def shouldEmitAnnotation(annot: AnnotationInfo) = {
+ annot.symbol.initialize.isJavaDefined &&
+ annot.matches(ClassfileAnnotationClass) &&
+ retentionPolicyOf(annot) != AnnotationRetentionPolicySourceValue &&
+ annot.args.isEmpty
+ }
+
+ def isRuntimeVisible(annot: AnnotationInfo): Boolean = {
+ annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr) match {
+ case Some(retentionAnnot) =>
+ retentionAnnot.assocs.contains(nme.value -> LiteralAnnotArg(Constant(AnnotationRetentionPolicyRuntimeValue)))
+ case _ =>
+ // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the
+ // annotation is emitted with visibility `RUNTIME`
+ true
+ }
+ }
+
+ private def retentionPolicyOf(annot: AnnotationInfo): Symbol =
+ annot.atp.typeSymbol.getAnnotation(AnnotationRetentionAttr).map(_.assocs).map(assoc =>
+ assoc.collectFirst {
+ case (`nme`.value, LiteralAnnotArg(Constant(value: Symbol))) => value
+ }).flatten.getOrElse(AnnotationRetentionPolicyClassValue)
+
+ def implementedInterfaces(classSym: Symbol): List[Symbol] = {
+ // Additional interface parents based on annotations and other cues
+ def newParentForAnnotation(ann: AnnotationInfo): Option[Type] = ann.symbol match {
+ case RemoteAttr => Some(RemoteInterfaceClass.tpe)
+ case _ => None
+ }
+
+ def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait
+
+ val allParents = classSym.info.parents ++ classSym.annotations.flatMap(newParentForAnnotation)
+
+ // We keep the superClass when computing minimizeParents to eliminate more interfaces.
+ // Example: T can be eliminated from D
+ // trait T
+ // class C extends T
+ // class D extends C with T
+ val interfaces = erasure.minimizeParents(allParents) match {
+ case superClass :: ifs if !isInterfaceOrTrait(superClass.typeSymbol) =>
+ ifs
+ case ifs =>
+ // minimizeParents removes the superclass if it's redundant, for example:
+ // trait A
+ // class C extends Object with A // minimizeParents removes Object
+ ifs
+ }
+ interfaces.map(_.typeSymbol)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
index 14bffd67ab..8d1c37532e 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
@@ -469,6 +469,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
trait BCAnnotGen extends BCInnerClassGen {
import genASM.{ubytesToCharArray, arrEncode}
+ import bCodeAsmCommon.{shouldEmitAnnotation, isRuntimeVisible}
/*
* can-multi-thread
@@ -533,17 +534,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
}
}
- /* Whether an annotation should be emitted as a Java annotation
- * .initialize: if 'annot' is read from pickle, atp might be un-initialized
- *
- * must-single-thread
- */
- private def shouldEmitAnnotation(annot: AnnotationInfo) =
- annot.symbol.initialize.isJavaDefined &&
- annot.matches(definitions.ClassfileAnnotationClass) &&
- annot.args.isEmpty &&
- !annot.matches(definitions.DeprecatedAttr)
-
/*
* In general,
* must-single-thread
@@ -563,7 +553,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = cw.visitAnnotation(descriptor(typ), true)
+ val av = cw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -575,7 +565,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = mw.visitAnnotation(descriptor(typ), true)
+ val av = mw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -587,7 +577,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = fw.visitAnnotation(descriptor(typ), true)
+ val av = fw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -602,7 +592,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
annot <- annots) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true)
+ val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), isRuntimeVisible(annot))
emitAssocs(pannVisitor, assocs)
}
}
@@ -625,13 +615,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen {
- // -----------------------------------------------------------------------------------------
- // Static forwarders (related to mirror classes but also present in
- // a plain class lacking companion module, for details see `isCandidateForForwarders`).
- // -----------------------------------------------------------------------------------------
-
- val ExcludedForwarderFlags = genASM.ExcludedForwarderFlags
-
/* Adds a @remote annotation, actual use unknown.
*
* Invoked from genMethod() and addForwarder().
@@ -727,7 +710,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
}
debuglog(s"Potentially conflicting names for forwarders: $conflictingNames")
- for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, symtab.Flags.METHOD)) {
+ for (m <- moduleClass.info.membersBasedOnFlags(bCodeAsmCommon.ExcludedForwarderFlags, symtab.Flags.METHOD)) {
if (m.isType || m.isDeferred || (m.owner eq definitions.ObjectClass) || m.isConstructor)
debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'")
else if (conflictingNames(m.name))
@@ -808,32 +791,28 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
assert(moduleClass.companionClass == NoSymbol, moduleClass)
innerClassBufferASM.clear()
this.cunit = cunit
- val moduleName = internalName(moduleClass) // + "$"
- val mirrorName = moduleName.substring(0, moduleName.length() - 1)
- val flags = (asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL)
+ val bType = mirrorClassClassBType(moduleClass)
val mirrorClass = new asm.tree.ClassNode
mirrorClass.visit(
classfileVersion,
- flags,
- mirrorName,
+ bType.info.flags,
+ bType.internalName,
null /* no java-generic-signature */,
ObjectReference.internalName,
EMPTY_STRING_ARRAY
)
- if (emitSource) {
- mirrorClass.visitSource("" + cunit.source,
- null /* SourceDebugExtension */)
- }
+ if (emitSource)
+ mirrorClass.visitSource("" + cunit.source, null /* SourceDebugExtension */)
- val ssa = getAnnotPickle(mirrorName, moduleClass.companionSymbol)
+ val ssa = getAnnotPickle(bType.internalName, moduleClass.companionSymbol)
mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
emitAnnotations(mirrorClass, moduleClass.annotations ++ ssa)
- addForwarders(isRemote(moduleClass), mirrorClass, mirrorName, moduleClass)
+ addForwarders(isRemote(moduleClass), mirrorClass, bType.internalName, moduleClass)
- innerClassBufferASM ++= classBTypeFromSymbol(moduleClass).info.memberClasses
+ innerClassBufferASM ++= bType.info.nestedClasses
addInnerClassesASM(mirrorClass, innerClassBufferASM.toList)
mirrorClass.visitEnd()
@@ -949,7 +928,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters {
constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
constructor.visitEnd()
- innerClassBufferASM ++= classBTypeFromSymbol(cls).info.memberClasses
+ innerClassBufferASM ++= classBTypeFromSymbol(cls).info.nestedClasses
addInnerClassesASM(beanInfoClass, innerClassBufferASM.toList)
beanInfoClass.visitEnd()
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
index d58368b19d..c3db28151b 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
@@ -271,7 +271,7 @@ abstract class BCodeIdiomatic extends SubComponent {
assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to")
// We're done with BOOL already
- from match {
+ (from: @unchecked) match {
// using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
@@ -361,7 +361,7 @@ abstract class BCodeIdiomatic extends SubComponent {
assert(elem.isNonVoidPrimitiveType)
val rand = {
// using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
- elem match {
+ (elem: @unchecked) match {
case BOOL => Opcodes.T_BOOLEAN
case BYTE => Opcodes.T_BYTE
case SHORT => Opcodes.T_SHORT
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
index 03bc32061b..142c901c21 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
@@ -118,7 +118,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers {
addClassFields()
- innerClassBufferASM ++= classBTypeFromSymbol(claszSymbol).info.memberClasses
+ innerClassBufferASM ++= classBTypeFromSymbol(claszSymbol).info.nestedClasses
gen(cd.impl)
addInnerClassesASM(cnode, innerClassBufferASM.toList)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
index 7c95b7fc3b..b94208c1a5 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
@@ -284,7 +284,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
* ------
*/
- // a note on terminology: this is not "postHandlers", despite appearences.
+ // a note on terminology: this is not "postHandlers", despite appearances.
// "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts.
if (hasFinally) {
nopIfNeeded(startTryBody)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
index 53ac5bfdc7..a9bce82acd 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala
@@ -8,16 +8,35 @@ package backend.jvm
import scala.tools.asm
import asm.Opcodes
+import scala.tools.asm.tree.{InnerClassNode, ClassNode}
+import opt.ByteCodeRepository
+import scala.collection.convert.decorateAsScala._
/**
- * The BTypes component defines The BType class hierarchy. BTypes encapsulates all type information
- * that is required after building the ASM nodes. This includes optimizations, geneartion of
+ * The BTypes component defines The BType class hierarchy. BTypes encapsulate all type information
+ * that is required after building the ASM nodes. This includes optimizations, generation of
* InnerClass attributes and generation of stack map frames.
*
* This representation is immutable and independent of the compiler data structures, hence it can
* be queried by concurrent threads.
*/
abstract class BTypes {
+ import BTypes.InternalName
+
+ // Some core BTypes are required here, in class BType, where no Global instance is available.
+ // The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual
+ // implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol.
+ val coreBTypes: CoreBTypesProxyGlobalIndependent[this.type]
+ import coreBTypes._
+
+ /**
+ * Tools for parsing classfiles, used by the inliner.
+ */
+ val byteCodeRepository: ByteCodeRepository
+
+ // Allows to define per-run caches here and in the CallGraph component, which don't have a global
+ def recordPerRunCache[T <: collection.generic.Clearable](cache: T): T
+
/**
* A map from internal names to ClassBTypes. Every ClassBType is added to this map on its
* construction.
@@ -29,30 +48,83 @@ abstract class BTypes {
* Concurrent because stack map frames are computed when in the class writer, which might run
* on multiple classes concurrently.
*/
- protected val classBTypeFromInternalNameMap: collection.concurrent.Map[String, ClassBType]
+ val classBTypeFromInternalName: collection.concurrent.Map[InternalName, ClassBType] = recordPerRunCache(collection.concurrent.TrieMap.empty[InternalName, ClassBType])
/**
- * The string represented by the `offset` / `length` values of a ClassBType, see comment of that
- * class.
+ * Parse the classfile for `internalName` and construct the [[ClassBType]].
*/
- protected def internalNameString(offset: Int, lenght: Int): String
+ def classBTypeFromParsedClassfile(internalName: InternalName): ClassBType = {
+ classBTypeFromClassNode(byteCodeRepository.classNode(internalName))
+ }
/**
- * Obtain a previously constructed ClassBType for a given internal name.
+ * Construct the [[ClassBType]] for a parsed classfile.
*/
- def classBTypeFromInternalName(internalName: String) = classBTypeFromInternalNameMap(internalName)
+ def classBTypeFromClassNode(classNode: ClassNode): ClassBType = {
+ classBTypeFromInternalName.getOrElse(classNode.name, {
+ setClassInfo(classNode, ClassBType(classNode.name))
+ })
+ }
- // Some core BTypes are required here, in class BType, where no Global instance is available.
- // The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual
- // implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol.
- val coreBTypes: CoreBTypesProxyGlobalIndependent[this.type]
- import coreBTypes._
+ private def setClassInfo(classNode: ClassNode, classBType: ClassBType): ClassBType = {
+ val superClass = classNode.superName match {
+ case null =>
+ assert(classNode.name == ObjectReference.internalName, s"class with missing super type: ${classNode.name}")
+ None
+ case superName =>
+ Some(classBTypeFromParsedClassfile(superName))
+ }
+
+ val interfaces: List[ClassBType] = classNode.interfaces.asScala.map(classBTypeFromParsedClassfile)(collection.breakOut)
+
+ val flags = classNode.access
+
+ /**
+ * Find all nested classes of classNode. The innerClasses attribute contains all nested classes
+ * that are declared inside classNode or used in the bytecode of classNode. So some of them are
+ * nested in some other class than classNode, and we need to filter them.
+ *
+ * For member classes, innerClassNode.outerName is defined, so we compare that to classNode.name.
+ *
+ * For local and anonymous classes, innerClassNode.outerName is null. Such classes are required
+ * to have an EnclosingMethod attribute declaring the outer class. So we keep those local and
+ * anonymous classes whose outerClass is classNode.name.
+ *
+ */
+ def nestedInCurrentClass(innerClassNode: InnerClassNode): Boolean = {
+ (innerClassNode.outerName != null && innerClassNode.outerName == classNode.name) ||
+ (innerClassNode.outerName == null && byteCodeRepository.classNode(innerClassNode.name).outerClass == classNode.name)
+ }
+
+ val nestedClasses: List[ClassBType] = classNode.innerClasses.asScala.collect({
+ case i if nestedInCurrentClass(i) => classBTypeFromParsedClassfile(i.name)
+ })(collection.breakOut)
+
+ // if classNode is a nested class, it has an innerClass attribute for itself. in this
+ // case we build the NestedInfo.
+ val nestedInfo = classNode.innerClasses.asScala.find(_.name == classNode.name) map {
+ case innerEntry =>
+ val enclosingClass =
+ if (innerEntry.outerName != null) {
+ // if classNode is a member class, the outerName is non-null
+ classBTypeFromParsedClassfile(innerEntry.outerName)
+ } else {
+ // for anonymous or local classes, the outerName is null, but the enclosing class is
+ // stored in the EnclosingMethod attribute (which ASM encodes in classNode.outerClass).
+ classBTypeFromParsedClassfile(classNode.outerClass)
+ }
+ val staticFlag = (innerEntry.access & Opcodes.ACC_STATIC) != 0
+ NestedInfo(enclosingClass, Option(innerEntry.outerName), Option(innerEntry.innerName), staticFlag)
+ }
+ classBType.info = ClassInfo(superClass, interfaces, flags, nestedClasses, nestedInfo)
+ classBType
+ }
/**
- * A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType
+ * A BType is either a primitive type, a ClassBType, an ArrayBType of one of these, or a MethodType
* referring to BTypes.
*/
- /*sealed*/ trait BType { // Not sealed for now due to SI-8546
+ sealed trait BType {
final override def toString: String = this match {
case UNIT => "V"
case BOOL => "Z"
@@ -171,6 +243,9 @@ abstract class BTypes {
assert(other.isRef, s"Cannot compute maxType: $this, $other")
// Approximate `lub`. The common type of two references is always ObjectReference.
ObjectReference
+
+ case _: MethodBType =>
+ throw new AssertionError(s"unexpected method type when computing maxType: $this")
}
/**
@@ -369,7 +444,7 @@ abstract class BTypes {
*
* - Initializer block (JLS 8.6 / 8.7): block of statements in a java class
* - static initializer: executed before constructor body
- * - instance initializer: exectued when class is initialized (instance creation, static
+ * - instance initializer: executed when class is initialized (instance creation, static
* field access, ...)
*
* - A static nested class can be defined as
@@ -540,7 +615,7 @@ abstract class BTypes {
*
* class A {
* void f() { class B {} }
- * static void g() { calss C {} }
+ * static void g() { class C {} }
* }
*
* B has an outer pointer, C doesn't. Both B and C are NOT marked static in the InnerClass table.
@@ -568,28 +643,14 @@ abstract class BTypes {
/**
* A ClassBType represents a class or interface type. The necessary information to build a
* ClassBType is extracted from compiler symbols and types, see BTypesFromSymbols.
- *
- * The `offset` and `length` fields are used to represent the internal name of the class. They
- * are indices into some character array. The internal name can be obtained through the method
- * `internalNameString`, which is abstract in this component. Name creation is assumed to be
- * hash-consed, so if two ClassBTypes have the same internal name, they NEED to have the same
- * `offset` and `length`.
- *
- * The actual implementation in subclass BTypesFromSymbols uses the global `chrs` array from the
- * name table. This representation is efficient because the JVM class name is obtained through
- * `classSymbol.javaBinaryName`. This already adds the necessary string to the `chrs` array,
- * so it makes sense to reuse the same name table in the backend.
- *
- * ClassBType is not a case class because we want a custom equals method, and because the
- * extractor extracts the internalName, which is what you typically need.
*/
- final class ClassBType(val offset: Int, val length: Int) extends RefBType {
+ final case class ClassBType(internalName: InternalName) extends RefBType {
/**
* Write-once variable allows initializing a cyclic graph of infos. This is required for
* nested classes. Example: for the definition `class A { class B }` we have
*
* B.info.nestedInfo.outerClass == A
- * A.info.memberClasses contains B
+ * A.info.nestedClasses contains B
*/
private var _info: ClassInfo = null
@@ -604,7 +665,7 @@ abstract class BTypes {
checkInfoConsistency()
}
- classBTypeFromInternalNameMap(internalName) = this
+ classBTypeFromInternalName(internalName) = this
private def checkInfoConsistency(): Unit = {
// we assert some properties. however, some of the linked ClassBType (members, superClass,
@@ -612,7 +673,7 @@ abstract class BTypes {
// best-effort verification.
def ifInit(c: ClassBType)(p: ClassBType => Boolean): Boolean = c._info == null || p(c)
- def isJLO(t: ClassBType) = t.internalName == "java/lang/Object"
+ def isJLO(t: ClassBType) = t.internalName == ObjectReference.internalName
assert(!ClassBType.isInternalPhantomType(internalName), s"Cannot create ClassBType for phantom type $this")
@@ -627,16 +688,10 @@ abstract class BTypes {
s"Invalid interfaces in $this: ${info.interfaces}"
)
- assert(info.memberClasses.forall(c => ifInit(c)(_.isNestedClass)), info.memberClasses)
+ assert(info.nestedClasses.forall(c => ifInit(c)(_.isNestedClass)), info.nestedClasses)
}
/**
- * The internal name of a class is the string returned by java.lang.Class.getName, with all '.'
- * replaced by '/'. For example "java/lang/String".
- */
- def internalName: String = internalNameString(offset, length)
-
- /**
* @return The class name without the package prefix
*/
def simpleName: String = internalName.split("/").last
@@ -661,8 +716,9 @@ abstract class BTypes {
outerName.orNull,
innerName.orNull,
GenBCode.mkFlags(
- info.flags,
- if (isStaticNestedClass) asm.Opcodes.ACC_STATIC else 0
+ // the static flag in the InnerClass table has a special meaning, see InnerClass comment
+ info.flags & ~Opcodes.ACC_STATIC,
+ if (isStaticNestedClass) Opcodes.ACC_STATIC else 0
) & ClassBType.INNER_CLASSES_FLAGS
)
}
@@ -736,33 +792,10 @@ abstract class BTypes {
} while (fcs == null)
fcs
}
-
- /**
- * Custom equals / hashCode: we only compare the name (offset / length)
- */
- override def equals(o: Any): Boolean = (this eq o.asInstanceOf[Object]) || (o match {
- case c: ClassBType => c.offset == this.offset && c.length == this.length
- case _ => false
- })
-
- override def hashCode: Int = {
- import scala.runtime.Statics
- var acc: Int = -889275714
- acc = Statics.mix(acc, offset)
- acc = Statics.mix(acc, length)
- Statics.finalizeHash(acc, 2)
- }
}
object ClassBType {
/**
- * Pattern matching on a ClassBType extracts the `internalName` of the class.
- */
- def unapply(c: ClassBType): Option[String] =
- if (c == null) None
- else Some(c.internalName)
-
- /**
* Valid flags for InnerClass attribute entry.
* See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6
*/
@@ -801,12 +834,12 @@ abstract class BTypes {
* through the superclass.
* @param flags The java flags, obtained through `javaFlags`. Used also to derive
* the flags for InnerClass entries.
- * @param memberClasses Classes nested in this class. Those need to be added to the
+ * @param nestedClasses Classes nested in this class. Those need to be added to the
* InnerClass table, see the InnerClass spec summary above.
* @param nestedInfo If this describes a nested class, information for the InnerClass table.
*/
- case class ClassInfo(superClass: Option[ClassBType], interfaces: List[ClassBType], flags: Int,
- memberClasses: List[ClassBType], nestedInfo: Option[NestedInfo])
+ final case class ClassInfo(superClass: Option[ClassBType], interfaces: List[ClassBType], flags: Int,
+ nestedClasses: List[ClassBType], nestedInfo: Option[NestedInfo])
/**
* Information required to add a class to an InnerClass table.
@@ -820,13 +853,13 @@ abstract class BTypes {
*
* (*) Note that the STATIC flag in ClassInfo.flags, obtained through javaFlags(classSym), is not
* correct for the InnerClass entry, see javaFlags. The static flag in the InnerClass describes
- * a source-level propety: if the class is in a static context (does not have an outer pointer).
+ * a source-level property: if the class is in a static context (does not have an outer pointer).
* This is checked when building the NestedInfo.
*/
- case class NestedInfo(enclosingClass: ClassBType,
- outerName: Option[String],
- innerName: Option[String],
- isStaticNestedClass: Boolean)
+ final case class NestedInfo(enclosingClass: ClassBType,
+ outerName: Option[String],
+ innerName: Option[String],
+ isStaticNestedClass: Boolean)
/**
* This class holds the data for an entry in the InnerClass table. See the InnerClass summary
@@ -839,9 +872,9 @@ abstract class BTypes {
* @param innerName The simple name of the inner class, may be null.
* @param flags The flags for this class in the InnerClass entry.
*/
- case class InnerClassEntry(name: String, outerName: String, innerName: String, flags: Int)
+ final case class InnerClassEntry(name: String, outerName: String, innerName: String, flags: Int)
- case class ArrayBType(componentType: BType) extends RefBType {
+ final case class ArrayBType(componentType: BType) extends RefBType {
def dimension: Int = componentType match {
case a: ArrayBType => 1 + a.dimension
case _ => 1
@@ -853,7 +886,7 @@ abstract class BTypes {
}
}
- case class MethodBType(argumentTypes: List[BType], returnType: BType) extends BType
+ final case class MethodBType(argumentTypes: List[BType], returnType: BType) extends BType
/* Some definitions that are required for the implementation of BTypes. They are abstract because
* initializing them requires information from types / symbols, which is not accessible here in
@@ -873,3 +906,12 @@ abstract class BTypes {
*/
def isCompilingPrimitive: Boolean
}
+
+object BTypes {
+ /**
+ * A marker for strings that represent class internal names.
+ * Ideally the type would be incompatible with String, for example by making it a value class.
+ * But that would create overhead in a Collection[InternalName].
+ */
+ type InternalName = String
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
index 0e2f938602..94f9b585d9 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala
@@ -7,10 +7,14 @@ package scala.tools.nsc
package backend.jvm
import scala.tools.asm
+import opt.ByteCodeRepository
+import scala.tools.asm.tree.ClassNode
+import scala.tools.nsc.backend.jvm.opt.ByteCodeRepository.Source
+import BTypes.InternalName
/**
* This class mainly contains the method classBTypeFromSymbol, which extracts the necessary
- * information from a symbol and its type to create the correpsonding ClassBType. It requires
+ * information from a symbol and its type to create the corresponding ClassBType. It requires
* access to the compiler (global parameter).
*
* The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes
@@ -32,20 +36,13 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
val coreBTypes = new CoreBTypesProxy[this.type](this)
import coreBTypes._
- final def intializeCoreBTypes(): Unit = {
+ val byteCodeRepository = new ByteCodeRepository(global.classPath, recordPerRunCache(collection.concurrent.TrieMap.empty[InternalName, (ClassNode, Source)]))
+
+ final def initializeCoreBTypes(): Unit = {
coreBTypes.setBTypes(new CoreBTypes[this.type](this))
}
- def internalNameString(offset: Int, length: Int) = new String(global.chrs, offset, length)
-
- protected val classBTypeFromInternalNameMap = {
- global.perRunCaches.recordCache(collection.concurrent.TrieMap.empty[String, ClassBType])
- }
-
- /**
- * Cache for the method classBTypeFromSymbol.
- */
- private val convertedClasses = perRunCaches.newMap[Symbol, ClassBType]()
+ def recordPerRunCache[T <: collection.generic.Clearable](cache: T): T = perRunCaches.recordCache(cache)
// helpers that need access to global.
// TODO @lry create a separate component, they don't belong to BTypesFromSymbols
@@ -89,13 +86,11 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
(classSym != NothingClass && classSym != NullClass),
s"Cannot create ClassBType for special class symbol ${classSym.fullName}")
- convertedClasses.getOrElse(classSym, {
- val internalName = classSym.javaBinaryName.toTypeName
- // We first create and add the ClassBType to the hash map before computing its info. This
- // allows initializing cylic dependencies, see the comment on variable ClassBType._info.
- val classBType = new ClassBType(internalName.start, internalName.length)
- convertedClasses(classSym) = classBType
- setClassInfo(classSym, classBType)
+ val internalName = classSym.javaBinaryName.toString
+ classBTypeFromInternalName.getOrElse(internalName, {
+ // The new ClassBType is added to the map in its constructor, before we set its info. This
+ // allows initializing cyclic dependencies, see the comment on variable ClassBType._info.
+ setClassInfo(classSym, ClassBType(internalName))
})
}
@@ -114,7 +109,7 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
val superClass = if (superClassSym == NoSymbol) None
else Some(classBTypeFromSymbol(superClassSym))
- val interfaces = getSuperInterfaces(classSym).map(classBTypeFromSymbol)
+ val interfaces = implementedInterfaces(classSym).map(classBTypeFromSymbol)
val flags = javaFlags(classSym)
@@ -126,25 +121,35 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
* code generation, but those duplicates will be eliminated when emitting the InnerClass
* attribute.
*
- * Why doe we need to collect classes into innerClassBufferASM at all? To collect references to
+ * Why do we need to collect classes into innerClassBufferASM at all? To collect references to
* nested classes, but NOT nested in C, that are used within C.
*/
val nestedClassSymbols = {
// The lambdalift phase lifts all nested classes to the enclosing class, so if we collect
// member classes right after lambdalift, we obtain all nested classes, including local and
// anonymous ones.
- val nestedClasses = exitingPhase(currentRun.lambdaliftPhase)(memberClassesOf(classSym))
+ val nestedClasses = {
+ val nested = exitingPhase(currentRun.lambdaliftPhase)(memberClassesOf(classSym))
+ if (isTopLevelModuleClass(classSym)) {
+ // For Java compatibility, member classes of top-level objects are treated as members of
+ // the top-level companion class, see comment below.
+ val members = exitingPickler(memberClassesOf(classSym))
+ nested diff members
+ } else {
+ nested
+ }
+ }
- // If this is a top-level class, and it has a companion object, the member classes of the
- // companion are added as members of the class. For example:
+ // If this is a top-level class, the member classes of the companion object are added as
+ // members of the class. For example:
// class C { }
// object C {
// class D
// def f = { class E }
// }
- // The class D is added as a member of class C. The reason is that the InnerClass attribute
- // for D will containt class "C" and NOT the module class "C$" as the outer class of D.
- // This is done by buildNestedInfo, the reason is Java compatibility, see comment in BTypes.
+ // The class D is added as a member of class C. The reason is: for Java compatibility, the
+ // InnerClass attribute for D has "C" (NOT the module class "C$") as the outer class of D
+ // (done by buildNestedInfo). See comment in BTypes.
// For consistency, the InnerClass entry for D needs to be present in C - to Java it looks
// like D is a member of C, not C$.
val linkedClass = exitingPickler(classSym.linkedClassOfClass) // linkedCoC does not work properly in late phases
@@ -174,94 +179,51 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
} else true
})
- val memberClasses = nestedClassSymbolsNoJavaModuleClasses.map(classBTypeFromSymbol)
+ val nestedClasses = nestedClassSymbolsNoJavaModuleClasses.map(classBTypeFromSymbol)
val nestedInfo = buildNestedInfo(classSym)
- classBType.info = ClassInfo(superClass, interfaces, flags, memberClasses, nestedInfo)
+ classBType.info = ClassInfo(superClass, interfaces, flags, nestedClasses, nestedInfo)
classBType
}
- /**
- * All interfaces implemented by a class, except for those inherited through the superclass.
- *
- * TODO @lry share code with GenASM
- */
- private def getSuperInterfaces(classSym: Symbol): List[Symbol] = {
-
- // Additional interface parents based on annotations and other cues
- def newParentForAnnotation(ann: AnnotationInfo): Symbol = ann.symbol match {
- case RemoteAttr => RemoteInterfaceClass
- case _ => NoSymbol
- }
-
- /**
- * Drop redundant interfaces (which are implemented by some other parent) from the immediate
- * parents. In other words, no two interfaces in the result are related by subtyping.
- */
- def dropRedundantInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
- var rest = lstIfaces
- var leaves = List.empty[Symbol]
- while (!rest.isEmpty) {
- val candidate = rest.head
- val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
- if (!nonLeaf) {
- leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
- }
- rest = rest.tail
- }
-
- leaves
- }
-
- val superInterfaces0: List[Symbol] = classSym.mixinClasses
- val superInterfaces = existingSymbols(superInterfaces0 ++ classSym.annotations.map(newParentForAnnotation)).distinct
-
- assert(!superInterfaces.contains(NoSymbol), s"found NoSymbol among: ${superInterfaces.mkString(", ")}")
- assert(superInterfaces.forall(s => s.isInterface || s.isTrait), s"found non-interface among: ${superInterfaces.mkString(", ")}")
-
- dropRedundantInterfaces(superInterfaces)
- }
-
private def buildNestedInfo(innerClassSym: Symbol): Option[NestedInfo] = {
assert(innerClassSym.isClass, s"Cannot build NestedInfo for non-class symbol $innerClassSym")
- val isNested = !innerClassSym.rawowner.isPackageClass
- if (!isNested) None
+ val isTopLevel = innerClassSym.rawowner.isPackageClass
+ if (isTopLevel) None
else {
// See comment in BTypes, when is a class marked static in the InnerClass table.
val isStaticNestedClass = isOriginallyStaticOwner(innerClassSym.originalOwner)
// After lambdalift (which is where we are), the rawowoner field contains the enclosing class.
- val enclosingClassSym = {
- if (innerClassSym.isJavaDefined && innerClassSym.rawowner.isModuleClass) {
- // Example java source: class C { static class D { } }
- // The Scala compiler creates a class and a module symbol for C. Because D is a static
- // nested class, the symbol for D is nested in the module class C (not in the class C).
- // For the InnerClass attribute, we use the class symbol C, which represents the situation
- // in the source code.
-
- // Cannot use innerClassSym.isStatic: this method looks at the owner, which is a package
- // at this pahse (after lambdalift, flatten).
- assert(isOriginallyStaticOwner(innerClassSym.originalOwner), innerClassSym.originalOwner)
-
+ val enclosingClass = {
+ // (1) Example java source: class C { static class D { } }
+ // The Scala compiler creates a class and a module symbol for C. Because D is a static
+ // nested class, the symbol for D is nested in the module class C (not in the class C).
+ // For the InnerClass attribute, we use the class symbol C, which represents the situation
+ // in the source code.
+
+ // (2) Java compatibility. See the big comment in BTypes that summarizes the InnerClass spec.
+ if ((innerClassSym.isJavaDefined && innerClassSym.rawowner.isModuleClass) || // (1)
+ (!isAnonymousOrLocalClass(innerClassSym) && isTopLevelModuleClass(innerClassSym.rawowner))) { // (2)
// phase travel for linkedCoC - does not always work in late phases
- exitingPickler(innerClassSym.rawowner.linkedClassOfClass)
+ exitingPickler(innerClassSym.rawowner.linkedClassOfClass) match {
+ case NoSymbol =>
+ // For top-level modules without a companion class, see doc of mirrorClassClassBType.
+ mirrorClassClassBType(exitingPickler(innerClassSym.rawowner))
+
+ case companionClass =>
+ classBTypeFromSymbol(companionClass)
+ }
+ } else {
+ classBTypeFromSymbol(innerClassSym.rawowner)
}
- else innerClassSym.rawowner
}
- val enclosingClass: ClassBType = classBTypeFromSymbol(enclosingClassSym)
val outerName: Option[String] = {
- if (isAnonymousOrLocalClass(innerClassSym)) {
- None
- } else {
- val outerName = innerClassSym.rawowner.javaBinaryName
- // Java compatibility. See the big comment in BTypes that summarizes the InnerClass spec.
- val outerNameModule = if (isTopLevelModuleClass(innerClassSym.rawowner)) outerName.dropModule
- else outerName
- Some(outerNameModule.toString)
- }
+ if (isAnonymousOrLocalClass(innerClassSym)) None
+ else Some(enclosingClass.internalName)
}
val innerName: Option[String] = {
@@ -274,6 +236,29 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes {
}
/**
+ * For top-level objects without a companion class, the compilere generates a mirror class with
+ * static forwarders (Java compat). There's no symbol for the mirror class, but we still need a
+ * ClassBType (its info.nestedClasses will hold the InnerClass entries, see comment in BTypes).
+ */
+ def mirrorClassClassBType(moduleClassSym: Symbol): ClassBType = {
+ assert(isTopLevelModuleClass(moduleClassSym), s"not a top-level module class: $moduleClassSym")
+ val internalName = moduleClassSym.javaBinaryName.dropModule.toString
+ classBTypeFromInternalName.getOrElse(internalName, {
+ val c = ClassBType(internalName)
+ // class info consistent with BCodeHelpers.genMirrorClass
+ val nested = exitingPickler(memberClassesOf(moduleClassSym)) map classBTypeFromSymbol
+ c.info = ClassInfo(
+ superClass = Some(ObjectReference),
+ interfaces = Nil,
+ flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL,
+ nestedClasses = nested,
+ nestedInfo = None
+ )
+ c
+ })
+ }
+
+ /**
* True for module classes of package level objects. The backend will generate a mirror class for
* such objects.
*/
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
index 4b9383c67c..03306f30aa 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala
@@ -14,7 +14,7 @@ object BackendStats {
val bcodeInitTimer = newSubTimer("bcode initialization", bcodeTimer)
val bcodeGenStat = newSubTimer("code generation", bcodeTimer)
- val bcodeDceTimer = newSubTimer("dead code elimination", bcodeTimer)
+ val methodOptTimer = newSubTimer("intra-method optimizations", bcodeTimer)
val bcodeWriteTimer = newSubTimer("classfile writing", bcodeTimer)
def timed[T](timer: Statistics.Timer)(body: => T): T = {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
index fac3c93be2..246235f395 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala
@@ -4,7 +4,7 @@ package backend.jvm
import scala.annotation.switch
/**
- * Core BTypes and some other definitions. The initialization of these definitions requies access
+ * Core BTypes and some other definitions. The initialization of these definitions requires access
* to symbols / types (global).
*
* The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To
@@ -18,11 +18,11 @@ import scala.annotation.switch
*
* The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When
* creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the
- * constructor will actucally go through the proxy. The lazy vals make sure the instance is assigned
+ * constructor will actually go through the proxy. The lazy vals make sure the instance is assigned
* in the proxy before the fields are initialized.
*
* Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap
- * could not be a perRunCache anymore: the classes defeined here need to be in that map, they are
+ * could not be a perRunCache anymore: the classes defined here need to be in that map, they are
* added when the ClassBTypes are created. The per run cache removes them, so they would be missing
* in the second run.
*/
@@ -192,7 +192,7 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) {
}
/**
- * This trait make some core BTypes availalbe that don't depend on a Global instance. Some core
+ * This trait make some core BTypes available that don't depend on a Global instance. Some core
* BTypes are required to be accessible in the BTypes trait, which does not have access to Global.
*
* BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 2593903b9d..abe3bc512c 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -20,7 +20,7 @@ import scala.annotation.tailrec
*
* Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2012Q2/GenASM.pdf
*/
-abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { self =>
+abstract class GenASM extends SubComponent with BytecodeWriters { self =>
import global._
import icodes._
import icodes.opcodes._
@@ -99,17 +99,76 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
+ private def isJavaEntryPoint(icls: IClass) = {
+ val sym = icls.symbol
+ def fail(msg: String, pos: Position = sym.pos) = {
+ reporter.warning(sym.pos,
+ sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" +
+ " Reason: " + msg
+ // TODO: make this next claim true, if possible
+ // by generating valid main methods as static in module classes
+ // not sure what the jvm allows here
+ // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead."
+ )
+ false
+ }
+ def failNoForwarder(msg: String) = {
+ fail(msg + ", which means no static forwarder can be generated.\n")
+ }
+ val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
+ val hasApproximate = possibles exists { m =>
+ m.info match {
+ case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass
+ case _ => false
+ }
+ }
+ // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
+ hasApproximate && {
+ // Before erasure so we can identify generic mains.
+ enteringErasure {
+ val companion = sym.linkedClassOfClass
+
+ if (hasJavaMainMethod(companion))
+ failNoForwarder("companion contains its own main method")
+ else if (companion.tpe.member(nme.main) != NoSymbol)
+ // this is only because forwarders aren't smart enough yet
+ failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
+ else if (companion.isTrait)
+ failNoForwarder("companion is a trait")
+ // Now either succeeed, or issue some additional warnings for things which look like
+ // attempts to be java main methods.
+ else (possibles exists isJavaMainMethod) || {
+ possibles exists { m =>
+ m.info match {
+ case PolyType(_, _) =>
+ fail("main methods cannot be generic.")
+ case MethodType(params, res) =>
+ if (res.typeSymbol :: params exists (_.isAbstractType))
+ fail("main methods cannot refer to type parameters or abstract types.", m.pos)
+ else
+ isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
+ case tp =>
+ fail("don't know what this is: " + tp, m.pos)
+ }
+ }
+ }
+ }
+ }
+ }
+
override def run() {
if (settings.debug)
inform("[running phase " + name + " on icode]")
- if (settings.Xdce)
- for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
+ if (settings.Xdce) {
+ val classes = icodes.classes.keys.toList // copy to avoid mutating the map while iterating
+ for (sym <- classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
log(s"Optimizer eliminated ${sym.fullNameString}")
deadCode.elidedClosures += sym
icodes.classes -= sym
}
+ }
// For predictably ordered error messages.
var sortedClasses = classes.values.toList sortBy (_.symbol.fullName)
@@ -618,7 +677,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
- def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
+ def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor, isMirror: Boolean = false) {
/* The outer name for this inner class. Note that it returns null
* when the inner class should not get an index in the constant pool.
* That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
@@ -639,11 +698,19 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
else
innerSym.rawname + innerSym.moduleSuffix
- // This collects all inner classes of csym, including local and anonymous: lambdalift makes
- // them members of their enclosing class.
- innerClassBuffer ++= exitingPhase(currentRun.lambdaliftPhase)(memberClassesOf(csym))
+ innerClassBuffer ++= {
+ val members = exitingPickler(memberClassesOf(csym))
+ // lambdalift makes all classes (also local, anonymous) members of their enclosing class
+ val allNested = exitingPhase(currentRun.lambdaliftPhase)(memberClassesOf(csym))
+
+ // for the mirror class, we take the members of the companion module class (Java compat,
+ // see doc in BTypes.scala). for module classes, we filter out those members.
+ if (isMirror) members
+ else if (isTopLevelModule(csym)) allNested diff members
+ else allNested
+ }
- // Add members of the companion object (if top-level). why, see comment in BTypes.scala.
+ // If this is a top-level class, add members of the companion object.
val linkedClass = exitingPickler(csym.linkedClassOfClass) // linkedCoC does not work properly in late phases
if (isTopLevelModule(linkedClass)) {
// phase travel to exitingPickler: this makes sure that memberClassesOf only sees member classes,
@@ -805,15 +872,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for (ThrownException(exc) <- excs.distinct)
yield javaName(exc)
- /** Whether an annotation should be emitted as a Java annotation
- * .initialize: if 'annot' is read from pickle, atp might be un-initialized
- */
- private def shouldEmitAnnotation(annot: AnnotationInfo) =
- annot.symbol.initialize.isJavaDefined &&
- annot.matches(ClassfileAnnotationClass) &&
- annot.args.isEmpty &&
- !annot.matches(DeprecatedAttr)
-
def getCurrentCUnit(): CompilationUnit
def getGenericSignature(sym: Symbol, owner: Symbol) = self.getGenericSignature(sym, owner, getCurrentCUnit())
@@ -875,7 +933,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = cw.visitAnnotation(descriptor(typ), true)
+ val av = cw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -884,7 +942,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = mw.visitAnnotation(descriptor(typ), true)
+ val av = mw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -893,7 +951,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for(annot <- annotations; if shouldEmitAnnotation(annot)) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val av = fw.visitAnnotation(descriptor(typ), true)
+ val av = fw.visitAnnotation(descriptor(typ), isRuntimeVisible(annot))
emitAssocs(av, assocs)
}
}
@@ -905,7 +963,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
annot <- annots) {
val AnnotationInfo(typ, args, assocs) = annot
assert(args.isEmpty, args)
- val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true)
+ val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), isRuntimeVisible(annot))
emitAssocs(pannVisitor, assocs)
}
}
@@ -1156,40 +1214,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def serialVUID: Option[Long] = genBCode.serialVUID(clasz.symbol)
- private def getSuperInterfaces(c: IClass): Array[String] = {
-
- // Additional interface parents based on annotations and other cues
- def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match {
- case RemoteAttr => RemoteInterfaceClass
- case _ => NoSymbol
- }
-
- /* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
- * This is important on Android because there is otherwise an interface explosion.
- */
- def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
- var rest = lstIfaces
- var leaves = List.empty[Symbol]
- while(!rest.isEmpty) {
- val candidate = rest.head
- val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
- if(!nonLeaf) {
- leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
- }
- rest = rest.tail
- }
-
- leaves
- }
-
- val ps = c.symbol.info.parents
- val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses
- val superInterfaces = existingSymbols(superInterfaces0 ++ c.symbol.annotations.map(newParentForAttr)).distinct
-
- if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
- else mkArray(minimizeInterfaces(superInterfaces) map javaName)
- }
-
var clasz: IClass = _ // this var must be assigned only by genClass()
var jclass: asm.ClassWriter = _ // the classfile being emitted
var thisName: String = _ // the internal name of jclass
@@ -1210,7 +1234,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val ps = c.symbol.info.parents
val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol)
- val ifaces = getSuperInterfaces(c)
+ val ifaces: Array[String] = implementedInterfaces(c.symbol).map(javaName)(collection.breakOut)
val thisSignature = getGenericSignature(c.symbol, c.symbol.owner)
val flags = mkFlags(
@@ -2780,7 +2804,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym)
- addInnerClasses(modsym, mirrorClass)
+ addInnerClasses(modsym, mirrorClass, isMirror = true)
mirrorClass.visitEnd()
writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym)
}
@@ -2915,7 +2939,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
} // end of class JBeanInfoBuilder
/** A namespace for utilities to normalize the code of an IMethod, over and beyond what IMethod.normalize() strives for.
- * In particualr, IMethod.normalize() doesn't collapseJumpChains().
+ * In particular, IMethod.normalize() doesn't collapseJumpChains().
*
* TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them).
*/
@@ -3130,7 +3154,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- // remove the unusued exception handler references
+ // remove the unused exception handler references
if (settings.debug)
for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks")
m.exh = m.exh filterNot unusedExceptionHandlers
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
index ba94a9c44c..d5e95c47cf 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
@@ -9,12 +9,12 @@ package tools.nsc
package backend
package jvm
-import scala.collection.{ mutable, immutable }
-import scala.annotation.switch
+import scala.collection.mutable
import scala.reflect.internal.util.Statistics
import scala.tools.asm
import scala.tools.asm.tree.ClassNode
+import scala.tools.nsc.backend.jvm.opt.LocalOpt
/*
* Prepare in-memory representations of classfiles using the ASM Tree API, and serialize them to disk.
@@ -215,13 +215,10 @@ abstract class GenBCode extends BCodeSyncAndTry {
* - converting the plain ClassNode to byte array and placing it on queue-3
*/
class Worker2 {
- def localOptimizations(classNode: ClassNode): Unit = {
- def dce(): Boolean = BackendStats.timed(BackendStats.bcodeDceTimer) {
- if (settings.YoptUnreachableCode) opt.LocalOpt.removeUnreachableCode(classNode)
- else false
- }
+ lazy val localOpt = new LocalOpt(settings)
- dce()
+ def localOptimizations(classNode: ClassNode): Unit = {
+ BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode))
}
def run() {
@@ -289,7 +286,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer)
arrivalPos = 0 // just in case
scalaPrimitives.init()
- bTypes.intializeCoreBTypes()
+ bTypes.initializeCoreBTypes()
Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart)
// initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated.
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
deleted file mode 100644
index 2bcde7f7b9..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Jason Zaugg
- */
-
-package scala.tools.nsc
-package backend.jvm
-import scala.tools.nsc.symtab._
-
-/** Code shared between the erstwhile legacy backend (aka GenJVM)
- * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
- * more here, but for now I'm starting with the refactorings that are either
- * straightforward to review or necessary for maintenance.
- */
-trait GenJVMASM {
- val global: Global
- import global._
- import icodes._
- import definitions._
-
- val ExcludedForwarderFlags = {
- import Flags._
- // Should include DEFERRED but this breaks findMember.
- ( SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
- }
-
- protected def isJavaEntryPoint(icls: IClass) = {
- val sym = icls.symbol
- def fail(msg: String, pos: Position = sym.pos) = {
- reporter.warning(sym.pos,
- sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" +
- " Reason: " + msg
- // TODO: make this next claim true, if possible
- // by generating valid main methods as static in module classes
- // not sure what the jvm allows here
- // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead."
- )
- false
- }
- def failNoForwarder(msg: String) = {
- fail(msg + ", which means no static forwarder can be generated.\n")
- }
- val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
- val hasApproximate = possibles exists { m =>
- m.info match {
- case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass
- case _ => false
- }
- }
- // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
- hasApproximate && {
- // Before erasure so we can identify generic mains.
- enteringErasure {
- val companion = sym.linkedClassOfClass
-
- if (hasJavaMainMethod(companion))
- failNoForwarder("companion contains its own main method")
- else if (companion.tpe.member(nme.main) != NoSymbol)
- // this is only because forwarders aren't smart enough yet
- failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
- else if (companion.isTrait)
- failNoForwarder("companion is a trait")
- // Now either succeeed, or issue some additional warnings for things which look like
- // attempts to be java main methods.
- else (possibles exists isJavaMainMethod) || {
- possibles exists { m =>
- m.info match {
- case PolyType(_, _) =>
- fail("main methods cannot be generic.")
- case MethodType(params, res) =>
- if (res.typeSymbol :: params exists (_.isAbstractType))
- fail("main methods cannot refer to type parameters or abstract types.", m.pos)
- else
- isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
- case tp =>
- fail("don't know what this is: " + tp, m.pos)
- }
- }
- }
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
new file mode 100644
index 0000000000..7b424d2107
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala
@@ -0,0 +1,112 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.tools.asm
+import asm.tree._
+import scala.collection.convert.decorateAsScala._
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.ClassFileLookup
+import OptimizerReporting._
+import ByteCodeRepository._
+import BTypes.InternalName
+
+/**
+ * The ByteCodeRepository provides utilities to read the bytecode of classfiles from the compilation
+ * classpath. Parsed classes are cached in the `classes` map.
+ *
+ * @param classPath The compiler classpath where classfiles are searched and read from.
+ * @param classes Cache for parsed ClassNodes. Also stores the source of the bytecode:
+ * [[Classfile]] if read from `classPath`, [[CompilationUnit]] if the bytecode
+ * corresponds to a class being compiled.
+ */
+class ByteCodeRepository(val classPath: ClassFileLookup[AbstractFile], val classes: collection.concurrent.Map[InternalName, (ClassNode, Source)]) {
+ /**
+ * The class node and source for an internal name. If the class node is not yet available, it is
+ * parsed from the classfile on the compile classpath.
+ */
+ def classNodeAndSource(internalName: InternalName): (ClassNode, Source) = {
+ classes.getOrElseUpdate(internalName, (parseClass(internalName), Classfile))
+ }
+
+ /**
+ * The class node for an internal name. If the class node is not yet available, it is parsed from
+ * the classfile on the compile classpath.
+ */
+ def classNode(internalName: InternalName) = classNodeAndSource(internalName)._1
+
+ /**
+ * The field node for a field matching `name` and `descriptor`, accessed in class `classInternalName`.
+ * The declaration of the field may be in one of the superclasses.
+ *
+ * @return The [[FieldNode]] of the requested field and the [[InternalName]] of its declaring class.
+ */
+ def fieldNode(classInternalName: InternalName, name: String, descriptor: String): Option[(FieldNode, InternalName)] = {
+ val c = classNode(classInternalName)
+ c.fields.asScala.find(f => f.name == name && f.desc == descriptor).map((_, classInternalName)) orElse {
+ Option(c.superName).flatMap(n => fieldNode(n, name, descriptor))
+ }
+ }
+
+ /**
+ * The method node for a method matching `name` and `descriptor`, accessed in class `classInternalName`.
+ * The declaration of the method may be in one of the parents.
+ *
+ * @return The [[MethodNode]] of the requested method and the [[InternalName]] of its declaring class.
+ */
+ def methodNode(classInternalName: InternalName, name: String, descriptor: String): Option[(MethodNode, InternalName)] = {
+ val c = classNode(classInternalName)
+ c.methods.asScala.find(m => m.name == name && m.desc == descriptor).map((_, classInternalName)) orElse {
+ val parents = Option(c.superName) ++ c.interfaces.asScala
+ // `view` to stop at the first result
+ parents.view.flatMap(methodNode(_, name, descriptor)).headOption
+ }
+ }
+
+ private def parseClass(internalName: InternalName): ClassNode = {
+ val fullName = internalName.replace('/', '.')
+ classPath.findClassFile(fullName) map { classFile =>
+ val classNode = new asm.tree.ClassNode()
+ val classReader = new asm.ClassReader(classFile.toByteArray)
+ // We don't need frames when inlining, but we want to keep the local variable table, so we
+ // don't use SKIP_DEBUG.
+ classReader.accept(classNode, asm.ClassReader.SKIP_FRAMES)
+ // SKIP_FRAMES leaves line number nodes. Remove them because they are not correct after
+ // inlining.
+ // TODO: we need to remove them also for classes that are not parsed from classfiles, why not simplify and do it once when inlining?
+ // OR: instead of skipping line numbers for inlined code, use write a SourceDebugExtension
+ // attribute that contains JSR-45 data that encodes debugging info.
+ // http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.7.11
+ // https://jcp.org/aboutJava/communityprocess/final/jsr045/index.html
+ removeLineNumberNodes(classNode)
+ classNode
+ } getOrElse {
+ inlineFailure(s"Class file for class $fullName not found.")
+ }
+ }
+
+ private def removeLineNumberNodes(classNode: ClassNode): Unit = {
+ for (method <- classNode.methods.asScala) {
+ val iter = method.instructions.iterator()
+ while (iter.hasNext) iter.next() match {
+ case _: LineNumberNode => iter.remove()
+ case _ =>
+ }
+ }
+ }
+}
+
+object ByteCodeRepository {
+ /**
+ * The source of a ClassNode in the ByteCodeRepository. Can be either [[CompilationUnit]] if the
+ * class is being compiled or [[Classfile]] if the class was parsed from the compilation classpath.
+ */
+ sealed trait Source
+ object CompilationUnit extends Source
+ object Classfile extends Source
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
new file mode 100644
index 0000000000..6b4047c0a7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala
@@ -0,0 +1,184 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+package opt
+
+import scala.annotation.{tailrec, switch}
+import scala.collection.mutable
+import scala.reflect.internal.util.Collections._
+import scala.tools.asm.Opcodes
+import scala.tools.asm.tree._
+import scala.collection.convert.decorateAsScala._
+
+object BytecodeUtils {
+
+ object Goto {
+ def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = {
+ if (instruction.getOpcode == Opcodes.GOTO) Some(instruction.asInstanceOf[JumpInsnNode])
+ else None
+ }
+ }
+
+ object JumpNonJsr {
+ def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = {
+ if (isJumpNonJsr(instruction)) Some(instruction.asInstanceOf[JumpInsnNode])
+ else None
+ }
+ }
+
+ object ConditionalJump {
+ def unapply(instruction: AbstractInsnNode): Option[JumpInsnNode] = {
+ if (isConditionalJump(instruction)) Some(instruction.asInstanceOf[JumpInsnNode])
+ else None
+ }
+ }
+
+ object VarInstruction {
+ def unapply(instruction: AbstractInsnNode): Option[VarInsnNode] = {
+ if (isVarInstruction(instruction)) Some(instruction.asInstanceOf[VarInsnNode])
+ else None
+ }
+
+ }
+
+ def isJumpNonJsr(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ // JSR is deprecated in classfile version 50, disallowed in 51. historically, it was used to implement finally.
+ op == Opcodes.GOTO || isConditionalJump(instruction)
+ }
+
+ def isConditionalJump(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ (op >= Opcodes.IFEQ && op <= Opcodes.IF_ACMPNE) || op == Opcodes.IFNULL || op == Opcodes.IFNONNULL
+ }
+
+ def isReturn(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ op >= Opcodes.IRETURN && op <= Opcodes.RETURN
+ }
+
+ def isVarInstruction(instruction: AbstractInsnNode): Boolean = {
+ val op = instruction.getOpcode
+ (op >= Opcodes.ILOAD && op <= Opcodes.ALOAD) || (op >= Opcodes.ISTORE && op <= Opcodes.ASTORE)
+ }
+
+ def isExecutable(instruction: AbstractInsnNode): Boolean = instruction.getOpcode >= 0
+
+ def nextExecutableInstruction(instruction: AbstractInsnNode, alsoKeep: AbstractInsnNode => Boolean = Set()): Option[AbstractInsnNode] = {
+ var result = instruction
+ do { result = result.getNext }
+ while (result != null && !isExecutable(result) && !alsoKeep(result))
+ Option(result)
+ }
+
+ def sameTargetExecutableInstruction(a: JumpInsnNode, b: JumpInsnNode): Boolean = {
+ // Compare next executable instead of the the labels. Identifies a, b as the same target:
+ // LabelNode(a)
+ // LabelNode(b)
+ // Instr
+ nextExecutableInstruction(a.label) == nextExecutableInstruction(b.label)
+ }
+
+ def removeJumpAndAdjustStack(method: MethodNode, jump: JumpInsnNode) {
+ val instructions = method.instructions
+ val op = jump.getOpcode
+ if ((op >= Opcodes.IFEQ && op <= Opcodes.IFGE) || op == Opcodes.IFNULL || op == Opcodes.IFNONNULL) {
+ instructions.insert(jump, getPop(1))
+ } else if ((op >= Opcodes.IF_ICMPEQ && op <= Opcodes.IF_ICMPLE) || op == Opcodes.IF_ACMPEQ || op == Opcodes.IF_ACMPNE) {
+ instructions.insert(jump, getPop(1))
+ instructions.insert(jump, getPop(1))
+ } else {
+ // we can't remove JSR: its execution does not only jump, it also adds a return address to the stack
+ assert(jump.getOpcode == Opcodes.GOTO)
+ }
+ instructions.remove(jump)
+ }
+
+ def finalJumpTarget(source: JumpInsnNode): LabelNode = {
+ @tailrec def followGoto(label: LabelNode, seenLabels: Set[LabelNode]): LabelNode = nextExecutableInstruction(label) match {
+ case Some(Goto(dest)) =>
+ if (seenLabels(dest.label)) dest.label
+ else followGoto(dest.label, seenLabels + dest.label)
+
+ case _ => label
+ }
+ followGoto(source.label, Set(source.label))
+ }
+
+ def negateJumpOpcode(jumpOpcode: Int): Int = (jumpOpcode: @switch) match {
+ case Opcodes.IFEQ => Opcodes.IFNE
+ case Opcodes.IFNE => Opcodes.IFEQ
+
+ case Opcodes.IFLT => Opcodes.IFGE
+ case Opcodes.IFGE => Opcodes.IFLT
+
+ case Opcodes.IFGT => Opcodes.IFLE
+ case Opcodes.IFLE => Opcodes.IFGT
+
+ case Opcodes.IF_ICMPEQ => Opcodes.IF_ICMPNE
+ case Opcodes.IF_ICMPNE => Opcodes.IF_ICMPEQ
+
+ case Opcodes.IF_ICMPLT => Opcodes.IF_ICMPGE
+ case Opcodes.IF_ICMPGE => Opcodes.IF_ICMPLT
+
+ case Opcodes.IF_ICMPGT => Opcodes.IF_ICMPLE
+ case Opcodes.IF_ICMPLE => Opcodes.IF_ICMPGT
+
+ case Opcodes.IF_ACMPEQ => Opcodes.IF_ACMPNE
+ case Opcodes.IF_ACMPNE => Opcodes.IF_ACMPEQ
+
+ case Opcodes.IFNULL => Opcodes.IFNONNULL
+ case Opcodes.IFNONNULL => Opcodes.IFNULL
+ }
+
+ def getPop(size: Int): InsnNode = {
+ val op = if (size == 1) Opcodes.POP else Opcodes.POP2
+ new InsnNode(op)
+ }
+
+ def labelReferences(method: MethodNode): Map[LabelNode, Set[AnyRef]] = {
+ val res = mutable.Map.empty[LabelNode, Set[AnyRef]]
+ def add(l: LabelNode, ref: AnyRef) = if (res contains l) res(l) = res(l) + ref else res(l) = Set(ref)
+
+ method.instructions.iterator().asScala foreach {
+ case jump: JumpInsnNode => add(jump.label, jump)
+ case line: LineNumberNode => add(line.start, line)
+ case switch: LookupSwitchInsnNode => switch.labels.asScala.foreach(add(_, switch)); add(switch.dflt, switch)
+ case switch: TableSwitchInsnNode => switch.labels.asScala.foreach(add(_, switch)); add(switch.dflt, switch)
+ case _ =>
+ }
+ if (method.localVariables != null) {
+ method.localVariables.iterator().asScala.foreach(l => { add(l.start, l); add(l.end, l) })
+ }
+ if (method.tryCatchBlocks != null) {
+ method.tryCatchBlocks.iterator().asScala.foreach(l => { add(l.start, l); add(l.handler, l); add(l.end, l) })
+ }
+
+ res.toMap
+ }
+
+ def substituteLabel(reference: AnyRef, from: LabelNode, to: LabelNode): Unit = {
+ def substList(list: java.util.List[LabelNode]) = {
+ foreachWithIndex(list.asScala.toList) { case (l, i) =>
+ if (l == from) list.set(i, to)
+ }
+ }
+ reference match {
+ case jump: JumpInsnNode => jump.label = to
+ case line: LineNumberNode => line.start = to
+ case switch: LookupSwitchInsnNode => substList(switch.labels); if (switch.dflt == from) switch.dflt = to
+ case switch: TableSwitchInsnNode => substList(switch.labels); if (switch.dflt == from) switch.dflt = to
+ case local: LocalVariableNode =>
+ if (local.start == from) local.start = to
+ if (local.end == from) local.end = to
+ case handler: TryCatchBlockNode =>
+ if (handler.start == from) handler.start = to
+ if (handler.handler == from) handler.handler = to
+ if (handler.end == from) handler.end = to
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
index 3acd2d6154..87ad715e4d 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala
@@ -7,125 +7,206 @@ package scala.tools.nsc
package backend.jvm
package opt
+import scala.annotation.switch
import scala.tools.asm.{Opcodes, MethodWriter, ClassWriter}
import scala.tools.asm.tree.analysis.{Analyzer, BasicValue, BasicInterpreter}
import scala.tools.asm.tree._
import scala.collection.convert.decorateAsScala._
-import scala.collection.{ mutable => m }
+import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._
+import scala.tools.nsc.settings.ScalaSettings
/**
- * Intra-Method optimizations.
+ * Optimizations within a single method.
+ *
+ * unreachable code
+ * - removes instructions of basic blocks to which no branch instruction points
+ * + enables eliminating some exception handlers and local variable descriptors
+ * > eliminating them is required for correctness, as explained in `removeUnreachableCode`
+ *
+ * empty exception handlers
+ * - removes exception handlers whose try block is empty
+ * + eliminating a handler where the try block is empty and reachable will turn the catch block
+ * unreachable. in this case "unreachable code" is invoked recursively until reaching a fixpoint.
+ * > for try blocks that are unreachable, "unreachable code" removes also the instructions of the
+ * catch block, and the recursive invocation is not necessary.
+ *
+ * simplify jumps
+ * - various simplifications, see doc domments of individual optimizations
+ * + changing or eliminating jumps may render some code unreachable, therefore "simplify jumps" is
+ * executed in a loop with "unreachable code"
+ *
+ * empty local variable descriptors
+ * - removes entries from the local variable table where the variable is not actually used
+ * + enables eliminating labels that the entry points to (if they are not otherwise referenced)
+ *
+ * empty line numbers
+ * - eliminates line number nodes that describe no executable instructions
+ * + enables eliminating the label of the line number node (if it's not otherwise referenced)
+ *
+ * stale labels
+ * - eliminate labels that are not referenced, merge sequences of label definitions.
*/
-object LocalOpt {
+class LocalOpt(settings: ScalaSettings) {
/**
- * Remove unreachable instructions from all (non-abstract) methods.
+ * Remove unreachable instructions from all (non-abstract) methods and apply various other
+ * cleanups to the bytecode.
*
* @param clazz The class whose methods are optimized
- * @return `true` if unreachable code was elminated in some method, `false` otherwise.
+ * @return `true` if unreachable code was eliminated in some method, `false` otherwise.
*/
- def removeUnreachableCode(clazz: ClassNode): Boolean = {
- clazz.methods.asScala.foldLeft(false) {
- case (changed, method) => removeUnreachableCode(method, clazz.name) || changed
+ def methodOptimizations(clazz: ClassNode): Boolean = {
+ !settings.YoptNone && clazz.methods.asScala.foldLeft(false) {
+ case (changed, method) => methodOptimizations(method, clazz.name) || changed
}
}
/**
* Remove unreachable code from a method.
+ *
* We rely on dead code elimination provided by the ASM framework, as described in the ASM User
* Guide (http://asm.ow2.org/index.html), Section 8.2.1. It runs a data flow analysis, which only
* computes Frame information for reachable instructions. Instructions for which no Frame data is
- * available after the analyis are unreachable.
+ * available after the analysis are unreachable.
*
- * TODO doc: it also removes empty handlers, unused local vars
+ * Also simplifies branching instructions, removes unused local variable descriptors, empty
+ * exception handlers, unnecessary label declarations and empty line number nodes.
*
- * Returns `true` if dead code in `method` has been eliminated.
+ * Returns `true` if the bytecode of `method` was changed.
*/
- private def removeUnreachableCode(method: MethodNode, ownerClassName: String): Boolean = {
+ private def methodOptimizations(method: MethodNode, ownerClassName: String): Boolean = {
if (method.instructions.size == 0) return false // fast path for abstract methods
- val codeRemoved = removeUnreachableCodeImpl(method, ownerClassName)
-
// unreachable-code also removes unused local variable nodes and empty exception handlers.
- // This is required for correctness: such nodes are not allowed to refer to instruction offsets
- // that don't exist (because they have been eliminated).
- val localsRemoved = removeUnusedLocalVariableNodes(method)
- val handlersRemoved = removeEmptyExceptionHandlers(method)
-
- // When eliminating a handler, the catch block becomes unreachable. The recursive invocation
- // removes these blocks.
- // Note that invoking removeUnreachableCode*Impl* a second time is not enough: removing the dead
- // catch block can render other handlers empty, which also have to be removed in turn.
- if (handlersRemoved) removeUnreachableCode(method, ownerClassName)
-
- // assert that we can leave local variable annotations as-is
+ // This is required for correctness, for example:
+ //
+ // def f = { return 0; try { 1 } catch { case _ => 2 } }
+ //
+ // The result after removeUnreachableCodeImpl:
+ //
+ // TRYCATCHBLOCK L0 L1 L2 java/lang/Exception
+ // L4
+ // ICONST_0
+ // IRETURN
+ // L0
+ // L1
+ // L2
+ //
+ // If we don't eliminate the handler, the ClassWriter emits:
+ //
+ // TRYCATCHBLOCK L0 L0 L0 java/lang/Exception
+ // L1
+ // ICONST_0
+ // IRETURN
+ // L0
+ //
+ // This triggers "ClassFormatError: Illegal exception table range in class file C". Similar
+ // for local variables in dead blocks. Maybe that's a bug in the ASM framework.
+
+ var recurse = true
+ var codeHandlersOrJumpsChanged = false
+ while (recurse) {
+ // unreachable-code, empty-handlers and simplify-jumps run until reaching a fixpoint (see doc on class LocalOpt)
+ val (codeRemoved, handlersRemoved, liveHandlerRemoved) = if (settings.YoptUnreachableCode) {
+ val (codeRemoved, liveLabels) = removeUnreachableCodeImpl(method, ownerClassName)
+ val removedHandlers = removeEmptyExceptionHandlers(method)
+ (codeRemoved, removedHandlers.nonEmpty, removedHandlers.exists(h => liveLabels(h.start)))
+ } else {
+ (false, false, false)
+ }
+
+ val jumpsChanged = if (settings.YoptSimplifyJumps) simplifyJumps(method) else false
+
+ codeHandlersOrJumpsChanged ||= (codeRemoved || handlersRemoved || jumpsChanged)
+
+ // The doc comment of class LocalOpt explains why we recurse if jumpsChanged || liveHandlerRemoved
+ recurse = settings.YoptRecurseUnreachableJumps && (jumpsChanged || liveHandlerRemoved)
+ }
+
+ // (*) Removing stale local variable descriptors is required for correctness of unreachable-code
+ val localsRemoved =
+ if (settings.YoptCompactLocals) compactLocalVariables(method)
+ else if (settings.YoptUnreachableCode) removeUnusedLocalVariableNodes(method)() // (*)
+ else false
+
+ val lineNumbersRemoved = if (settings.YoptEmptyLineNumbers) removeEmptyLineNumbers(method) else false
+
+ val labelsRemoved = if (settings.YoptEmptyLabels) removeEmptyLabelNodes(method) else false
+
+ // assert that local variable annotations are empty (we don't emit them) - otherwise we'd have
+ // to eliminate those covering an empty range, similar to removeUnusedLocalVariableNodes.
def nullOrEmpty[T](l: java.util.List[T]) = l == null || l.isEmpty
assert(nullOrEmpty(method.visibleLocalVariableAnnotations), method.visibleLocalVariableAnnotations)
assert(nullOrEmpty(method.invisibleLocalVariableAnnotations), method.invisibleLocalVariableAnnotations)
- codeRemoved || localsRemoved || handlersRemoved
+ codeHandlersOrJumpsChanged || localsRemoved || lineNumbersRemoved || labelsRemoved
}
- private def removeUnreachableCodeImpl(method: MethodNode, ownerClassName: String): Boolean = {
- val initialSize = method.instructions.size
- if (initialSize == 0) return false
-
+ /**
+ * Removes unreachable basic blocks.
+ *
+ * TODO: rewrite, don't use computeMaxLocalsMaxStack (runs a ClassWriter) / Analyzer. Too slow.
+ */
+ def removeUnreachableCodeImpl(method: MethodNode, ownerClassName: String): (Boolean, Set[LabelNode]) = {
// The data flow analysis requires the maxLocals / maxStack fields of the method to be computed.
computeMaxLocalsMaxStack(method)
val a = new Analyzer[BasicValue](new BasicInterpreter)
a.analyze(ownerClassName, method)
val frames = a.getFrames
+ val initialSize = method.instructions.size
var i = 0
+ var liveLabels = Set.empty[LabelNode]
val itr = method.instructions.iterator()
while (itr.hasNext) {
- val ins = itr.next()
- // Don't remove label nodes: they might be referenced for example in a LocalVariableNode
- if (frames(i) == null && !ins.isInstanceOf[LabelNode]) {
- // Instruction iterators allow removing during iteration.
- // Removing is O(1): instructions are doubly linked list elements.
- itr.remove()
+ itr.next() match {
+ case l: LabelNode =>
+ if (frames(i) != null) liveLabels += l
+
+ case ins =>
+ // label nodes are not removed: they might be referenced for example in a LocalVariableNode
+ if (frames(i) == null || ins.getOpcode == Opcodes.NOP) {
+ // Instruction iterators allow removing during iteration.
+ // Removing is O(1): instructions are doubly linked list elements.
+ itr.remove()
+ }
}
i += 1
}
-
- method.instructions.size != initialSize
- }
-
- /**
- * Remove exception handlers that cover empty code blocks from all methods of `clazz`.
- * Returns `true` if any exception handler was eliminated.
- */
- def removeEmptyExceptionHandlers(clazz: ClassNode): Boolean = {
- clazz.methods.asScala.foldLeft(false) {
- case (changed, method) => removeEmptyExceptionHandlers(method) || changed
- }
+ (method.instructions.size != initialSize, liveLabels)
}
/**
* Remove exception handlers that cover empty code blocks. A block is considered empty if it
* consist only of labels, frames, line numbers, nops and gotos.
*
+ * There are no executable instructions that we can assume don't throw (eg ILOAD). The JVM spec
+ * basically says that a VirtualMachineError may be thrown at any time:
+ * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.3
+ *
* Note that no instructions are eliminated.
*
- * @return `true` if some exception handler was eliminated.
+ * @return the set of removed handlers
*/
- def removeEmptyExceptionHandlers(method: MethodNode): Boolean = {
+ def removeEmptyExceptionHandlers(method: MethodNode): Set[TryCatchBlockNode] = {
/** True if there exists code between start and end. */
def containsExecutableCode(start: AbstractInsnNode, end: LabelNode): Boolean = {
- start != end && (start.getOpcode match {
+ start != end && ((start.getOpcode : @switch) match {
// FrameNode, LabelNode and LineNumberNode have opcode == -1.
- case -1 | Opcodes.NOP | Opcodes.GOTO => containsExecutableCode(start.getNext, end)
+ case -1 | Opcodes.GOTO => containsExecutableCode(start.getNext, end)
case _ => true
})
}
- val initialNumberHandlers = method.tryCatchBlocks.size
+ var removedHandlers = Set.empty[TryCatchBlockNode]
val handlersIter = method.tryCatchBlocks.iterator()
while(handlersIter.hasNext) {
val handler = handlersIter.next()
- if (!containsExecutableCode(handler.start, handler.end)) handlersIter.remove()
+ if (!containsExecutableCode(handler.start, handler.end)) {
+ removedHandlers += handler
+ handlersIter.remove()
+ }
}
- method.tryCatchBlocks.size != initialNumberHandlers
+ removedHandlers
}
/**
@@ -135,41 +216,113 @@ object LocalOpt {
* Note that each entry in the local variable table has a start, end and index. Two entries with
* the same index, but distinct start / end ranges are different variables, they may have not the
* same type or name.
- *
- * TODO: also re-allocate locals to occupy fewer slots after eliminating unused ones
*/
- def removeUnusedLocalVariableNodes(method: MethodNode): Boolean = {
+ def removeUnusedLocalVariableNodes(method: MethodNode)(fistLocalIndex: Int = parametersSize(method), renumber: Int => Int = identity): Boolean = {
def variableIsUsed(start: AbstractInsnNode, end: LabelNode, varIndex: Int): Boolean = {
start != end && (start match {
- case v: VarInsnNode => v.`var` == varIndex
+ case v: VarInsnNode if v.`var` == varIndex => true
case _ => variableIsUsed(start.getNext, end, varIndex)
})
}
val initialNumVars = method.localVariables.size
val localsIter = method.localVariables.iterator()
- // The parameters and `this` (for instance methods) have the lowest indices in the local variables
- // table. Note that double / long fields occupy two slots, so we sum up the sizes. Since getSize
- // returns 0 for void, we have to add `max 1`.
- val paramsSize = scala.tools.asm.Type.getArgumentTypes(method.desc).map(_.getSize max 1).sum
- val thisSize = if ((method.access & Opcodes.ACC_STATIC) == 0) 1 else 0
- val endParamIndex = paramsSize + thisSize
while (localsIter.hasNext) {
val local = localsIter.next()
- // parameters and `this` have the lowest indices, starting at 0
- val used = local.index < endParamIndex || variableIsUsed(local.start, local.end, local.index)
- if (!used)
- localsIter.remove()
+ val index = local.index
+ // parameters and `this` (the lowest indices, starting at 0) are never removed or renumbered
+ if (index >= fistLocalIndex) {
+ if (!variableIsUsed(local.start, local.end, index)) localsIter.remove()
+ else if (renumber(index) != index) local.index = renumber(index)
+ }
}
- method.localVariables.size == initialNumVars
+ method.localVariables.size != initialNumVars
}
+ /**
+ * The number of local variable slots used for parameters and for the `this` reference.
+ */
+ private def parametersSize(method: MethodNode): Int = {
+ // Double / long fields occupy two slots, so we sum up the sizes. Since getSize returns 0 for
+ // void, we have to add `max 1`.
+ val paramsSize = scala.tools.asm.Type.getArgumentTypes(method.desc).iterator.map(_.getSize max 1).sum
+ val thisSize = if ((method.access & Opcodes.ACC_STATIC) == 0) 1 else 0
+ paramsSize + thisSize
+ }
+
+ /**
+ * Compact the local variable slots used in the method's implementation. This prevents having
+ * unused slots for example after eliminating unreachable code.
+ *
+ * This transformation reduces the size of the frame for invoking the method. For example, if the
+ * method has an ISTORE instruction to the local variable 3, the maxLocals of the method is at
+ * least 4, even if some local variable slots below 3 are not used by any instruction.
+ *
+ * This could be improved by doing proper register allocation.
+ */
+ def compactLocalVariables(method: MethodNode): Boolean = {
+ // This array is built up to map local variable indices from old to new.
+ val renumber = collection.mutable.ArrayBuffer.empty[Int]
+
+ // Add the index of the local variable used by `varIns` to the `renumber` array.
+ def addVar(varIns: VarInsnNode): Unit = {
+ val index = varIns.`var`
+ val isWide = (varIns.getOpcode: @switch) match {
+ case Opcodes.LLOAD | Opcodes.DLOAD | Opcodes.LSTORE | Opcodes.DSTORE => true
+ case _ => false
+ }
+
+ // Ensure the length of `renumber`. Unused variable indices are mapped to -1.
+ val minLength = if (isWide) index + 2 else index + 1
+ for (i <- renumber.length until minLength) renumber += -1
+
+ renumber(index) = index
+ if (isWide) renumber(index + 1) = index
+ }
+
+ // first phase: collect all used local variables. if the variable at index x is used, set
+ // renumber(x) = x, otherwise renumber(x) = -1. if the variable is wide (long or double), set
+ // renumber(x+1) = x.
+
+ val firstLocalIndex = parametersSize(method)
+ for (i <- 0 until firstLocalIndex) renumber += i // parameters and `this` are always used.
+ method.instructions.iterator().asScala foreach {
+ case VarInstruction(varIns) => addVar(varIns)
+ case _ =>
+ }
+
+ // assign the next free slot to each used local variable.
+ // for example, rewrite (0, 1, -1, 3, -1, 5) to (0, 1, -1, 2, -1, 3).
+
+ var nextIndex = firstLocalIndex
+ for (i <- firstLocalIndex until renumber.length if renumber(i) != -1) {
+ renumber(i) = nextIndex
+ nextIndex += 1
+ }
+
+ // Update the local variable descriptors according to the renumber table, and eliminate stale entries
+ val removedLocalVariableDescriptors = removeUnusedLocalVariableNodes(method)(firstLocalIndex, renumber)
+
+ if (nextIndex == renumber.length) removedLocalVariableDescriptors
+ else {
+ // update variable instructions according to the renumber table
+ method.maxLocals = nextIndex
+ method.instructions.iterator().asScala.foreach {
+ case VarInstruction(varIns) =>
+ val oldIndex = varIns.`var`
+ if (oldIndex >= firstLocalIndex && renumber(oldIndex) != oldIndex)
+ varIns.`var` = renumber(varIns.`var`)
+ case _ =>
+ }
+ true
+ }
+ }
/**
* In order to run an Analyzer, the maxLocals / maxStack fields need to be available. The ASM
* framework only computes these values during bytecode generation.
*
- * Sicne there's currently no better way, we run a bytecode generator on the method and extract
+ * Since there's currently no better way, we run a bytecode generator on the method and extract
* the computed values. This required changes to the ASM codebase:
* - the [[MethodWriter]] class was made public
* - accessors for maxLocals / maxStack were added to the MethodWriter class
@@ -187,4 +340,223 @@ object LocalOpt {
method.maxLocals = mw.getMaxLocals
method.maxStack = mw.getMaxStack
}
+
+ /**
+ * Removes LineNumberNodes that don't describe any executable instructions.
+ *
+ * This method expects (and asserts) that the `start` label of each LineNumberNode is the
+ * lexically preceding label declaration.
+ */
+ def removeEmptyLineNumbers(method: MethodNode): Boolean = {
+ def isEmpty(node: AbstractInsnNode): Boolean = node.getNext match {
+ case null => true
+ case l: LineNumberNode => true
+ case n if n.getOpcode >= 0 => false
+ case n => isEmpty(n)
+ }
+
+ val initialSize = method.instructions.size
+ val iterator = method.instructions.iterator()
+ var previousLabel: LabelNode = null
+ while (iterator.hasNext) {
+ iterator.next match {
+ case label: LabelNode => previousLabel = label
+ case line: LineNumberNode if isEmpty(line) =>
+ assert(line.start == previousLabel)
+ iterator.remove()
+ case _ =>
+ }
+ }
+ method.instructions.size != initialSize
+ }
+
+ /**
+ * Removes unreferenced label declarations, also squashes sequences of label definitions.
+ *
+ * [ops]; Label(a); Label(b); [ops];
+ * => subs([ops], b, a); Label(a); subs([ops], b, a);
+ */
+ def removeEmptyLabelNodes(method: MethodNode): Boolean = {
+ val references = labelReferences(method)
+
+ val initialSize = method.instructions.size
+ val iterator = method.instructions.iterator()
+ var prev: LabelNode = null
+ while (iterator.hasNext) {
+ iterator.next match {
+ case label: LabelNode =>
+ if (!references.contains(label)) iterator.remove()
+ else if (prev != null) {
+ references(label).foreach(substituteLabel(_, label, prev))
+ iterator.remove()
+ } else prev = label
+
+ case instruction =>
+ if (instruction.getOpcode >= 0) prev = null
+ }
+ }
+ method.instructions.size != initialSize
+ }
+
+ /**
+ * Apply various simplifications to branching instructions.
+ */
+ def simplifyJumps(method: MethodNode): Boolean = {
+ var changed = false
+
+ val allHandlers = method.tryCatchBlocks.asScala.toSet
+
+ // A set of all exception handlers that guard the current instruction, required for simplifyGotoReturn
+ var activeHandlers = Set.empty[TryCatchBlockNode]
+
+ // Instructions that need to be removed. simplifyBranchOverGoto returns an instruction to be
+ // removed. It cannot remove it itself because the instruction may be the successor of the current
+ // instruction of the iterator, which is not supported in ASM.
+ var instructionsToRemove = Set.empty[AbstractInsnNode]
+
+ val iterator = method.instructions.iterator()
+ while (iterator.hasNext) {
+ val instruction = iterator.next()
+
+ instruction match {
+ case l: LabelNode =>
+ activeHandlers ++= allHandlers.filter(_.start == l)
+ activeHandlers = activeHandlers.filter(_.end != l)
+ case _ =>
+ }
+
+ if (instructionsToRemove(instruction)) {
+ iterator.remove()
+ instructionsToRemove -= instruction
+ } else if (isJumpNonJsr(instruction)) { // fast path - all of the below only treat jumps
+ var jumpRemoved = simplifyThenElseSameTarget(method, instruction)
+
+ if (!jumpRemoved) {
+ changed = collapseJumpChains(instruction) || changed
+ jumpRemoved = removeJumpToSuccessor(method, instruction)
+
+ if (!jumpRemoved) {
+ val staleGoto = simplifyBranchOverGoto(method, instruction)
+ instructionsToRemove ++= staleGoto
+ changed ||= staleGoto.nonEmpty
+ changed = simplifyGotoReturn(method, instruction, inTryBlock = activeHandlers.nonEmpty) || changed
+ }
+ }
+ changed ||= jumpRemoved
+ }
+ }
+ assert(instructionsToRemove.isEmpty, "some optimization required removing a previously traversed instruction. add `instructionsToRemove.foreach(method.instructions.remove)`")
+ changed
+ }
+
+ /**
+ * Removes a conditional jump if it is followed by a GOTO to the same destination.
+ *
+ * CondJump l; [nops]; GOTO l; [...]
+ * POP*; [nops]; GOTO l; [...]
+ *
+ * Introduces 1 or 2 POP instructions, depending on the number of values consumed by the CondJump.
+ */
+ private def simplifyThenElseSameTarget(method: MethodNode, instruction: AbstractInsnNode): Boolean = instruction match {
+ case ConditionalJump(jump) =>
+ nextExecutableInstruction(instruction) match {
+ case Some(Goto(elseJump)) if sameTargetExecutableInstruction(jump, elseJump) =>
+ removeJumpAndAdjustStack(method, jump)
+ true
+
+ case _ => false
+ }
+ case _ => false
+ }
+
+ /**
+ * Replace jumps to a sequence of GOTO instructions by a jump to the final destination.
+ *
+ * Jump l; [any ops]; l: GOTO m; [any ops]; m: GOTO n; [any ops]; n: NotGOTO; [...]
+ * => Jump n; [rest unchaned]
+ *
+ * If there's a loop of GOTOs, the initial jump is replaced by one of the labels in the loop.
+ */
+ private def collapseJumpChains(instruction: AbstractInsnNode): Boolean = instruction match {
+ case JumpNonJsr(jump) =>
+ val target = finalJumpTarget(jump)
+ if (jump.label == target) false else {
+ jump.label = target
+ true
+ }
+
+ case _ => false
+ }
+
+ /**
+ * Eliminates unnecessary jump instructions
+ *
+ * Jump l; [nops]; l: [...]
+ * => POP*; [nops]; l: [...]
+ *
+ * Introduces 0, 1 or 2 POP instructions, depending on the number of values consumed by the Jump.
+ */
+ private def removeJumpToSuccessor(method: MethodNode, instruction: AbstractInsnNode) = instruction match {
+ case JumpNonJsr(jump) if nextExecutableInstruction(jump, alsoKeep = Set(jump.label)) == Some(jump.label) =>
+ removeJumpAndAdjustStack(method, jump)
+ true
+ case _ => false
+ }
+
+ /**
+ * If the "else" part of a conditional branch is a simple GOTO, negates the conditional branch
+ * and eliminates the GOTO.
+ *
+ * CondJump l; [nops, no labels]; GOTO m; [nops]; l: [...]
+ * => NegatedCondJump m; [nops, no labels]; [nops]; l: [...]
+ *
+ * Note that no label definitions are allowed in the first [nops] section. Otherwise, there could
+ * be some other jump to the GOTO, and eliminating it would change behavior.
+ *
+ * For technical reasons, we cannot remove the GOTO here (*).Instead this method returns an Option
+ * containing the GOTO that needs to be eliminated.
+ *
+ * (*) The ASM instruction iterator (used in the caller [[simplifyJumps]]) has an undefined
+ * behavior if the successor of the current instruction is removed, which may be the case here
+ */
+ private def simplifyBranchOverGoto(method: MethodNode, instruction: AbstractInsnNode): Option[JumpInsnNode] = instruction match {
+ case ConditionalJump(jump) =>
+ // don't skip over labels, see doc comment
+ nextExecutableInstruction(jump, alsoKeep = _.isInstanceOf[LabelNode]) match {
+ case Some(Goto(goto)) =>
+ if (nextExecutableInstruction(goto, alsoKeep = Set(jump.label)) == Some(jump.label)) {
+ val newJump = new JumpInsnNode(negateJumpOpcode(jump.getOpcode), goto.label)
+ method.instructions.set(jump, newJump)
+ Some(goto)
+ } else None
+
+ case _ => None
+ }
+ case _ => None
+ }
+
+ /**
+ * Inlines xRETURN and ATHROW
+ *
+ * GOTO l; [any ops]; l: xRETURN/ATHROW
+ * => xRETURN/ATHROW; [any ops]; l: xRETURN/ATHROW
+ *
+ * inlining is only done if the GOTO instruction is not part of a try block, otherwise the
+ * rewrite might change the behavior. For xRETURN, the reason is that return instructions may throw
+ * an IllegalMonitorStateException, as described here:
+ * http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.return
+ */
+ private def simplifyGotoReturn(method: MethodNode, instruction: AbstractInsnNode, inTryBlock: Boolean): Boolean = !inTryBlock && (instruction match {
+ case Goto(jump) =>
+ nextExecutableInstruction(jump.label) match {
+ case Some(target) =>
+ if (isReturn(target) || target.getOpcode == Opcodes.ATHROW) {
+ method.instructions.set(jump, target.clone(null))
+ true
+ } else false
+
+ case _ => false
+ }
+ case _ => false
+ })
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/OptimizerReporting.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/OptimizerReporting.scala
new file mode 100644
index 0000000000..7002e43d98
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/OptimizerReporting.scala
@@ -0,0 +1,24 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2014 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import asm.tree._
+
+/**
+ * Reporting utilities used in the optimizer.
+ */
+object OptimizerReporting {
+ def methodSignature(className: String, methodName: String, methodDescriptor: String): String = {
+ className + "::" + methodName + methodDescriptor
+ }
+
+ def methodSignature(className: String, method: MethodNode): String = methodSignature(className, method.name, method.desc)
+
+ def inlineFailure(reason: String): Nothing = MissingRequirementError.signal(reason)
+ def assertionError(message: String): Nothing = throw new AssertionError(message)
+}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
index 1fadcb8920..0e6ee76eb2 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
@@ -7,7 +7,6 @@ package scala
package tools.nsc
package backend.opt
-import scala.tools.nsc.backend.icode.analysis.LubException
import scala.annotation.tailrec
/**
@@ -19,7 +18,7 @@ import scala.annotation.tailrec
*
* With some more work it could be extended to
* - cache stable values (final fields, modules) in locals
- * - replace the copy propagation in ClosureElilmination
+ * - replace the copy propagation in ClosureElimination
* - fold constants
* - eliminate unnecessary stores and loads
* - propagate knowledge gathered from conditionals for further optimization
@@ -438,7 +437,7 @@ abstract class ConstantOptimization extends SubComponent {
// TODO if we do all that we need to be careful in the
// case that success and failure are the same target block
// because we're using a Map and don't want one possible state to clobber the other
- // alternative mayb we should just replace the conditional with a jump if both targets are the same
+ // alternative maybe we should just replace the conditional with a jump if both targets are the same
def mightEqual = val1 mightEqual val2
def mightNotEqual = val1 mightNotEqual val2
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 4b419b210c..3704acb055 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -223,7 +223,7 @@ abstract class DeadCodeElimination extends SubComponent {
debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
val instr = bb(idx)
- // adds the instrutions that define the stack values about to be consumed to the work list to
+ // adds the instructions that define the stack values about to be consumed to the work list to
// be marked useful
def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
debuglog(s"\t${bb1(idx1)} is consumed by $instr")
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 351eb23c4c..8f6fc65706 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -26,7 +26,7 @@ import scala.reflect.internal.util.NoSourceFile
* where `p` is defined in a library L, and is accessed from a library C (for Client),
* where C was compiled against L', an optimized version of L where the inliner made `p` public at the bytecode level.
* The only such members are fields, either synthetic or isParamAccessor, and thus having a dollar sign in their name
- * (the accesibility of methods and constructors isn't touched by the inliner).
+ * (the accessibility of methods and constructors isn't touched by the inliner).
*
* Thus we add one more goal to our list:
* (c) Compile C (either optimized or not) against any of L or L',
@@ -290,7 +290,7 @@ abstract class Inliners extends SubComponent {
/**
* A transformation local to the body of the IMethod received as argument.
- * An linining decision consists in replacing a callsite with the body of the callee.
+ * An inlining decision consists in replacing a callsite with the body of the callee.
* Please notice that, because `analyzeMethod()` itself may modify a method body,
* the particular callee bodies that end up being inlined depend on the particular order in which methods are visited
* (no topological sorting over the call-graph is attempted).
diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala
new file mode 100644
index 0000000000..3f06264e3c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/AggregateFlatClassPath.scala
@@ -0,0 +1,125 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.net.URL
+import scala.annotation.tailrec
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
+import scala.tools.nsc.util.ClassRepresentation
+
+/**
+ * A classpath unifying multiple class- and sourcepath entries.
+ * Flat classpath can obtain entries for classes and sources independently
+ * so it tries to do operations quite optimally - iterating only these collections
+ * which are needed in the given moment and only as far as it's necessary.
+ * @param aggregates classpath instances containing entries which this class processes
+ */
+case class AggregateFlatClassPath(aggregates: Seq[FlatClassPath]) extends FlatClassPath {
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ @tailrec
+ def find(aggregates: Seq[FlatClassPath]): Option[AbstractFile] =
+ if (aggregates.nonEmpty) {
+ val classFile = aggregates.head.findClassFile(className)
+ if (classFile.isDefined) classFile
+ else find(aggregates.tail)
+ } else None
+
+ find(aggregates)
+ }
+
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+
+ @tailrec
+ def findEntry[T <: ClassRepClassPathEntry](aggregates: Seq[FlatClassPath], getEntries: FlatClassPath => Seq[T]): Option[T] =
+ if (aggregates.nonEmpty) {
+ val entry = getEntries(aggregates.head)
+ .find(_.name == simpleClassName)
+ if (entry.isDefined) entry
+ else findEntry(aggregates.tail, getEntries)
+ } else None
+
+ val classEntry = findEntry(aggregates, classesGetter(pkg))
+ val sourceEntry = findEntry(aggregates, sourcesGetter(pkg))
+
+ mergeClassesAndSources(classEntry.toList, sourceEntry.toList).headOption
+ }
+
+ override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs)
+
+ override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct
+
+ override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*)
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct
+ aggregatedPackages
+ }
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] =
+ getDistinctEntries(classesGetter(inPackage))
+
+ override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] =
+ getDistinctEntries(sourcesGetter(inPackage))
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ val (packages, classesAndSources) = aggregates.map(_.list(inPackage)).unzip
+ val distinctPackages = packages.flatten.distinct
+ val distinctClassesAndSources = mergeClassesAndSources(classesAndSources: _*)
+ FlatClassPathEntries(distinctPackages, distinctClassesAndSources)
+ }
+
+ /**
+ * Returns only one entry for each name. If there's both a source and a class entry, it
+ * creates an entry containing both of them. If there would be more than one class or source
+ * entries for the same class it always would use the first entry of each type found on a classpath.
+ */
+ private def mergeClassesAndSources(entries: Seq[ClassRepClassPathEntry]*): Seq[ClassRepClassPathEntry] = {
+ // based on the implementation from MergedClassPath
+ var count = 0
+ val indices = collection.mutable.HashMap[String, Int]()
+ val mergedEntries = new ArrayBuffer[ClassRepClassPathEntry](1024)
+
+ for {
+ partOfEntries <- entries
+ entry <- partOfEntries
+ } {
+ val name = entry.name
+ if (indices contains name) {
+ val index = indices(name)
+ val existing = mergedEntries(index)
+
+ if (existing.binary.isEmpty && entry.binary.isDefined)
+ mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get)
+ if (existing.source.isEmpty && entry.source.isDefined)
+ mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get)
+ }
+ else {
+ indices(name) = count
+ mergedEntries += entry
+ count += 1
+ }
+ }
+ mergedEntries.toIndexedSeq
+ }
+
+ private def getDistinctEntries[EntryType <: ClassRepClassPathEntry](getEntries: FlatClassPath => Seq[EntryType]): Seq[EntryType] = {
+ val seenNames = collection.mutable.HashSet[String]()
+ val entriesBuffer = new ArrayBuffer[EntryType](1024)
+ for {
+ cp <- aggregates
+ entry <- getEntries(cp) if !seenNames.contains(entry.name)
+ } {
+ entriesBuffer += entry
+ seenNames += entry.name
+ }
+ entriesBuffer.toIndexedSeq
+ }
+
+ private def classesGetter(pkg: String) = (cp: FlatClassPath) => cp.classes(pkg)
+ private def sourcesGetter(pkg: String) = (cp: FlatClassPath) => cp.sources(pkg)
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
new file mode 100644
index 0000000000..9bf4e3f779
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
+
+/**
+ * A trait that contains factory methods for classpath elements of type T.
+ *
+ * The logic has been abstracted from ClassPath#ClassPathContext so it's possible
+ * to have common trait that supports both recursive and flat classpath representations.
+ *
+ * Therefore, we expect that T will be either ClassPath[U] or FlatClassPath.
+ */
+trait ClassPathFactory[T] {
+
+ /**
+ * Create a new classpath based on the abstract file.
+ */
+ def newClassPath(file: AbstractFile): T
+
+ /**
+ * Creators for sub classpaths which preserve this context.
+ */
+ def sourcesInPath(path: String): List[T]
+
+ def expandPath(path: String, expandStar: Boolean = true): List[String] = ClassPath.expandPath(path, expandStar)
+
+ def expandDir(extdir: String): List[String] = ClassPath.expandDir(extdir)
+
+ def contentsOfDirsInPath(path: String): List[T] =
+ for {
+ dir <- expandPath(path, expandStar = false)
+ name <- expandDir(dir)
+ entry <- Option(AbstractFile.getDirectory(name))
+ } yield newClassPath(entry)
+
+ def classesInExpandedPath(path: String): IndexedSeq[T] =
+ classesInPathImpl(path, expand = true).toIndexedSeq
+
+ def classesInPath(path: String) = classesInPathImpl(path, expand = false)
+
+ def classesInManifest(useManifestClassPath: Boolean) =
+ if (useManifestClassPath) ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url))
+ else Nil
+
+ // Internal
+ protected def classesInPathImpl(path: String, expand: Boolean) =
+ for {
+ file <- expandPath(path, expand)
+ dir <- Option(AbstractFile.getDirectory(file))
+ } yield newClassPath(dir)
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala
new file mode 100644
index 0000000000..81d2f7320f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import java.io.FileFilter
+import java.net.URL
+import scala.reflect.io.AbstractFile
+import scala.reflect.io.PlainFile
+import scala.tools.nsc.util.ClassRepresentation
+import FileUtils._
+
+/**
+ * A trait allowing to look for classpath entries of given type in directories.
+ * It provides common logic for classes handling class and source files.
+ * It makes use of the fact that in the case of nested directories it's easy to find a file
+ * when we have a name of a package.
+ */
+trait DirectoryFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath {
+ val dir: File
+ assert(dir != null, "Directory file in DirectoryFileLookup cannot be null")
+
+ override def asURLs: Seq[URL] = Seq(dir.toURI.toURL)
+ override def asClassPathStrings: Seq[String] = Seq(dir.getPath)
+
+ import FlatClassPath.RootPackage
+ private def getDirectory(forPackage: String): Option[File] = {
+ if (forPackage == RootPackage) {
+ Some(dir)
+ } else {
+ val packageDirName = FileUtils.dirPath(forPackage)
+ val packageDir = new File(dir, packageDirName)
+ if (packageDir.exists && packageDir.isDirectory) {
+ Some(packageDir)
+ } else None
+ }
+ }
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ val dirForPackage = getDirectory(inPackage)
+ val nestedDirs: Array[File] = dirForPackage match {
+ case None => Array.empty
+ case Some(directory) => directory.listFiles(DirectoryFileLookup.packageDirectoryFileFilter)
+ }
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ val entries = nestedDirs map { file =>
+ PackageEntryImpl(prefix + file.getName)
+ }
+ entries
+ }
+
+ protected def files(inPackage: String): Seq[FileEntryType] = {
+ val dirForPackage = getDirectory(inPackage)
+ val files: Array[File] = dirForPackage match {
+ case None => Array.empty
+ case Some(directory) => directory.listFiles(fileFilter)
+ }
+ val entries = files map { file =>
+ val wrappedFile = new scala.reflect.io.File(file)
+ createFileEntry(new PlainFile(wrappedFile))
+ }
+ entries
+ }
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ val dirForPackage = getDirectory(inPackage)
+ val files: Array[File] = dirForPackage match {
+ case None => Array.empty
+ case Some(directory) => directory.listFiles()
+ }
+ val packagePrefix = PackageNameUtils.packagePrefix(inPackage)
+ val packageBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
+ val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
+ for (file <- files) {
+ if (file.isPackage) {
+ val pkgEntry = PackageEntryImpl(packagePrefix + file.getName)
+ packageBuf += pkgEntry
+ } else if (fileFilter.accept(file)) {
+ val wrappedFile = new scala.reflect.io.File(file)
+ val abstractFile = new PlainFile(wrappedFile)
+ fileBuf += createFileEntry(abstractFile)
+ }
+ }
+ FlatClassPathEntries(packageBuf, fileBuf)
+ }
+
+ protected def createFileEntry(file: AbstractFile): FileEntryType
+ protected def fileFilter: FileFilter
+}
+
+object DirectoryFileLookup {
+
+ private[classpath] object packageDirectoryFileFilter extends FileFilter {
+ override def accept(pathname: File): Boolean = pathname.isPackage
+ }
+}
+
+case class DirectoryFlatClassPath(dir: File)
+ extends DirectoryFileLookup[ClassFileEntryImpl]
+ with NoSourcePaths {
+
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = findClassFile(className) map ClassFileEntryImpl
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className)
+ val classFile = new File(s"$dir/$relativePath.class")
+ if (classFile.exists) {
+ val wrappedClassFile = new scala.reflect.io.File(classFile)
+ val abstractClassFile = new PlainFile(wrappedClassFile)
+ Some(abstractClassFile)
+ } else None
+ }
+
+ override protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ override protected def fileFilter: FileFilter = DirectoryFlatClassPath.classFileFilter
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+}
+
+object DirectoryFlatClassPath {
+
+ private val classFileFilter = new FileFilter {
+ override def accept(pathname: File): Boolean = pathname.isClass
+ }
+}
+
+case class DirectoryFlatSourcePath(dir: File)
+ extends DirectoryFileLookup[SourceFileEntryImpl]
+ with NoClassPaths {
+
+ override def asSourcePathString: String = asClassPathString
+
+ override protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file)
+ override protected def fileFilter: FileFilter = DirectoryFlatSourcePath.sourceFileFilter
+
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
+ findSourceFile(className) map SourceFileEntryImpl
+ }
+
+ private def findSourceFile(className: String): Option[AbstractFile] = {
+ val relativePath = FileUtils.dirPath(className)
+ val sourceFile = Stream("scala", "java")
+ .map(ext => new File(s"$dir/$relativePath.$ext"))
+ .collectFirst { case file if file.exists() => file }
+
+ sourceFile.map { file =>
+ val wrappedSourceFile = new scala.reflect.io.File(file)
+ val abstractSourceFile = new PlainFile(wrappedSourceFile)
+ abstractSourceFile
+ }
+ }
+
+ override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage)
+}
+
+object DirectoryFlatSourcePath {
+
+ private val sourceFileFilter = new FileFilter {
+ override def accept(pathname: File): Boolean = endsScalaOrJava(pathname.getName)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala
new file mode 100644
index 0000000000..ee2528e15c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.{ File => JFile }
+import java.net.URL
+import scala.reflect.internal.FatalError
+import scala.reflect.io.AbstractFile
+
+/**
+ * Common methods related to Java files and abstract files used in the context of classpath
+ */
+object FileUtils {
+ implicit class AbstractFileOps(val file: AbstractFile) extends AnyVal {
+ def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name)
+
+ def isClass: Boolean = !file.isDirectory && file.hasExtension("class")
+
+ def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java"))
+
+ // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip?
+ def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip")
+
+ /**
+ * Safe method returning a sequence containing one URL representing this file, when underlying file exists,
+ * and returning given default value in other case
+ */
+ def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL)
+ }
+
+ implicit class FileOps(val file: JFile) extends AnyVal {
+ def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName)
+
+ def isClass: Boolean = file.isFile && file.getName.endsWith(".class")
+ }
+
+ def stripSourceExtension(fileName: String): String = {
+ if (endsScala(fileName)) stripClassExtension(fileName)
+ else if (endsJava(fileName)) stripJavaExtension(fileName)
+ else throw new FatalError("Unexpected source file ending: " + fileName)
+ }
+
+ def dirPath(forPackage: String) = forPackage.replace('.', '/')
+
+ def endsClass(fileName: String): Boolean =
+ fileName.length > 6 && fileName.substring(fileName.length - 6) == ".class"
+
+ def endsScalaOrJava(fileName: String): Boolean =
+ endsScala(fileName) || endsJava(fileName)
+
+ def endsJava(fileName: String): Boolean =
+ fileName.length > 5 && fileName.substring(fileName.length - 5) == ".java"
+
+ def endsScala(fileName: String): Boolean =
+ fileName.length > 6 && fileName.substring(fileName.length - 6) == ".scala"
+
+ def stripClassExtension(fileName: String): String =
+ fileName.substring(0, fileName.length - 6) // equivalent of fileName.length - ".class".length
+
+ def stripJavaExtension(fileName: String): String =
+ fileName.substring(0, fileName.length - 5)
+
+ // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed
+ // because then some tests in partest don't pass
+ private def mayBeValidPackage(dirName: String): Boolean =
+ (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.')
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala
new file mode 100644
index 0000000000..cb201617d2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/FlatClassPath.scala
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.reflect.io.AbstractFile
+import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, ClassRepresentation }
+
+/**
+ * A base trait for the particular flat classpath representation implementations.
+ *
+ * We call this variant of a classpath representation flat because it's possible to
+ * query the whole classpath using just single instance extending this trait.
+ *
+ * This is an alternative design compared to scala.tools.nsc.util.ClassPath
+ */
+trait FlatClassPath extends ClassFileLookup[AbstractFile] {
+ /** Empty string represents root package */
+ private[nsc] def packages(inPackage: String): Seq[PackageEntry]
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry]
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry]
+
+ /** Allows to get entries for packages and classes merged with sources possibly in one pass. */
+ private[nsc] def list(inPackage: String): FlatClassPathEntries
+
+ // A default implementation which should be overridden, if we can create the more efficient
+ // solution for a given type of FlatClassPath
+ override def findClass(className: String): Option[ClassRepresentation[AbstractFile]] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+
+ val foundClassFromClassFiles = classes(pkg)
+ .find(_.name == simpleClassName)
+
+ def findClassInSources = sources(pkg)
+ .find(_.name == simpleClassName)
+
+ foundClassFromClassFiles orElse findClassInSources
+ }
+
+ override def asClassPathString: String = ClassPath.join(asClassPathStrings: _*)
+ def asClassPathStrings: Seq[String]
+}
+
+object FlatClassPath {
+ val RootPackage = ""
+}
+
+case class FlatClassPathEntries(packages: Seq[PackageEntry], classesAndSources: Seq[ClassRepClassPathEntry])
+
+object FlatClassPathEntries {
+ import scala.language.implicitConversions
+ // to have working unzip method
+ implicit def entry2Tuple(entry: FlatClassPathEntries) = (entry.packages, entry.classesAndSources)
+}
+
+sealed trait ClassRepClassPathEntry extends ClassRepresentation[AbstractFile]
+
+trait ClassFileEntry extends ClassRepClassPathEntry {
+ def file: AbstractFile
+}
+
+trait SourceFileEntry extends ClassRepClassPathEntry {
+ def file: AbstractFile
+}
+
+trait PackageEntry {
+ def name: String
+}
+
+private[nsc] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry {
+ override def name = FileUtils.stripClassExtension(file.name) // class name
+
+ override def binary: Option[AbstractFile] = Some(file)
+ override def source: Option[AbstractFile] = None
+}
+
+private[nsc] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry {
+ override def name = FileUtils.stripSourceExtension(file.name)
+
+ override def binary: Option[AbstractFile] = None
+ override def source: Option[AbstractFile] = Some(file)
+}
+
+private[nsc] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepClassPathEntry {
+ override def name = FileUtils.stripClassExtension(classFile.name)
+
+ override def binary: Option[AbstractFile] = Some(classFile)
+ override def source: Option[AbstractFile] = Some(srcFile)
+}
+
+private[nsc] case class PackageEntryImpl(name: String) extends PackageEntry
+
+private[nsc] trait NoSourcePaths {
+ def asSourcePathString: String = ""
+ private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty
+}
+
+private[nsc] trait NoClassPaths {
+ def findClassFile(className: String): Option[AbstractFile] = None
+ private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala
new file mode 100644
index 0000000000..7f67381d4d
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/FlatClassPathFactory.scala
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.tools.nsc.Settings
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.util.ClassPath
+import FileUtils.AbstractFileOps
+
+/**
+ * Provides factory methods for flat classpath. When creating classpath instances for a given path,
+ * it uses proper type of classpath depending on a types of particular files containing sources or classes.
+ */
+class FlatClassPathFactory(settings: Settings) extends ClassPathFactory[FlatClassPath] {
+
+ override def newClassPath(file: AbstractFile): FlatClassPath =
+ if (file.isJarOrZip)
+ ZipAndJarFlatClassPathFactory.create(file, settings)
+ else if (file.isDirectory)
+ new DirectoryFlatClassPath(file.file)
+ else
+ sys.error(s"Unsupported classpath element: $file")
+
+ override def sourcesInPath(path: String): List[FlatClassPath] =
+ for {
+ file <- expandPath(path, expandStar = false)
+ dir <- Option(AbstractFile getDirectory file)
+ } yield createSourcePath(dir)
+
+ private def createSourcePath(file: AbstractFile): FlatClassPath =
+ if (file.isJarOrZip)
+ ZipAndJarFlatSourcePathFactory.create(file, settings)
+ else if (file.isDirectory)
+ new DirectoryFlatSourcePath(file.file)
+ else
+ sys.error(s"Unsupported sourcepath element: $file")
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
new file mode 100644
index 0000000000..c907d565d2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import scala.tools.nsc.classpath.FlatClassPath.RootPackage
+
+/**
+ * Common methods related to package names represented as String
+ */
+object PackageNameUtils {
+
+ /**
+ * @param fullClassName full class name with package
+ * @return (package, simple class name)
+ */
+ def separatePkgAndClassNames(fullClassName: String): (String, String) = {
+ val lastDotIndex = fullClassName.lastIndexOf('.')
+ if (lastDotIndex == -1)
+ (RootPackage, fullClassName)
+ else
+ (fullClassName.substring(0, lastDotIndex), fullClassName.substring(lastDotIndex + 1))
+ }
+
+ def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "."
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
new file mode 100644
index 0000000000..84e21a3ccd
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala
@@ -0,0 +1,180 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import java.net.URL
+import scala.annotation.tailrec
+import scala.reflect.io.{ AbstractFile, FileZipArchive, ManifestResources }
+import scala.tools.nsc.Settings
+import FileUtils._
+
+/**
+ * A trait providing an optional cache for classpath entries obtained from zip and jar files.
+ * It's possible to create such a cache assuming that entries in such files won't change (at
+ * least will be the same each time we'll load classpath during the lifetime of JVM process)
+ * - unlike class and source files in directories, which can be modified and recompiled.
+ * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE
+ * when there are a lot of projects having a lot of common dependencies.
+ */
+sealed trait ZipAndJarFileLookupFactory {
+
+ private val cache = collection.mutable.Map.empty[AbstractFile, FlatClassPath]
+
+ def create(zipFile: AbstractFile, settings: Settings): FlatClassPath = {
+ if (settings.YdisableFlatCpCaching) createForZipFile(zipFile)
+ else createUsingCache(zipFile, settings)
+ }
+
+ protected def createForZipFile(zipFile: AbstractFile): FlatClassPath
+
+ private def createUsingCache(zipFile: AbstractFile, settings: Settings): FlatClassPath = cache.synchronized {
+ def newClassPathInstance = {
+ if (settings.verbose || settings.Ylogcp)
+ println(s"$zipFile is not yet in the classpath cache")
+ createForZipFile(zipFile)
+ }
+ cache.getOrElseUpdate(zipFile, newClassPathInstance)
+ }
+}
+
+/**
+ * Manages creation of flat classpath for class files placed in zip and jar files.
+ * It should be the only way of creating them as it provides caching.
+ */
+object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory {
+
+ private case class ZipArchiveFlatClassPath(zipFile: File)
+ extends ZipArchiveFileLookup[ClassFileEntryImpl]
+ with NoSourcePaths {
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+ classes(pkg).find(_.name == simpleClassName).map(_.file)
+ }
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
+
+ override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file)
+ override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass
+ }
+
+ /**
+ * This type of classpath is closly related to the support for JSR-223.
+ * Its usage can be observed e.g. when running:
+ * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala
+ * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry:
+ * Name: scala/Function2$mcFJD$sp.class
+ */
+ private case class ManifestResourcesFlatClassPath(file: ManifestResources)
+ extends FlatClassPath
+ with NoSourcePaths {
+
+ override def findClassFile(className: String): Option[AbstractFile] = {
+ val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className)
+ classes(pkg).find(_.name == simpleClassName).map(_.file)
+ }
+
+ override def asClassPathStrings: Seq[String] = Seq(file.path)
+
+ override def asURLs: Seq[URL] = file.toURLs()
+
+ import ManifestResourcesFlatClassPath.PackageFileInfo
+ import ManifestResourcesFlatClassPath.PackageInfo
+
+ /**
+ * A cache mapping package name to abstract file for package directory and subpackages of given package.
+ *
+ * ManifestResources can iterate through the collections of entries from e.g. remote jar file.
+ * We can't just specify the path to the concrete directory etc. so we can't just 'jump' into
+ * given package, when it's needed. On the other hand we can iterate over entries to get
+ * AbstractFiles, iterate over entries of these files etc.
+ *
+ * Instead of traversing a tree of AbstractFiles once and caching all entries or traversing each time,
+ * when we need subpackages of a given package or its classes, we traverse once and cache only packages.
+ * Classes for given package can be then easily loaded when they are needed.
+ */
+ private lazy val cachedPackages: collection.mutable.HashMap[String, PackageFileInfo] = {
+ val packages = collection.mutable.HashMap[String, PackageFileInfo]()
+
+ def getSubpackages(dir: AbstractFile): List[AbstractFile] =
+ (for (file <- dir if file.isPackage) yield file)(collection.breakOut)
+
+ @tailrec
+ def traverse(packagePrefix: String,
+ filesForPrefix: List[AbstractFile],
+ subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match {
+ case pkgFile :: remainingFiles =>
+ val subpackages = getSubpackages(pkgFile)
+ val fullPkgName = packagePrefix + pkgFile.name
+ packages.put(fullPkgName, PackageFileInfo(pkgFile, subpackages))
+ val newPackagePrefix = fullPkgName + "."
+ subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages))
+ traverse(packagePrefix, remainingFiles, subpackagesQueue)
+ case Nil if subpackagesQueue.nonEmpty =>
+ val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue()
+ traverse(packagePrefix, filesForPrefix, subpackagesQueue)
+ case _ =>
+ }
+
+ val subpackages = getSubpackages(file)
+ packages.put(FlatClassPath.RootPackage, PackageFileInfo(file, subpackages))
+ traverse(FlatClassPath.RootPackage, subpackages, collection.mutable.Queue())
+ packages
+ }
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = cachedPackages.get(inPackage) match {
+ case None => Seq.empty
+ case Some(PackageFileInfo(_, subpackages)) =>
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ subpackages.map(packageFile => PackageEntryImpl(prefix + packageFile.name))
+ }
+
+ override private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = cachedPackages.get(inPackage) match {
+ case None => Seq.empty
+ case Some(PackageFileInfo(pkg, _)) =>
+ (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file))(collection.breakOut)
+ }
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = FlatClassPathEntries(packages(inPackage), classes(inPackage))
+ }
+
+ private object ManifestResourcesFlatClassPath {
+ case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile])
+ case class PackageInfo(packageName: String, subpackages: List[AbstractFile])
+ }
+
+ override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath =
+ if (zipFile.file == null) createWithoutUnderlyingFile(zipFile)
+ else ZipArchiveFlatClassPath(zipFile.file)
+
+ private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match {
+ case manifestRes: ManifestResources =>
+ ManifestResourcesFlatClassPath(manifestRes)
+ case _ =>
+ val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile"
+ throw new IllegalArgumentException(errorMsg)
+ }
+}
+
+/**
+ * Manages creation of flat classpath for source files placed in zip and jar files.
+ * It should be the only way of creating them as it provides caching.
+ */
+object ZipAndJarFlatSourcePathFactory extends ZipAndJarFileLookupFactory {
+
+ private case class ZipArchiveFlatSourcePath(zipFile: File)
+ extends ZipArchiveFileLookup[SourceFileEntryImpl]
+ with NoClassPaths {
+
+ override def asSourcePathString: String = asClassPathString
+
+ override private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = files(inPackage)
+
+ override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file)
+ override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource
+ }
+
+ override protected def createForZipFile(zipFile: AbstractFile): FlatClassPath = ZipArchiveFlatSourcePath(zipFile.file)
+}
diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
new file mode 100644
index 0000000000..1d0de57779
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.classpath
+
+import java.io.File
+import java.net.URL
+import scala.collection.Seq
+import scala.reflect.io.AbstractFile
+import scala.reflect.io.FileZipArchive
+import FileUtils.AbstractFileOps
+
+/**
+ * A trait allowing to look for classpath entries of given type in zip and jar files.
+ * It provides common logic for classes handling class and source files.
+ * It's aware of things like e.g. META-INF directory which is correctly skipped.
+ */
+trait ZipArchiveFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatClassPath {
+ val zipFile: File
+
+ assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null")
+
+ override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL)
+ override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath)
+
+ private val archive = new FileZipArchive(zipFile)
+
+ override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = {
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+ for {
+ dirEntry <- findDirEntry(inPackage).toSeq
+ entry <- dirEntry.iterator if entry.isPackage
+ } yield PackageEntryImpl(prefix + entry.name)
+ }
+
+ protected def files(inPackage: String): Seq[FileEntryType] =
+ for {
+ dirEntry <- findDirEntry(inPackage).toSeq
+ entry <- dirEntry.iterator if isRequiredFileType(entry)
+ } yield createFileEntry(entry)
+
+ override private[nsc] def list(inPackage: String): FlatClassPathEntries = {
+ val foundDirEntry = findDirEntry(inPackage)
+
+ foundDirEntry map { dirEntry =>
+ val pkgBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
+ val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
+ val prefix = PackageNameUtils.packagePrefix(inPackage)
+
+ for (entry <- dirEntry.iterator) {
+ if (entry.isPackage)
+ pkgBuf += PackageEntryImpl(prefix + entry.name)
+ else if (isRequiredFileType(entry))
+ fileBuf += createFileEntry(entry)
+ }
+ FlatClassPathEntries(pkgBuf, fileBuf)
+ } getOrElse FlatClassPathEntries(Seq.empty, Seq.empty)
+ }
+
+ private def findDirEntry(pkg: String) = {
+ val dirName = s"${FileUtils.dirPath(pkg)}/"
+ archive.allDirs.get(dirName)
+ }
+
+ protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType
+ protected def isRequiredFileType(file: AbstractFile): Boolean
+}
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 6e3d013e52..4b1805479d 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -7,7 +7,7 @@
package scala.tools.nsc
package plugins
-import scala.reflect.io.{ File, Path }
+import scala.reflect.io.Path
import scala.tools.nsc.util.ClassPath
import scala.tools.util.PathResolver.Defaults
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index 0b218b711c..5bf611a7b0 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -29,7 +29,7 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
case INFO => null
}
- private def clabel(severity: Severity): String = {
+ protected def clabel(severity: Severity): String = {
val label0 = label(severity)
if (label0 eq null) "" else label0 + ": "
}
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index 4727e6d867..060a24d8d4 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -35,7 +35,11 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
case s: AbsSettings => this.userSetSettings == s.userSetSettings
case _ => false
}
- override def toString() = "Settings {\n%s}\n" format (userSetSettings map (" " + _ + "\n")).mkString
+ override def toString() = {
+ val uss = userSetSettings
+ val indent = if (uss.nonEmpty) " " * 2 else ""
+ uss.mkString(f"Settings {%n$indent", f"%n$indent", f"%n}%n")
+ }
def toConciseString = userSetSettings.mkString("(", " ", ")")
def checkDependencies =
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 8c2b510bfd..fffbb4333f 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -22,13 +22,15 @@ class FscSettings(error: String => Unit) extends Settings(error) {
val reset = BooleanSetting("-reset", "Reset compile server caches")
val shutdown = BooleanSetting("-shutdown", "Shutdown compile server")
val server = StringSetting ("-server", "hostname:portnumber", "Specify compile server socket", "")
+ val port = IntSetting ("-port", "Search and start compile server in given port only",
+ 0, Some((0, Int.MaxValue)), (_: String) => None)
val preferIPv4 = BooleanSetting("-ipv4", "Use IPv4 rather than IPv6 for the server socket")
val idleMins = IntSetting ("-max-idle", "Set idle timeout in minutes for fsc (use 0 for no timeout)",
30, Some((0, Int.MaxValue)), (_: String) => None)
// For improved help output, separating fsc options from the others.
def fscSpecific = Set[Settings#Setting](
- currentDir, reset, shutdown, server, preferIPv4, idleMins
+ currentDir, reset, shutdown, server, port, preferIPv4, idleMins
)
val isFscSpecific: String => Boolean = fscSpecific map (_.name)
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index bbe21477cb..b4987e1240 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -125,14 +125,26 @@ class MutableSettings(val errorFn: String => Unit)
case Some(cmd) => setter(cmd)(args)
}
- // if arg is of form -Xfoo:bar,baz,quux
- def parseColonArg(s: String): Option[List[String]] = {
- val (p, args) = StringOps.splitWhere(s, _ == ':', doDropIndex = true) getOrElse (return None)
-
- // any non-Nil return value means failure and we return s unmodified
- tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _)
+ // -Xfoo: clears Clearables
+ def clearIfExists(cmd: String): Option[List[String]] = lookupSetting(cmd) match {
+ case Some(c: Clearable) => c.clear() ; Some(Nil)
+ case Some(s) => s.errorAndValue(s"Missing argument to $cmd", None)
+ case None => None
}
+ // if arg is of form -Xfoo:bar,baz,quux
+ // the entire arg is consumed, so return None for failure
+ // any non-Nil return value means failure and we return s unmodified
+ def parseColonArg(s: String): Option[List[String]] =
+ if (s endsWith ":") {
+ clearIfExists(s.init)
+ } else {
+ for {
+ (p, args) <- StringOps.splitWhere(s, _ == ':', doDropIndex = true)
+ rest <- tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _)
+ } yield rest
+ }
+
// if arg is of form -Xfoo or -Xfoo bar (name = "-Xfoo")
def parseNormalArg(p: String, args: List[String]): Option[List[String]] =
tryToSetIfExists(p, args, (s: Setting) => s.tryToSet _)
@@ -217,7 +229,8 @@ class MutableSettings(val errorFn: String => Unit)
def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default))
def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
- def ScalaVersionSetting(name: String, arg: String, descr: String, default: ScalaVersion) = add(new ScalaVersionSetting(name, arg, descr, default))
+ def ScalaVersionSetting(name: String, arg: String, descr: String, initial: ScalaVersion, default: Option[ScalaVersion] = None) =
+ add(new ScalaVersionSetting(name, arg, descr, initial, default))
def PathSetting(name: String, descr: String, default: String): PathSetting = {
val prepend = StringSetting(name + "/p", "", "", "").internalOnly()
val append = StringSetting(name + "/a", "", "", "").internalOnly()
@@ -494,28 +507,35 @@ class MutableSettings(val errorFn: String => Unit)
withHelpSyntax(name + " <" + arg + ">")
}
- /** A setting represented by a Scala version, (`default` unless set) */
+ /** A setting represented by a Scala version.
+ * The `initial` value is used if the setting is not specified.
+ * The `default` value is used if the option is specified without argument (e.g., `-Xmigration`).
+ */
class ScalaVersionSetting private[nsc](
name: String,
val arg: String,
descr: String,
- default: ScalaVersion)
+ initial: ScalaVersion,
+ default: Option[ScalaVersion])
extends Setting(name, descr) {
type T = ScalaVersion
- protected var v: T = NoScalaVersion
+ protected var v: T = initial
+ // This method is invoked if there are no colonated args. In this case the default value is
+ // used. No arguments are consumed.
override def tryToSet(args: List[String]) = {
- value = default
+ default match {
+ case Some(d) => value = d
+ case None => errorFn(s"$name requires an argument, the syntax is $helpSyntax")
+ }
Some(args)
}
override def tryToSetColon(args: List[String]) = args match {
- case Nil => value = default; Some(Nil)
- case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
+ case x :: xs => value = ScalaVersion(x, errorFn); Some(xs)
+ case nil => Some(nil)
}
- override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
-
def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
withHelpSyntax(s"${name}:<${arg}>")
@@ -532,6 +552,7 @@ class MutableSettings(val errorFn: String => Unit)
def prepend(s: String) = prependPath.value = join(s, prependPath.value)
def append(s: String) = appendPath.value = join(appendPath.value, s)
+ override def isDefault = super.isDefault && prependPath.isDefault && appendPath.isDefault
override def value = join(
prependPath.value,
super.value,
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 466e397dd7..a5b722612d 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -92,7 +92,8 @@ trait ScalaSettings extends AbsScalaSettings
* The previous "-source" option is intended to be used mainly
* though this helper.
*/
- lazy val isScala211: Boolean = (source.value >= ScalaVersion("2.11.0"))
+ def isScala211: Boolean = source.value >= ScalaVersion("2.11.0")
+ def isScala212: Boolean = source.value >= ScalaVersion("2.12.0")
/**
* -X "Advanced" settings
@@ -111,7 +112,7 @@ trait ScalaSettings extends AbsScalaSettings
val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
- val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
+ val Xmigration = ScalaVersionSetting ("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", initial = NoScalaVersion, default = Some(AnyScalaVersion))
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
@@ -133,7 +134,7 @@ trait ScalaSettings extends AbsScalaSettings
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
val strictInference = BooleanSetting ("-Xstrict-inference", "Don't infer known-unsound types")
- val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", ScalaVersion("2.11")) withPostSetHook ( _ => isScala211)
+ val source = ScalaVersionSetting ("-Xsource", "version", "Treat compiler input as Scala source for the specified version, see SI-8126.", initial = ScalaVersion("2.11"))
val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
@@ -167,7 +168,7 @@ trait ScalaSettings extends AbsScalaSettings
val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error")
val inline = BooleanSetting ("-Yinline", "Perform inlining when possible.")
val inlineHandlers = BooleanSetting ("-Yinline-handlers", "Perform exception handler inlining when possible.")
- val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally surpressed due to high volume)")
+ val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally suppressed due to high volume)")
val Xlinearizer = ChoiceSetting ("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo")
val log = PhasesSetting ("-Ylog", "Log operations during")
val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.")
@@ -198,9 +199,12 @@ trait ScalaSettings extends AbsScalaSettings
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects")
val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "")
- val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
+ val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.")
val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").withDeprecationMessage(removalIn212)
val inferByName = BooleanSetting ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212)
+ val YclasspathImpl = ChoiceSetting ("-YclasspathImpl", "implementation", "Choose classpath scanning method.", List(ClassPathRepresentationType.Recursive, ClassPathRepresentationType.Flat), ClassPathRepresentationType.Recursive)
+ val YdisableFlatCpCaching = BooleanSetting ("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.")
+
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.")
val YnoLoadImplClass = BooleanSetting ("-Yno-load-impl-class", "Do not load $class.class files.")
@@ -210,21 +214,26 @@ trait ScalaSettings extends AbsScalaSettings
val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "inline")
object YoptChoices extends MultiChoiceEnumeration {
- val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code")
+ val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers protecting no instructions, debug information of eliminated variables.")
+ val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.")
+ val recurseUnreachableJumps = Choice("recurse-unreachable-jumps", "Recursively apply unreachable-code and simplify-jumps (if enabled) until reaching a fixpoint.")
+ val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.")
+ val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.")
+ val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.")
- val lNone = Choice("l:none", "Don't enable any optimizations")
+ val lNone = Choice("l:none", "Don't enable any optimizations.")
private val defaultChoices = List(unreachableCode)
- val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices)
+ val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices)
- private val methodChoices = List(lDefault)
- val lMethod = Choice("l:method", "Intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices)
+ private val methodChoices = List(unreachableCode, simplifyJumps, recurseUnreachableJumps, emptyLineNumbers, emptyLabels, compactLocals)
+ val lMethod = Choice("l:method", "Enable intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices)
private val projectChoices = List(lMethod)
- val lProject = Choice("l:project", "Cross-method optimizations within the current project: "+ projectChoices.mkString(","), expandsTo = projectChoices)
+ val lProject = Choice("l:project", "Enable cross-method optimizations within the current project: "+ projectChoices.mkString(","), expandsTo = projectChoices)
private val classpathChoices = List(lProject)
- val lClasspath = Choice("l:classpath", "Cross-method optmizations across the entire classpath: "+ classpathChoices.mkString(","), expandsTo = classpathChoices)
+ val lClasspath = Choice("l:classpath", "Enable cross-method optimizations across the entire classpath: "+ classpathChoices.mkString(","), expandsTo = classpathChoices)
}
val Yopt = MultiChoiceSetting(
@@ -233,7 +242,13 @@ trait ScalaSettings extends AbsScalaSettings
descr = "Enable optimizations",
domain = YoptChoices)
- def YoptUnreachableCode: Boolean = !Yopt.isSetByUser || Yopt.contains(YoptChoices.unreachableCode)
+ def YoptNone = Yopt.isSetByUser && Yopt.value.isEmpty
+ def YoptUnreachableCode = !Yopt.isSetByUser || Yopt.contains(YoptChoices.unreachableCode)
+ def YoptSimplifyJumps = Yopt.contains(YoptChoices.simplifyJumps)
+ def YoptRecurseUnreachableJumps = Yopt.contains(YoptChoices.recurseUnreachableJumps)
+ def YoptEmptyLineNumbers = Yopt.contains(YoptChoices.emptyLineNumbers)
+ def YoptEmptyLabels = Yopt.contains(YoptChoices.emptyLabels)
+ def YoptCompactLocals = Yopt.contains(YoptChoices.compactLocals)
private def removalIn212 = "This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug."
@@ -262,6 +277,8 @@ trait ScalaSettings extends AbsScalaSettings
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
+ val YpatmatExhaustdepth = IntSetting("-Ypatmat-exhaust-depth", "off", 20, Some((10, Int.MaxValue)),
+ str => Some(if(str.equalsIgnoreCase("off")) Int.MaxValue else str.toInt))
val Yquasiquotedebug = BooleanSetting("-Yquasiquote-debug", "Trace quasiquote-related activities.")
// TODO 2.12 Remove
@@ -316,3 +333,8 @@ trait ScalaSettings extends AbsScalaSettings
val Discard = "discard"
}
}
+
+object ClassPathRepresentationType {
+ val Flat = "flat"
+ val Recursive = "recursive"
+}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index 4f45043c5e..43bdad5882 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -34,7 +34,7 @@ case object NoScalaVersion extends ScalaVersion {
* to segregate builds
*/
case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
- def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+ def unparse = s"${major}.${minor}.${rev}${build.unparse}"
def compare(that: ScalaVersion): Int = that match {
case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index c400e8c29c..d174dc86c7 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -30,7 +30,7 @@ trait Warnings {
// Experimental lint warnings that are turned off, but which could be turned on programmatically.
// These warnings are said to blind those who dare enable them.
// They are not activated by -Xlint and can't be enabled on the command line.
- val warnValueOverrides = { // currently turned off as experimental. creaded using constructor (new BS), so not available on the command line.
+ val warnValueOverrides = { // Currently turned off as experimental. Created using constructor (new BS), so not available on the command line.
val flag = new BooleanSetting("value-overrides", "Generated value class method overrides an implementation")
flag.value = false
flag
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 82c2a4d6ed..8fd2ea45e4 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -6,13 +6,15 @@
package scala.tools.nsc
package symtab
+import classfile.ClassfileParser
import java.io.IOException
import scala.compat.Platform.currentTime
-import scala.tools.nsc.util.{ ClassPath }
-import classfile.ClassfileParser
import scala.reflect.internal.MissingRequirementError
import scala.reflect.internal.util.Statistics
import scala.reflect.io.{ AbstractFile, NoAbstractFile }
+import scala.tools.nsc.classpath.FlatClassPath
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.{ ClassPath, ClassRepresentation }
/** This class ...
*
@@ -86,8 +88,7 @@ abstract class SymbolLoaders {
// require yjp.jar at runtime. See SI-2089.
if (settings.termConflict.isDefault)
throw new TypeError(
- root+" contains object and package with same name: "+
- name+"\none of them needs to be removed from classpath"
+ s"$root contains object and package with same name: $name\none of them needs to be removed from classpath"
)
else if (settings.termConflict.value == "package") {
warning(
@@ -154,7 +155,7 @@ abstract class SymbolLoaders {
/** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
*/
- def initializeFromClassPath(owner: Symbol, classRep: ClassPath[AbstractFile]#ClassRep) {
+ def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation[AbstractFile]) {
((classRep.binary, classRep.source) : @unchecked) match {
case (Some(bin), Some(src))
if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
@@ -169,7 +170,7 @@ abstract class SymbolLoaders {
}
/** Create a new loader from a binary classfile.
- * This is intented as a hook allowing to support loading symbols from
+ * This is intended as a hook allowing to support loading symbols from
* files other than .class files.
*/
protected def newClassLoader(bin: AbstractFile): SymbolLoader =
@@ -250,7 +251,7 @@ abstract class SymbolLoaders {
* Load contents of a package
*/
class PackageLoader(classpath: ClassPath[AbstractFile]) extends SymbolLoader with FlagAgnosticCompleter {
- protected def description = "package loader "+ classpath.name
+ protected def description = s"package loader ${classpath.name}"
protected def doComplete(root: Symbol) {
assert(root.isPackageClass, root)
@@ -276,6 +277,39 @@ abstract class SymbolLoaders {
}
}
+ /**
+ * Loads contents of a package
+ */
+ class PackageLoaderUsingFlatClassPath(packageName: String, classPath: FlatClassPath) extends SymbolLoader with FlagAgnosticCompleter {
+ protected def description = {
+ val shownPackageName = if (packageName == FlatClassPath.RootPackage) "<root package>" else packageName
+ s"package loader $shownPackageName"
+ }
+
+ protected def doComplete(root: Symbol) {
+ assert(root.isPackageClass, root)
+ root.setInfo(new PackageClassInfoType(newScope, root))
+
+ val classPathEntries = classPath.list(packageName)
+
+ if (!root.isRoot)
+ for (entry <- classPathEntries.classesAndSources) initializeFromClassPath(root, entry)
+ if (!root.isEmptyPackageClass) {
+ for (pkg <- classPathEntries.packages) {
+ val fullName = pkg.name
+
+ val name =
+ if (packageName == FlatClassPath.RootPackage) fullName
+ else fullName.substring(packageName.length + 1)
+ val packageLoader = new PackageLoaderUsingFlatClassPath(fullName, classPath)
+ enterPackage(root, name, packageLoader)
+ }
+
+ openPackageModule(root)
+ }
+ }
+ }
+
class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
private object classfileParser extends {
val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable
@@ -293,8 +327,13 @@ abstract class SymbolLoaders {
*
*/
private type SymbolLoadersRefined = SymbolLoaders { val symbolTable: classfileParser.symbolTable.type }
+
val loaders = SymbolLoaders.this.asInstanceOf[SymbolLoadersRefined]
- val classPath = platform.classPath
+
+ override def classFileLookup: util.ClassFileLookup[AbstractFile] = settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Recursive => platform.classPath
+ case ClassPathRepresentationType.Flat => platform.flatClassPath
+ }
}
protected def description = "class file "+ classfile.toString
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 14be8374b9..4d08be3c24 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -16,8 +16,7 @@ import scala.annotation.switch
import scala.reflect.internal.{ JavaAccFlags }
import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
import scala.tools.nsc.io.AbstractFile
-
-import util.ClassPath
+import scala.tools.nsc.util.ClassFileLookup
/** This abstract class implements a class file parser.
*
@@ -43,8 +42,8 @@ abstract class ClassfileParser {
*/
protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
- /** The compiler classpath. */
- def classPath: ClassPath[AbstractFile]
+ /** The way of the class file lookup used by the compiler. */
+ def classFileLookup: ClassFileLookup[AbstractFile]
import definitions._
import scala.reflect.internal.ClassfileConstants._
@@ -352,7 +351,7 @@ abstract class ClassfileParser {
}
private def loadClassSymbol(name: Name): Symbol = {
- val file = classPath findClassFile ("" +name) getOrElse {
+ val file = classFileLookup findClassFile name.toString getOrElse {
// SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
// therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
// that are not in their correct place (see bug for details)
@@ -588,7 +587,7 @@ abstract class ClassfileParser {
info = MethodType(newParams, clazz.tpe)
}
- // Note: the info may be overrwritten later with a generic signature
+ // Note: the info may be overwritten later with a generic signature
// parsed from SignatureATTR
sym setInfo info
propagatePackageBoundary(jflags, sym)
@@ -769,7 +768,7 @@ abstract class ClassfileParser {
classTParams = tparams
val parents = new ListBuffer[Type]()
while (index < end) {
- parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter
+ parents += sig2type(tparams, skiptvs = false) // here the variance doesn't matter
}
ClassInfoType(parents.toList, instanceScope, sym)
}
@@ -1047,8 +1046,8 @@ abstract class ClassfileParser {
for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClass) {
- val file = classPath.findClassFile(entry.externalName.toString) getOrElse {
- throw new AssertionError(entry.externalName)
+ val file = classFileLookup.findClassFile(entry.externalName.toString) getOrElse {
+ throw new AssertionError(s"Class file for ${entry.externalName} not found")
}
enterClassAndModule(entry, file)
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index cbe427775a..bd1fa4e707 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -130,7 +130,7 @@ abstract class ICodeReader extends ClassfileParser {
log("ICodeReader reading " + cls)
val name = cls.javaClassName
- classPath.findClassFile(name) match {
+ classFileLookup.findClassFile(name) match {
case Some(classFile) => parse(classFile, cls)
case _ => MissingRequirementError.notFound("Could not find bytecode for " + cls)
}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 2b7c6cca2c..f786ffb8f3 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -8,6 +8,7 @@ package transform
import symtab._
import Flags._
+import scala.tools.nsc.util.ClassPath
abstract class AddInterfaces extends InfoTransform { self: Erasure =>
import global._ // the global environment
@@ -67,25 +68,30 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
val implName = tpnme.implClassName(iface.name)
val implFlags = (iface.flags & ~(INTERFACE | lateINTERFACE)) | IMPLCLASS
- val impl0 = (
+ val impl0 = {
if (!inClass) NoSymbol
- else iface.owner.info.decl(implName) match {
- case NoSymbol => NoSymbol
- case implSym =>
- // Unlink a pre-existing symbol only if the implementation class is
- // visible on the compilation classpath. In general this is true under
- // -optimise and not otherwise, but the classpath can use arbitrary
- // logic so the classpath must be queried.
- if (classPath.context.isValidName(implName + ".class")) {
- iface.owner.info.decls unlink implSym
- NoSymbol
- }
- else {
- log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.")
- implSym
- }
+ else {
+ val typeInfo = iface.owner.info
+ typeInfo.decl(implName) match {
+ case NoSymbol => NoSymbol
+ case implSym =>
+ // Unlink a pre-existing symbol only if the implementation class is
+ // visible on the compilation classpath. In general this is true under
+ // -optimise and not otherwise, but the classpath can use arbitrary
+ // logic so the classpath must be queried.
+ // TODO this is not taken into account by flat classpath yet
+ classPath match {
+ case cp: ClassPath[_] if !cp.context.isValidName(implName + ".class") =>
+ log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.")
+ implSym
+ case _ =>
+ typeInfo.decls unlink implSym
+ NoSymbol
+ }
+ }
}
- )
+ }
+
val impl = impl0 orElse {
val impl = iface.owner.newImplClass(implName, iface.pos, implFlags)
if (iface.thisSym != iface) {
@@ -345,6 +351,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
while (owner != sym && owner != impl) owner = owner.owner;
if (owner == impl) This(impl) setPos tree.pos
else tree
+ //TODO what about this commented out code?
/* !!!
case Super(qual, mix) =>
val mix1 = mix
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 1664fe0e0d..c29826551b 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -520,7 +520,9 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL {
* And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler
* have little in common.
*/
- case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply =>
+ case Apply(fn @ Select(qual, _), (arg @ Literal(Constant(symname: String))) :: Nil)
+ if treeInfo.isQualifierSafeToElide(qual) && fn.symbol == Symbol_apply && !currentClass.isTrait =>
+
def transformApply = {
// add the symbol name to a map if it's not there already
val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index f471440293..362cbde04f 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -535,7 +535,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL {
* whether `sym` denotes a param-accessor (ie a field) that fulfills all of:
* (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and
* (b) isn't subject to specialization. We might be processing statements for:
- * (b.1) the constructur in the generic (super-)class; or
+ * (b.1) the constructor in the generic (super-)class; or
* (b.2) the constructor in the specialized (sub-)class.
* (c) isn't part of a DelayedInit subclass.
*/
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
index 12e7b23f48..d2c511a2d1 100644
--- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -9,7 +9,7 @@ import scala.reflect.internal.Symbols
import scala.collection.mutable.LinkedHashMap
/**
- * This transformer is responisble for turning lambdas into anonymous classes.
+ * This transformer is responsible for turning lambdas into anonymous classes.
* The main assumption it makes is that a lambda {args => body} has been turned into
* {args => liftedBody()} where lifted body is a top level method that implements the body of the lambda.
* Currently Uncurry is responsible for that transformation.
@@ -17,7 +17,7 @@ import scala.collection.mutable.LinkedHashMap
* From a lambda, Delambdafy will create
* 1) a static forwarder at the top level of the class that contained the lambda
* 2) a new top level class that
- a) has fields and a constructor taking the captured environment (including possbily the "this"
+ a) has fields and a constructor taking the captured environment (including possibly the "this"
* reference)
* b) an apply method that calls the static forwarder
* c) if needed a bridge method for the apply method
@@ -30,7 +30,6 @@ import scala.collection.mutable.LinkedHashMap
abstract class Delambdafy extends Transform with TypingTransformers with ast.TreeDSL with TypeAdaptingTransformer {
import global._
import definitions._
- import CODE._
val analyzer: global.analyzer.type = global.analyzer
@@ -100,7 +99,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
super.transform(newExpr)
// when we encounter a template (basically the thing that holds body of a class/trait)
- // we need to updated it to include newly created accesor methods after transforming it
+ // we need to updated it to include newly created accessor methods after transforming it
case Template(_, _, _) =>
try {
// during this call accessorMethods will be populated from the Function case
@@ -250,7 +249,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
else "$" + funOwner.name + "$"
)
val oldClassPart = oldClass.name.decode
- // make sure the class name doesn't contain $anon, otherwsie isAnonymousClass/Function may be true
+ // make sure the class name doesn't contain $anon, otherwise isAnonymousClass/Function may be true
val name = unit.freshTypeName(s"$oldClassPart$suffix".replace("$anon", "$nestedInAnon"))
val lambdaClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation
@@ -260,7 +259,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
val captureProxies2 = new LinkedHashMap[Symbol, TermSymbol]
captures foreach {capture =>
- val sym = lambdaClass.newVariable(capture.name.toTermName, capture.pos, SYNTHETIC)
+ val sym = lambdaClass.newVariable(unit.freshTermName(capture.name.toString + "$"), capture.pos, SYNTHETIC)
sym setInfo capture.info
captureProxies2 += ((capture, sym))
}
@@ -435,7 +434,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre
}
/**
- * Get the symbol of the target lifted lambad body method from a function. I.e. if
+ * Get the symbol of the target lifted lambda body method from a function. I.e. if
* the function is {args => anonfun(args)} then this method returns anonfun's symbol
*/
private def targetMethod(fun: Function): Symbol = fun match {
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 3d8b2f02f3..5c72bb3258 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -98,7 +98,7 @@ abstract class Erasure extends AddInterfaces
val len = sig.length
val copy: Array[Char] = sig.toCharArray
var changed = false
- while (i < sig.length) {
+ while (i < len) {
val ch = copy(i)
if (ch == '.' && last != '>') {
copy(i) = '$'
@@ -185,6 +185,25 @@ abstract class Erasure extends AddInterfaces
private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
+ /* Drop redundant types (ones which are implemented by some other parent) from the immediate parents.
+ * This is important on Android because there is otherwise an interface explosion.
+ */
+ def minimizeParents(parents: List[Type]): List[Type] = {
+ var rest = parents
+ var leaves = collection.mutable.ListBuffer.empty[Type]
+ while(rest.nonEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol }
+ if(!nonLeaf) {
+ leaves = leaves filterNot { t => candidate.typeSymbol isSubClass t.typeSymbol }
+ leaves += candidate
+ }
+ rest = rest.tail
+ }
+ leaves.toList
+ }
+
+
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
@@ -192,16 +211,24 @@ abstract class Erasure extends AddInterfaces
val isTraitSignature = sym0.enclClass.isTrait
def superSig(parents: List[Type]) = {
- val ps = (
- if (isTraitSignature) {
+ def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait
+
+ // a signature should always start with a class
+ def ensureClassAsFirstParent(tps: List[Type]) = tps match {
+ case Nil => ObjectTpe :: Nil
+ case head :: tail if isInterfaceOrTrait(head.typeSymbol) => ObjectTpe :: tps
+ case _ => tps
+ }
+
+ val minParents = minimizeParents(parents)
+ val validParents =
+ if (isTraitSignature)
// java is unthrilled about seeing interfaces inherit from classes
- val ok = parents filter (p => p.typeSymbol.isTrait || p.typeSymbol.isInterface)
- // traits should always list Object.
- if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectTpe :: ok
- else ok
- }
- else parents
- )
+ minParents filter (p => isInterfaceOrTrait(p.typeSymbol))
+ else minParents
+
+ val ps = ensureClassAsFirstParent(validParents)
+
(ps map boxedSig).mkString
}
def boxedSig(tp: Type) = jsig(tp, primitiveOK = false)
@@ -403,14 +430,13 @@ abstract class Erasure extends AddInterfaces
* a name clash. The present method guards against these name clashes.
*
* @param member The original member
- * @param other The overidden symbol for which the bridge was generated
+ * @param other The overridden symbol for which the bridge was generated
* @param bridge The bridge
*/
def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): Seq[(Position, String)] = {
def fulldef(sym: Symbol) =
if (sym == NoSymbol) sym.toString
else s"$sym: ${sym.tpe} in ${sym.owner}"
- var noclash = true
val clashErrors = mutable.Buffer[(Position, String)]()
def clashError(what: String) = {
val pos = if (member.owner == root) member.pos else root.pos
@@ -1127,7 +1153,7 @@ abstract class Erasure extends AddInterfaces
}
}
- /** The main transform function: Pretransfom the tree, and then
+ /** The main transform function: Pretransform the tree, and then
* re-type it at phase erasure.next.
*/
override def transform(tree: Tree): Tree = {
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index e47fdac938..f3cab8184c 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -441,7 +441,7 @@ abstract class ExplicitOuter extends InfoTransform
else atPos(tree.pos)(outerPath(outerValue, currentClass.outerClass, sym)) // (5)
case Select(qual, name) =>
- // make not private symbol acessed from inner classes, as well as
+ // make not private symbol accessed from inner classes, as well as
// symbols accessed from @inline methods
//
// See SI-6552 for an example of why `sym.owner.enclMethod hasAnnotation ScalaInlineClass`
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index fa53ef48b5..6149e40fa7 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -77,8 +77,11 @@ abstract class Flatten extends InfoTransform {
if (sym.isTerm && !sym.isStaticModule) {
decls1 enter sym
if (sym.isModule) {
- // Nested, non-static moduls are transformed to methods.
- assert(sym.isMethod, s"Non-static $sym should have the lateMETHOD flag from RefChecks")
+ // In theory, we could assert(sym.isMethod), because nested, non-static modules are
+ // transformed to methods (lateMETHOD flag added in RefChecks). But this requires
+ // forcing sym.info (see comment on isModuleNotMethod), which forces stub symbols
+ // too eagerly (SI-8907).
+
// Note that module classes are not entered into the 'decls' of the ClassInfoType
// of the outer class, only the module symbols are. So the current loop does
// not visit module classes. Therefore we set the LIFTED flag here for module
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index d69c9d9a65..fa0c1f797b 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -402,7 +402,7 @@ abstract class LambdaLift extends InfoTransform {
}
/* SI-6231: Something like this will be necessary to eliminate the implementation
- * restiction from paramGetter above:
+ * restriction from paramGetter above:
* We need to pass getters to the interface of an implementation class.
private def fixTraitGetters(lifted: List[Tree]): List[Tree] =
for (stat <- lifted) yield stat match {
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index bbd11efa7e..e4082eb376 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package transform
-import symtab.Flags._
import scala.reflect.internal.SymbolPairs
/** A class that yields a kind of iterator (`Cursor`),
@@ -36,7 +35,7 @@ abstract class OverridingPairs extends SymbolPairs {
*/
override protected def matches(lo: Symbol, high: Symbol) = lo.isType || (
(lo.owner != high.owner) // don't try to form pairs from overloaded members
- && !high.isPrivate // private or private[this] members never are overriden
+ && !high.isPrivate // private or private[this] members never are overridden
&& !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member.
&& relatively.matches(lo, high)
) // TODO we don't call exclude(high), should we?
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 908aa69310..1691b01e3e 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -861,11 +861,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env)))
info(specMember) = NormalizedMember(sym)
newOverload(sym, specMember, env)
- // if this is a class, we insert the normalized member in scope,
- // if this is a method, there's no attached scope for it (EmptyScope)
- val decls = owner.info.decls
- if (decls != EmptyScope)
- decls.enter(specMember)
specMember
}
}
@@ -899,7 +894,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
- owner.info.decls.enter(specMember)
typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
@@ -1296,7 +1290,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* // even in the specialized variant, the local X class
* // doesn't extend Parent$mcI$sp, since its symbol has
* // been created after specialization and was not seen
- * // by specialzation's info transformer.
+ * // by specialization's info transformer.
* ...
* }
* }
@@ -1504,20 +1498,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val residualTargs = symbol.info.typeParams zip baseTargs collect {
case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ
}
- // See SI-5583. Don't know why it happens now if it didn't before.
- if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) {
- devWarning("Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
- baseTree
- }
- else {
- ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
- "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env))
- )
+ ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
+ "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env))
+ )
- val tree1 = gen.mkTypeApply(specTree, residualTargs)
- debuglog("rewrote " + tree + " to " + tree1)
- localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
- }
+ val tree1 = gen.mkTypeApply(specTree, residualTargs)
+ debuglog("rewrote " + tree + " to " + tree1)
+ localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
}
curTree = tree
diff --git a/src/compiler/scala/tools/nsc/transform/Statics.scala b/src/compiler/scala/tools/nsc/transform/Statics.scala
index e2508b8d08..4673be6de7 100644
--- a/src/compiler/scala/tools/nsc/transform/Statics.scala
+++ b/src/compiler/scala/tools/nsc/transform/Statics.scala
@@ -1,9 +1,6 @@
package scala.tools.nsc
package transform
-import symtab._
-import Flags._
-
import collection.mutable.Buffer
abstract class Statics extends Transform with ast.TreeDSL {
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index ef534f70fd..16ea3ea90f 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -129,6 +129,13 @@ abstract class TailCalls extends Transform {
}
override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}"
+ final def noTailContext() = clonedTailContext(false)
+ final def yesTailContext() = clonedTailContext(true)
+ protected def clonedTailContext(tailPos: Boolean): TailContext = this match {
+ case _ if this.tailPos == tailPos => this
+ case clone: ClonedTailContext => clone.that.clonedTailContext(tailPos)
+ case _ => new ClonedTailContext(this, tailPos)
+ }
}
object EmptyTailContext extends TailContext {
@@ -174,7 +181,7 @@ abstract class TailCalls extends Transform {
}
def containsRecursiveCall(t: Tree) = t exists isRecursiveCall
}
- class ClonedTailContext(that: TailContext, override val tailPos: Boolean) extends TailContext {
+ class ClonedTailContext(val that: TailContext, override val tailPos: Boolean) extends TailContext {
def method = that.method
def tparams = that.tparams
def methodPos = that.methodPos
@@ -183,9 +190,6 @@ abstract class TailCalls extends Transform {
}
private var ctx: TailContext = EmptyTailContext
- private def noTailContext() = new ClonedTailContext(ctx, tailPos = false)
- private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true)
-
override def transformUnit(unit: CompilationUnit): Unit = {
try {
@@ -206,16 +210,16 @@ abstract class TailCalls extends Transform {
finally this.ctx = saved
}
- def yesTailTransform(tree: Tree): Tree = transform(tree, yesTailContext())
- def noTailTransform(tree: Tree): Tree = transform(tree, noTailContext())
+ def yesTailTransform(tree: Tree): Tree = transform(tree, ctx.yesTailContext())
+ def noTailTransform(tree: Tree): Tree = transform(tree, ctx.noTailContext())
def noTailTransforms(trees: List[Tree]) = {
- val nctx = noTailContext()
- trees map (t => transform(t, nctx))
+ val nctx = ctx.noTailContext()
+ trees mapConserve (t => transform(t, nctx))
}
override def transform(tree: Tree): Tree = {
/* A possibly polymorphic apply to be considered for tail call transformation. */
- def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = {
+ def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree], mustTransformArgs: Boolean = true) = {
val receiver: Tree = fun match {
case Select(qual, _) => qual
case _ => EmptyTree
@@ -223,7 +227,7 @@ abstract class TailCalls extends Transform {
def receiverIsSame = ctx.enclosingType.widen =:= receiver.tpe.widen
def receiverIsSuper = ctx.enclosingType.widen <:< receiver.tpe.widen
def isRecursiveCall = (ctx.method eq fun.symbol) && ctx.tailPos
- def transformArgs = noTailTransforms(args)
+ def transformArgs = if (mustTransformArgs) noTailTransforms(args) else args
def matchesTypeArgs = ctx.tparams sameElements (targs map (_.tpe.typeSymbol))
/* Records failure reason in Context for reporting.
@@ -265,6 +269,10 @@ abstract class TailCalls extends Transform {
!(sym.hasAccessorFlag || sym.isConstructor)
}
+ // intentionally shadowing imports from definitions for performance
+ val runDefinitions = currentRun.runDefinitions
+ import runDefinitions.{Boolean_or, Boolean_and}
+
tree match {
case ValDef(_, _, _, _) =>
if (tree.symbol.isLazy && tree.symbol.hasAnnotation(TailrecClass))
@@ -312,8 +320,13 @@ abstract class TailCalls extends Transform {
// the assumption is once we encounter a case, the remainder of the block will consist of cases
// the prologue may be empty, usually it is the valdef that stores the scrut
val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ val transformedPrologue = noTailTransforms(prologue)
+ val transformedCases = transformTrees(cases)
+ val transformedStats =
+ if ((prologue eq transformedPrologue) && (cases eq transformedCases)) stats // allow reuse of `tree` if the subtransform was an identity
+ else transformedPrologue ++ transformedCases
treeCopy.Block(tree,
- noTailTransforms(prologue) ++ transformTrees(cases),
+ transformedStats,
transform(expr)
)
@@ -380,7 +393,7 @@ abstract class TailCalls extends Transform {
if (res ne arg)
treeCopy.Apply(tree, fun, res :: Nil)
else
- rewriteApply(fun, fun, Nil, args)
+ rewriteApply(fun, fun, Nil, args, mustTransformArgs = false)
case Apply(fun, args) =>
rewriteApply(fun, fun, Nil, args)
@@ -421,6 +434,10 @@ abstract class TailCalls extends Transform {
def traverseNoTail(tree: Tree) = traverse(tree, maybeTailNew = false)
def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail
+ // intentionally shadowing imports from definitions for performance
+ private val runDefinitions = currentRun.runDefinitions
+ import runDefinitions.{Boolean_or, Boolean_and}
+
override def traverse(tree: Tree) = tree match {
// we're looking for label(x){x} in tail position, since that means `a` is in tail position in a call `label(a)`
case LabelDef(_, List(arg), body@Ident(_)) if arg.symbol == body.symbol =>
diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
index f83b6f857e..3b23306386 100644
--- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
@@ -1,7 +1,6 @@
package scala.tools.nsc
package transform
-import scala.reflect.internal._
import scala.tools.nsc.ast.TreeDSL
import scala.tools.nsc.Global
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index 0899507bab..0b53dc37de 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -10,15 +10,14 @@ package tools.nsc.transform.patmat
import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
-import scala.reflect.internal.util.Position
import scala.reflect.internal.util.HashSet
trait Logic extends Debugging {
import PatternMatchingStats._
private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max
- private def alignedColumns(cols: Seq[AnyRef]): Seq[String] = {
- def toString(x: AnyRef) = if (x eq null) "" else x.toString
+ private def alignedColumns(cols: Seq[Any]): Seq[String] = {
+ def toString(x: Any) = if (x == null) "" else x.toString
if (cols.isEmpty || cols.tails.isEmpty) cols map toString
else {
val colLens = cols map (c => toString(c).length)
@@ -33,7 +32,7 @@ trait Logic extends Debugging {
}
}
- def alignAcrossRows(xss: List[List[AnyRef]], sep: String, lineSep: String = "\n"): String = {
+ def alignAcrossRows(xss: List[List[Any]], sep: String, lineSep: String = "\n"): String = {
val maxLen = max(xss map (_.length))
val padded = xss map (xs => xs ++ List.fill(maxLen - xs.length)(null))
padded.transpose.map(alignedColumns).transpose map (_.mkString(sep)) mkString(lineSep)
@@ -47,7 +46,7 @@ trait Logic extends Debugging {
type Tree
class Prop
- case class Eq(p: Var, q: Const) extends Prop
+ final case class Eq(p: Var, q: Const) extends Prop
type Const
@@ -72,6 +71,8 @@ trait Logic extends Debugging {
def unapply(v: Var): Some[Tree]
}
+ def reportWarning(message: String): Unit
+
// resets hash consing -- only supposed to be called by TreeMakersToProps
def prepareNewAnalysis(): Unit
@@ -86,7 +87,7 @@ trait Logic extends Debugging {
def mayBeNull: Boolean
// compute the domain and return it (call registerNull first!)
- def domainSyms: Option[mutable.LinkedHashSet[Sym]]
+ def domainSyms: Option[Set[Sym]]
// the symbol for this variable being equal to its statically known type
// (only available if registerEquality has been called for that type before)
@@ -104,43 +105,157 @@ trait Logic extends Debugging {
// would be nice to statically check whether a prop is equational or pure,
// but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop)
- case class And(a: Prop, b: Prop) extends Prop
- case class Or(a: Prop, b: Prop) extends Prop
- case class Not(a: Prop) extends Prop
+ final case class And(ops: Set[Prop]) extends Prop
+ object And {
+ def apply(ops: Prop*) = new And(ops.toSet)
+ }
+
+ final case class Or(ops: Set[Prop]) extends Prop
+ object Or {
+ def apply(ops: Prop*) = new Or(ops.toSet)
+ }
+
+ final case class Not(a: Prop) extends Prop
case object True extends Prop
case object False extends Prop
// symbols are propositions
- abstract case class Sym(variable: Var, const: Const) extends Prop {
+ final class Sym private[PropositionalLogic] (val variable: Var, val const: Const) extends Prop {
+
+ override def equals(other: scala.Any): Boolean = other match {
+ case that: Sym => this.variable == that.variable &&
+ this.const == that.const
+ case _ => false
+ }
+
+ override def hashCode(): Int = {
+ variable.hashCode * 41 + const.hashCode
+ }
+
private val id: Int = Sym.nextSymId
- override def toString = variable +"="+ const +"#"+ id
+ override def toString = s"$variable=$const#$id"
}
- class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
+
object Sym {
private val uniques: HashSet[Sym] = new HashSet("uniques", 512)
def apply(variable: Var, const: Const): Sym = {
- val newSym = new UniqueSym(variable, const)
+ val newSym = new Sym(variable, const)
(uniques findEntryOrUpdate newSym)
}
- private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ def nextSymId = {_symId += 1; _symId}; private var _symId = 0
implicit val SymOrdering: Ordering[Sym] = Ordering.by(_.id)
}
- def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
- def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
+ def /\(props: Iterable[Prop]) = if (props.isEmpty) True else And(props.toSeq: _*)
+ def \/(props: Iterable[Prop]) = if (props.isEmpty) False else Or(props.toSeq: _*)
+
+ /**
+ * Simplifies propositional formula according to the following rules:
+ * - eliminate double negation (avoids unnecessary Tseitin variables)
+ * - flatten trees of same connectives (avoids unnecessary Tseitin variables)
+ * - removes constants and connectives that are in fact constant because of their operands
+ * - eliminates duplicate operands
+ * - convert formula into NNF: all sub-expressions have a positive polarity
+ * which makes them amenable for the subsequent Plaisted transformation
+ * and increases chances to figure out that the formula is already in CNF
+ *
+ * Complexity: DFS over formula tree
+ *
+ * See http://www.decision-procedures.org/slides/propositional_logic-2x3.pdf
+ */
+ def simplify(f: Prop): Prop = {
+
+ // limit size to avoid blow up
+ def hasImpureAtom(ops: Seq[Prop]): Boolean = ops.size < 10 &&
+ ops.combinations(2).exists {
+ case Seq(a, Not(b)) if a == b => true
+ case Seq(Not(a), b) if a == b => true
+ case _ => false
+ }
+
+ // push negation inside formula
+ def negationNormalFormNot(p: Prop): Prop = p match {
+ case And(ops) => Or(ops.map(negationNormalFormNot)) // De'Morgan
+ case Or(ops) => And(ops.map(negationNormalFormNot)) // De'Morgan
+ case Not(p) => negationNormalForm(p)
+ case True => False
+ case False => True
+ case s: Sym => Not(s)
+ }
+
+ def negationNormalForm(p: Prop): Prop = p match {
+ case And(ops) => And(ops.map(negationNormalForm))
+ case Or(ops) => Or(ops.map(negationNormalForm))
+ case Not(negated) => negationNormalFormNot(negated)
+ case True
+ | False
+ | (_: Sym) => p
+ }
+
+ def simplifyProp(p: Prop): Prop = p match {
+ case And(fv) =>
+ // recurse for nested And (pulls all Ands up)
+ val ops = fv.map(simplifyProp) - True // ignore `True`
+
+ // build up Set in order to remove duplicates
+ val opsFlattened = ops.flatMap {
+ case And(fv) => fv
+ case f => Set(f)
+ }.toSeq
+
+ if (hasImpureAtom(opsFlattened) || opsFlattened.contains(False)) {
+ False
+ } else {
+ opsFlattened match {
+ case Seq() => True
+ case Seq(f) => f
+ case ops => And(ops: _*)
+ }
+ }
+ case Or(fv) =>
+ // recurse for nested Or (pulls all Ors up)
+ val ops = fv.map(simplifyProp) - False // ignore `False`
+
+ val opsFlattened = ops.flatMap {
+ case Or(fv) => fv
+ case f => Set(f)
+ }.toSeq
+
+ if (hasImpureAtom(opsFlattened) || opsFlattened.contains(True)) {
+ True
+ } else {
+ opsFlattened match {
+ case Seq() => False
+ case Seq(f) => f
+ case ops => Or(ops: _*)
+ }
+ }
+ case Not(Not(a)) =>
+ simplify(a)
+ case Not(p) =>
+ Not(simplify(p))
+ case p =>
+ p
+ }
+
+ val nnf = negationNormalForm(f)
+ simplifyProp(nnf)
+ }
trait PropTraverser {
def apply(x: Prop): Unit = x match {
- case And(a, b) => apply(a); apply(b)
- case Or(a, b) => apply(a); apply(b)
+ case And(ops) => ops foreach apply
+ case Or(ops) => ops foreach apply
case Not(a) => apply(a)
case Eq(a, b) => applyVar(a); applyConst(b)
+ case s: Sym => applySymbol(s)
case _ =>
}
def applyVar(x: Var): Unit = {}
def applyConst(x: Const): Unit = {}
+ def applySymbol(x: Sym): Unit = {}
}
def gatherVariables(p: Prop): Set[Var] = {
@@ -151,36 +266,27 @@ trait Logic extends Debugging {
vars.toSet
}
+ def gatherSymbols(p: Prop): Set[Sym] = {
+ val syms = new mutable.HashSet[Sym]()
+ (new PropTraverser {
+ override def applySymbol(s: Sym) = syms += s
+ })(p)
+ syms.toSet
+ }
+
trait PropMap {
def apply(x: Prop): Prop = x match { // TODO: mapConserve
- case And(a, b) => And(apply(a), apply(b))
- case Or(a, b) => Or(apply(a), apply(b))
+ case And(ops) => And(ops map apply)
+ case Or(ops) => Or(ops map apply)
case Not(a) => Not(apply(a))
case p => p
}
}
- // to govern how much time we spend analyzing matches for unreachability/exhaustivity
- object AnalysisBudget {
- private val budgetProp = scala.sys.Prop[String]("scalac.patmat.analysisBudget")
- private val budgetOff = "off"
- val max: Int = {
- val DefaultBudget = 256
- budgetProp.option match {
- case Some(`budgetOff`) =>
- Integer.MAX_VALUE
- case Some(x) =>
- x.toInt
- case None =>
- DefaultBudget
- }
- }
-
- abstract class Exception(val advice: String) extends RuntimeException("CNF budget exceeded")
-
- object exceeded extends Exception(
- s"(The analysis required more space than allowed. Please try with scalac -D${budgetProp.key}=${AnalysisBudget.max*2} or -D${budgetProp.key}=${budgetOff}.)")
-
+ // TODO: remove since deprecated
+ val budgetProp = scala.sys.Prop[String]("scalac.patmat.analysisBudget")
+ if (budgetProp.isSet) {
+ reportWarning(s"Please remove -D${budgetProp.key}, it is ignored.")
}
// convert finite domain propositional logic with subtyping to pure boolean propositional logic
@@ -201,10 +307,10 @@ trait Logic extends Debugging {
// TODO: for V1 representing x1 and V2 standing for x1.head, encode that
// V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
// may throw an AnalysisBudget.Exception
- def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Formula, List[Formula]) = {
+ def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = {
val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
- val vars = mutable.LinkedHashSet[Var]()
+ val vars = new mutable.HashSet[Var]
object gatherEqualities extends PropTraverser {
override def apply(p: Prop) = p match {
@@ -225,10 +331,10 @@ trait Logic extends Debugging {
props foreach gatherEqualities.apply
if (modelNull) vars foreach (_.registerNull())
- val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
+ val pure = props map (p => rewriteEqualsToProp(p))
- val eqAxioms = formulaBuilder
- @inline def addAxiom(p: Prop) = addFormula(eqAxioms, eqFreePropToSolvable(p))
+ val eqAxioms = mutable.ArrayBuffer[Prop]()
+ @inline def addAxiom(p: Prop) = eqAxioms += p
debug.patmat("removeVarEq vars: "+ vars)
vars.foreach { v =>
@@ -254,49 +360,32 @@ trait Logic extends Debugging {
}
}
- debug.patmat("eqAxioms:\n"+ cnfString(toFormula(eqAxioms)))
- debug.patmat("pure:"+ pure.map(p => cnfString(p)).mkString("\n"))
+ debug.patmat(s"eqAxioms:\n${eqAxioms.mkString("\n")}")
+ debug.patmat(s"pure:${pure.mkString("\n")}")
if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start)
- (toFormula(eqAxioms), pure)
+ (And(eqAxioms: _*), pure)
}
+ type Solvable
- // an interface that should be suitable for feeding a SAT solver when the time comes
- type Formula
- type FormulaBuilder
-
- // creates an empty formula builder to which more formulae can be added
- def formulaBuilder: FormulaBuilder
-
- // val f = formulaBuilder; addFormula(f, f1); ... addFormula(f, fN)
- // toFormula(f) == andFormula(f1, andFormula(..., fN))
- def addFormula(buff: FormulaBuilder, f: Formula): Unit
- def toFormula(buff: FormulaBuilder): Formula
-
- // the conjunction of formulae `a` and `b`
- def andFormula(a: Formula, b: Formula): Formula
-
- // equivalent formula to `a`, but simplified in a lightweight way (drop duplicate clauses)
- def simplifyFormula(a: Formula): Formula
-
- // may throw an AnalysisBudget.Exception
- def propToSolvable(p: Prop): Formula = {
- val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false)
- andFormula(eqAxioms, pure)
+ def propToSolvable(p: Prop): Solvable = {
+ val (eqAxiom, pure :: Nil) = removeVarEq(List(p), modelNull = false)
+ eqFreePropToSolvable(And(eqAxiom, pure))
}
- // may throw an AnalysisBudget.Exception
- def eqFreePropToSolvable(p: Prop): Formula
- def cnfString(f: Formula): String
+ def eqFreePropToSolvable(f: Prop): Solvable
- type Model = collection.immutable.SortedMap[Sym, Boolean]
+ type Model = Map[Sym, Boolean]
val EmptyModel: Model
val NoModel: Model
- def findModelFor(f: Formula): Model
- def findAllModelsFor(f: Formula): List[Model]
+ final case class Solution(model: Model, unassigned: List[Sym])
+
+ def findModelFor(solvable: Solvable): Model
+
+ def findAllModelsFor(solvable: Solvable): List[Solution]
}
}
@@ -341,9 +430,9 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
// we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
// once we go to run-time checks (on Const's), convert them to checkable types
// TODO: there seems to be bug for singleton domains (variable does not show up in model)
- lazy val domain: Option[mutable.LinkedHashSet[Const]] = {
- val subConsts: Option[mutable.LinkedHashSet[Const]] = enumerateSubtypes(staticTp).map { tps =>
- mutable.LinkedHashSet(tps: _*).map{ tp =>
+ lazy val domain: Option[Set[Const]] = {
+ val subConsts = enumerateSubtypes(staticTp).map{ tps =>
+ tps.toSet[Type].map{ tp =>
val domainC = TypeConst(tp)
registerEquality(domainC)
domainC
@@ -486,7 +575,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
}
// accessing after calling registerNull will result in inconsistencies
- lazy val domainSyms: Option[collection.mutable.LinkedHashSet[Sym]] = domain map { _ map symForEqualsTo }
+ lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
@@ -546,7 +635,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
if (!t.symbol.isStable) {
// Create a fresh type for each unstable value, since we can never correlate it to another value.
- // For example `case X => case X =>` should not complaing about the second case being unreachable,
+ // For example `case X => case X =>` should not complain about the second case being unreachable,
// if X is mutable.
freshExistentialSubtype(t.tpe)
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index b2dc6e4e52..34ebbc7463 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -6,10 +6,11 @@
package scala.tools.nsc.transform.patmat
+import scala.annotation.tailrec
+import scala.collection.immutable.{IndexedSeq, Iterable}
import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
-import scala.reflect.internal.util.Position
trait TreeAndTypeAnalysis extends Debugging {
import global._
@@ -173,7 +174,6 @@ trait TreeAndTypeAnalysis extends Debugging {
// a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
// we consider tuple types with at least one component of a checkable type as a checkable type
def uncheckableType(tp: Type): Boolean = {
- def tupleComponents(tp: Type) = tp.normalize.typeArgs
val checkable = (
(isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
|| enumerateSubtypes(tp).nonEmpty)
@@ -268,7 +268,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
// the type of the binder passed to the first invocation
// determines the type of the tree that'll be returned for that binder as of then
final def binderToUniqueTree(b: Symbol) =
- unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
+ unique(accumSubst(normalize(gen.mkAttributedStableRef(b))), b.tpe)
// note that the sequencing of operations is important: must visit in same order as match execution
// binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
@@ -365,7 +365,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
def handleUnknown(tm: TreeMaker) = handler(tm)
}
- // used for CSE -- rewrite all unknowns to False (the most conserative option)
+ // used for CSE -- rewrite all unknowns to False (the most conservative option)
object conservative extends TreeMakerToProp {
def handleUnknown(tm: TreeMaker) = False
}
@@ -399,7 +399,7 @@ trait MatchAnalysis extends MatchApproximation {
trait MatchAnalyzer extends MatchApproximator {
def uncheckedWarning(pos: Position, msg: String) = currentRun.reporting.uncheckedWarning(pos, msg)
- def warn(pos: Position, ex: AnalysisBudget.Exception, kind: String) = uncheckedWarning(pos, s"Cannot check match for $kind.\n${ex.advice}")
+ def reportWarning(message: String) = global.reporter.warning(typer.context.tree.pos, message)
// TODO: model dependencies between variables: if V1 corresponds to (x: List[_]) and V2 is (x.hd), V2 cannot be assigned when V1 = null or V1 = Nil
// right now hackily implement this by pruning counter-examples
@@ -429,49 +429,44 @@ trait MatchAnalysis extends MatchApproximation {
val propsCasesOk = approximate(True) map caseWithoutBodyToProp
val propsCasesFail = approximate(False) map (t => Not(caseWithoutBodyToProp(t)))
- try {
- val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
- val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
- val eqAxioms = simplifyFormula(andFormula(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure.
-
- val prefix = formulaBuilder
- addFormula(prefix, eqAxioms)
-
- var prefixRest = symbolicCasesFail
- var current = symbolicCasesOk
- var reachable = true
- var caseIndex = 0
-
- debug.patmat("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
- debug.patmat("equality axioms:\n"+ cnfString(eqAxiomsOk))
-
- // invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
- // termination: prefixRest.length decreases by 1
- while (prefixRest.nonEmpty && reachable) {
- val prefHead = prefixRest.head
- caseIndex += 1
- prefixRest = prefixRest.tail
- if (prefixRest.isEmpty) reachable = true
- else {
- addFormula(prefix, prefHead)
- current = current.tail
- val model = findModelFor(andFormula(current.head, toFormula(prefix)))
+ val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
+ val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
+ val eqAxioms = simplify(And(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure.
- // debug.patmat("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
- // if (NoModel ne model) debug.patmat("reached: "+ modelString(model))
+ val prefix = mutable.ArrayBuffer[Prop]()
+ prefix += eqAxioms
- reachable = NoModel ne model
- }
- }
+ var prefixRest = symbolicCasesFail
+ var current = symbolicCasesOk
+ var reachable = true
+ var caseIndex = 0
+
+ debug.patmat("reachability, vars:\n" + ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
+ debug.patmat(s"equality axioms:\n$eqAxiomsOk")
+
+ // invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
+ // termination: prefixRest.length decreases by 1
+ while (prefixRest.nonEmpty && reachable) {
+ val prefHead = prefixRest.head
+ caseIndex += 1
+ prefixRest = prefixRest.tail
+ if (prefixRest.isEmpty) reachable = true
+ else {
+ prefix += prefHead
+ current = current.tail
+ val and = And((current.head +: prefix): _*)
+ val model = findModelFor(eqFreePropToSolvable(and))
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start)
+ // debug.patmat("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
+ // if (NoModel ne model) debug.patmat("reached: "+ modelString(model))
- if (reachable) None else Some(caseIndex)
- } catch {
- case ex: AnalysisBudget.Exception =>
- warn(prevBinder.pos, ex, "unreachability")
- None // CNF budget exceeded
+ reachable = NoModel ne model
+ }
}
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start)
+
+ if (reachable) None else Some(caseIndex)
}
// exhaustivity
@@ -519,22 +514,25 @@ trait MatchAnalysis extends MatchApproximation {
// debug.patmat("\nvars:\n"+ (vars map (_.describe) mkString ("\n")))
// debug.patmat("\nmatchFails as CNF:\n"+ cnfString(propToSolvable(matchFails)))
- try {
- // find the models (under which the match fails)
- val matchFailModels = findAllModelsFor(propToSolvable(matchFails))
+ // find the models (under which the match fails)
+ val matchFailModels = findAllModelsFor(propToSolvable(matchFails))
- val scrutVar = Var(prevBinderTree)
- val counterExamples = matchFailModels.map(modelToCounterExample(scrutVar))
+ val scrutVar = Var(prevBinderTree)
+ val counterExamples = {
+ matchFailModels.flatMap {
+ model =>
+ val varAssignments = expandModel(model)
+ varAssignments.flatMap(modelToCounterExample(scrutVar) _)
+ }
+ }
- val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
+ // sorting before pruning is important here in order to
+ // keep neg/t7020.scala stable
+ // since e.g. List(_, _) would cover List(1, _)
+ val pruned = CounterExample.prune(counterExamples.sortBy(_.toString)).map(_.toString)
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start)
- pruned
- } catch {
- case ex : AnalysisBudget.Exception =>
- warn(prevBinder.pos, ex, "exhaustivity")
- Nil // CNF budget exceeded
- }
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start)
+ pruned
}
}
@@ -599,6 +597,8 @@ trait MatchAnalysis extends MatchApproximation {
case object WildcardExample extends CounterExample { override def toString = "_" }
case object NoExample extends CounterExample { override def toString = "??" }
+ // returns a mapping from variable to
+ // equal and notEqual symbols
def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] =
model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs =>
val (trues, falses) = xs.partition(_._2)
@@ -612,20 +612,110 @@ trait MatchAnalysis extends MatchApproximation {
v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment
}.mkString("\n")
- // return constructor call when the model is a true counter example
- // (the variables don't take into account type information derived from other variables,
- // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
- // since we didn't realize the tail of the outer cons was a Nil)
- def modelToCounterExample(scrutVar: Var)(model: Model): CounterExample = {
+ /**
+ * The models we get from the DPLL solver need to be mapped back to counter examples.
+ * However there's no precalculated mapping model -> counter example. Even worse,
+ * not every valid model corresponds to a valid counter example.
+ * The reason is that restricting the valid models further would for example require
+ * a quadratic number of additional clauses. So to keep the optimistic case fast
+ * (i.e., all cases are covered in a pattern match), the infeasible counter examples
+ * are filtered later.
+ *
+ * The DPLL procedure keeps the literals that do not contribute to the solution
+ * unassigned, e.g., for `(a \/ b)`
+ * only {a = true} or {b = true} is required and the other variable can have any value.
+ *
+ * This function does a smart expansion of the model and avoids models that
+ * have conflicting mappings.
+ *
+ * For example for in case of the given set of symbols (taken from `t7020.scala`):
+ * "V2=2#16"
+ * "V2=6#19"
+ * "V2=5#18"
+ * "V2=4#17"
+ * "V2=7#20"
+ *
+ * One possibility would be to group the symbols by domain but
+ * this would only work for equality tests and would not be compatible
+ * with type tests.
+ * Another observation leads to a much simpler algorithm:
+ * Only one of these symbols can be set to true,
+ * since `V2` can at most be equal to one of {2,6,5,4,7}.
+ */
+ def expandModel(solution: Solution): List[Map[Var, (Seq[Const], Seq[Const])]] = {
+
+ val model = solution.model
+
// x1 = ...
// x1.hd = ...
// x1.tl = ...
// x1.hd.hd = ...
// ...
val varAssignment = modelToVarAssignment(model)
+ debug.patmat("var assignment for model " + model + ":\n" + varAssignmentString(varAssignment))
+
+ // group symbols that assign values to the same variables (i.e., symbols are mutually exclusive)
+ // (thus the groups are sets of disjoint assignments to variables)
+ val groupedByVar: Map[Var, List[Sym]] = solution.unassigned.groupBy(_.variable)
+
+ val expanded = for {
+ (variable, syms) <- groupedByVar.toList
+ } yield {
- debug.patmat("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
+ val (equal, notEqual) = varAssignment.getOrElse(variable, Nil -> Nil)
+ def addVarAssignment(equalTo: List[Const], notEqualTo: List[Const]) = {
+ Map(variable ->(equal ++ equalTo, notEqual ++ notEqualTo))
+ }
+
+ // this assignment is needed in case that
+ // there exists already an assign
+ val allNotEqual = addVarAssignment(Nil, syms.map(_.const))
+
+ // this assignment is conflicting on purpose:
+ // a list counter example could contain wildcards: e.g. `List(_,_)`
+ val allEqual = addVarAssignment(syms.map(_.const), Nil)
+
+ if(equal.isEmpty) {
+ val oneHot = for {
+ s <- syms
+ } yield {
+ addVarAssignment(List(s.const), syms.filterNot(_ == s).map(_.const))
+ }
+ allEqual :: allNotEqual :: oneHot
+ } else {
+ allEqual :: allNotEqual :: Nil
+ }
+ }
+
+ if (expanded.isEmpty) {
+ List(varAssignment)
+ } else {
+ // we need the cartesian product here,
+ // since we want to report all missing cases
+ // (i.e., combinations)
+ val cartesianProd = expanded.reduceLeft((xs, ys) =>
+ for {map1 <- xs
+ map2 <- ys} yield {
+ map1 ++ map2
+ })
+
+ // add expanded variables
+ // note that we can just use `++`
+ // since the Maps have disjoint keySets
+ for {
+ m <- cartesianProd
+ } yield {
+ varAssignment ++ m
+ }
+ }
+ }
+
+ // return constructor call when the model is a true counter example
+ // (the variables don't take into account type information derived from other variables,
+ // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
+ // since we didn't realize the tail of the outer cons was a Nil)
+ def modelToCounterExample(scrutVar: Var)(varAssignment: Map[Var, (Seq[Const], Seq[Const])]): Option[CounterExample] = {
// chop a path into a list of symbols
def chop(path: Tree): List[Symbol] = path match {
case Ident(_) => List(path.symbol)
@@ -674,6 +764,7 @@ trait MatchAnalysis extends MatchApproximation {
private val fields: mutable.Map[Symbol, VariableAssignment] = mutable.HashMap.empty
// need to prune since the model now incorporates all super types of a constant (needed for reachability)
private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
+ private lazy val inSameDomain = uniqueEqualTo forall (const => variable.domainSyms.exists(_.exists(_.const.tp =:= const.tp)))
private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
private lazy val ctorParams = if (ctor.paramss.isEmpty) Nil else ctor.paramss.head
@@ -694,13 +785,13 @@ trait MatchAnalysis extends MatchApproximation {
// NoExample if the constructor call is ill-typed
// (thus statically impossible -- can we incorporate this into the formula?)
// beBrief is used to suppress negative information nested in tuples -- it tends to get too noisy
- def toCounterExample(beBrief: Boolean = false): CounterExample =
- if (!allFieldAssignmentsLegal) NoExample
+ def toCounterExample(beBrief: Boolean = false): Option[CounterExample] =
+ if (!allFieldAssignmentsLegal) Some(NoExample)
else {
debug.patmat("describing "+ ((variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal)))
val res = prunedEqualTo match {
// a definite assignment to a value
- case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
+ case List(eq: ValueConst) if fields.isEmpty => Some(ValueExample(eq))
// constructor call
// or we did not gather any information about equality but we have information about the fields
@@ -713,30 +804,50 @@ trait MatchAnalysis extends MatchApproximation {
// figure out the constructor arguments from the field assignment
val argLen = (caseFieldAccs.length min ctorParams.length)
- (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse WildcardExample).toList
+ val examples = (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse Some(WildcardExample)).toList
+ sequence(examples)
}
cls match {
- case ConsClass => ListExample(args())
- case _ if isTupleSymbol(cls) => TupleExample(args(brevity = true))
- case _ => ConstructorExample(cls, args())
+ case ConsClass =>
+ args().map {
+ case List(NoExample, l: ListExample) =>
+ // special case for neg/t7020.scala:
+ // if we find a counter example `??::*` we report `*::*` instead
+ // since the `??` originates from uniqueEqualTo containing several instanced of the same type
+ List(WildcardExample, l)
+ case args => args
+ }.map(ListExample)
+ case _ if isTupleSymbol(cls) => args(brevity = true).map(TupleExample)
+ case _ if cls.isSealed && cls.isAbstractClass =>
+ // don't report sealed abstract classes, since
+ // 1) they can't be instantiated
+ // 2) we are already reporting any missing subclass (since we know the full domain)
+ // (see patmatexhaust.scala)
+ None
+ case _ => args().map(ConstructorExample(cls, _))
}
// a definite assignment to a type
- case List(eq) if fields.isEmpty => TypeExample(eq)
+ case List(eq) if fields.isEmpty => Some(TypeExample(eq))
// negative information
case Nil if nonTrivialNonEqualTo.nonEmpty =>
// negation tends to get pretty verbose
- if (beBrief) WildcardExample
+ if (beBrief) Some(WildcardExample)
else {
val eqTo = equalTo.headOption getOrElse TypeConst(variable.staticTpCheckable)
- NegativeExample(eqTo, nonTrivialNonEqualTo)
+ Some(NegativeExample(eqTo, nonTrivialNonEqualTo))
}
+ // if uniqueEqualTo contains more than one symbol of the same domain
+ // then we can safely ignore these counter examples since we will eventually encounter
+ // both counter examples separately
+ case _ if inSameDomain => None
+
// not a valid counter-example, possibly since we have a definite type but there was a field mismatch
// TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
- case _ => NoExample
+ case _ => Some(NoExample)
}
debug.patmatResult("described as")(res)
}
@@ -752,12 +863,12 @@ trait MatchAnalysis extends MatchApproximation {
}
def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {
- if (!suppression.unreachable) {
+ if (!suppression.suppressUnreachable) {
unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
reportUnreachable(cases(caseIndex).last.pos)
}
}
- if (!suppression.exhaustive) {
+ if (!suppression.suppressExhaustive) {
val counterExamples = exhaustive(prevBinder, cases, pt)
if (counterExamples.nonEmpty)
reportMissingCases(prevBinder.pos, counterExamples)
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index e9c81f4728..b3aef8a20e 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -46,16 +46,16 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
val cond = test.prop
def simplify(c: Prop): Set[Prop] = c match {
- case And(a, b) => simplify(a) ++ simplify(b)
- case Or(_, _) => Set(False) // TODO: make more precise
- case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering
+ case And(ops) => ops.toSet flatMap simplify
+ case Or(ops) => Set(False) // TODO: make more precise
+ case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering
case _ => Set(c)
}
val conds = simplify(cond)
if (conds(False)) false // stop when we encounter a definite "no" or a "not sure"
else {
- val nonTrivial = conds filterNot (_ == True)
+ val nonTrivial = conds - True
if (nonTrivial nonEmpty) {
tested ++= nonTrivial
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index d862805a07..6302e34ac9 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -208,7 +208,7 @@ trait MatchTranslation {
case _ => (cases, None)
}
- checkMatchVariablePatterns(nonSyntheticCases)
+ if (!settings.XnoPatmatAnalysis) checkMatchVariablePatterns(nonSyntheticCases)
// we don't transform after uncurry
// (that would require more sophistication when generating trees,
@@ -248,7 +248,10 @@ trait MatchTranslation {
if (caseDefs forall treeInfo.isCatchCase) caseDefs
else {
val swatches = { // switch-catches
- val bindersAndCases = caseDefs map { caseDef =>
+ // SI-7459 must duplicate here as we haven't commited to switch emission, and just figuring out
+ // if we can ends up mutating `caseDefs` down in the use of `substituteSymbols` in
+ // `TypedSubstitution#Substitution`. That is called indirectly by `emitTypeSwitch`.
+ val bindersAndCases = caseDefs.map(_.duplicate) map { caseDef =>
// generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
// if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
val caseScrutSym = freshSym(pos, pureType(ThrowableTpe))
@@ -518,7 +521,7 @@ trait MatchTranslation {
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
override protected def tupleSel(binder: Symbol)(i: Int): Tree = {
val accessors = binder.caseFieldAccessors
- if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
+ if (accessors isDefinedAt (i-1)) gen.mkAttributedStableRef(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
}
@@ -544,10 +547,17 @@ trait MatchTranslation {
// wrong when isSeq, and resultInMonad should always be correct since it comes
// directly from the extractor's result type
val binder = freshSym(pos, pureType(resultInMonad))
+ val potentiallyMutableBinders: Set[Symbol] =
+ if (extractorApply.tpe.typeSymbol.isNonBottomSubClass(OptionClass) && !aligner.isSeq)
+ Set.empty
+ else
+ // Ensures we capture unstable bound variables eagerly. These can arise under name based patmat or by indexing into mutable Seqs. See run t9003.scala
+ subPatBinders.toSet
ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
subPatBinders,
subPatRefs(binder),
+ potentiallyMutableBinders,
aligner.isBool,
checkedLength,
patBinderOrCasted,
@@ -573,7 +583,7 @@ trait MatchTranslation {
// duplicated with the extractor Unapplied
case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
treeCopy.Apply(t, x, binderRef(i.pos) :: Nil)
- // SI-7868 Account for numeric widening, e.g. <unappplySelector>.toInt
+ // SI-7868 Account for numeric widening, e.g. <unapplySelector>.toInt
case Apply(x, List(i @ (sel @ Select(Ident(nme.SELECTOR_DUMMY), name)))) =>
treeCopy.Apply(t, x, treeCopy.Select(sel, binderRef(i.pos), name) :: Nil)
case _ =>
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 1974befb45..b703b5bc6d 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -21,9 +21,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
import global._
import definitions._
- final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
+ final case class Suppression(suppressExhaustive: Boolean, suppressUnreachable: Boolean)
object Suppression {
val NoSuppression = Suppression(false, false)
+ val FullSuppression = Suppression(true, true)
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -166,8 +167,17 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
val usedBinders = new mutable.HashSet[Symbol]()
// all potentially stored subpat binders
val potentiallyStoredBinders = stored.unzip._1.toSet
+ def ref(sym: Symbol) =
+ if (potentiallyStoredBinders(sym)) usedBinders += sym
// compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
- in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
+ in.foreach {
+ case tt: TypeTree =>
+ tt.tpe foreach { // SI-7459 e.g. case Prod(t) => new t.u.Foo
+ case SingleType(_, sym) => ref(sym)
+ case _ =>
+ }
+ case t => ref(t.symbol)
+ }
if (usedBinders.isEmpty) in
else {
@@ -192,13 +202,14 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)(
val subPatBinders: List[Symbol],
val subPatRefs: List[Tree],
+ val potentiallyMutableBinders: Set[Symbol],
extractorReturnsBoolean: Boolean,
val checkedLength: Option[Int],
val prevBinder: Symbol,
val ignoredSubPatBinders: Set[Symbol]
) extends FunTreeMaker with PreserveSubPatBinders {
- def extraStoredBinders: Set[Symbol] = Set()
+ def extraStoredBinders: Set[Symbol] = potentiallyMutableBinders
debug.patmat(s"""
|ExtractorTreeMaker($extractor, $extraCond, $nextBinder) {
@@ -516,7 +527,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
// a foldLeft to accumulate the localSubstitution left-to-right
- // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
+ // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fulfilled by propagateSubstitution
def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
var accumSubst: Substitution = initial
treeMakers foreach { maker =>
@@ -541,7 +552,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
val (suppression, requireSwitch): (Suppression, Boolean) =
- if (settings.XnoPatmatAnalysis) (Suppression.NoSuppression, false)
+ if (settings.XnoPatmatAnalysis) (Suppression.FullSuppression, false)
else scrut match {
case Typed(tree, tpt) =>
val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
@@ -550,14 +561,30 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case _ => false
}
val suppression = Suppression(suppressExhaustive, supressUnreachable)
+ val hasSwitchAnnotation = treeInfo.isSwitchAnnotation(tpt.tpe)
// matches with two or fewer cases need not apply for switchiness (if-then-else will do)
- val requireSwitch = treeInfo.isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0
+ // `case 1 | 2` is considered as two cases.
+ def exceedsTwoCasesOrAlts = {
+ // avoids traversing the entire list if there are more than 3 elements
+ def lengthMax3[T](l: List[T]): Int = l match {
+ case a :: b :: c :: _ => 3
+ case cases =>
+ cases.map({
+ case AlternativesTreeMaker(_, alts, _) :: _ => lengthMax3(alts)
+ case c => 1
+ }).sum
+ }
+ lengthMax3(casesNoSubstOnly) > 2
+ }
+ val requireSwitch = hasSwitchAnnotation && exceedsTwoCasesOrAlts
+ if (hasSwitchAnnotation && !requireSwitch)
+ reporter.warning(scrut.pos, "matches with two cases or fewer are emitted using if-then-else instead of switch")
(suppression, requireSwitch)
case _ =>
(Suppression.NoSuppression, false)
}
- emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, suppression.exhaustive).getOrElse{
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, unchecked = suppression.suppressExhaustive).getOrElse{
if (requireSwitch) reporter.warning(scrut.pos, "could not emit switch for @switch annotated match")
if (casesNoSubstOnly nonEmpty) {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index ef50e083a1..d35aad964d 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -12,7 +12,7 @@ import scala.language.postfixOps
import scala.tools.nsc.transform.TypingTransformers
import scala.tools.nsc.transform.Transform
import scala.reflect.internal.util.Statistics
-import scala.reflect.internal.Types
+import scala.reflect.internal.{Mode, Types}
import scala.reflect.internal.util.Position
/** Translate pattern matching.
@@ -198,33 +198,57 @@ trait Interface extends ast.TreeDSL {
}
class Substitution(val from: List[Symbol], val to: List[Tree]) {
- import global.{Transformer, Ident, NoType}
+ import global.{Transformer, Ident, NoType, TypeTree, SingleType}
// We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
// and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
def apply(tree: Tree): Tree = {
// according to -Ystatistics 10% of translateMatch's time is spent in this method...
// since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
- if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
- else (new Transformer {
+ val toIdents = to.forall(_.isInstanceOf[Ident])
+ val containsSym = tree.exists {
+ case i@Ident(_) => from contains i.symbol
+ case tt: TypeTree => tt.tpe.exists {
+ case SingleType(_, sym) =>
+ (from contains sym) && {
+ if (!toIdents) global.devWarning(s"Unexpected substitution of non-Ident into TypeTree `$tt`, subst= $this")
+ true
+ }
+ case _ => false
+ }
+ case _ => false
+ }
+ val toSyms = to.map(_.symbol)
+ object substIdentsForTrees extends Transformer {
private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
if (origTp == null || origTp == NoType) to
// important: only type when actually substing and when original tree was typed
// (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
else typer.typed(to)
+ def typedStable(t: Tree) = typer.typed(t.shallowDuplicate, Mode.MonoQualifierModes | Mode.TYPEPATmode)
+ lazy val toTypes: List[Type] = to map (tree => typedStable(tree).tpe)
+
override def transform(tree: Tree): Tree = {
def subst(from: List[Symbol], to: List[Tree]): Tree =
if (from.isEmpty) tree
- else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate.setPos(tree.pos), tree.tpe)
+ else if (tree.symbol == from.head) typedIfOrigTyped(typedStable(to.head).setPos(tree.pos), tree.tpe)
else subst(from.tail, to.tail)
- tree match {
+ val tree1 = tree match {
case Ident(_) => subst(from, to)
case _ => super.transform(tree)
}
+ tree1.modifyType(_.substituteTypes(from, toTypes))
}
- }).transform(tree)
+ }
+ if (containsSym) {
+ if (to.forall(_.isInstanceOf[Ident]))
+ tree.duplicate.substituteSymbols(from, to.map(_.symbol)) // SI-7459 catches `case t => new t.Foo`
+ else
+ substIdentsForTrees.transform(tree)
+ }
+ else tree
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
index 79f5e3bee8..8924394b72 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala
@@ -73,9 +73,7 @@ trait ScalacPatternExpanders {
* Unfortunately the MethodType does not carry the information of whether
* it was unapplySeq, so we have to funnel that information in separately.
*/
- def unapplyMethodTypes(method: Type, isSeq: Boolean): Extractor = {
- val whole = firstParamType(method)
- val result = method.finalResultType
+ def unapplyMethodTypes(whole: Type, result: Type, isSeq: Boolean): Extractor = {
val expanded = (
if (result =:= BooleanTpe) Nil
else typeOfMemberNamedGet(result) match {
@@ -124,11 +122,11 @@ trait ScalacPatternExpanders {
case _ => sel
}
val patterns = newPatterns(args)
- val isSeq = sel.symbol.name == nme.unapplySeq
val isUnapply = sel.symbol.name == nme.unapply
+
val extractor = sel.symbol.name match {
- case nme.unapply => unapplyMethodTypes(fn.tpe, isSeq = false)
- case nme.unapplySeq => unapplyMethodTypes(fn.tpe, isSeq = true)
+ case nme.unapply => unapplyMethodTypes(firstParamType(fn.tpe), sel.tpe, isSeq = false)
+ case nme.unapplySeq => unapplyMethodTypes(firstParamType(fn.tpe), sel.tpe, isSeq = true)
case _ => applyMethodTypes(fn.tpe)
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index 31b1ffa912..27217f0dc2 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -6,238 +6,419 @@
package scala.tools.nsc.transform.patmat
-import scala.collection.mutable
+import scala.collection.mutable.ArrayBuffer
import scala.reflect.internal.util.Statistics
import scala.language.postfixOps
+import scala.collection.mutable
import scala.reflect.internal.util.Collections._
-// naive CNF translation and simple DPLL solver
+// a literal is a (possibly negated) variable
+class Lit(val v: Int) extends AnyVal {
+ def unary_- : Lit = Lit(-v)
+
+ def variable: Int = Math.abs(v)
+
+ def positive = v >= 0
+
+ override def toString(): String = s"Lit#$v"
+}
+
+object Lit {
+ def apply(v: Int): Lit = new Lit(v)
+
+ implicit val LitOrdering: Ordering[Lit] = Ordering.by(_.v)
+}
+
+/** Solve pattern matcher exhaustivity problem via DPLL.
+ */
trait Solving extends Logic {
+
import PatternMatchingStats._
+
trait CNF extends PropositionalLogic {
- import scala.collection.mutable.ArrayBuffer
- type FormulaBuilder = ArrayBuffer[Clause]
- def formulaBuilder = ArrayBuffer[Clause]()
- def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
- def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
- def toFormula(buff: FormulaBuilder): Formula = buff
- // CNF: a formula is a conjunction of clauses
- type Formula = FormulaBuilder
- def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
+ type Clause = Set[Lit]
- type Clause = collection.Set[Lit]
// a clause is a disjunction of distinct literals
- def clause(l: Lit*): Clause = (
- if (l.lengthCompare(1) <= 0) {
- l.toSet // SI-8531 Avoid LinkedHashSet's bulk for 0 and 1 element clauses
- } else {
- // neg/t7020.scala changes output 1% of the time, the non-determinism is quelled with this linked set
- mutable.LinkedHashSet(l: _*)
+ def clause(l: Lit*): Clause = l.toSet
+
+ /** Conjunctive normal form (of a Boolean formula).
+ * A formula in this form is amenable to a SAT solver
+ * (i.e., solver that decides satisfiability of a formula).
+ */
+ type Cnf = Array[Clause]
+
+ class SymbolMapping(symbols: Set[Sym]) {
+ val variableForSymbol: Map[Sym, Int] = {
+ symbols.zipWithIndex.map {
+ case (sym, i) => sym -> (i + 1)
+ }.toMap
+ }
+
+ val symForVar: Map[Int, Sym] = variableForSymbol.map(_.swap)
+
+ val relevantVars: Set[Int] = symForVar.keySet.map(math.abs)
+
+ def lit(sym: Sym): Lit = Lit(variableForSymbol(sym))
+
+ def size = symbols.size
+ }
+
+ case class Solvable(cnf: Cnf, symbolMapping: SymbolMapping)
+
+ trait CnfBuilder {
+ private[this] val buff = ArrayBuffer[Clause]()
+
+ var literalCount: Int
+
+ /**
+ * @return new Tseitin variable
+ */
+ def newLiteral(): Lit = {
+ literalCount += 1
+ Lit(literalCount)
}
- )
-
- type Lit
- def Lit(sym: Sym, pos: Boolean = true): Lit
-
- def andFormula(a: Formula, b: Formula): Formula = a ++ b
- def simplifyFormula(a: Formula): Formula = a.distinct
-
- private def merge(a: Clause, b: Clause) = a ++ b
-
- // throws an AnalysisBudget.Exception when the prop results in a CNF that's too big
- // TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding)
- def eqFreePropToSolvable(p: Prop): Formula = {
- def negationNormalFormNot(p: Prop, budget: Int): Prop =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else p match {
- case And(a, b) => Or(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
- case Or(a, b) => And(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
- case Not(p) => negationNormalForm(p, budget - 1)
- case True => False
- case False => True
- case s: Sym => Not(s)
+
+ lazy val constTrue: Lit = {
+ val constTrue = newLiteral()
+ addClauseProcessed(clause(constTrue))
+ constTrue
+ }
+
+ def constFalse: Lit = -constTrue
+
+ def isConst(l: Lit): Boolean = l == constTrue || l == constFalse
+
+ def addClauseProcessed(clause: Clause) {
+ if (clause.nonEmpty) {
+ buff += clause
}
+ }
+
+ def buildCnf: Array[Clause] = buff.toArray
- def negationNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Prop =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else p match {
- case And(a, b) => And(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
- case Or(a, b) => Or(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
- case Not(negated) => negationNormalFormNot(negated, budget - 1)
- case True
- | False
- | (_ : Sym) => p
+ }
+
+ /** Plaisted transformation: used for conversion of a
+ * propositional formula into conjunctive normal form (CNF)
+ * (input format for SAT solver).
+ * A simple conversion into CNF via Shannon expansion would
+ * also be possible but it's worst-case complexity is exponential
+ * (in the number of variables) and thus even simple problems
+ * could become untractable.
+ * The Plaisted transformation results in an _equisatisfiable_
+ * CNF-formula (it generates auxiliary variables)
+ * but runs with linear complexity.
+ * The common known Tseitin transformation uses bi-implication,
+ * whereas the Plaisted transformation uses implication only, thus
+ * the resulting CNF formula has (on average) only half of the clauses
+ * of a Tseitin transformation.
+ * The Plaisted transformation uses the polarities of sub-expressions
+ * to figure out which part of the bi-implication can be omitted.
+ * However, if all sub-expressions have positive polarity
+ * (e.g., after transformation into negation normal form)
+ * then the conversion is rather simple and the pseudo-normalization
+ * via NNF increases chances only one side of the bi-implication
+ * is needed.
+ */
+ class TransformToCnf(symbolMapping: SymbolMapping) extends CnfBuilder {
+
+ // new literals start after formula symbols
+ var literalCount: Int = symbolMapping.size
+
+ def convertSym(sym: Sym): Lit = symbolMapping.lit(sym)
+
+ def apply(p: Prop): Solvable = {
+
+ def convert(p: Prop): Lit = {
+ p match {
+ case And(fv) =>
+ and(fv.map(convert))
+ case Or(fv) =>
+ or(fv.map(convert))
+ case Not(a) =>
+ not(convert(a))
+ case sym: Sym =>
+ convertSym(sym)
+ case True =>
+ constTrue
+ case False =>
+ constFalse
+ case _: Eq =>
+ throw new MatchError(p)
+ }
}
- val TrueF = formula()
- val FalseF = formula(clause())
- def lit(s: Sym) = formula(clause(Lit(s)))
- def negLit(s: Sym) = formula(clause(Lit(s, pos = false)))
-
- def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
- def distribute(a: Formula, b: Formula, budget: Int): Formula =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else
- (a, b) match {
- // true \/ _ = true
- // _ \/ true = true
- case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF
- // lit \/ lit
- case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0)))
- // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d))
- // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn))
- case (cs, ds) =>
- val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs)
- big flatMap (c => distribute(formula(c), small, budget - (big.size*small.size)))
- }
+ def and(bv: Set[Lit]): Lit = {
+ if (bv.isEmpty) {
+ // this case can actually happen because `removeVarEq` could add no constraints
+ constTrue
+ } else if (bv.size == 1) {
+ bv.head
+ } else if (bv.contains(constFalse)) {
+ constFalse
+ } else {
+ // op1 /\ op2 /\ ... /\ opx <==>
+ // (o -> op1) /\ (o -> op2) ... (o -> opx) /\ (!op1 \/ !op2 \/... \/ !opx \/ o)
+ // (!o \/ op1) /\ (!o \/ op2) ... (!o \/ opx) /\ (!op1 \/ !op2 \/... \/ !opx \/ o)
+ val new_bv = bv - constTrue // ignore `True`
+ val o = newLiteral() // auxiliary Tseitin variable
+ new_bv.map(op => addClauseProcessed(clause(op, -o)))
+ o
+ }
+ }
- if (budget <= 0) throw AnalysisBudget.exceeded
-
- p match {
- case True => TrueF
- case False => FalseF
- case s: Sym => lit(s)
- case Not(s: Sym) => negLit(s)
- case And(a, b) =>
- val cnfA = conjunctiveNormalForm(a, budget - 1)
- val cnfB = conjunctiveNormalForm(b, budget - cnfA.size)
- cnfA ++ cnfB
- case Or(a, b) =>
- val cnfA = conjunctiveNormalForm(a)
- val cnfB = conjunctiveNormalForm(b)
- distribute(cnfA, cnfB, budget - (cnfA.size + cnfB.size))
+ def or(bv: Set[Lit]): Lit = {
+ if (bv.isEmpty) {
+ constFalse
+ } else if (bv.size == 1) {
+ bv.head
+ } else if (bv.contains(constTrue)) {
+ constTrue
+ } else {
+ // op1 \/ op2 \/ ... \/ opx <==>
+ // (op1 -> o) /\ (op2 -> o) ... (opx -> o) /\ (op1 \/ op2 \/... \/ opx \/ !o)
+ // (!op1 \/ o) /\ (!op2 \/ o) ... (!opx \/ o) /\ (op1 \/ op2 \/... \/ opx \/ !o)
+ val new_bv = bv - constFalse // ignore `False`
+ val o = newLiteral() // auxiliary Tseitin variable
+ addClauseProcessed(new_bv + (-o))
+ o
+ }
}
+
+ // no need for auxiliary variable
+ def not(a: Lit): Lit = -a
+
+ // add intermediate variable since we want the formula to be SAT!
+ addClauseProcessed(clause(convert(p)))
+
+ Solvable(buildCnf, symbolMapping)
}
+ }
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatCNF) else null
- val res = conjunctiveNormalForm(negationNormalForm(p))
+ class AlreadyInCNF(symbolMapping: SymbolMapping) {
- if (Statistics.canEnable) Statistics.stopTimer(patmatCNF, start)
+ object ToLiteral {
+ def unapply(f: Prop): Option[Lit] = f match {
+ case Not(ToLiteral(lit)) => Some(-lit)
+ case sym: Sym => Some(symbolMapping.lit(sym))
+ case _ => None
+ }
+ }
- //
- if (Statistics.canEnable) patmatCNFSizes(res.size).value += 1
+ object ToDisjunction {
+ def unapply(f: Prop): Option[Array[Clause]] = f match {
+ case Or(fv) =>
+ val cl = fv.foldLeft(Option(clause())) {
+ case (Some(clause), ToLiteral(lit)) =>
+ Some(clause + lit)
+ case (_, _) =>
+ None
+ }
+ cl.map(Array(_))
+ case True => Some(Array()) // empty, no clauses needed
+ case False => Some(Array(clause())) // empty clause can't be satisfied
+ case ToLiteral(lit) => Some(Array(clause(lit)))
+ case _ => None
+ }
+ }
-// debug.patmat("cnf for\n"+ p +"\nis:\n"+cnfString(res))
- res
+ /**
+ * Checks if propositional formula is already in CNF
+ */
+ object ToCnf {
+ def unapply(f: Prop): Option[Solvable] = f match {
+ case ToDisjunction(clauses) => Some(Solvable(clauses, symbolMapping) )
+ case And(fv) =>
+ val clauses = fv.foldLeft(Option(mutable.ArrayBuffer[Clause]())) {
+ case (Some(cnf), ToDisjunction(clauses)) =>
+ Some(cnf ++= clauses)
+ case (_, _) =>
+ None
+ }
+ clauses.map(c => Solvable(c.toArray, symbolMapping))
+ case _ => None
+ }
+ }
+ }
+
+ def eqFreePropToSolvable(p: Prop): Solvable = {
+
+ // collect all variables since after simplification / CNF conversion
+ // they could have been removed from the formula
+ val symbolMapping = new SymbolMapping(gatherSymbols(p))
+
+ val simplified = simplify(p)
+ val cnfExtractor = new AlreadyInCNF(symbolMapping)
+ simplified match {
+ case cnfExtractor.ToCnf(solvable) =>
+ // this is needed because t6942 would generate too many clauses with Tseitin
+ // already in CNF, just add clauses
+ solvable
+ case p =>
+ new TransformToCnf(symbolMapping).apply(p)
+ }
}
}
// simple solver using DPLL
trait Solver extends CNF {
- // a literal is a (possibly negated) variable
- def Lit(sym: Sym, pos: Boolean = true) = new Lit(sym, pos)
- class Lit(val sym: Sym, val pos: Boolean) {
- override def toString = if (!pos) "-"+ sym.toString else sym.toString
- override def equals(o: Any) = o match {
- case o: Lit => (o.sym eq sym) && (o.pos == pos)
- case _ => false
- }
- override def hashCode = sym.hashCode + pos.hashCode
+ import scala.collection.mutable.ArrayBuffer
- def unary_- = Lit(sym, !pos)
+ def cnfString(f: Array[Clause]): String = {
+ val lits: Array[List[String]] = f map (_.map(_.toString).toList)
+ val xss: List[List[String]] = lits toList
+ val aligned: String = alignAcrossRows(xss, "\\/", " /\\\n")
+ aligned
}
- def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
-
// adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
- val EmptyModel = collection.immutable.SortedMap.empty[Sym, Boolean]
+
+ // empty set of clauses is trivially satisfied
+ val EmptyModel = Map.empty[Sym, Boolean]
+
+ // no model: originates from the encounter of an empty clause, i.e.,
+ // happens if all variables have been assigned in a way that makes the corresponding literals false
+ // thus there is no possibility to satisfy that clause, so the whole formula is UNSAT
val NoModel: Model = null
+ // this model contains the auxiliary variables as well
+ type TseitinModel = Set[Lit]
+ val EmptyTseitinModel = Set.empty[Lit]
+ val NoTseitinModel: TseitinModel = null
+
// returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
- def findAllModelsFor(f: Formula): List[Model] = {
- val vars: Set[Sym] = f.flatMap(_ collect {case l: Lit => l.sym}).toSet
+ def findAllModelsFor(solvable: Solvable): List[Solution] = {
+ debug.patmat("find all models for\n"+ cnfString(solvable.cnf))
+
+ // we must take all vars from non simplified formula
+ // otherwise if we get `T` as formula, we don't expand the variables
+ // that are not in the formula...
+ val relevantVars: Set[Int] = solvable.symbolMapping.relevantVars
+
// debug.patmat("vars "+ vars)
// the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True)
- def negateModel(m: Model) = clause(m.toSeq.map{ case (sym, pos) => Lit(sym, !pos) } : _*)
+ // (i.e. the blocking clause - used for ALL-SAT)
+ def negateModel(m: TseitinModel) = {
+ // filter out auxiliary Tseitin variables
+ val relevantLits = m.filter(l => relevantVars.contains(l.variable))
+ relevantLits.map(lit => -lit)
+ }
- def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]=
- if (recursionDepthAllowed == 0) models
- else {
- debug.patmat("find all models for\n"+ cnfString(f))
- val model = findModelFor(f)
+ final case class TseitinSolution(model: TseitinModel, unassigned: List[Int]) {
+ def projectToSolution(symForVar: Map[Int, Sym]) = Solution(projectToModel(model, symForVar), unassigned map symForVar)
+ }
+ def findAllModels(clauses: Array[Clause],
+ models: List[TseitinSolution],
+ recursionDepthAllowed: Int = global.settings.YpatmatExhaustdepth.value): List[TseitinSolution]=
+ if (recursionDepthAllowed == 0) {
+ val maxDPLLdepth = global.settings.YpatmatExhaustdepth.value
+ reportWarning("(Exhaustivity analysis reached max recursion depth, not all missing cases are reported. " +
+ s"Please try with scalac -Ypatmat-exhaust-depth ${maxDPLLdepth * 2} or -Ypatmat-exhaust-depth off.)")
+ models
+ } else {
+ debug.patmat("find all models for\n" + cnfString(clauses))
+ val model = findTseitinModelFor(clauses)
// if we found a solution, conjunct the formula with the model's negation and recurse
- if (model ne NoModel) {
- val unassigned = (vars -- model.keySet).toList
+ if (model ne NoTseitinModel) {
+ // note that we should not expand the auxiliary variables (from Tseitin transformation)
+ // since they are existentially quantified in the final solution
+ val unassigned: List[Int] = (relevantVars -- model.map(lit => lit.variable)).toList
debug.patmat("unassigned "+ unassigned +" in "+ model)
- def force(lit: Lit) = {
- val model = withLit(findModelFor(dropUnit(f, lit)), lit)
- if (model ne NoModel) List(model)
- else Nil
- }
- val forced = unassigned flatMap { s =>
- force(Lit(s, pos = true)) ++ force(Lit(s, pos = false))
- }
- debug.patmat("forced "+ forced)
+
+ val solution = TseitinSolution(model, unassigned)
val negated = negateModel(model)
- findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1)
+ findAllModels(clauses :+ negated, solution :: models, recursionDepthAllowed - 1)
}
else models
}
- findAllModels(f, Nil)
+ val tseitinSolutions = findAllModels(solvable.cnf, Nil)
+ tseitinSolutions.map(_.projectToSolution(solvable.symbolMapping.symForVar))
}
- private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
- private def dropUnit(f: Formula, unitLit: Lit): Formula = {
+ private def withLit(res: TseitinModel, l: Lit): TseitinModel = {
+ if (res eq NoTseitinModel) NoTseitinModel else res + l
+ }
+
+ /** Drop trivially true clauses, simplify others by dropping negation of `unitLit`.
+ *
+ * Disjunctions that contain the literal we're making true in the returned model are trivially true.
+ * Clauses can be simplified by dropping the negation of the literal we're making true
+ * (since False \/ X == X)
+ */
+ private def dropUnit(clauses: Array[Clause], unitLit: Lit): Array[Clause] = {
val negated = -unitLit
- // drop entire clauses that are trivially true
- // (i.e., disjunctions that contain the literal we're making true in the returned model),
- // and simplify clauses by dropping the negation of the literal we're making true
- // (since False \/ X == X)
- val dropped = formulaBuilderSized(f.size)
- for {
- clause <- f
- if !(clause contains unitLit)
- } dropped += (clause - negated)
- dropped
+ val simplified = new ArrayBuffer[Clause](clauses.size)
+ clauses foreach {
+ case trivial if trivial contains unitLit => // drop
+ case clause => simplified += clause - negated
+ }
+ simplified.toArray
+ }
+
+ def findModelFor(solvable: Solvable): Model = {
+ projectToModel(findTseitinModelFor(solvable.cnf), solvable.symbolMapping.symForVar)
}
- def findModelFor(f: Formula): Model = {
- @inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b
+ def findTseitinModelFor(clauses: Array[Clause]): TseitinModel = {
+ @inline def orElse(a: TseitinModel, b: => TseitinModel) = if (a ne NoTseitinModel) a else b
- debug.patmat("DPLL\n"+ cnfString(f))
+ debug.patmat(s"DPLL\n${cnfString(clauses)}")
val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaDPLL) else null
- val satisfiableWithModel: Model =
- if (f isEmpty) EmptyModel
- else if(f exists (_.isEmpty)) NoModel
- else f.find(_.size == 1) match {
+ val satisfiableWithModel: TseitinModel =
+ if (clauses isEmpty) EmptyTseitinModel
+ else if (clauses exists (_.isEmpty)) NoTseitinModel
+ else clauses.find(_.size == 1) match {
case Some(unitClause) =>
val unitLit = unitClause.head
- // debug.patmat("unit: "+ unitLit)
- withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
+ withLit(findTseitinModelFor(dropUnit(clauses, unitLit)), unitLit)
case _ =>
// partition symbols according to whether they appear in positive and/or negative literals
- // SI-7020 Linked- for deterministic counter examples.
- val pos = new mutable.LinkedHashSet[Sym]()
- val neg = new mutable.LinkedHashSet[Sym]()
- mforeach(f)(lit => if (lit.pos) pos += lit.sym else neg += lit.sym)
+ val pos = new mutable.HashSet[Int]()
+ val neg = new mutable.HashSet[Int]()
+ mforeach(clauses)(lit => if (lit.positive) pos += lit.variable else neg += lit.variable)
// appearing in both positive and negative
- val impures: mutable.LinkedHashSet[Sym] = pos intersect neg
+ val impures = pos intersect neg
// appearing only in either positive/negative positions
- val pures: mutable.LinkedHashSet[Sym] = (pos ++ neg) -- impures
+ val pures = (pos ++ neg) -- impures
if (pures nonEmpty) {
- val pureSym = pures.head
+ val pureVar = pures.head
// turn it back into a literal
// (since equality on literals is in terms of equality
// of the underlying symbol and its positivity, simply construct a new Lit)
- val pureLit = Lit(pureSym, pos(pureSym))
+ val pureLit = Lit(if (neg(pureVar)) -pureVar else pureVar)
// debug.patmat("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures)
- val simplified = f.filterNot(_.contains(pureLit))
- withLit(findModelFor(simplified), pureLit)
+ val simplified = clauses.filterNot(_.contains(pureLit))
+ withLit(findTseitinModelFor(simplified), pureLit)
} else {
- val split = f.head.head
+ val split = clauses.head.head
// debug.patmat("split: "+ split)
- orElse(findModelFor(f :+ clause(split)), findModelFor(f :+ clause(-split)))
+ orElse(findTseitinModelFor(clauses :+ clause(split)), findTseitinModelFor(clauses :+ clause(-split)))
}
}
if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
satisfiableWithModel
}
+
+ private def projectToModel(model: TseitinModel, symForVar: Map[Int, Sym]): Model =
+ if (model == NoTseitinModel) NoModel
+ else if (model == EmptyTseitinModel) EmptyModel
+ else {
+ val mappedModels = model.toList collect {
+ case lit if symForVar isDefinedAt lit.variable => (symForVar(lit.variable), lit.positive)
+ }
+ if (mappedModels.isEmpty) {
+ // could get an empty model if mappedModels is a constant like `True`
+ EmptyModel
+ } else {
+ mappedModels.toMap
+ }
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 3a77cab919..fc632e0d0d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -11,12 +11,28 @@ import scala.language.postfixOps
/** On pattern matcher checkability:
*
+ * The spec says that case _: List[Int] should be always issue
+ * an unchecked warning:
+ *
+ * > Types which are not of one of the forms described above are
+ * > also accepted as type patterns. However, such type patterns
+ * > will be translated to their erasure (§3.7). The Scala compiler
+ * > will issue an “unchecked” warning for these patterns to flag
+ * > the possible loss of type-safety.
+ *
+ * But the implementation goes a little further to omit warnings
+ * based on the static type of the scrutinee. As a trivial example:
+ *
+ * def foo(s: Seq[Int]) = s match { case _: List[Int] => }
+ *
+ * need not issue this warning.
+ *
* Consider a pattern match of this form: (x: X) match { case _: P => }
*
* There are four possibilities to consider:
* [P1] X will always conform to P
* [P2] x will never conform to P
- * [P3] X <: P if some runtime test is true
+ * [P3] X will conform to P if some runtime test is true
* [P4] X cannot be checked against P
*
* The first two cases correspond to those when there is enough
@@ -28,6 +44,11 @@ import scala.language.postfixOps
* which is essentially the intersection of X and |P|, where |P| is
* the erasure of P. If XR <: P, then no warning is emitted.
*
+ * We evaluate "X with conform to P" by checking `X <: P_wild, where
+ * P_wild is the result of substituting wildcard types in place of
+ * pattern type variables. This is intentionally stricter than
+ * (X matchesPattern P), see SI-8597 for motivating test cases.
+ *
* Examples of how this info is put to use:
* sealed trait A[T] ; class B[T] extends A[T]
* def f(x: B[Int]) = x match { case _: A[Int] if true => }
@@ -100,7 +121,7 @@ trait Checkable {
private def typeArgsInTopLevelType(tp: Type): List[Type] = {
val tps = tp match {
case RefinedType(parents, _) => parents flatMap typeArgsInTopLevelType
- case TypeRef(_, ArrayClass, arg :: Nil) => typeArgsInTopLevelType(arg)
+ case TypeRef(_, ArrayClass, arg :: Nil) => if (arg.typeSymbol.isAbstractType) arg :: Nil else typeArgsInTopLevelType(arg)
case TypeRef(pre, sym, args) => typeArgsInTopLevelType(pre) ++ args
case ExistentialType(tparams, underlying) => tparams.map(_.tpe) ++ typeArgsInTopLevelType(underlying)
case _ => Nil
@@ -108,14 +129,31 @@ trait Checkable {
tps filterNot isUnwarnableTypeArg
}
+ private def scrutConformsToPatternType(scrut: Type, pattTp: Type): Boolean = {
+ def typeVarToWildcard(tp: Type) = {
+ // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala
+ if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp
+ }
+ val pattTpWild = pattTp.map(typeVarToWildcard)
+ scrut <:< pattTpWild
+ }
+
private class CheckabilityChecker(val X: Type, val P: Type) {
def Xsym = X.typeSymbol
def Psym = P.typeSymbol
- def XR = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym)
+ def PErased = {
+ P match {
+ case erasure.GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P)
+ case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*)
+ }
+ }
+ def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym)
+
+
// sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean]
- def P1 = X matchesPattern P
+ def P1 = scrutConformsToPatternType(X, P)
def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P)
- def P3 = isNonRefinementClassType(P) && (XR matchesPattern P)
+ def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P)
def P4 = !(P1 || P2 || P3)
def summaryString = f"""
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 56ed0ee16c..2f4771e9d4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -75,7 +75,7 @@ abstract class ConstantFolder {
case nme.AND => Constant(x.booleanValue & y.booleanValue)
case nme.EQ => Constant(x.booleanValue == y.booleanValue)
case nme.NE => Constant(x.booleanValue != y.booleanValue)
- case _ => null
+ case _ => null
}
private def foldSubrangeOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.OR => Constant(x.intValue | y.intValue)
@@ -95,14 +95,20 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.intValue * y.intValue)
case nme.DIV => Constant(x.intValue / y.intValue)
case nme.MOD => Constant(x.intValue % y.intValue)
- case _ => null
+ case _ => null
}
private def foldLongOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.OR => Constant(x.longValue | y.longValue)
case nme.XOR => Constant(x.longValue ^ y.longValue)
case nme.AND => Constant(x.longValue & y.longValue)
- case nme.LSL => Constant(x.longValue << y.longValue)
+ case nme.LSL if x.tag <= IntTag
+ => Constant(x.intValue << y.longValue)
+ case nme.LSL => Constant(x.longValue << y.longValue)
+ case nme.LSR if x.tag <= IntTag
+ => Constant(x.intValue >>> y.longValue)
case nme.LSR => Constant(x.longValue >>> y.longValue)
+ case nme.ASR if x.tag <= IntTag
+ => Constant(x.intValue >> y.longValue)
case nme.ASR => Constant(x.longValue >> y.longValue)
case nme.EQ => Constant(x.longValue == y.longValue)
case nme.NE => Constant(x.longValue != y.longValue)
@@ -115,7 +121,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.longValue * y.longValue)
case nme.DIV => Constant(x.longValue / y.longValue)
case nme.MOD => Constant(x.longValue % y.longValue)
- case _ => null
+ case _ => null
}
private def foldFloatOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.EQ => Constant(x.floatValue == y.floatValue)
@@ -129,7 +135,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.floatValue * y.floatValue)
case nme.DIV => Constant(x.floatValue / y.floatValue)
case nme.MOD => Constant(x.floatValue % y.floatValue)
- case _ => null
+ case _ => null
}
private def foldDoubleOp(op: Name, x: Constant, y: Constant): Constant = op match {
case nme.EQ => Constant(x.doubleValue == y.doubleValue)
@@ -143,7 +149,7 @@ abstract class ConstantFolder {
case nme.MUL => Constant(x.doubleValue * y.doubleValue)
case nme.DIV => Constant(x.doubleValue / y.doubleValue)
case nme.MOD => Constant(x.doubleValue % y.doubleValue)
- case _ => null
+ case _ => null
}
private def foldBinop(op: Name, x: Constant, y: Constant): Constant = {
@@ -162,7 +168,7 @@ abstract class ConstantFolder {
case _ => null
}
catch {
- case ex: ArithmeticException => null
+ case _: ArithmeticException => null
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 20e462bbce..5c36bd9d28 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -73,7 +73,7 @@ trait ContextErrors {
// 2) provide the type of the implicit parameter for which we got diverging expansion
// (pt at the point of divergence gives less information to the user)
// Note: it is safe to delay error message generation in this case
- // becasue we don't modify implicits' infos.
+ // because we don't modify implicits' infos.
case class DivergentImplicitTypeError(underlyingTree: Tree, pt0: Type, sym: Symbol)
extends TreeTypeError {
def errMsg: String = errMsgForPt(pt0)
@@ -885,22 +885,31 @@ trait ContextErrors {
val WrongNumber, NoParams, ArgsDoNotConform = Value
}
- private def issueAmbiguousTypeErrorUnlessErroneous(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String): Unit =
- if (!(pre.isErroneous || sym1.isErroneous || sym2.isErroneous)) {
- if (sym1.hasDefault && sym2.hasDefault && sym1.enclClass == sym2.enclClass) {
- val methodName = nme.defaultGetterToMethod(sym1.name)
- context.issueAmbiguousError(AmbiguousTypeError(sym1.enclClass.pos,
- "in "+ sym1.enclClass +", multiple overloaded alternatives of " + methodName +
- " define default arguments"))
- } else {
- context.issueAmbiguousError(AmbiguousTypeError(pos,
- ("ambiguous reference to overloaded definition,\n" +
- "both " + sym1 + sym1.locationString + " of type " + pre.memberType(sym1) +
- "\nand " + sym2 + sym2.locationString + " of type " + pre.memberType(sym2) +
- "\nmatch " + rest)
- ))
- }
- }
+ private def issueAmbiguousTypeErrorUnlessErroneous(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String): Unit = {
+ // To avoid stack overflows (SI-8890), we MUST (at least) report when either `validTargets` OR `ambiguousSuppressed`
+ // More details:
+ // If `!context.ambiguousErrors`, `reporter.issueAmbiguousError` (which `context.issueAmbiguousError` forwards to)
+ // buffers ambiguous errors. In this case, to avoid looping, we must issue even if `!validTargets`. (TODO: why?)
+ // When not buffering (and thus reporting to the user), we shouldn't issue unless `validTargets`,
+ // otherwise we report two different errors that trace back to the same root cause,
+ // and unless `validTargets`, we don't know for sure the ambiguity is real anyway.
+ val validTargets = !(pre.isErroneous || sym1.isErroneous || sym2.isErroneous)
+ val ambiguousBuffered = !context.ambiguousErrors
+ if (validTargets || ambiguousBuffered)
+ context.issueAmbiguousError(
+ if (sym1.hasDefault && sym2.hasDefault && sym1.enclClass == sym2.enclClass) {
+ val methodName = nme.defaultGetterToMethod(sym1.name)
+ AmbiguousTypeError(sym1.enclClass.pos,
+ s"in ${sym1.enclClass}, multiple overloaded alternatives of $methodName define default arguments")
+
+ } else {
+ AmbiguousTypeError(pos,
+ "ambiguous reference to overloaded definition,\n" +
+ s"both ${sym1.fullLocationString} of type ${pre.memberType(sym1)}\n" +
+ s"and ${sym2.fullLocationString} of type ${pre.memberType(sym2)}\n" +
+ s"match $rest")
+ })
+ }
def AccessError(tree: Tree, sym: Symbol, ctx: Context, explanation: String): AbsTypeError =
AccessError(tree, sym, ctx.enclClass.owner.thisType, ctx.enclClass.owner, explanation)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index c86eaffccf..b3e207b334 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -104,7 +104,7 @@ trait Contexts { self: Analyzer =>
// there must be a scala.xml package when xml literals were parsed in this unit
if (unit.hasXml && ScalaXmlPackage == NoSymbol)
- reporter.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see http://docs.scala-lang.org/overviews/core/scala-2.11.html#scala-xml.")
+ reporter.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see https://github.com/scala/scala-xml for details.")
// scala-xml needs `scala.xml.TopScope` to be in scope globally as `$scope`
// We detect `scala-xml` by looking for `scala.xml.TopScope` and
@@ -145,7 +145,7 @@ trait Contexts { self: Analyzer =>
* - A variety of bits that track the current error reporting policy (more on this later);
* whether or not implicits/macros are enabled, whether we are in a self or super call or
* in a constructor suffix. These are represented as bits in the mask `contextMode`.
- * - Some odds and ends: undetermined type pararameters of the current line of type inference;
+ * - Some odds and ends: undetermined type parameters of the current line of type inference;
* contextual augmentation for error messages, tracking of the nesting depth.
*
* And behaviour:
@@ -154,19 +154,19 @@ trait Contexts { self: Analyzer =>
* to buffer these for use in 'silent' type checking, when some recovery might be possible.
* - `Context` is something of a Zipper for the tree were are typechecking: it `enclosingContextChain`
* is the path back to the root. This is exactly what we need to resolve names (`lookupSymbol`)
- * and to collect in-scope implicit defintions (`implicitss`)
+ * and to collect in-scope implicit definitions (`implicitss`)
* Supporting these are `imports`, which represents all `Import` trees in in the enclosing context chain.
- * - In a similar vein, we can assess accessiblity (`isAccessible`.)
+ * - In a similar vein, we can assess accessibility (`isAccessible`.)
*
* More on error buffering:
* When are type errors recoverable? In quite a few places, it turns out. Some examples:
* trying to type an application with/without the expected type, or with/without implicit views
* enabled. This is usually mediated by `Typer.silent`, `Inferencer#tryTwice`.
*
- * Intially, starting from the `typer` phase, the contexts either buffer or report errors;
+ * Initially, starting from the `typer` phase, the contexts either buffer or report errors;
* afterwards errors are thrown. This is configured in `rootContext`. Additionally, more
* fine grained control is needed based on the kind of error; ambiguity errors are often
- * suppressed during exploraratory typing, such as determining whether `a == b` in an argument
+ * suppressed during exploratory typing, such as determining whether `a == b` in an argument
* position is an assignment or a named argument, when `Infererencer#isApplicableSafe` type checks
* applications with and without an expected type, or whtn `Typer#tryTypedApply` tries to fit arguments to
* a function type with/without implicit views.
@@ -330,7 +330,7 @@ trait Contexts { self: Analyzer =>
// if set, errors will not be reporter/thrown
def bufferErrors = reporter.isBuffering
- def reportErrors = !bufferErrors
+ def reportErrors = !(bufferErrors || reporter.isThrowing)
// whether to *report* (which is separate from buffering/throwing) ambiguity errors
def ambiguousErrors = this(AmbiguousErrors)
@@ -480,6 +480,9 @@ trait Contexts { self: Analyzer =>
// SI-8245 `isLazy` need to skip lazy getters to ensure `return` binds to the right place
c.enclMethod = if (isDefDef && !owner.isLazy) c else enclMethod
+ if (tree != outer.tree)
+ c(TypeConstructorAllowed) = false
+
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
c
@@ -796,7 +799,7 @@ trait Contexts { self: Analyzer =>
isAccessible(sym, pre) &&
!(imported && {
val e = scope.lookupEntry(name)
- (e ne null) && (e.owner == scope)
+ (e ne null) && (e.owner == scope) && (!settings.isScala212 || e.sym.exists)
})
private def collectImplicits(syms: Scope, pre: Type, imported: Boolean = false): List[ImplicitInfo] =
@@ -1206,6 +1209,7 @@ trait Contexts { self: Analyzer =>
def makeImmediate: ContextReporter = this
def makeBuffering: ContextReporter = this
def isBuffering: Boolean = false
+ def isThrowing: Boolean = false
/** Emit an ambiguous error according to context.ambiguousErrors
*
@@ -1343,6 +1347,7 @@ trait Contexts { self: Analyzer =>
* TODO: get rid of it, use ImmediateReporter and a check for reporter.hasErrors where necessary
*/
private[typechecker] class ThrowingReporter extends ContextReporter {
+ override def isThrowing = true
protected def handleError(pos: Position, msg: String): Unit = throw new TypeError(pos, msg)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index e2ad578252..4435ed0b60 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -3,7 +3,7 @@
* @author Martin Odersky
*/
-//todo: rewrite or disllow new T where T is a mixin (currently: <init> not a member of T)
+//todo: rewrite or disallow new T where T is a mixin (currently: <init> not a member of T)
//todo: use inherited type info also for vars and values
//todo: disallow C#D in superclass
//todo: treat :::= correctly
@@ -159,7 +159,7 @@ trait Implicits {
* @param tree The tree representing the implicit
* @param subst A substituter that represents the undetermined type parameters
* that were instantiated by the winning implicit.
- * @param undetparams undeterminted type parameters
+ * @param undetparams undetermined type parameters
*/
class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter, val undetparams: List[Symbol]) {
override def toString = "SearchResult(%s, %s)".format(tree,
@@ -1475,8 +1475,10 @@ trait Implicits {
})
private lazy val typeParamNames: List[String] = sym.typeParams.map(_.decodedName)
+ private def typeArgsAtSym(paramTp: Type) = paramTp.baseType(sym).typeArgs
+
+ def format(paramName: Name, paramTp: Type): String = format(typeArgsAtSym(paramTp) map (_.toString))
- def format(paramName: Name, paramTp: Type): String = format(paramTp.typeArgs map (_.toString))
def format(typeArgs: List[String]): String =
interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index ee2775ee26..cf97474d9a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -295,11 +295,17 @@ trait Infer extends Checkable {
&& !isByNameParamType(tp)
&& isCompatible(tp, dropByName(pt))
)
+ def isCompatibleSam(tp: Type, pt: Type): Boolean = {
+ val samFun = typer.samToFunctionType(pt)
+ (samFun ne NoType) && isCompatible(tp, samFun)
+ }
+
val tp1 = normalize(tp)
( (tp1 weak_<:< pt)
|| isCoercible(tp1, pt)
|| isCompatibleByName(tp, pt)
+ || isCompatibleSam(tp, pt)
)
}
def isCompatibleArgs(tps: List[Type], pts: List[Type]) = (tps corresponds pts)(isCompatible)
@@ -1011,7 +1017,7 @@ trait Infer extends Checkable {
/** Substitute free type variables `undetparams` of type constructor
* `tree` in pattern, given prototype `pt`.
*
- * @param tree the constuctor that needs to be instantiated
+ * @param tree the constructor that needs to be instantiated
* @param undetparams the undetermined type parameters
* @param pt0 the expected result type of the instance
*/
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index ba183fe3e6..0aa62d771e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -212,7 +212,9 @@ trait MethodSynthesis {
List(cd, mdef)
case _ =>
// Shouldn't happen, but let's give ourselves a reasonable error when it does
- abort("No synthetics for " + meth + ": synthetics contains " + context.unit.synthetics.keys.mkString(", "))
+ context.error(cd.pos, s"Internal error: Symbol for synthetic factory method not found among ${context.unit.synthetics.keys.mkString(", ")}")
+ // Soldier on for the sake of the presentation compiler
+ List(cd)
}
case _ =>
stat :: Nil
@@ -355,8 +357,9 @@ trait MethodSynthesis {
def derivedSym: Symbol = {
// Only methods will do! Don't want to pick up any stray
// companion objects of the same name.
- val result = enclClass.info decl name suchThat (x => x.isMethod && x.isSynthetic)
- assert(result != NoSymbol, "not found: "+name+" in "+enclClass+" "+enclClass.info.decls)
+ val result = enclClass.info decl name filter (x => x.isMethod && x.isSynthetic)
+ if (result == NoSymbol || result.isOverloaded)
+ context.error(tree.pos, s"Internal error: Unable to find the synthetic factory method corresponding to implicit class $name in $enclClass / ${enclClass.info.decls}")
result
}
def derivedTree: DefDef =
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index fdff2f3076..711cfba24d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -171,7 +171,7 @@ trait Namers extends MethodSynthesis {
val newFlags = (sym.flags & LOCKED) | flags
sym.rawInfo match {
case tr: TypeRef =>
- // !!! needed for: pos/t5954d; the uniques type cache will happilly serve up the same TypeRef
+ // !!! needed for: pos/t5954d; the uniques type cache will happily serve up the same TypeRef
// over this mutated symbol, and we witness a stale cache for `parents`.
tr.invalidateCaches()
case _ =>
@@ -296,7 +296,7 @@ trait Namers extends MethodSynthesis {
}
tree.symbol match {
case NoSymbol => try dispatch() catch typeErrorHandler(tree, this.context)
- case sym => enterExistingSym(sym)
+ case sym => enterExistingSym(sym, tree)
}
}
@@ -413,6 +413,7 @@ trait Namers extends MethodSynthesis {
if (isRedefinition) {
updatePosFlags(existing, tree.pos, tree.mods.flags)
setPrivateWithin(tree, existing)
+ clearRenamedCaseAccessors(existing)
existing
}
else assignAndEnterSymbol(tree) setFlag inConstructorFlag
@@ -583,7 +584,7 @@ trait Namers extends MethodSynthesis {
// more than one hidden name, the second will not be warned.
// So it is the position of the actual hidden name.
//
- // Note: java imports have precence over definitions in the same package
+ // Note: java imports have precedence over definitions in the same package
// so don't warn for them. There is a corresponding special treatment
// in the shadowing rules in typedIdent to (SI-7232). In any case,
// we shouldn't be emitting warnings for .java source files.
@@ -736,7 +737,9 @@ trait Namers extends MethodSynthesis {
}
// Hooks which are overridden in the presentation compiler
- def enterExistingSym(sym: Symbol): Context = this.context
+ def enterExistingSym(sym: Symbol, tree: Tree): Context = {
+ this.context
+ }
def enterIfNotThere(sym: Symbol) { }
def enterSyntheticSym(tree: Tree): Symbol = {
@@ -1643,6 +1646,7 @@ trait Namers extends MethodSynthesis {
def symbolAllowsDeferred = (
sym.isValueParameter
|| sym.isTypeParameterOrSkolem
+ || (sym.isAbstractType && sym.owner.isClass)
|| context.tree.isInstanceOf[ExistentialTypeTree]
)
// Does the symbol owner require no undefined members?
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index b6387fd56b..50f658f68d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -384,7 +384,7 @@ trait NamesDefaults { self: Analyzer =>
* of arguments.
*
* @param args The list of arguments
- * @param params The list of parameter sybols of the invoked method
+ * @param params The list of parameter symbols of the invoked method
* @param argName A function that extracts the name of an argument expression, if it is a named argument.
*/
def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name]): (List[Symbol], Boolean) = {
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
index bb8c3c3c6d..fa4a764f1b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -336,7 +336,7 @@ trait PatternTypers {
val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
// must call doTypedUnapply directly, as otherwise we get undesirable rewrites
// and re-typechecks of the target of the unapply call in PATTERNmode,
- // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
+ // this breaks down when the classTagExtractor (which defines the unapply member) is not a simple reference to an object,
// but an arbitrary tree as is the case here
val res = doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index af4e9e8927..d2931ff9e1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -543,7 +543,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
def checkOverrideDeprecated() {
- if (other.hasDeprecatedOverridingAnnotation) {
+ if (other.hasDeprecatedOverridingAnnotation && !member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
val suffix = other.deprecatedOverridingMessage map (": " + _) getOrElse ""
val msg = s"overriding ${other.fullLocationString} is deprecated$suffix"
currentRun.reporting.deprecationWarning(member.pos, other, msg)
@@ -1095,7 +1095,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// better to have lubbed and lost
def warnIfLubless(): Unit = {
val common = global.lub(List(actual.tpe, receiver.tpe))
- if (ObjectTpe <:< common)
+ if (ObjectTpe <:< common && !(ObjectTpe <:< actual.tpe && ObjectTpe <:< receiver.tpe))
unrelatedTypes()
}
// warn if actual has a case parent that is not same as receiver's;
@@ -1404,7 +1404,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (symbol.isDeprecated) {
val concrOvers =
symbol.allOverriddenSymbols.filter(sym =>
- !sym.isDeprecated && !sym.isDeferred)
+ !sym.isDeprecated && !sym.isDeferred && !sym.hasDeprecatedOverridingAnnotation && !sym.enclClass.hasDeprecatedInheritanceAnnotation)
if(!concrOvers.isEmpty)
currentRun.reporting.deprecationWarning(
tree.pos,
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 57f27a05fd..ea44b9dc39 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -61,7 +61,7 @@ trait StdAttachments {
val metadata = MacroExpansionAttachment(expandee, expanded)
expandee updateAttachment metadata
expanded match {
- case expanded: Tree => expanded updateAttachment metadata
+ case expanded: Tree if !expanded.isEmpty => expanded updateAttachment metadata
case _ => // do nothing
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index aafe73d88d..8f13507fa9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -54,6 +54,9 @@ trait SyntheticMethods extends ast.TreeDSL {
/** Does not force the info of `caseclazz` */
final def caseAccessorName(caseclazz: Symbol, paramName: TermName) =
(renamedCaseAccessors get caseclazz).fold(paramName)(_(paramName))
+ final def clearRenamedCaseAccessors(caseclazz: Symbol): Unit = {
+ renamedCaseAccessors -= caseclazz
+ }
/** Add the synthetic methods to case classes.
*/
@@ -150,7 +153,7 @@ trait SyntheticMethods extends ast.TreeDSL {
def thatCast(eqmeth: Symbol): Tree =
gen.mkCast(Ident(eqmeth.firstParam), clazz.tpe)
- /* The equality method core for case classes and inline clases.
+ /* The equality method core for case classes and inline classes.
* 1+ args:
* (that.isInstanceOf[this.C]) && {
* val x$1 = that.asInstanceOf[this.C]
@@ -359,7 +362,14 @@ trait SyntheticMethods extends ast.TreeDSL {
for (ddef @ DefDef(_, _, _, _, _, _) <- templ.body ; if isRewrite(ddef.symbol)) {
val original = ddef.symbol
- val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc =>
+ val i = original.owner.caseFieldAccessors.indexOf(original)
+ def freshAccessorName = {
+ devWarning(s"Unable to find $original among case accessors of ${original.owner}: ${original.owner.caseFieldAccessors}")
+ context.unit.freshTermName(original.name + "$")
+ }
+ def nameSuffixedByParamIndex = original.name.append(nme.CASE_ACCESSOR + "$" + i).toTermName
+ val newName = if (i < 0) freshAccessorName else nameSuffixedByParamIndex
+ val newAcc = deriveMethod(ddef.symbol, name => newName) { newAcc =>
newAcc.makePublic
newAcc resetFlag (ACCESSOR | PARAMACCESSOR | OVERRIDE)
ddef.rhs.duplicate
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index 90ec3a89b8..57dc74d2a0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -11,7 +11,6 @@ trait Tags {
self: Typer =>
private val runDefinitions = currentRun.runDefinitions
- import runDefinitions._
private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper {
def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree))
@@ -66,7 +65,7 @@ trait Tags {
// if someone requests a type tag, but scala-reflect.jar isn't on the library classpath, then bail
if (pre == NoType && ApiUniverseClass == NoSymbol) EmptyTree
else {
- val tagSym = if (concrete) TypeTagClass else WeakTypeTagClass
+ val tagSym = if (concrete) runDefinitions.TypeTagClass else runDefinitions.WeakTypeTagClass
val tagTp = if (pre == NoType) TypeRef(ApiUniverseClass.toTypeConstructor, tagSym, List(tp)) else singleType(pre, pre member tagSym.name)
val taggedTp = appliedType(tagTp, List(tp))
resolveTag(pos, taggedTp, allowMaterialization)
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 1dac27639c..0f90c6a478 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -607,7 +607,7 @@ trait TypeDiagnostics {
if (!c.owner.exists || c.owner.isClass || c.owner.isMethod || (c.owner.isType && !c.owner.isParameter)) c
else enclClassOrMethodOrTypeMember(c.outer)
- val tt = tparams.filter(_.name != typeNames.WILDCARD).foreach { tp =>
+ tparams.filter(_.name != typeNames.WILDCARD).foreach { tp =>
// we don't care about type params shadowing other type params in the same declaration
enclClassOrMethodOrTypeMember(context).outer.lookupSymbol(tp.name, s => s != tp.symbol && s.hasRawInfo && reallyExists(s)) match {
case LookupSucceeded(_, sym2) => context.warning(tp.pos,
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index fe6038bc00..7f86a59164 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -745,6 +745,26 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case _ =>
}
+ /**
+ * Convert a SAM type to the corresponding FunctionType,
+ * extrapolating BoundedWildcardTypes in the process
+ * (no type precision is lost by the extrapolation,
+ * but this facilitates dealing with the types arising from Java's use-site variance).
+ */
+ def samToFunctionType(tp: Type, sam: Symbol = NoSymbol): Type = {
+ val samSym = sam orElse samOf(tp)
+
+ def correspondingFunctionSymbol = {
+ val numVparams = samSym.info.params.length
+ if (numVparams > definitions.MaxFunctionArity) NoSymbol
+ else FunctionClass(numVparams)
+ }
+
+ if (samSym.exists && samSym.owner != correspondingFunctionSymbol) // don't treat Functions as SAMs
+ wildcardExtrapolation(normalize(tp memberInfo samSym))
+ else NoType
+ }
+
/** Perform the following adaptations of expression, pattern or type `tree` wrt to
* given mode `mode` and given prototype `pt`:
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
@@ -828,7 +848,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case Block(_, tree1) => tree1.symbol
case _ => tree.symbol
}
- if (!meth.isConstructor && isFunctionType(pt)) { // (4.2)
+ if (!meth.isConstructor && (isFunctionType(pt) || samOf(pt).exists)) { // (4.2)
debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt")
checkParamsConvertible(tree, tree.tpe)
val tree0 = etaExpand(context.unit, tree, this)
@@ -854,13 +874,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def adaptType(): Tree = {
// @M When not typing a type constructor (!context.inTypeConstructorAllowed)
- // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
+ // or raw type, types must be of kind *,
// and thus parameterized types must be applied to their type arguments
// @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
def properTypeRequired = (
tree.hasSymbolField
&& !context.inTypeConstructorAllowed
- && !(tree.symbol.isJavaDefined && context.unit.isJava)
+ && !context.unit.isJava
)
// @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
// (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
@@ -898,24 +918,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def insertApply(): Tree = {
assert(!context.inTypeConstructorAllowed, mode) //@M
val adapted = adaptToName(tree, nme.apply)
- def stabilize0(pre: Type): Tree = stabilize(adapted, pre, MonoQualifierModes, WildcardType)
-
- // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
- val qual = adapted match {
- case This(_) =>
- gen.stabilize(adapted)
- case Ident(_) =>
- val owner = adapted.symbol.owner
- val pre =
- if (owner.isPackageClass) owner.thisType
- else if (owner.isClass) context.enclosingSubClassContext(owner).prefix
- else NoPrefix
- stabilize0(pre)
- case Select(qualqual, _) =>
- stabilize0(qualqual.tpe)
- case other =>
- other
- }
+ val qual = gen.stabilize(adapted)
typedPos(tree.pos, mode, pt) {
Select(qual setPos tree.pos.makeTransparent, nme.apply)
}
@@ -930,7 +933,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// Ignore type errors raised in later phases that are due to mismatching types with existential skolems
// We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
- // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
+ // Here's my hypothesis why this happens. The pattern matcher defines a variable of type
//
// val x: T = expr
//
@@ -1104,7 +1107,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
adaptConstant(value)
case OverloadedType(pre, alts) if !mode.inFunMode => // (1)
inferExprAlternative(tree, pt)
- adapt(tree, mode, pt, original)
+ adaptAfterOverloadResolution(tree, mode, pt, original)
case NullaryMethodType(restpe) => // (2)
adapt(tree setType restpe, mode, pt, original)
case TypeRef(_, ByNameParamClass, arg :: Nil) if mode.inExprMode => // (2)
@@ -1137,6 +1140,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
}
}
+ // This just exists to help keep track of the spots where we have to adapt a tree after
+ // overload resolution. These proved hard to find during the fix for SI-8267.
+ def adaptAfterOverloadResolution(tree: Tree, mode: Mode, pt: Type = WildcardType, original: Tree = EmptyTree): Tree = {
+ adapt(tree, mode, pt, original)
+ }
+
def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = {
inferExprInstance(tree, context.extractUndetparams(), pt)
adapt(tree, mode, pt)
@@ -1514,7 +1523,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
val clazz = context.owner
assert(clazz != NoSymbol, templ)
- val cscope = context.outer.makeNewScope(ctor, context.outer.owner)
+ val dummy = context.outer.owner.newLocalDummy(templ.pos)
+ val cscope = context.outer.makeNewScope(ctor, dummy)
+ if (dummy.isTopLevel) currentRun.symSource(dummy) = currentUnit.source.file
val cbody2 = { // called both during completion AND typing.
val typer1 = newTyper(cscope)
// XXX: see about using the class's symbol....
@@ -1653,7 +1664,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val sameSourceFile = context.unit.source.file == psym.sourceFile
- if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation && !sameSourceFile) {
+ if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation &&
+ !sameSourceFile && !context.owner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
context.deprecationWarning(parent.pos, psym, msg)
@@ -1724,7 +1736,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) {
if (!clazz.owner.isPackageClass)
context.error(clazz.pos, "inner classes cannot be classfile annotations")
- else restrictionWarning(cdef.pos, unit,
+ // Ignore @SerialVersionUID, because it is special-cased and handled completely differently.
+ // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement
+ // of constant argument values "for free". Related to SI-7041.
+ else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit,
"""|subclassing Classfile does not
|make your annotation visible at runtime. If that is what
|you want, you must write the annotation class in Java.""".stripMargin)
@@ -2673,7 +2688,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* `{
* def apply$body(p1: T1, ..., pN: TN): T = body
* new S {
- * def apply(p1: T1, ..., pN: TN): T = apply$body(p1,..., pN)
+ * def apply(p1: T1', ..., pN: TN'): T' = apply$body(p1,..., pN)
* }
* }`
*
@@ -2683,6 +2698,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
*
* The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `samClassTp`,
* and `resPt` is derived from `samClassTp` -- it may be fully defined, or not...
+ * If it is not fully defined, we derive `samClassTpFullyDefined` by inferring any unknown type parameters.
+ *
+ * The types T1' ... TN' and T' are derived from the method signature of the sam method,
+ * as seen from the fully defined `samClassTpFullyDefined`.
*
* The function's body is put in a method outside of the class definition to enforce scoping.
* S's members should not be in scope in `body`.
@@ -2694,6 +2713,22 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* However T must be fully defined before we type the instantiation, as it'll end up as a parent type,
* which must be fully defined. Would be nice to have some kind of mechanism to insert type vars in a block of code,
* and have the instantiation of the first occurrence propagate to the rest of the block.
+ *
+ * TODO: by-name params
+ * scala> trait LazySink { def accept(a: => Any): Unit }
+ * defined trait LazySink
+ *
+ * scala> val f: LazySink = (a) => (a, a)
+ * f: LazySink = $anonfun$1@1fb26910
+ *
+ * scala> f(println("!"))
+ * <console>:10: error: LazySink does not take parameters
+ * f(println("!"))
+ * ^
+ *
+ * scala> f.accept(println("!"))
+ * !
+ * !
*/
def synthesizeSAMFunction(sam: Symbol, fun: Function, resPt: Type, samClassTp: Type, mode: Mode): Tree = {
// assert(fun.vparams forall (vp => isFullyDefined(vp.tpt.tpe))) -- by construction, as we take them from sam's info
@@ -2774,14 +2809,21 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
samClassTp
}
- // `final override def ${sam.name}($p1: $T1, ..., $pN: $TN): $resPt = ${sam.name}\$body'($p1, ..., $pN)`
+ // what's the signature of the method that we should actually be overriding?
+ val samMethTp = samClassTpFullyDefined memberInfo sam
+ // Before the mutation, `tp <:< vpar.tpt.tpe` should hold.
+ // TODO: error message when this is not the case, as the expansion won't type check
+ // - Ti' <:< Ti and T <: T' must hold for the samDef body to type check
+ val funArgTps = foreach2(samMethTp.paramTypes, fun.vparams)((tp, vpar) => vpar.tpt setType tp)
+
+ // `final override def ${sam.name}($p1: $T1', ..., $pN: $TN'): ${samMethTp.finalResultType} = ${sam.name}\$body'($p1, ..., $pN)`
val samDef =
DefDef(Modifiers(FINAL | OVERRIDE | SYNTHETIC),
sam.name.toTermName,
Nil,
List(fun.vparams),
- TypeTree(samBodyDef.tpt.tpe) setPos sampos.focus,
- Apply(Ident(bodyName), fun.vparams map (p => Ident(p.name)))
+ TypeTree(samMethTp.finalResultType) setPos sampos.focus,
+ Apply(Ident(bodyName), fun.vparams map gen.paramToArg)
)
val serializableParentAddendum =
@@ -2811,6 +2853,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
)
}
+ // TODO: improve error reporting -- when we're in silent mode (from `silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError`)
+ // the errors in the function don't get out...
+ if (block exists (_.isErroneous))
+ context.error(fun.pos, s"Could not derive subclass of $samClassTp\n (with SAM `def $sam$samMethTp`)\n based on: $fun.")
+
classDef.symbol addAnnotation SerialVersionUIDAnnotation
block
}
@@ -2831,7 +2878,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* as `(a => a): Int => Int` should not (yet) get the sam treatment.
*/
val sam =
- if (!settings.Xexperimental || pt.typeSymbol == FunctionSymbol) NoSymbol
+ if (pt.typeSymbol == FunctionSymbol) NoSymbol
else samOf(pt)
/* The SAM case comes first so that this works:
@@ -2841,15 +2888,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
* Note that the arity of the sam must correspond to the arity of the function.
*/
val samViable = sam.exists && sameLength(sam.info.params, fun.vparams)
+ val ptNorm = if (samViable) samToFunctionType(pt, sam) else pt
val (argpts, respt) =
- if (samViable) {
- val samInfo = pt memberInfo sam
- (samInfo.paramTypes, samInfo.resultType)
- } else {
- pt baseType FunctionSymbol match {
- case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
- case _ => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType)
- }
+ ptNorm baseType FunctionSymbol match {
+ case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
+ case _ => (fun.vparams map (_ => if (pt == ErrorType) ErrorType else NoType), WildcardType)
}
if (!FunctionSymbol.exists)
@@ -3175,7 +3218,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
if (sym1 != NoSymbol) sym = sym1
}
if (sym == NoSymbol) fun
- else adapt(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode, WildcardType)
+ else adaptAfterOverloadResolution(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode)
} else fun
}
@@ -3220,11 +3263,27 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
setError(tree)
else {
inferMethodAlternative(fun, undetparams, argTpes, pt)
- doTypedApply(tree, adapt(fun, mode.forFunMode, WildcardType), args1, mode, pt)
+ doTypedApply(tree, adaptAfterOverloadResolution(fun, mode.forFunMode, WildcardType), args1, mode, pt)
}
}
handleOverloaded
+ case _ if isPolymorphicSignature(fun.symbol) =>
+ // Mimic's Java's treatment of polymorphic signatures as described in
+ // https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.3
+ //
+ // One can think of these methods as being infinitely overloaded. We create
+ // a ficticious new cloned method symbol for each call site that takes on a signature
+ // governed by a) the argument types and b) the expected type
+ val args1 = typedArgs(args, forArgMode(fun, mode))
+ val pts = args1.map(_.tpe.deconst)
+ val clone = fun.symbol.cloneSymbol
+ val cloneParams = pts map (pt => clone.newValueParameter(currentUnit.freshTermName()).setInfo(pt))
+ val resultType = if (isFullyDefined(pt)) pt else ObjectTpe
+ clone.modifyInfo(mt => copyMethodType(mt, cloneParams, resultType))
+ val fun1 = fun.setSymbol(clone).setType(clone.info)
+ doTypedApply(tree, fun1, args1, mode, resultType).setType(resultType)
+
case mt @ MethodType(params, _) =>
val paramTypes = mt.paramTypes
// repeat vararg as often as needed, remove by-name
@@ -3720,8 +3779,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
case TypeRef(pre, sym, args) =>
if (sym.isAliasType && containsLocal(tp) && (tp.dealias ne tp)) apply(tp.dealias)
else {
- if (pre.isVolatile)
- InferTypeWithVolatileTypeSelectionError(tree, pre)
+ if (pre.isVolatile) pre match {
+ case SingleType(_, sym) if sym.isSynthetic && isPastTyper =>
+ debuglog(s"ignoring volatility of prefix in pattern matcher generated inferred type: $tp") // See pos/t7459c.scala
+ case _ =>
+ InferTypeWithVolatileTypeSelectionError(tree, pre)
+ }
mapOver(tp)
}
case _ =>
@@ -3803,7 +3866,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
case OverloadedType(pre, alts) =>
inferPolyAlternatives(fun, mapList(args)(treeTpe))
- val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
+
+ // SI-8267 `memberType` can introduce existentials *around* a PolyType/MethodType, see AsSeenFromMap#captureThis.
+ // If we had selected a non-overloaded symbol, `memberType` would have been called in `makeAccessible`
+ // and the resulting existential type would have been skolemized in `adapt` *before* we typechecked
+ // the enclosing type-/ value- application.
+ //
+ // However, if the selection is overloaded, we defer calling `memberType` until we can select a single
+ // alternative here. It is therefore necessary to skolemize the existential here.
+ //
+ val fun1 = adaptAfterOverloadResolution(fun, mode.forFunMode | TAPPmode)
+
+ val tparams = fun1.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
val args1 = if (sameLength(args, tparams)) {
//@M: in case TypeApply we can't check the kind-arities of the type arguments,
// as we don't know which alternative to choose... here we do
@@ -3817,7 +3891,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// ...actually this was looping anyway, see bug #278.
return TypedApplyWrongNumberOfTpeParametersError(fun, fun)
- typedTypeApply(tree, mode, fun, args1)
+ typedTypeApply(tree, mode, fun1, args1)
case SingleType(_, _) =>
typedTypeApply(tree, mode, fun setType fun.tpe.widen, args)
case PolyType(tparams, restpe) if tparams.nonEmpty =>
@@ -5116,16 +5190,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe)
}
- if (!refTyped.isErrorTyped)
- tree setType refTyped.tpe.resultType
-
- if (treeInfo.admitsTypeSelection(refTyped)) tree
- else UnstableTreeError(refTyped)
+ if (refTyped.isErrorTyped) {
+ setError(tree)
+ } else {
+ tree setType refTyped.tpe.resultType.deconst
+ if (refTyped.isErrorTyped || treeInfo.admitsTypeSelection(refTyped)) tree
+ else UnstableTreeError(tree)
+ }
}
def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
val qual1 = typedType(tree.qualifier, mode)
- if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
+ if (qual1.isErrorTyped) setError(treeCopy.SelectFromTypeTree(tree, qual1, tree.name))
+ else if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
else typedSelect(tree, qual1, tree.name)
}
@@ -5137,7 +5214,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
def typedExistentialTypeTree(tree: ExistentialTypeTree) = {
val tree1 = typerWithLocalContext(context.makeNewScope(tree, context.owner)){
- _.typedExistentialTypeTree(tree, mode)
+ typer =>
+ if (context.inTypeConstructorAllowed)
+ typer.context.withinTypeConstructorAllowed(typer.typedExistentialTypeTree(tree, mode))
+ else
+ typer.typedExistentialTypeTree(tree, mode)
}
checkExistentialsFeature(tree1.pos, tree1.tpe, "the existential type")
tree1
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index cc2d9141ce..fc1f45e358 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -142,17 +142,30 @@ trait Unapplies extends ast.TreeDSL {
/** The unapply method corresponding to a case class
*/
def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = {
- val tparams = constrTparamsInvariant(cdef)
- val method = constrParamss(cdef) match {
+ val tparams = constrTparamsInvariant(cdef)
+ val method = constrParamss(cdef) match {
case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq
case _ => nme.unapply
}
- val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
- val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
- val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName))
+ val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
+ val resultType = if (!settings.isScala212) TypeTree() else { // fix for SI-6541 under -Xsource:2.12
+ def repeatedToSeq(tp: Tree) = tp match {
+ case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), tps) => AppliedTypeTree(gen.rootScalaDot(tpnme.Seq), tps)
+ case _ => tp
+ }
+ constrParamss(cdef) match {
+ case Nil | Nil :: _ =>
+ gen.rootScalaDot(tpnme.Boolean)
+ case params :: _ =>
+ val constrParamTypes = params.map(param => repeatedToSeq(param.tpt))
+ AppliedTypeTree(gen.rootScalaDot(tpnme.Option), List(treeBuilder.makeTupleType(constrParamTypes)))
+ }
+ }
+ val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
+ val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName))
atPos(cdef.pos.focus)(
- DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body)
+ DefDef(caseMods, method, tparams, List(cparams), resultType, body)
)
}
diff --git a/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala b/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala
new file mode 100644
index 0000000000..4451651229
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/ClassFileLookup.scala
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2014 Contributor. All rights reserved.
+ */
+package scala.tools.nsc.util
+
+import scala.tools.nsc.io.AbstractFile
+import java.net.URL
+
+/**
+ * Simple interface that allows us to abstract over how class file lookup is performed
+ * in different classpath representations.
+ */
+// TODO at the end, after the possible removal of the old classpath representation, this class shouldn't be generic
+// T should be just changed to AbstractFile
+trait ClassFileLookup[T] {
+ def findClassFile(name: String): Option[AbstractFile]
+
+ /**
+ * It returns both classes from class file and source files (as our base ClassRepresentation).
+ * So note that it's not so strictly related to findClassFile.
+ */
+ def findClass(name: String): Option[ClassRepresentation[T]]
+
+ /**
+ * A sequence of URLs representing this classpath.
+ */
+ def asURLs: Seq[URL]
+
+ /** The whole classpath in the form of one String.
+ */
+ def asClassPathString: String
+
+ // for compatibility purposes
+ @deprecated("Use asClassPathString instead of this one", "2.11.5")
+ def asClasspathString: String = asClassPathString
+
+ /** The whole sourcepath in the form of one String.
+ */
+ def asSourcePathString: String
+}
+
+/**
+ * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
+ */
+// TODO at the end, after the possible removal of the old classpath implementation, this class shouldn't be generic
+// T should be just changed to AbstractFile
+trait ClassRepresentation[T] {
+ def binary: Option[T]
+ def source: Option[AbstractFile]
+
+ def name: String
+}
+
+object ClassRepresentation {
+ def unapply[T](classRep: ClassRepresentation[T]): Option[(Option[T], Option[AbstractFile])] =
+ Some((classRep.binary, classRep.source))
+}
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index e89f08ec6b..8d4d07759f 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -7,16 +7,18 @@
package scala.tools.nsc
package util
+import io.{ AbstractFile, Directory, File, Jar }
+import java.net.MalformedURLException
import java.net.URL
+import java.util.regex.PatternSyntaxException
import scala.collection.{ mutable, immutable }
-import io.{ File, Directory, Path, Jar, AbstractFile }
import scala.reflect.internal.util.StringOps.splitWhere
-import Jar.isJarOrZip
+import scala.tools.nsc.classpath.FileUtils
+
import File.pathSeparator
-import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
-import java.net.MalformedURLException
-import java.util.regex.PatternSyntaxException
-import scala.reflect.runtime.ReflectionUtils
+import FileUtils.endsClass
+import FileUtils.endsScalaOrJava
+import Jar.isJarOrZip
/** <p>
* This module provides star expansion of '-classpath' option arguments, behaves the same as
@@ -89,7 +91,7 @@ object ClassPath {
/** A class modeling aspects of a ClassPath which should be
* propagated to any classpaths it creates.
*/
- abstract class ClassPathContext[T] {
+ abstract class ClassPathContext[T] extends classpath.ClassPathFactory[ClassPath[T]] {
/** A filter which can be used to exclude entities from the classpath
* based on their name.
*/
@@ -99,75 +101,47 @@ object ClassPath {
*/
def validClassFile(name: String) = endsClass(name) && isValidName(name)
def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.charAt(0) != '.')
- def validSourceFile(name: String) = endsScala(name) || endsJava(name)
+ def validSourceFile(name: String) = endsScalaOrJava(name)
/** From the representation to its identifier.
*/
def toBinaryName(rep: T): String
- /** Create a new classpath based on the abstract file.
- */
- def newClassPath(file: AbstractFile): ClassPath[T]
-
- /** Creators for sub classpaths which preserve this context.
- */
def sourcesInPath(path: String): List[ClassPath[T]] =
for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield
new SourcePath[T](dir, this)
-
- def contentsOfDirsInPath(path: String): List[ClassPath[T]] =
- for (dir <- expandPath(path, expandStar = false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
- newClassPath(entry)
-
- def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] =
- classesInPathImpl(path, expand = true).toIndexedSeq
-
- def classesInPath(path: String) = classesInPathImpl(path, expand = false)
-
- // Internal
- private def classesInPathImpl(path: String, expand: Boolean) =
- for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
- newClassPath(dir)
-
- def classesInManifest(used: Boolean) =
- if (used) for (url <- manifests) yield newClassPath(AbstractFile getResources url) else Nil
}
- def manifests = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF").filter(_.getProtocol() == "jar").toList
+ def manifests: List[java.net.URL] = {
+ import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
+ Thread.currentThread().getContextClassLoader()
+ .getResources("META-INF/MANIFEST.MF")
+ .filter(_.getProtocol == "jar").toList
+ }
class JavaContext extends ClassPathContext[AbstractFile] {
def toBinaryName(rep: AbstractFile) = {
val name = rep.name
assert(endsClass(name), name)
- name.substring(0, name.length - 6)
+ FileUtils.stripClassExtension(name)
}
+
def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
}
object DefaultJavaContext extends JavaContext
- private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
- private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
- private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java"
-
/** From the source file to its identifier.
*/
- def toSourceName(f: AbstractFile): String = {
- val name = f.name
-
- if (endsScala(name)) name.substring(0, name.length - 6)
- else if (endsJava(name)) name.substring(0, name.length - 5)
- else throw new FatalError("Unexpected source file ending: " + name)
- }
+ def toSourceName(f: AbstractFile): String = FileUtils.stripSourceExtension(f.name)
}
+
import ClassPath._
/**
* Represents a package which contains classes and other packages
*/
-abstract class ClassPath[T] {
- type AnyClassRep = ClassPath[T]#ClassRep
-
+abstract class ClassPath[T] extends ClassFileLookup[T] {
/**
* The short name of the package (without prefix)
*/
@@ -179,28 +153,37 @@ abstract class ClassPath[T] {
*/
def origin: Option[String] = None
- /** A list of URLs representing this classpath.
- */
- def asURLs: List[URL]
-
- /** The whole classpath in the form of one String.
- */
- def asClasspathString: String
-
/** Info which should be propagated to any sub-classpaths.
*/
def context: ClassPathContext[T]
/** Lists of entities.
*/
- def classes: IndexedSeq[AnyClassRep]
+ def classes: IndexedSeq[ClassRepresentation[T]]
def packages: IndexedSeq[ClassPath[T]]
def sourcepaths: IndexedSeq[AbstractFile]
+ /** The entries this classpath is composed of. In class `ClassPath` it's just the singleton list containing `this`.
+ * Subclasses such as `MergedClassPath` typically return lists with more elements.
+ */
+ def entries: IndexedSeq[ClassPath[T]] = IndexedSeq(this)
+
+ /** Merge classpath of `platform` and `urls` into merged classpath */
+ def mergeUrlsIntoClassPath(urls: URL*): MergedClassPath[T] = {
+ // Collect our new jars/directories and add them to the existing set of classpaths
+ val allEntries =
+ (entries ++
+ urls.map(url => context.newClassPath(io.AbstractFile.getURL(url)))
+ ).distinct
+
+ // Combine all of our classpaths (old and new) into one merged classpath
+ new MergedClassPath(allEntries, context)
+ }
+
/**
* Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
*/
- case class ClassRep(binary: Option[T], source: Option[AbstractFile]) {
+ case class ClassRep(binary: Option[T], source: Option[AbstractFile]) extends ClassRepresentation[T] {
def name: String = binary match {
case Some(x) => context.toBinaryName(x)
case _ =>
@@ -219,25 +202,27 @@ abstract class ClassPath[T] {
* Find a ClassRep given a class name of the form "package.subpackage.ClassName".
* Does not support nested classes on .NET
*/
- def findClass(name: String): Option[AnyClassRep] =
+ override def findClass(name: String): Option[ClassRepresentation[T]] =
splitWhere(name, _ == '.', doDropIndex = true) match {
case Some((pkg, rest)) =>
val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
rep map {
- case x: ClassRep => x
+ case x: ClassRepresentation[T] => x
case x => throw new FatalError("Unexpected ClassRep '%s' found searching for name '%s'".format(x, name))
}
case _ =>
classes find (_.name == name)
}
- def findClassFile(name: String): Option[AbstractFile] =
+ override def findClassFile(name: String): Option[AbstractFile] =
findClass(name) match {
- case Some(ClassRep(Some(x: AbstractFile), _)) => Some(x)
+ case Some(ClassRepresentation(Some(x: AbstractFile), _)) => Some(x)
case _ => None
}
- def sortString = join(split(asClasspathString).sorted: _*)
+ override def asSourcePathString: String = sourcepaths.mkString(pathSeparator)
+
+ def sortString = join(split(asClassPathString).sorted: _*)
override def equals(that: Any) = that match {
case x: ClassPath[_] => this.sortString == x.sortString
case _ => false
@@ -249,10 +234,12 @@ abstract class ClassPath[T] {
* A Classpath containing source files
*/
class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends ClassPath[T] {
+ import FileUtils.AbstractFileOps
+
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) Nil else List(dir.toURL)
- def asClasspathString = dir.path
+ def asURLs = dir.toURLs()
+ def asClassPathString = dir.path
val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq(dir)
private def traverse() = {
@@ -275,10 +262,12 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
* A directory (or a .jar file) containing classfiles and packages
*/
class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
+ import FileUtils.AbstractFileOps
+
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) List(new URL(name)) else List(dir.toURL)
- def asClasspathString = dir.path
+ def asURLs = dir.toURLs(default = Seq(new URL(name)))
+ def asClassPathString = dir.path
val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
// calculates (packages, classes) in one traversal.
@@ -322,9 +311,10 @@ extends MergedClassPath[T](original.entries map (e => subst getOrElse (e, e)), o
* A classpath unifying multiple class- and sourcepath entries.
*/
class MergedClassPath[T](
- val entries: IndexedSeq[ClassPath[T]],
+ override val entries: IndexedSeq[ClassPath[T]],
val context: ClassPathContext[T])
extends ClassPath[T] {
+
def this(entries: TraversableOnce[ClassPath[T]], context: ClassPathContext[T]) =
this(entries.toIndexedSeq, context)
@@ -333,12 +323,12 @@ extends ClassPath[T] {
lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths)
override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
- override def asClasspathString: String = join(entries map (_.asClasspathString) : _*)
+ override def asClassPathString: String = join(entries map (_.asClassPathString) : _*)
- lazy val classes: IndexedSeq[AnyClassRep] = {
+ lazy val classes: IndexedSeq[ClassRepresentation[T]] = {
var count = 0
val indices = mutable.HashMap[String, Int]()
- val cls = new mutable.ArrayBuffer[AnyClassRep](1024)
+ val cls = new mutable.ArrayBuffer[ClassRepresentation[T]](1024)
for (e <- entries; c <- e.classes) {
val name = c.name
@@ -347,9 +337,9 @@ extends ClassPath[T] {
val existing = cls(idx)
if (existing.binary.isEmpty && c.binary.isDefined)
- cls(idx) = existing.copy(binary = c.binary)
+ cls(idx) = ClassRep(binary = c.binary, source = existing.source)
if (existing.source.isEmpty && c.source.isDefined)
- cls(idx) = existing.copy(source = c.source)
+ cls(idx) = ClassRep(binary = existing.binary, source = c.source)
}
else {
indices(name) = count
@@ -387,10 +377,12 @@ extends ClassPath[T] {
}
new MergedClassPath[T](newEntries, context)
}
+
def show() {
println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
- asClasspathString split ':' foreach (x => println(" " + x))
+ asClassPathString split ':' foreach (x => println(" " + x))
}
+
override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
}
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index ba44126df2..352816803f 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -8,7 +8,7 @@ package util
import scala.reflect.internal.Chars._
-/** Utilitity methods for doc comment strings
+/** Utility methods for doc comment strings
*/
object DocStrings {
diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 3ae21b6b98..8d8418945a 100644
--- a/src/compiler/scala/tools/reflect/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -1,17 +1,17 @@
package scala.tools
package reflect
+import scala.reflect.internal.util.ScalaClassLoader
import scala.tools.nsc.Driver
import scala.tools.nsc.Global
import scala.tools.nsc.Settings
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.util.PathResolver
+import scala.tools.util.PathResolverFactory
object ReflectMain extends Driver {
private def classloaderFromSettings(settings: Settings) = {
- val classpath = new PathResolver(settings).result
- ScalaClassLoader.fromURLs(classpath.asURLs, getClass.getClassLoader)
+ val classPathURLs = PathResolverFactory.create(settings).resultAsURLs
+ ScalaClassLoader.fromURLs(classPathURLs, getClass.getClassLoader)
}
override def newCompiler(): Global = new ReflectGlobal(settings, reporter, classloaderFromSettings(settings))
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
deleted file mode 100644
index 3cfc1eb2a1..0000000000
--- a/src/compiler/scala/tools/util/Javap.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-import scala.tools.nsc.util.ScalaClassLoader
-import java.io.PrintWriter
-
-trait JpResult {
- def isError: Boolean
- def value: Any
- def show(): Unit
-}
-
-trait Javap {
- def loader: ScalaClassLoader
- def printWriter: PrintWriter
- def apply(args: Seq[String]): List[JpResult]
- def tryFile(path: String): Option[Array[Byte]]
- def tryClass(path: String): Array[Byte]
-}
-
-object NoJavap extends Javap {
- def loader: ScalaClassLoader = getClass.getClassLoader
- def printWriter: PrintWriter = new PrintWriter(System.err, true)
- def apply(args: Seq[String]): List[JpResult] = Nil
- def tryFile(path: String): Option[Array[Byte]] = None
- def tryClass(path: String): Array[Byte] = Array()
-}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 5526660509..8e5b1e0a5c 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -7,14 +7,17 @@ package scala
package tools
package util
+import java.net.URL
import scala.tools.reflect.WrappedProperties.AccessControl
-import scala.tools.nsc.{ Settings }
-import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
+import scala.tools.nsc.Settings
+import scala.tools.nsc.util.{ ClassFileLookup, ClassPath, JavaClassPath }
import scala.reflect.io.{ File, Directory, Path, AbstractFile }
import scala.reflect.runtime.ReflectionUtils
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
import scala.language.postfixOps
+import scala.tools.nsc.classpath.{ AggregateFlatClassPath, ClassPathFactory, FlatClassPath, FlatClassPathFactory }
+import scala.tools.nsc.settings.ClassPathRepresentationType
// Loosely based on the draft specification at:
// https://wiki.scala-lang.org/display/SIW/Classpath
@@ -48,9 +51,8 @@ object PathResolver {
/** Values found solely by inspecting environment or property variables.
*/
object Environment {
- private def searchForBootClasspath = (
+ private def searchForBootClasspath =
systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
- )
/** Environment variables which java pays attention to so it
* seems we do as well.
@@ -104,7 +106,7 @@ object PathResolver {
else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path
else ""
- // XXX It must be time for someone to figure out what all these things
+ // TODO It must be time for someone to figure out what all these things
// are intended to do. This is disabled here because it was causing all
// the scala jars to end up on the classpath twice: one on the boot
// classpath as set up by the runner (or regular classpath under -nobootcp)
@@ -170,39 +172,48 @@ object PathResolver {
!ReflectionUtils.scalacShouldntLoadClassfile(name)
}
- // called from scalap
+ @deprecated("This method is no longer used be scalap and will be deleted", "2.11.5")
def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
val s = new Settings()
s.classpath.value = path
- new PathResolver(s, context) result
+ new PathResolver(s, context).result
}
/** With no arguments, show the interesting values in Environment and Defaults.
* If there are arguments, show those in Calculated as if those options had been
* given to a scala runner.
*/
- def main(args: Array[String]): Unit = {
+ def main(args: Array[String]): Unit =
if (args.isEmpty) {
println(Environment)
println(Defaults)
- }
- else {
+ } else {
val settings = new Settings()
val rest = settings.processArguments(args.toList, processAll = false)._2
- val pr = new PathResolver(settings)
- println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
+ val pr = PathResolverFactory.create(settings)
+ println("COMMAND: 'scala %s'".format(args.mkString(" ")))
println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
- pr.result.show()
+
+ pr.result match {
+ case cp: JavaClassPath =>
+ cp.show()
+ case cp: AggregateFlatClassPath =>
+ println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}")
+ }
}
- }
}
-class PathResolver(settings: Settings, context: JavaContext) {
- import PathResolver.{ Defaults, Environment, AsLines, MkLines, ppcp }
+trait PathResolverResult {
+ def result: ClassFileLookup[AbstractFile]
- def this(settings: Settings) = this(settings,
- if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext
- else DefaultJavaContext)
+ def resultAsURLs: Seq[URL] = result.asURLs
+}
+
+abstract class PathResolverBase[BaseClassPathType <: ClassFileLookup[AbstractFile], ResultClassPathType <: BaseClassPathType]
+(settings: Settings, classPathFactory: ClassPathFactory[BaseClassPathType])
+ extends PathResolverResult {
+
+ import PathResolver.{ AsLines, Defaults, ppcp }
private def cmdLineOrElse(name: String, alt: String) = {
(commandLineFor(name) match {
@@ -232,6 +243,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
def scalaBootClassPath = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath)
def scalaExtDirs = cmdLineOrElse("extdirs", Defaults.scalaExtDirs)
+
/** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as:
* [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect
* [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg)
@@ -250,16 +262,14 @@ class PathResolver(settings: Settings, context: JavaContext) {
* - Otherwise, if CLASSPATH is set, it is that
* - If neither of those, then "." is used.
*/
- def userClassPath = (
- if (!settings.classpath.isDefault)
- settings.classpath.value
+ def userClassPath =
+ if (!settings.classpath.isDefault) settings.classpath.value
else sys.env.getOrElse("CLASSPATH", ".")
- )
- import context._
+ import classPathFactory._
// Assemble the elements!
- def basis = List[Traversable[ClassPath[AbstractFile]]](
+ def basis = List[Traversable[BaseClassPathType]](
classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
@@ -278,7 +288,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
| javaBootClassPath = ${ppcp(javaBootClassPath)}
| javaExtDirs = ${ppcp(javaExtDirs)}
| javaUserClassPath = ${ppcp(javaUserClassPath)}
- | useJavaClassPath = $useJavaClassPath
+ | useJavaClassPath = $useJavaClassPath
| scalaBootClassPath = ${ppcp(scalaBootClassPath)}
| scalaExtDirs = ${ppcp(scalaExtDirs)}
| userClassPath = ${ppcp(userClassPath)}
@@ -288,8 +298,10 @@ class PathResolver(settings: Settings, context: JavaContext) {
def containers = Calculated.containers
- lazy val result = {
- val cp = new JavaClassPath(containers.toIndexedSeq, context)
+ import PathResolver.MkLines
+
+ def result: ResultClassPathType = {
+ val cp = computeResult()
if (settings.Ylogcp) {
Console print f"Classpath built from ${settings.toConciseString} %n"
Console print s"Defaults: ${PathResolver.Defaults}"
@@ -301,5 +313,37 @@ class PathResolver(settings: Settings, context: JavaContext) {
cp
}
- def asURLs = result.asURLs
+ @deprecated("Use resultAsURLs instead of this one", "2.11.5")
+ def asURLs: List[URL] = resultAsURLs.toList
+
+ protected def computeResult(): ResultClassPathType
+}
+
+class PathResolver(settings: Settings, context: JavaContext)
+ extends PathResolverBase[ClassPath[AbstractFile], JavaClassPath](settings, context) {
+
+ def this(settings: Settings) =
+ this(settings,
+ if (settings.YnoLoadImplClass) PathResolver.NoImplClassJavaContext
+ else DefaultJavaContext)
+
+ override protected def computeResult(): JavaClassPath =
+ new JavaClassPath(containers.toIndexedSeq, context)
+}
+
+class FlatClassPathResolver(settings: Settings, flatClassPathFactory: ClassPathFactory[FlatClassPath])
+ extends PathResolverBase[FlatClassPath, AggregateFlatClassPath](settings, flatClassPathFactory) {
+
+ def this(settings: Settings) = this(settings, new FlatClassPathFactory(settings))
+
+ override protected def computeResult(): AggregateFlatClassPath = AggregateFlatClassPath(containers.toIndexedSeq)
+}
+
+object PathResolverFactory {
+
+ def create(settings: Settings): PathResolverResult =
+ settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat => new FlatClassPathResolver(settings)
+ case ClassPathRepresentationType.Recursive => new PathResolver(settings)
+ }
}
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index 1d39a59cf4..7858bf0658 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -28,12 +28,12 @@ trait CompileOutputCommon {
* @author Martin Odersky
* @version 1.0
*/
-abstract class SocketServer extends CompileOutputCommon {
+abstract class SocketServer(fixPort: Int = 0) extends CompileOutputCommon {
def shutdown: Boolean
def session(): Unit
def timeout(): Unit = () // called after a timeout is detected for subclasses to cleanup
// a hook for subclasses
- protected def createServerSocket(): ServerSocket = new ServerSocket(0)
+ protected def createServerSocket(): ServerSocket = new ServerSocket(fixPort)
var in: BufferedReader = _
var out: PrintWriter = _
diff --git a/src/intellij-14/README b/src/intellij-14/README
new file mode 100644
index 0000000000..310a766a20
--- /dev/null
+++ b/src/intellij-14/README
@@ -0,0 +1,12 @@
+Use the latest IntelliJ IDEA release and install the Scala plugin from within the IDE.
+
+Compilation withing IDEA is performed in "-Dlocker.skip=1" mode: the sources are built
+directly using the STARR compiler.
+
+The following steps are required to use IntelliJ IDEA on Scala trunk
+ - Run "ant init". This will download some JARs from to ./build/deps, which are
+ included in IntelliJ's classpath.
+ - Run src/intellij-14/setup.sh
+ - Open ./src/intellij-14/scala.ipr in IntelliJ
+ - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the
+ Java 1.6 SDK
diff --git a/src/intellij-14/actors.iml.SAMPLE b/src/intellij-14/actors.iml.SAMPLE
new file mode 100644
index 0000000000..3da7a5f777
--- /dev/null
+++ b/src/intellij-14/actors.iml.SAMPLE
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../actors">
+ <sourceFolder url="file://$MODULE_DIR$/../actors" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/asm.iml.SAMPLE b/src/intellij-14/asm.iml.SAMPLE
new file mode 100644
index 0000000000..9b2fd58ce7
--- /dev/null
+++ b/src/intellij-14/asm.iml.SAMPLE
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../asm">
+ <sourceFolder url="file://$MODULE_DIR$/../asm/src" isTestSource="false" />
+ <sourceFolder url="file://$MODULE_DIR$/../asm" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/compiler.iml.SAMPLE b/src/intellij-14/compiler.iml.SAMPLE
new file mode 100644
index 0000000000..858ca2f2c2
--- /dev/null
+++ b/src/intellij-14/compiler.iml.SAMPLE
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../compiler">
+ <sourceFolder url="file://$MODULE_DIR$/../compiler" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="library" name="ant" level="project" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/diff.sh b/src/intellij-14/diff.sh
new file mode 100755
index 0000000000..54f9248608
--- /dev/null
+++ b/src/intellij-14/diff.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+#
+# Diffs the SAMPLE files against the working project config.
+#
+export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
+for f in "$SCRIPT_DIR"/*.{iml,ipr}; do
+ echo $f; diff -u $f.SAMPLE $f;
+done
diff --git a/src/intellij-14/forkjoin.iml.SAMPLE b/src/intellij-14/forkjoin.iml.SAMPLE
new file mode 100644
index 0000000000..42507b2911
--- /dev/null
+++ b/src/intellij-14/forkjoin.iml.SAMPLE
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../forkjoin">
+ <sourceFolder url="file://$MODULE_DIR$/../forkjoin" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/interactive.iml.SAMPLE b/src/intellij-14/interactive.iml.SAMPLE
new file mode 100644
index 0000000000..db12a7dc9b
--- /dev/null
+++ b/src/intellij-14/interactive.iml.SAMPLE
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../interactive">
+ <sourceFolder url="file://$MODULE_DIR$/../interactive" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="scaladoc" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/library.iml.SAMPLE b/src/intellij-14/library.iml.SAMPLE
new file mode 100644
index 0000000000..08cccba4b9
--- /dev/null
+++ b/src/intellij-14/library.iml.SAMPLE
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../library">
+ <sourceFolder url="file://$MODULE_DIR$/../library" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/manual.iml.SAMPLE b/src/intellij-14/manual.iml.SAMPLE
new file mode 100644
index 0000000000..2e67076e28
--- /dev/null
+++ b/src/intellij-14/manual.iml.SAMPLE
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../manual">
+ <sourceFolder url="file://$MODULE_DIR$/../manual" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="library" name="ant" level="project" />
+ <orderEntry type="library" name="scaladoc-deps" level="project" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/partest-extras.iml.SAMPLE b/src/intellij-14/partest-extras.iml.SAMPLE
new file mode 100644
index 0000000000..b3537a949a
--- /dev/null
+++ b/src/intellij-14/partest-extras.iml.SAMPLE
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../partest-extras">
+ <sourceFolder url="file://$MODULE_DIR$/../partest-extras" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="repl" />
+ <orderEntry type="library" name="partest" level="project" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/partest-javaagent.iml.SAMPLE b/src/intellij-14/partest-javaagent.iml.SAMPLE
new file mode 100644
index 0000000000..3a387aab0f
--- /dev/null
+++ b/src/intellij-14/partest-javaagent.iml.SAMPLE
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../partest-javaagent">
+ <sourceFolder url="file://$MODULE_DIR$/../partest-javaagent" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/reflect.iml.SAMPLE b/src/intellij-14/reflect.iml.SAMPLE
new file mode 100644
index 0000000000..87da13777b
--- /dev/null
+++ b/src/intellij-14/reflect.iml.SAMPLE
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../reflect">
+ <sourceFolder url="file://$MODULE_DIR$/../reflect" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/repl.iml.SAMPLE b/src/intellij-14/repl.iml.SAMPLE
new file mode 100644
index 0000000000..5a7476b1ef
--- /dev/null
+++ b/src/intellij-14/repl.iml.SAMPLE
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../repl">
+ <sourceFolder url="file://$MODULE_DIR$/../repl" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="library" name="repl-deps" level="project" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/scala.iml.SAMPLE b/src/intellij-14/scala.iml.SAMPLE
new file mode 100644
index 0000000000..9e8718dd45
--- /dev/null
+++ b/src/intellij-14/scala.iml.SAMPLE
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../..">
+ <excludeFolder url="file://$MODULE_DIR$/../../build" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/scala.ipr.SAMPLE b/src/intellij-14/scala.ipr.SAMPLE
new file mode 100644
index 0000000000..7c2022f3a9
--- /dev/null
+++ b/src/intellij-14/scala.ipr.SAMPLE
@@ -0,0 +1,261 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+ <component name="CompilerConfiguration">
+ <option name="DEFAULT_COMPILER" value="Javac" />
+ <resourceExtensions />
+ <wildcardResourcePatterns>
+ <entry name="!?*.java" />
+ <entry name="!?*.form" />
+ <entry name="!?*.class" />
+ <entry name="!?*.groovy" />
+ <entry name="!?*.scala" />
+ <entry name="!?*.flex" />
+ <entry name="!?*.kt" />
+ <entry name="!?*.clj" />
+ </wildcardResourcePatterns>
+ <annotationProcessing>
+ <profile default="true" name="Default" enabled="false">
+ <processorPath useClasspath="true" />
+ </profile>
+ </annotationProcessing>
+ </component>
+ <component name="CopyrightManager" default="" />
+ <component name="DaemonCodeAnalyzer">
+ <disable_hints />
+ </component>
+ <component name="DependencyValidationManager">
+ <option name="SKIP_IMPORT_STATEMENTS" value="false" />
+ </component>
+ <component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false" />
+ <component name="EntryPointsManager">
+ <entry_points version="2.0" />
+ </component>
+ <component name="ProjectLevelVcsManager" settingsEditedManually="false">
+ <OptionsSetting value="true" id="Add" />
+ <OptionsSetting value="true" id="Remove" />
+ <OptionsSetting value="true" id="Checkout" />
+ <OptionsSetting value="true" id="Update" />
+ <OptionsSetting value="true" id="Status" />
+ <OptionsSetting value="true" id="Edit" />
+ <ConfirmationsSetting value="0" id="Add" />
+ <ConfirmationsSetting value="0" id="Remove" />
+ </component>
+ <component name="ProjectModuleManager">
+ <modules>
+ <module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
+ <module fileurl="file://$PROJECT_DIR$/asm.iml" filepath="$PROJECT_DIR$/asm.iml" />
+ <module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
+ <module fileurl="file://$PROJECT_DIR$/forkjoin.iml" filepath="$PROJECT_DIR$/forkjoin.iml" />
+ <module fileurl="file://$PROJECT_DIR$/interactive.iml" filepath="$PROJECT_DIR$/interactive.iml" />
+ <module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
+ <module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
+ <module fileurl="file://$PROJECT_DIR$/partest-extras.iml" filepath="$PROJECT_DIR$/partest-extras.iml" />
+ <module fileurl="file://$PROJECT_DIR$/partest-javaagent.iml" filepath="$PROJECT_DIR$/partest-javaagent.iml" />
+ <module fileurl="file://$PROJECT_DIR$/reflect.iml" filepath="$PROJECT_DIR$/reflect.iml" />
+ <module fileurl="file://$PROJECT_DIR$/repl.iml" filepath="$PROJECT_DIR$/repl.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scaladoc.iml" filepath="$PROJECT_DIR$/scaladoc.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
+ <module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
+ <module fileurl="file://$PROJECT_DIR$/test-junit.iml" filepath="$PROJECT_DIR$/test-junit.iml" />
+ </modules>
+ </component>
+ <component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
+ <output url="file://$PROJECT_DIR$/../../out" />
+ </component>
+ <component name="PropertiesComponent">
+ <property name="GoToClass.includeLibraries" value="false" />
+ <property name="GoToClass.toSaveIncludeLibraries" value="false" />
+ <property name="GoToFile.includeJavaFiles" value="false" />
+ <property name="MemberChooser.sorted" value="false" />
+ <property name="MemberChooser.showClasses" value="true" />
+ <property name="MemberChooser.copyJavadoc" value="false" />
+ <property name="options.lastSelected" value="configurable.group.appearance" />
+ <property name="options.splitter.main.proportions" value="0.3" />
+ <property name="options.splitter.details.proportions" value="0.2" />
+ <property name="options.searchVisible" value="true" />
+ </component>
+ <component name="RunManager">
+ <configuration default="true" type="#org.jetbrains.idea.devkit.run.PluginConfigurationType" factoryName="Plugin">
+ <module name="" />
+ <option name="VM_PARAMETERS" value="-Xmx512m -Xms256m -XX:MaxPermSize=250m -ea" />
+ <option name="PROGRAM_PARAMETERS" />
+ <method />
+ </configuration>
+ <configuration default="true" type="Remote" factoryName="Remote">
+ <option name="USE_SOCKET_TRANSPORT" value="true" />
+ <option name="SERVER_MODE" value="false" />
+ <option name="SHMEM_ADDRESS" value="javadebug" />
+ <option name="HOST" value="localhost" />
+ <option name="PORT" value="5005" />
+ <method />
+ </configuration>
+ <configuration default="true" type="Applet" factoryName="Applet">
+ <module name="" />
+ <option name="MAIN_CLASS_NAME" />
+ <option name="HTML_FILE_NAME" />
+ <option name="HTML_USED" value="false" />
+ <option name="WIDTH" value="400" />
+ <option name="HEIGHT" value="300" />
+ <option name="POLICY_FILE" value="$CARDEA_HOME$/bin/appletviewer.policy" />
+ <option name="VM_PARAMETERS" />
+ <option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
+ <option name="ALTERNATIVE_JRE_PATH" />
+ <method />
+ </configuration>
+ <configuration default="true" type="TestNG" factoryName="TestNG">
+ <extension name="coverage" enabled="false" merge="false" sample_coverage="true" runner="idea" />
+ <module name="" />
+ <option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
+ <option name="ALTERNATIVE_JRE_PATH" />
+ <option name="SUITE_NAME" />
+ <option name="PACKAGE_NAME" />
+ <option name="MAIN_CLASS_NAME" />
+ <option name="METHOD_NAME" />
+ <option name="GROUP_NAME" />
+ <option name="TEST_OBJECT" value="CLASS" />
+ <option name="VM_PARAMETERS" value="-ea" />
+ <option name="PARAMETERS" />
+ <option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
+ <option name="OUTPUT_DIRECTORY" />
+ <option name="ANNOTATION_TYPE" />
+ <option name="ENV_VARIABLES" />
+ <option name="PASS_PARENT_ENVS" value="true" />
+ <option name="TEST_SEARCH_SCOPE">
+ <value defaultName="moduleWithDependencies" />
+ </option>
+ <option name="USE_DEFAULT_REPORTERS" value="false" />
+ <option name="PROPERTIES_FILE" />
+ <envs />
+ <properties />
+ <listeners />
+ <method />
+ </configuration>
+ <configuration default="true" type="Application" factoryName="Application">
+ <extension name="coverage" enabled="false" merge="false" sample_coverage="true" runner="idea" />
+ <option name="MAIN_CLASS_NAME" />
+ <option name="VM_PARAMETERS" />
+ <option name="PROGRAM_PARAMETERS" />
+ <option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
+ <option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
+ <option name="ALTERNATIVE_JRE_PATH" />
+ <option name="ENABLE_SWING_INSPECTOR" value="false" />
+ <option name="ENV_VARIABLES" />
+ <option name="PASS_PARENT_ENVS" value="true" />
+ <module name="" />
+ <envs />
+ <method />
+ </configuration>
+ <configuration default="true" type="JUnit" factoryName="JUnit">
+ <extension name="coverage" enabled="false" merge="false" sample_coverage="true" runner="idea" />
+ <module name="" />
+ <option name="ALTERNATIVE_JRE_PATH_ENABLED" value="false" />
+ <option name="ALTERNATIVE_JRE_PATH" />
+ <option name="PACKAGE_NAME" />
+ <option name="MAIN_CLASS_NAME" />
+ <option name="METHOD_NAME" />
+ <option name="TEST_OBJECT" value="class" />
+ <option name="VM_PARAMETERS" value="-ea" />
+ <option name="PARAMETERS" />
+ <option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
+ <option name="ENV_VARIABLES" />
+ <option name="PASS_PARENT_ENVS" value="true" />
+ <option name="TEST_SEARCH_SCOPE">
+ <value defaultName="moduleWithDependencies" />
+ </option>
+ <envs />
+ <patterns />
+ <method />
+ </configuration>
+ <list size="0" />
+ <configuration name="&lt;template&gt;" type="WebApp" default="true" selected="false">
+ <Host>localhost</Host>
+ <Port>5050</Port>
+ </configuration>
+ </component>
+ <component name="ScalaCompilerConfiguration">
+ <parameters>
+ <parameter value="-sourcepath" />
+ <parameter value="$PROJECT_DIR$/../library" />
+ </parameters>
+ </component>
+ <component name="VcsContentAnnotationSettings">
+ <option name="myLimit" value="2678400000" />
+ </component>
+ <component name="VcsDirectoryMappings">
+ <mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
+ </component>
+ <component name="VcsManagerConfiguration">
+ <option name="myTodoPanelSettings">
+ <TodoPanelSettings />
+ </option>
+ </component>
+ <component name="libraryTable">
+ <library name="ant">
+ <CLASSES>
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/ant.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ <library name="junit">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" />
+ </library>
+ <library name="partest">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/partest" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/partest" recursive="false" />
+ </library>
+ <library name="repl-deps">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/repl" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/repl" recursive="false" />
+ </library>
+ <library name="scaladoc-deps">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/scaladoc" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/scaladoc" recursive="false" />
+ </library>
+ <library name="starr" type="Scala">
+ <properties>
+ <compiler-classpath>
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-compiler-2.11.2.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-library-2.11.2.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-reflect-2.11.2.jar" />
+ </compiler-classpath>
+ </properties>
+ <CLASSES>
+ <root url="jar://$PROJECT_DIR$/../../build/deps/starr/scala-library-2.11.2.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../build/deps/starr/scala-reflect-2.11.2.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ <library name="starr-no-deps" type="Scala">
+ <properties>
+ <compiler-classpath>
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-compiler-2.11.2.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-library-2.11.2.jar" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/starr/scala-reflect-2.11.2.jar" />
+ </compiler-classpath>
+ </properties>
+ <CLASSES />
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ </component>
+</project> \ No newline at end of file
diff --git a/src/intellij-14/scaladoc.iml.SAMPLE b/src/intellij-14/scaladoc.iml.SAMPLE
new file mode 100644
index 0000000000..1e7621ffed
--- /dev/null
+++ b/src/intellij-14/scaladoc.iml.SAMPLE
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../scaladoc">
+ <sourceFolder url="file://$MODULE_DIR$/../scaladoc" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="library" name="scaladoc-deps" level="project" />
+ <orderEntry type="library" name="partest" level="project" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/scalap.iml.SAMPLE b/src/intellij-14/scalap.iml.SAMPLE
new file mode 100644
index 0000000000..e09b8d11b6
--- /dev/null
+++ b/src/intellij-14/scalap.iml.SAMPLE
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../scalap">
+ <sourceFolder url="file://$MODULE_DIR$/../scalap" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/setup.sh b/src/intellij-14/setup.sh
new file mode 100755
index 0000000000..ec303778ed
--- /dev/null
+++ b/src/intellij-14/setup.sh
@@ -0,0 +1,14 @@
+#!/usr/bin/env bash
+#
+# Generates IntelliJ IDEA project files based on the checked-in samples.
+#
+
+set -e
+export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
+echo "About to delete .ipr and .iml files and replace with the .SAMPLE files. Press enter to continue or CTRL-C to cancel."
+read
+
+for f in "$SCRIPT_DIR"/*.SAMPLE; do
+ g=${f%.SAMPLE}
+ cp $f $g
+done
diff --git a/src/intellij-14/test-junit.iml.SAMPLE b/src/intellij-14/test-junit.iml.SAMPLE
new file mode 100644
index 0000000000..786f02e2e2
--- /dev/null
+++ b/src/intellij-14/test-junit.iml.SAMPLE
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../../test/junit">
+ <sourceFolder url="file://$MODULE_DIR$/../../test/junit" isTestSource="true" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="library" name="junit" level="project" />
+ <orderEntry type="library" name="scaladoc-deps" level="project" />
+ <orderEntry type="module" module-name="actors" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="partest-extras" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="repl" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/test.iml.SAMPLE b/src/intellij-14/test.iml.SAMPLE
new file mode 100644
index 0000000000..a384d72266
--- /dev/null
+++ b/src/intellij-14/test.iml.SAMPLE
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../../test">
+ <excludeFolder url="file://$MODULE_DIR$/../../test/junit" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="actors" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="partest-extras" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="repl" />
+ <orderEntry type="library" name="partest" level="project" />
+ <orderEntry type="library" name="scaladoc-deps" level="project" />
+ <orderEntry type="library" name="starr-no-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij-14/update.sh b/src/intellij-14/update.sh
new file mode 100755
index 0000000000..eb6fea782f
--- /dev/null
+++ b/src/intellij-14/update.sh
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+#
+# Updates the .SAMPLE files with the current project files.
+#
+
+set -e
+export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
+
+echo "About to create overwrite the .ipr.SAMPLE and .iml.SAMPLE files with the current project files. Press enter to continue or CTRL-C to cancel."
+read
+
+for f in "$SCRIPT_DIR"/*.{iml,ipr}; do
+ cp $f $f.SAMPLE
+done
+
+for f in "$SCRIPT_DIR"/*.SAMPLE; do
+ g=${f%.SAMPLE}
+ if [[ ! -f $g ]]; then
+ echo "Stale sample file, deleting $f"
+ rm $f
+ fi
+done
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
index c0614c946c..0cd3fdae6a 100644
--- a/src/intellij/scala-lang.ipr.SAMPLE
+++ b/src/intellij/scala-lang.ipr.SAMPLE
@@ -218,6 +218,7 @@
<module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
<module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
<module fileurl="file://$PROJECT_DIR$/test-junit.iml" filepath="$PROJECT_DIR$/test-junit.iml" />
+ <module fileurl="file://$PROJECT_DIR$/test-osgi.iml" filepath="$PROJECT_DIR$/test-osgi.iml" />
</modules>
</component>
<component name="ProjectResources">
diff --git a/src/intellij/test-osgi.iml.SAMPLE b/src/intellij/test-osgi.iml.SAMPLE
new file mode 100644
index 0000000000..a589aaa0a9
--- /dev/null
+++ b/src/intellij/test-osgi.iml.SAMPLE
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../../test/osgi">
+ <sourceFolder url="file://$MODULE_DIR$/../../test/osgi/src" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="actors" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="repl" />
+ <orderEntry type="module" module-name="partest-extras" />
+ <orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="library" name="junit" level="project" />
+ <orderEntry type="library" name="scaladoc-deps" level="project" />
+ <orderEntry type="library" name="scala-sdk" level="project" />
+ <orderEntry type="library" scope="PROVIDED" name="pax.exam-deps" level="project" />
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij/test/files/neg/virtpatmat_exhaust_big.check b/src/intellij/test/files/neg/virtpatmat_exhaust_big.check
new file mode 100644
index 0000000000..fddc85a362
--- /dev/null
+++ b/src/intellij/test/files/neg/virtpatmat_exhaust_big.check
@@ -0,0 +1,7 @@
+virtpatmat_exhaust_big.scala:27: warning: match may not be exhaustive.
+It would fail on the following input: Z11()
+ def foo(z: Z) = z match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/src/intellij/test/files/neg/virtpatmat_exhaust_big.flags b/src/intellij/test/files/neg/virtpatmat_exhaust_big.flags
new file mode 100644
index 0000000000..b5a8748652
--- /dev/null
+++ b/src/intellij/test/files/neg/virtpatmat_exhaust_big.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked
diff --git a/src/intellij/test/files/neg/virtpatmat_exhaust_big.scala b/src/intellij/test/files/neg/virtpatmat_exhaust_big.scala
new file mode 100644
index 0000000000..dd639eb56e
--- /dev/null
+++ b/src/intellij/test/files/neg/virtpatmat_exhaust_big.scala
@@ -0,0 +1,32 @@
+sealed abstract class Z
+object Z {
+ object Z0 extends Z
+ case class Z1() extends Z
+ object Z2 extends Z
+ case class Z3() extends Z
+ object Z4 extends Z
+ case class Z5() extends Z
+ object Z6 extends Z
+ case class Z7() extends Z
+ object Z8 extends Z
+ case class Z9() extends Z
+ object Z10 extends Z
+ case class Z11() extends Z
+ object Z12 extends Z
+ case class Z13() extends Z
+ object Z14 extends Z
+ case class Z15() extends Z
+ object Z16 extends Z
+ case class Z17() extends Z
+ object Z18 extends Z
+ case class Z19() extends Z
+}
+
+object Test {
+ import Z._
+ def foo(z: Z) = z match {
+ case Z0 | Z1() | Z2 | Z3() | Z4 | Z5() | Z6 | Z7() | Z8 | Z9() |
+ Z10 | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19()
+ =>
+ }
+}
diff --git a/src/intellij/test/files/pos/virtpatmat_exhaust_big.scala b/src/intellij/test/files/pos/virtpatmat_exhaust_big.scala
new file mode 100644
index 0000000000..41aef3226e
--- /dev/null
+++ b/src/intellij/test/files/pos/virtpatmat_exhaust_big.scala
@@ -0,0 +1,34 @@
+sealed abstract class Z
+object Z {
+ object Z0 extends Z
+ case class Z1() extends Z
+ object Z2 extends Z
+ case class Z3() extends Z
+ object Z4 extends Z
+ case class Z5() extends Z
+ object Z6 extends Z
+ case class Z7() extends Z
+ object Z8 extends Z
+ case class Z9() extends Z
+ object Z10 extends Z
+ case class Z11() extends Z
+ object Z12 extends Z
+ case class Z13() extends Z
+ object Z14 extends Z
+ case class Z15() extends Z
+ object Z16 extends Z
+ case class Z17() extends Z
+ object Z18 extends Z
+ case class Z19() extends Z
+}
+
+// drop any case and it will report an error
+object Test {
+ import Z._
+ def foo(z: Z) = z match {
+ case Z0 | Z1() | Z2 | Z3() | Z4 | Z5() | Z6 | Z7() | Z8 | Z9() |
+ Z10 | Z11() | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19()
+ =>
+ }
+}
+-
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index fdedaa600c..a192dd3738 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -66,7 +66,9 @@ trait InteractiveAnalyzer extends Analyzer {
// that case the definitions that were already attributed as
// well as any default parameters of such methods need to be
// re-entered in the current scope.
- override def enterExistingSym(sym: Symbol): Context = {
+ //
+ // Tested in test/files/presentation/t8941b
+ override def enterExistingSym(sym: Symbol, tree: Tree): Context = {
if (sym != null && sym.owner.isTerm) {
enterIfNotThere(sym)
if (sym.isLazy)
@@ -74,8 +76,17 @@ trait InteractiveAnalyzer extends Analyzer {
for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
defAtt.defaultGetters foreach enterIfNotThere
+ } else if (sym != null && sym.isClass && sym.isImplicit) {
+ val owningInfo = sym.owner.info
+ val existingDerivedSym = owningInfo.decl(sym.name.toTermName).filter(sym => sym.isSynthetic && sym.isMethod)
+ existingDerivedSym.alternatives foreach (owningInfo.decls.unlink)
+ val defTree = tree match {
+ case dd: DocDef => dd.definition // See SI-9011, Scala IDE's presentation compiler incorporates ScalaDocGlobal with InterativeGlobal, so we have to unwrap DocDefs.
+ case _ => tree
+ }
+ enterImplicitWrapper(defTree.asInstanceOf[ClassDef])
}
- super.enterExistingSym(sym)
+ super.enterExistingSym(sym, tree)
}
override def enterIfNotThere(sym: Symbol) {
val scope = context.scope
@@ -123,8 +134,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
else NullLogger
import log.logreplay
- debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath)
- debugLog("classpath: "+classPath)
+ debugLog(s"logger: ${log.getClass} writing to ${(new java.io.File(logName)).getAbsolutePath}")
+ debugLog(s"classpath: $classPath")
private var curTime = System.nanoTime
private def timeStep = {
@@ -516,7 +527,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
/** The current presentation compiler runner */
@volatile private[interactive] var compileRunner: Thread = newRunnerThread()
- /** Check that the currenyly executing thread is the presentation compiler thread.
+ /** Check that the currently executing thread is the presentation compiler thread.
*
* Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase)
*/
@@ -733,7 +744,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
- private def reloadSource(source: SourceFile) {
+ private[interactive] def reloadSource(source: SourceFile) {
val unit = new RichCompilationUnit(source)
unitOfFile(source.file) = unit
toBeRemoved -= source.file
@@ -782,7 +793,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
/** A fully attributed tree located at position `pos` */
- private def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match {
+ private[interactive] def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match {
case None =>
reloadSources(List(pos.source))
try typedTreeAt(pos)
@@ -1182,7 +1193,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
- /** Parses and enters given source file, stroring parse tree in response */
+ /** Parses and enters given source file, storing parse tree in response */
private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) {
respond(response) {
onUnitOf(source) { unit =>
diff --git a/src/interactive/scala/tools/nsc/interactive/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
index 83f3fab925..ddc0c8a068 100644
--- a/src/interactive/scala/tools/nsc/interactive/Pickler.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
@@ -6,7 +6,7 @@ import scala.language.implicitConversions
import scala.reflect.ClassTag
/** An abstract class for writing and reading Scala objects to and
- * from a legible representation. The presesentation follows the following grammar:
+ * from a legible representation. The representation follows the following grammar:
* {{{
* Pickled = `true` | `false` | `null` | NumericLit | StringLit |
* Labelled | Pickled `,` Pickled
@@ -85,7 +85,7 @@ abstract class Pickler[T] {
object Pickler {
/** A base class representing unpickler result. It has two subclasses:
- * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
+ * `UnpickleSuccess` for successful unpicklings and `UnpickleFailure` for failures,
* where a value of the given type `T` could not be unpickled from input.
* @tparam T the type of unpickled values in case of success.
*/
@@ -154,7 +154,7 @@ object Pickler {
*/
def pkl[T: Pickler] = implicitly[Pickler[T]]
- /** A class represenenting `~`-pairs */
+ /** A class representing `~`-pairs */
case class ~[+S, +T](fst: S, snd: T)
/** A wrapper class to be able to use `~` s an infix method */
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index d4b9c17eab..c4aa511cd7 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -121,7 +121,8 @@ abstract class Enumeration (initial: Int) extends Serializable {
* @throws NoSuchElementException if no `Value` with a matching
* name is in this `Enumeration`
*/
- final def withName(s: String): Value = values.find(_.toString == s).get
+ final def withName(s: String): Value = values.find(_.toString == s).getOrElse(
+ throw new NoSuchElementException(s"No value found for '$s'"))
/** Creates a fresh value, part of this enumeration. */
protected final def Value: Value = Value(nextId)
@@ -239,6 +240,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
*
* @param nnIds The set of ids of values (adjusted so that the lowest value does
* not fall below zero), organized as a `BitSet`.
+ * @define Coll `collection.immutable.SortedSet`
*/
class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet)
extends AbstractSet[Value]
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 66900e7258..f134f5ce3d 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -94,6 +94,7 @@ object Option {
* @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current
* representation type `Repr` and the new element type `B`.
*/
+@SerialVersionUID(-114498752079829388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
sealed abstract class Option[+A] extends Product with Serializable {
self =>
@@ -107,7 +108,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
/** Returns the option's value.
* @note The option must be nonEmpty.
- * @throws Predef.NoSuchElementException if the option is empty.
+ * @throws java.util.NoSuchElementException if the option is empty.
*/
def get: A
@@ -124,8 +125,8 @@ sealed abstract class Option[+A] extends Product with Serializable {
* Although the use of null is discouraged, code written to use
* $option must often interface with code that expects and returns nulls.
* @example {{{
- * val initalText: Option[String] = getInitialText
- * val textField = new JComponent(initalText.orNull,20)
+ * val initialText: Option[String] = getInitialText
+ * val textField = new JComponent(initialText.orNull,20)
* }}}
*/
@inline final def orNull[A1 >: A](implicit ev: Null <:< A1): A1 = this getOrElse ev(null)
@@ -328,6 +329,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @author Martin Odersky
* @version 1.0, 16/07/2003
*/
+@SerialVersionUID(1234815782226070388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
final case class Some[+A](x: A) extends Option[A] {
def isEmpty = false
def get = x
@@ -339,6 +341,7 @@ final case class Some[+A](x: A) extends Option[A] {
* @author Martin Odersky
* @version 1.0, 16/07/2003
*/
+@SerialVersionUID(5066590221178148012L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
case object None extends Option[Nothing] {
def isEmpty = true
def get = throw new NoSuchElementException("None.get")
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 59c89df3fa..060ecbfead 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -220,7 +220,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
}
/** `???` can be used for marking methods that remain to be implemented.
- * @throws A `NotImplementedError`
+ * @throws NotImplementedError
*/
def ??? : Nothing = throw new NotImplementedError
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 0798587772..9cd38ed148 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -22,7 +22,7 @@ trait Product extends Any with Equals {
* product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 < n < k`.
*
* @param n the index of the element to return
- * @throws `IndexOutOfBoundsException`
+ * @throws IndexOutOfBoundsException
* @return the element `n` elements after the first element
*/
def productElement(n: Int): Any
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 2632994a34..e60fa2f290 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -58,7 +58,7 @@ case class StringContext(parts: String*) {
/** Checks that the length of the given argument `args` is one less than the number
* of `parts` supplied to the enclosing `StringContext`.
* @param `args` The arguments to be checked.
- * @throws An `IllegalArgumentException` if this is not the case.
+ * @throws IllegalArgumentException if this is not the case.
*/
def checkLengths(args: Seq[Any]): Unit =
if (parts.length != args.length + 1)
@@ -85,10 +85,11 @@ case class StringContext(parts: String*) {
* will print the string `1 + 1 = 2`.
*
* @param `args` The arguments to be inserted into the resulting string.
- * @throws An `IllegalArgumentException`
+ * @throws IllegalArgumentException
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
- * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
+ * @throws StringContext.InvalidEscapeException
+ * if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
*/
def s(args: Any*): String = standardInterpolator(treatEscapes, args)
@@ -109,7 +110,7 @@ case class StringContext(parts: String*) {
* }}}
*
* @param `args` The arguments to be inserted into the resulting string.
- * @throws An `IllegalArgumentException`
+ * @throws IllegalArgumentException
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
*/
@@ -144,10 +145,11 @@ case class StringContext(parts: String*) {
* }}}
*
* @param `args` The arguments to be inserted into the resulting string.
- * @throws An `IllegalArgumentException`
+ * @throws IllegalArgumentException
* if the number of `parts` in the enclosing `StringContext` does not exceed
* the number of arguments `arg` by exactly 1.
- * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
+ * @throws StringContext.InvalidEscapeException
+ * if a `parts` string contains a backslash (`\`) character
* that does not start a valid escape sequence.
*
* Note: The `f` method works by assembling a format string from all the `parts` strings and using
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index c3bad60072..cf1de0c8e6 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -47,7 +47,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* @param idx The index to select.
* @return the element of this $coll at index `idx`, where `0` indicates the first element.
- * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`.
+ * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`.
*/
def apply(idx: Int): A
@@ -397,7 +397,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @inheritdoc
*
* Another way to express this
- * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * is that `xs union ys` computes the order-preserving multi-set union of `xs` and `ys`.
* `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
*
* $willNotTerminateInf
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index ca098e57b9..8b9d3e7a17 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -63,7 +63,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
/** Selects the first element of this $coll.
* $orderDependent
* @return the first element of this $coll.
- * @throws `NoSuchElementException` if the $coll is empty.
+ * @throws NoSuchElementException if the $coll is empty.
*/
def head: A
@@ -83,7 +83,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* $orderDependent
* @return a $coll consisting of all elements of this $coll
* except the first one.
- * @throws `UnsupportedOperationException` if the $coll is empty.
+ * @throws UnsupportedOperationException if the $coll is empty.
*/
def tail: Repr
@@ -105,7 +105,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* $orderDependent
* @return a $coll consisting of all elements of this $coll
* except the last one.
- * @throws `UnsupportedOperationException` if the $coll is empty.
+ * @throws UnsupportedOperationException if the $coll is empty.
*/
def init: Repr
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 0cd91409cf..8c7c754af8 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -268,7 +268,7 @@ trait GenTraversableOnce[+A] extends Any {
* op(x_1, op(x_2, ..., op(x_{n-1}, x_n)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- * @throws `UnsupportedOperationException` if this $coll is empty.
+ * @throws UnsupportedOperationException if this $coll is empty.
*/
def reduceRight[B >: A](op: (A, B) => B): B
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index 42cb37aa24..a7e06b4d1a 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -141,10 +141,10 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
def drop(n: Int): Repr = slice(n, length)
override /*IterableLike*/
- def takeRight(n: Int): Repr = slice(length - n, length)
+ def takeRight(n: Int): Repr = slice(length - math.max(n, 0), length)
override /*IterableLike*/
- def dropRight(n: Int): Repr = slice(0, length - n)
+ def dropRight(n: Int): Repr = slice(0, length - math.max(n, 0))
override /*TraversableLike*/
def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n))
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 668190f700..c254ed7480 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -69,6 +69,10 @@ trait IterableViewLike[+A,
trait Appended[B >: A] extends super.Appended[B] with Transformed[B] {
def iterator = self.iterator ++ rest
}
+
+ trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] {
+ def iterator = fst.toIterator ++ self
+ }
trait Filtered extends super.Filtered with Transformed[A] {
def iterator = self.iterator filter pred
@@ -110,6 +114,7 @@ trait IterableViewLike[+A,
} with AbstractTransformed[(A1, B)] with ZippedAll[A1, B]
protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -150,10 +155,10 @@ trait IterableViewLike[+A,
sliding(size, 1) // we could inherit this, but that implies knowledge of the way the super class is implemented.
override def dropRight(n: Int): This =
- take(thisSeq.length - n)
+ take(thisSeq.length - math.max(n, 0))
override def takeRight(n: Int): This =
- drop(thisSeq.length - n)
+ drop(thisSeq.length - math.max(n, 0))
override def stringPrefix = "IterableView"
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index adb97e27e3..34a025e5b8 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -364,7 +364,14 @@ trait Iterator[+A] extends TraversableOnce[A] {
* it omits the first `n` values.
* @note Reuse: $consumesAndProducesIterator
*/
- def drop(n: Int): Iterator[A] = sliceIterator(n, -1)
+ def drop(n: Int): Iterator[A] = {
+ var j = 0
+ while (j < n && hasNext) {
+ next()
+ j += 1
+ }
+ this
+ }
/** Creates an iterator returning an interval of the values produced by this iterator.
*
@@ -511,7 +518,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
}
- /** Produces a collection containing cummulative results of applying the
+ /** Produces a collection containing cumulative results of applying the
* operator going left to right.
*
* $willNotTerminateInf
@@ -534,8 +541,8 @@ trait Iterator[+A] extends TraversableOnce[A] {
} else Iterator.empty.next()
}
- /** Produces a collection containing cummulative results of applying the operator going right to left.
- * The head of the collection is the last cummulative result.
+ /** Produces a collection containing cumulative results of applying the operator going right to left.
+ * The head of the collection is the last cumulative result.
*
* $willNotTerminateInf
* $orderDependent
@@ -1187,9 +1194,8 @@ trait Iterator[+A] extends TraversableOnce[A] {
* $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
- require(start >= 0 && (start < xs.length || xs.length == 0), s"start $start out of range ${xs.length}")
var i = start
- val end = start + math.min(len, xs.length - start)
+ val end = start + math.min(len, xs.length - start)
while (i < end && hasNext) {
xs(i) = next()
i += 1
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index a4fa58b13c..875f6e1c02 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -37,8 +37,8 @@ import convert._
* val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
* assert(sl eq sl2)
* }}}
- * The following conversions also are supported, but the
- * direction Scala to Java is done my a more specifically named method:
+ * The following conversions are also supported, but the
+ * direction from Scala to Java is done by the more specifically named methods:
* `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
*
* - `scala.collection.Iterable` <=> `java.util.Collection`
diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala
index 49fbb902ab..5a7bb5891e 100644
--- a/src/library/scala/collection/LinearSeq.scala
+++ b/src/library/scala/collection/LinearSeq.scala
@@ -15,7 +15,14 @@ import generic._
import mutable.Builder
/** A base trait for linear sequences.
+ *
* $linearSeqInfo
+ *
+ * @define linearSeqInfo
+ * Linear sequences have reasonably efficient `head`, `tail`, and `isEmpty` methods.
+ * If these methods provide the fastest way to traverse the collection, a
+ * collection `Coll` that extends this trait should also extend
+ * `LinearSeqOptimized[A, Coll[A]]`.
*/
trait LinearSeq[+A] extends Seq[A]
with GenericTraversableTemplate[A, LinearSeq]
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index ff7985bf0d..96e2135fd1 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -14,22 +14,10 @@ import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]`.
*
- * $linearSeqInfo
- *
- * This trait just implements `iterator` in terms of `isEmpty, ``head`, and `tail`.
- * However, see `LinearSeqOptimized` for an implementation trait that overrides operations
+ * This trait just implements `iterator` and `corresponds` in terms of `isEmpty, ``head`, and `tail`.
+ * However, see `LinearSeqOptimized` for an implementation trait that overrides many more operations
* to make them run faster under the assumption of fast linear access with `head` and `tail`.
*
- * @define linearSeqInfo
- * Linear sequences are defined in terms of three abstract methods, which are assumed
- * to have efficient implementations. These are:
- * {{{
- * def isEmpty: Boolean
- * def head: A
- * def tail: Repr
- * }}}
- * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself.
- *
* Linear sequences do not add any new methods to `Seq`, but promise efficient implementations
* of linear access patterns.
* @author Martin Odersky
@@ -58,12 +46,18 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
val result = these.head; these = these.tail; result
} else Iterator.empty.next()
- /** Have to clear `these` so the iterator is exhausted like
- * it would be without the optimization.
- */
override def toList: List[A] = {
+ /* Have to clear `these` so the iterator is exhausted like
+ * it would be without the optimization.
+ *
+ * Calling "newBuilder.result()" in toList method
+ * prevents original seq from garbage collection,
+ * so we use these.take(0) here.
+ *
+ * Check SI-8924 for details
+ */
val xs = these.toList
- these = newBuilder.result()
+ these = these.take(0)
xs
}
}
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index 8635b090b9..9c336e8e31 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -13,10 +13,24 @@ import mutable.ListBuffer
import immutable.List
import scala.annotation.tailrec
-/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
- * the implementation of several methods under the assumption of fast linear access.
+/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
+ * the implementation of various methods under the assumption of fast linear access.
+ *
+ * $linearSeqOptim
+ *
+ * @define linearSeqOptim
+ * Linear-optimized sequences implement most operations in in terms of three methods,
+ * which are assumed to have efficient implementations. These are:
+ * {{{
+ * def isEmpty: Boolean
+ * def head: A
+ * def tail: Repr
+ * }}}
+ * Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself.
+ * Note that default implementations are provided via inheritance, but these
+ * should be overridden for performance.
+ *
*
- * $linearSeqInfo
*/
trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends LinearSeqLike[A, Repr] { self: Repr =>
@@ -30,7 +44,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
*
* $willNotTerminateInf
*
- * Note: the execution of `length` may take time proportial to the length of the sequence.
+ * Note: the execution of `length` may take time proportional to the length of the sequence.
*/
def length: Int = {
var these = self
@@ -43,8 +57,8 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
/** Selects an element by its index in the $coll.
- * Note: the execution of `apply` may take time proportial to the index value.
- * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`.
+ * Note: the execution of `apply` may take time proportional to the index value.
+ * @throws IndexOutOfBoundsException if `idx` does not satisfy `0 <= idx < length`.
*/
def apply(n: Int): A = {
val rest = drop(n)
@@ -235,13 +249,16 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
override /*IterableLike*/
def sameElements[B >: A](that: GenIterable[B]): Boolean = that match {
case that1: LinearSeq[_] =>
- var these = this
- var those = that1
- while (!these.isEmpty && !those.isEmpty && these.head == those.head) {
- these = these.tail
- those = those.tail
+ // Probably immutable, so check reference identity first (it's quick anyway)
+ (this eq that1) || {
+ var these = this
+ var those = that1
+ while (!these.isEmpty && !those.isEmpty && these.head == those.head) {
+ these = these.tail
+ those = those.tail
+ }
+ these.isEmpty && those.isEmpty
}
- these.isEmpty && those.isEmpty
case _ =>
super.sameElements(that)
}
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 38a598321f..b474abc12a 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -222,7 +222,7 @@ self =>
* but it might be overridden in subclasses.
*
* @param key the given key value for which a binding is missing.
- * @throws `NoSuchElementException`
+ * @throws NoSuchElementException
*/
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index fdfb1f2efc..329273df5b 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -140,7 +140,15 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
if (isEmpty) Iterator(repr)
else new PermutationsItr
- /** Iterates over combinations.
+ /** Iterates over combinations. A _combination_ of length `n` is a subsequence of
+ * the original sequence, with the elements taken in order. Thus, `"xy"` and `"yy"`
+ * are both length-2 combinations of `"xyy"`, but `"yx"` is not. If there is
+ * more than one way to generate the same subsequence, only one will be returned.
+ *
+ * For example, `"xyyy"` has three different ways to generate `"xy"` depending on
+ * whether the first, second, or third `"y"` is selected. However, since all are
+ * identical, only one will be chosen. Which of the three will be taken is an
+ * implementation detail that is not defined.
*
* @return An Iterator which traverses the possible n-element combinations of this $coll.
* @example `"abbbc".combinations(2) = Iterator(ab, ac, bb, bc)`
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index e719f19c78..587ec133a5 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -55,7 +55,7 @@ trait SeqViewLike[+A,
trait Sliced extends super.Sliced with Transformed[A] {
def length = iterator.size
def apply(idx: Int): A =
- if (idx + from < until) self.apply(idx + from)
+ if (idx >= 0 && idx + from < until) self.apply(idx + from)
else throw new IndexOutOfBoundsException(idx.toString)
override def foreach[U](f: A => U) = iterator foreach f
@@ -83,6 +83,7 @@ trait SeqViewLike[+A,
}
def length = index(self.length)
def apply(idx: Int) = {
+ if (idx < 0 || idx >= self.length) throw new IndexOutOfBoundsException(idx.toString)
val row = findRow(idx, 0, self.length - 1)
mapping(self(row)).seq.toSeq(idx - index(row))
}
@@ -95,6 +96,14 @@ trait SeqViewLike[+A,
if (idx < self.length) self(idx) else restSeq(idx - self.length)
}
+ trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B] {
+ protected[this] lazy val fstSeq = fst.toSeq
+ def length: Int = fstSeq.length + self.length
+ def apply(idx: Int): B =
+ if (idx < fstSeq.length) fstSeq(idx)
+ else self.apply(idx - fstSeq.length)
+ }
+
trait Filtered extends super.Filtered with Transformed[A] {
protected[this] lazy val index = {
var len = 0
@@ -178,21 +187,12 @@ trait SeqViewLike[+A,
final override protected[this] def viewIdentifier = "P"
}
- trait Prepended[B >: A] extends Transformed[B] {
- protected[this] val fst: B
- override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
- def length: Int = 1 + self.length
- def apply(idx: Int): B =
- if (idx == 0) fst
- else self.apply(idx - 1)
- final override protected[this] def viewIdentifier = "A"
- }
-
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -211,7 +211,6 @@ trait SeqViewLike[+A,
val patch = _patch
val replaced = _replaced
} with AbstractTransformed[B] with Patched[B]
- protected def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
// see comment in IterableViewLike.
protected override def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
@@ -241,7 +240,7 @@ trait SeqViewLike[+A,
}
override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
- newPrepended(elem).asInstanceOf[That]
+ newPrepended(elem :: Nil).asInstanceOf[That]
override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
++(Iterator.single(elem))(bf)
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index a8731a51b1..5a07874fd6 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -419,7 +419,7 @@ trait TraversableLike[+A, +Repr] extends Any
/** Selects the first element of this $coll.
* $orderDependent
* @return the first element of this $coll.
- * @throws `NoSuchElementException` if the $coll is empty.
+ * @throws NoSuchElementException if the $coll is empty.
*/
def head: A = {
var result: () => A = () => throw new NoSuchElementException
@@ -473,7 +473,7 @@ trait TraversableLike[+A, +Repr] extends Any
* $orderDependent
* @return a $coll consisting of all elements of this $coll
* except the last one.
- * @throws `UnsupportedOperationException` if the $coll is empty.
+ * @throws UnsupportedOperationException if the $coll is empty.
*/
def init: Repr = {
if (isEmpty) throw new UnsupportedOperationException("empty.init")
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 13cd99d910..2eab58009c 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -159,7 +159,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* op( op( ... op(x_1, x_2) ..., x_{n-1}), x_n)
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- * @throws `UnsupportedOperationException` if this $coll is empty. */
+ * @throws UnsupportedOperationException if this $coll is empty. */
def reduceLeft[B >: A](op: (B, A) => B): B = {
if (isEmpty)
throw new UnsupportedOperationException("empty.reduceLeft")
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 5926c69ebf..0901d749c3 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -189,6 +189,15 @@ trait TraversableViewLike[+A,
}
final override protected[this] def viewIdentifier = "A"
}
+
+ trait Prepended[B >: A] extends Transformed[B] {
+ protected[this] val fst: GenTraversable[B]
+ def foreach[U](f: B => U) {
+ fst foreach f
+ self foreach f
+ }
+ final override protected[this] def viewIdentifier = "A"
+ }
trait Filtered extends Transformed[A] {
protected[this] val pred: A => Boolean
@@ -222,11 +231,15 @@ trait TraversableViewLike[+A,
final override protected[this] def viewIdentifier = "D"
}
- override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ override def ++[B >: A, That](xs: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That =
newAppended(xs.seq.toTraversable).asInstanceOf[That]
-// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That]
-// else super.++[B, That](that)(bf)
- }
+
+ override def ++:[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(xs.seq.toTraversable).asInstanceOf[That]
+
+ // Need second one because of optimization in TraversableLike
+ override def ++:[B >: A, That](xs: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(xs).asInstanceOf[That]
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = {
newMapped(f).asInstanceOf[That]
@@ -253,6 +266,7 @@ trait TraversableViewLike[+A,
*/
protected def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected def newPrepended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val fst = that } with AbstractTransformed[B] with Prepended[B]
protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index 02e5dd01f5..f0a5f57225 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -20,7 +20,7 @@ package collection.concurrent
* @tparam A the key type of the map
* @tparam B the value type of the map
*
- * @define Coll `ConcurrentMap`
+ * @define Coll `concurrent.Map`
* @define coll concurrent map
* @define concurrentmapinfo
* This is a base trait for all Scala concurrent map implementations. It
@@ -86,4 +86,15 @@ trait Map[A, B] extends scala.collection.mutable.Map[A, B] {
* @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
*/
def replace(k: A, v: B): Option[B]
+
+ override def getOrElseUpdate(key: A, op: =>B): B = get(key) match {
+ case Some(v) => v
+ case None =>
+ val v = op
+ putIfAbsent(key, v) match {
+ case Some(nv) => nv
+ case None => v
+ }
+ }
+
}
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index 9916fe9843..e97a2ff1fc 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -30,8 +30,9 @@ trait WrapAsJava {
* @return A Java Iterator view of the argument.
*/
implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
- case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
- case _ => IteratorWrapper(it)
+ case null => null
+ case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
+ case _ => IteratorWrapper(it)
}
/**
@@ -48,8 +49,9 @@ trait WrapAsJava {
* @return A Java Enumeration view of the argument.
*/
implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
+ case null => null
case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
- case _ => IteratorWrapper(it)
+ case _ => IteratorWrapper(it)
}
/**
@@ -66,8 +68,9 @@ trait WrapAsJava {
* @return A Java Iterable view of the argument.
*/
implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
- case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
- case _ => IterableWrapper(i)
+ case null => null
+ case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
+ case _ => IterableWrapper(i)
}
/**
@@ -82,8 +85,9 @@ trait WrapAsJava {
* @return A Java Collection view of the argument.
*/
implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
- case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
- case _ => new IterableWrapper(it)
+ case null => null
+ case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
+ case _ => new IterableWrapper(it)
}
/**
@@ -100,8 +104,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableBufferWrapper(b)
+ case null => null
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableBufferWrapper(b)
}
/**
@@ -118,8 +123,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableSeqWrapper(seq)
+ case null => null
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableSeqWrapper(seq)
}
/**
@@ -136,8 +142,9 @@ trait WrapAsJava {
* @return A Java List view of the argument.
*/
implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
- case _ => new SeqWrapper(seq)
+ case null => null
+ case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
+ case _ => new SeqWrapper(seq)
}
/**
@@ -154,8 +161,9 @@ trait WrapAsJava {
* @return A Java Set view of the argument.
*/
implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
+ case null => null
case JSetWrapper(wrapped) => wrapped
- case _ => new MutableSetWrapper(s)
+ case _ => new MutableSetWrapper(s)
}
/**
@@ -172,8 +180,9 @@ trait WrapAsJava {
* @return A Java Set view of the argument.
*/
implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
+ case null => null
case JSetWrapper(wrapped) => wrapped
- case _ => new SetWrapper(s)
+ case _ => new SetWrapper(s)
}
/**
@@ -190,9 +199,9 @@ trait WrapAsJava {
* @return A Java Map view of the argument.
*/
implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case JMapWrapper(wrapped) => wrapped
- case _ => new MutableMapWrapper(m)
+ case _ => new MutableMapWrapper(m)
}
/**
@@ -210,9 +219,9 @@ trait WrapAsJava {
* @return A Java `Dictionary` view of the argument.
*/
implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JDictionaryWrapper(wrapped) => wrapped
- case _ => new DictionaryWrapper(m)
+ case null => null
+ case JDictionaryWrapper(wrapped) => wrapped
+ case _ => new DictionaryWrapper(m)
}
/**
@@ -230,9 +239,9 @@ trait WrapAsJava {
* @return A Java `Map` view of the argument.
*/
implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
- case _ => new MapWrapper(m)
+ case _ => new MapWrapper(m)
}
/**
@@ -251,8 +260,9 @@ trait WrapAsJava {
* @return A Java `ConcurrentMap` view of the argument.
*/
implicit def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
+ case null => null
case JConcurrentMapWrapper(wrapped) => wrapped
- case _ => new ConcurrentMapWrapper(m)
+ case _ => new ConcurrentMapWrapper(m)
}
}
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index ab151a6778..7332b71af1 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -30,8 +30,9 @@ trait WrapAsScala {
* @return A Scala `Iterator` view of the argument.
*/
implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match {
+ case null => null
case IteratorWrapper(wrapped) => wrapped
- case _ => JIteratorWrapper(it)
+ case _ => JIteratorWrapper(it)
}
/**
@@ -48,8 +49,9 @@ trait WrapAsScala {
* @return A Scala Iterator view of the argument.
*/
implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match {
+ case null => null
case IteratorWrapper(wrapped) => wrapped
- case _ => JEnumerationWrapper(i)
+ case _ => JEnumerationWrapper(i)
}
/**
@@ -67,8 +69,9 @@ trait WrapAsScala {
* @return A Scala Iterable view of the argument.
*/
implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match {
+ case null => null
case IterableWrapper(wrapped) => wrapped
- case _ => JIterableWrapper(i)
+ case _ => JIterableWrapper(i)
}
/**
@@ -82,8 +85,9 @@ trait WrapAsScala {
* @return A Scala Iterable view of the argument.
*/
implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match {
+ case null => null
case IterableWrapper(wrapped) => wrapped
- case _ => JCollectionWrapper(i)
+ case _ => JCollectionWrapper(i)
}
/**
@@ -101,8 +105,9 @@ trait WrapAsScala {
* @return A Scala mutable `Buffer` view of the argument.
*/
implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match {
- case MutableBufferWrapper(wrapped) => wrapped
- case _ =>new JListWrapper(l)
+ case null => null
+ case MutableBufferWrapper(wrapped) => wrapped
+ case _ => new JListWrapper(l)
}
/**
@@ -119,8 +124,9 @@ trait WrapAsScala {
* @return A Scala mutable Set view of the argument.
*/
implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match {
+ case null => null
case MutableSetWrapper(wrapped) => wrapped
- case _ =>new JSetWrapper(s)
+ case _ => new JSetWrapper(s)
}
/**
@@ -144,9 +150,9 @@ trait WrapAsScala {
* @return A Scala mutable Map view of the argument.
*/
implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match {
- //case ConcurrentMapWrapper(wrapped) => wrapped
+ case null => null
case MutableMapWrapper(wrapped) => wrapped
- case _ => new JMapWrapper(m)
+ case _ => new JMapWrapper(m)
}
/**
@@ -163,8 +169,9 @@ trait WrapAsScala {
* @return A Scala mutable ConcurrentMap view of the argument.
*/
implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match {
- case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
- case _ => new JConcurrentMapWrapper(m)
+ case null => null
+ case cmw: ConcurrentMapWrapper[A, B] => cmw.underlying
+ case _ => new JConcurrentMapWrapper(m)
}
/**
@@ -179,8 +186,9 @@ trait WrapAsScala {
* @return A Scala mutable Map[String, String] view of the argument.
*/
implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
+ case null => null
case DictionaryWrapper(wrapped) => wrapped
- case _ => new JDictionaryWrapper(p)
+ case _ => new JDictionaryWrapper(p)
}
/**
@@ -194,7 +202,8 @@ trait WrapAsScala {
* @return A Scala mutable Map[String, String] view of the argument.
*/
implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
- case _ => new JPropertiesWrapper(p)
+ case null => null
+ case _ => new JPropertiesWrapper(p)
}
}
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index cd48cd23f4..54455c531a 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -25,7 +25,7 @@ import scala.language.higherKinds
* @author Martin Odersky
* @since 2.8
* @define coll collection
- * @define Coll CC
+ * @define Coll Traversable
*/
trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
@@ -45,7 +45,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
/** Selects the first element of this $coll.
*
* @return the first element of this $coll.
- * @throws `NoSuchElementException` if the $coll is empty.
+ * @throws NoSuchElementException if the $coll is empty.
*/
def head: A
@@ -202,7 +202,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
* element type of this $coll is a `Traversable`.
* @return a two-dimensional $coll of ${coll}s which has as ''n''th row
* the ''n''th column of this $coll.
- * @throws `IllegalArgumentException` if all collections in this $coll
+ * @throws IllegalArgumentException if all collections in this $coll
* are not of the same size.
*/
@migration("`transpose` throws an `IllegalArgumentException` if collections are not uniformly sized.", "2.9.0")
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 837143784b..49b4397cf2 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -406,7 +406,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
// create a new HashSet1 with the hash we already know
new HashSet1(ks1.head, hash)
case _ =>
- // create a new HashSetCollison with the hash we already know and the new keys
+ // create a new HashSetCollision with the hash we already know and the new keys
new HashSetCollision1(hash, ks1)
}
}
@@ -426,7 +426,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
// create a new HashSet1 with the hash we already know
new HashSet1(ks1.head, hash)
case _ =>
- // create a new HashSetCollison with the hash we already know and the new keys
+ // create a new HashSetCollision with the hash we already know and the new keys
new HashSetCollision1(hash, ks1)
}
}
@@ -445,7 +445,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
// Should only have HSC1 if size > 1
this
case _ =>
- // create a new HashSetCollison with the hash we already know and the new keys
+ // create a new HashSetCollision with the hash we already know and the new keys
new HashSetCollision1(hash, ks1)
}
} else this
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index a8f1149615..89b4ee1145 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -80,6 +80,7 @@ import java.io.{ObjectOutputStream, ObjectInputStream}
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
+@SerialVersionUID(-6084104484083858598L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
sealed abstract class List[+A] extends AbstractSeq[A]
with LinearSeq[A]
with Product
@@ -290,7 +291,6 @@ sealed abstract class List[+A] extends AbstractSeq[A]
if (this eq Nil) Nil.asInstanceOf[That] else {
var rest = this
var h: ::[B] = null
- var x: A = null.asInstanceOf[A]
// Special case for first element
do {
val x: Any = pf.applyOrElse(rest.head, List.partialNotApplied)
@@ -428,13 +428,14 @@ case object Nil extends List[Nothing] {
}
/** A non empty list characterized by a head and a tail.
- * @param hd the first element of the list
+ * @param head the first element of the list
* @param tl the list containing the remaining elements of this list after the first one.
* @tparam B the type of the list elements.
* @author Martin Odersky
* @version 1.0, 15/07/2003
* @since 2.8
*/
+@SerialVersionUID(509929039250432923L) // value computed by serialver for 2.11.2, annotation added in 2.11.4
final case class ::[B](override val head: B, private[scala] var tl: List[B]) extends List[B] {
override def tail : List[B] = tl
override def isEmpty: Boolean = false
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 7c40e84280..c5773338f5 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -29,7 +29,11 @@ object ListMap extends ImmutableMapFactory[ListMap] {
new MapCanBuildFrom[A, B]
def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]]
- private object EmptyListMap extends ListMap[Any, Nothing] { }
+ @SerialVersionUID(-8256686706655863282L)
+ private object EmptyListMap extends ListMap[Any, Nothing] {
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
+ }
}
/** This class implements immutable maps using a list-based data structure.
@@ -159,7 +163,6 @@ extends AbstractMap[A, B]
*/
override def apply(k: A): B1 = apply0(this, k)
-
@tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
else if (k == cur.key) cur.value
@@ -176,7 +179,16 @@ extends AbstractMap[A, B]
@tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
if (k == cur.key) Some(cur.value)
else if (cur.next.nonEmpty) get0(cur.next, k) else None
-
+
+
+ override def contains(key: A): Boolean = contains0(this, key)
+
+ @tailrec private def contains0(cur: ListMap[A, B1], k: A): Boolean =
+ if (k == cur.key) true
+ else if (cur.next.nonEmpty) contains0(cur.next, k)
+ else false
+
+
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
* it will be overridden by this function.
@@ -186,6 +198,7 @@ extends AbstractMap[A, B]
new m.Node[B2](k, v)
}
+
/** Creates a new mapping without the given `key`.
* If the map does not contain a mapping for the given key, the
* method returns the same map.
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 89d1a9640e..a6e6fba0a5 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -111,7 +111,7 @@ class ListSet[A] extends AbstractSet[A]
/** Creates a new iterator over all elements contained in this set.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the new iterator
*/
def iterator: Iterator[A] = new AbstractIterator[A] {
@@ -127,12 +127,12 @@ class ListSet[A] extends AbstractSet[A]
}
/**
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
*/
override def head: A = throw new NoSuchElementException("Set has no elements")
/**
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
*/
override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set")
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 5178d5a862..63ddcb18cf 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -94,6 +94,8 @@ object Map extends ImmutableMapFactory[Map] {
private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable {
override def size: Int = 0
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
def get(key: Any): Option[Nothing] = None
def iterator: Iterator[(Any, Nothing)] = Iterator.empty
override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value)
@@ -103,6 +105,8 @@ object Map extends ImmutableMapFactory[Map] {
class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 1
+ override def apply(key: A) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = key == key1
def get(key: A): Option[B] =
if (key == key1) Some(value1) else None
def iterator = Iterator((key1, value1))
@@ -119,6 +123,11 @@ object Map extends ImmutableMapFactory[Map] {
class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 2
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
@@ -140,6 +149,12 @@ object Map extends ImmutableMapFactory[Map] {
class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 3
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2) || (key == key3)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
@@ -164,6 +179,13 @@ object Map extends ImmutableMapFactory[Map] {
class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 4
+ override def apply(key: A) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else if (key == key4) value4
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: A) = (key == key1) || (key == key2) || (key == key3) || (key == key4)
def get(key: A): Option[B] =
if (key == key1) Some(value1)
else if (key == key2) Some(value2)
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index fb9f6703a9..8910ee16b9 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -158,7 +158,7 @@ extends scala.collection.AbstractSeq[T]
* @note Calling this method will force the entire sequence to be read.
*/
def length: Int = {
- while (!latest.isLast) addMore()
+ while (!latest.isLast && latest.end < end) addMore()
(latest.end min end) - start
}
@@ -175,7 +175,8 @@ extends scala.collection.AbstractSeq[T]
*/
override def isDefinedAt(index: Int) =
index >= 0 && index < end - start && {
- val p = page(index + start); index + start < p.end
+ val absidx = index + start
+ absidx >= 0 && absidx < page(absidx).end
}
/** The subsequence from index `start` up to `end -1` if `end`
@@ -192,6 +193,9 @@ extends scala.collection.AbstractSeq[T]
if (f.next eq null) f.addMore(more)
f = f.next
}
+ // Warning -- not refining `more` means that slices can freely request and obtain
+ // data outside of their slice. This is part of the design of PagedSeq
+ // (to read pages!) but can be surprising.
new PagedSeq(more, f, s, e)
}
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 264304db68..98266716cc 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -53,7 +53,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
*
* @param n index of the element to return
* @return the element at position `n` in this queue.
- * @throws Predef.NoSuchElementException if the queue is too short.
+ * @throws java.util.NoSuchElementException if the queue is too short.
*/
override def apply(n: Int): A = {
val len = out.length
@@ -120,7 +120,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
/** Returns a tuple with the first element in the queue,
* and a new queue with this element removed.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the first element of the queue.
*/
def dequeue: (A, Queue[A]) = out match {
@@ -139,7 +139,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
/** Returns the first element in the queue, or throws an error if there
* is no element contained in the queue.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the first element.
*/
def front: A = head
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 9fa877055f..0b380517f8 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -33,7 +33,13 @@ import scala.collection.parallel.immutable.ParRange
* `init`) are also permitted on overfull ranges.
*
* @param start the start of this range.
- * @param end the exclusive end of the range.
+ * @param end the end of the range. For exclusive ranges, e.g.
+ * `Range(0,3)` or `(0 until 3)`, this is one
+ * step past the last one in the range. For inclusive
+ * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`,
+ * it may be in the range if it is not skipped by the step size.
+ * To find the last element inside a non-empty range,
+ use `last` instead.
* @param step the step for the range.
*
* @author Martin Odersky
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index b77b16f23f..1c28093b2c 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -95,7 +95,7 @@ class Stack[+A] protected (protected val elems: List[A])
/** Returns the top element of the stack. An error is signaled if
* there is no element on the stack.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the top element.
*/
def top: A =
@@ -105,7 +105,7 @@ class Stack[+A] protected (protected val elems: List[A])
/** Removes the top element from the stack.
* Note: should return `(A, Stack[A])` as for queues (mics)
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the new stack without the former top element.
*/
def pop: Stack[A] =
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 91a4e1c43d..8be0b2fee2 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -225,7 +225,7 @@ self =>
* }}}
*
* @return The first element of the `Stream`.
- * @throws Predef.NoSuchElementException if the stream is empty.
+ * @throws java.util.NoSuchElementException if the stream is empty.
*/
def head: A
@@ -236,7 +236,7 @@ self =>
* returns the lazy result.
*
* @return The tail of the `Stream`.
- * @throws Predef.UnsupportedOperationException if the stream is empty.
+ * @throws UnsupportedOperationException if the stream is empty.
*/
def tail: Stream[A]
@@ -524,57 +524,9 @@ self =>
*/
override def filter(p: A => Boolean): Stream[A] = filterImpl(p, isFlipped = false) // This override is only left in 2.11 because of binary compatibility, see PR #3925
- override final def withFilter(p: A => Boolean): StreamWithFilter = new StreamWithFilter(p)
-
- /** A lazier implementation of WithFilter than TraversableLike's.
- */
- final class StreamWithFilter(p: A => Boolean) extends WithFilter(p) {
-
- override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailMap(coll: Stream[A]): Stream[B] = {
- var head: A = null.asInstanceOf[A]
- var tail: Stream[A] = coll
- while (true) {
- if (tail.isEmpty)
- return Stream.Empty
- head = tail.head
- tail = tail.tail
- if (p(head))
- return cons(f(head), tailMap(tail))
- }
- throw new RuntimeException()
- }
-
- if (isStreamBuilder(bf)) asThat(tailMap(Stream.this))
- else super.map(f)(bf)
- }
-
- override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailFlatMap(coll: Stream[A]): Stream[B] = {
- var head: A = null.asInstanceOf[A]
- var tail: Stream[A] = coll
- while (true) {
- if (tail.isEmpty)
- return Stream.Empty
- head = tail.head
- tail = tail.tail
- if (p(head))
- return f(head).toStream append tailFlatMap(tail)
- }
- throw new RuntimeException()
- }
-
- if (isStreamBuilder(bf)) asThat(tailFlatMap(Stream.this))
- else super.flatMap(f)(bf)
- }
-
- override def foreach[B](f: A => B) =
- for (x <- self)
- if (p(x)) f(x)
-
- override def withFilter(q: A => Boolean): StreamWithFilter =
- new StreamWithFilter(x => p(x) && q(x))
- }
+ /** A FilterMonadic which allows GC of the head of stream during processing */
+ @noinline // Workaround SI-9137, see https://github.com/scala/scala/pull/4284#issuecomment-73180791
+ override final def withFilter(p: A => Boolean): FilterMonadic[A, Stream[A]] = new Stream.StreamWithFilter(this, p)
/** A lazier Iterator than LinearSeqLike's. */
override def iterator: Iterator[A] = new StreamIterator(self)
@@ -878,7 +830,7 @@ self =>
* @return A new `Stream` containing everything but the last element. If your
* `Stream` represents an infinite series, this method will not return.
*
- * @throws `Predef.UnsupportedOperationException` if the stream is empty.
+ * @throws UnsupportedOperationException if the stream is empty.
*/
override def init: Stream[A] =
if (isEmpty) super.init
@@ -946,7 +898,7 @@ self =>
*
* @param p the test predicate.
* @return A new `Stream` representing the results of applying `p` to the
- * oringal `Stream`.
+ * original `Stream`.
*
* @example {{{
* // Assume we have a Stream that takes the first 20 natural numbers
@@ -1293,6 +1245,29 @@ object Stream extends SeqFactory[Stream] {
private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
cons(head, stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
}
-}
+ /** An implementation of `FilterMonadic` allowing GC of the filtered-out elements of
+ * the `Stream` as it is processed.
+ *
+ * Because this is not an inner class of `Stream` with a reference to the original
+ * head, it is now possible for GC to collect any leading and filtered-out elements
+ * which do not satisfy the filter, while the tail is still processing (see SI-8990).
+ */
+ private[immutable] final class StreamWithFilter[A](sl: => Stream[A], p: A => Boolean) extends FilterMonadic[A, Stream[A]] {
+ private var s = sl // set to null to allow GC after filtered
+ private lazy val filtered = { val f = s filter p; s = null; f } // don't set to null if throw during filter
+
+ def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ filtered map f
+ def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
+ filtered flatMap f
+
+ def foreach[U](f: A => U): Unit =
+ filtered foreach f
+
+ def withFilter(q: A => Boolean): FilterMonadic[A, Stream[A]] =
+ new StreamWithFilter[A](filtered, q)
+ }
+
+}
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index c2eb85815d..4d7eaeff2a 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -53,6 +53,7 @@ extends SeqView[A, Coll]
/** boilerplate */
protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newPrepended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { protected[this] val fst = that } with AbstractTransformed[B] with Prepended[B]
protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
@@ -67,7 +68,6 @@ extends SeqView[A, Coll]
protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): Transformed[B] = {
new { val from = _from; val patch = _patch; val replaced = _replaced } with AbstractTransformed[B] with Patched[B]
}
- protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
override def stringPrefix = "StreamView"
}
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 738b294ce6..f0daaf25a5 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -135,23 +135,29 @@ self =>
def linesIterator: Iterator[String] =
linesWithSeparators map (line => new WrappedString(line).stripLineEnd)
- /** Returns this string with first character converted to upper case */
+ /** Returns this string with first character converted to upper case.
+ * If the first character of the string is capitalized, it is returned unchanged.
+ */
def capitalize: String =
if (toString == null) null
else if (toString.length == 0) ""
+ else if (toString.charAt(0).isUpper) toString
else {
val chars = toString.toCharArray
chars(0) = chars(0).toUpper
new String(chars)
}
- /** Returns this string with the given `prefix` stripped. */
+ /** Returns this string with the given `prefix` stripped. If this string does not
+ * start with `prefix`, it is returned unchanged.
+ */
def stripPrefix(prefix: String) =
if (toString.startsWith(prefix)) toString.substring(prefix.length)
else toString
/** Returns this string with the given `suffix` stripped. If this string does not
- * end with `suffix`, it is returned unchanged. */
+ * end with `suffix`, it is returned unchanged.
+ */
def stripSuffix(suffix: String) =
if (toString.endsWith(suffix)) toString.substring(0, toString.length() - suffix.length)
else toString
@@ -224,31 +230,31 @@ self =>
def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*)
/**
- * @throws `java.lang.IllegalArgumentException` - If the string does not contain a parsable boolean.
+ * @throws java.lang.IllegalArgumentException - If the string does not contain a parsable boolean.
*/
def toBoolean: Boolean = parseBoolean(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable byte.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable byte.
*/
def toByte: Byte = java.lang.Byte.parseByte(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable short.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable short.
*/
def toShort: Short = java.lang.Short.parseShort(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable int.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable int.
*/
def toInt: Int = java.lang.Integer.parseInt(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable long.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable long.
*/
def toLong: Long = java.lang.Long.parseLong(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable float.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable float.
*/
def toFloat: Float = java.lang.Float.parseFloat(toString)
/**
- * @throws `java.lang.NumberFormatException` - If the string does not contain a parsable double.
+ * @throws java.lang.NumberFormatException - If the string does not contain a parsable double.
*/
def toDouble: Double = java.lang.Double.parseDouble(toString)
@@ -281,7 +287,7 @@ self =>
* understands.
*
* @param args the arguments used to instantiating the pattern.
- * @throws `java.lang.IllegalArgumentException`
+ * @throws java.lang.IllegalArgumentException
*/
def format(args : Any*): String =
java.lang.String.format(toString, args map unwrapArg: _*)
@@ -298,7 +304,7 @@ self =>
*
* @param l an instance of `java.util.Locale`
* @param args the arguments used to instantiating the pattern.
- * @throws `java.lang.IllegalArgumentException`
+ * @throws java.lang.IllegalArgumentException
*/
def formatLocal(l: java.util.Locale, args: Any*): String =
java.lang.String.format(l, toString, args map unwrapArg: _*)
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 8cc99a53e6..662075cd93 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -101,8 +101,8 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
else new TreeMap(RB.slice(tree, from, until))
}
- override def dropRight(n: Int) = take(size - n)
- override def takeRight(n: Int) = drop(size - n)
+ override def dropRight(n: Int) = take(size - math.max(n, 0))
+ override def takeRight(n: Int) = drop(size - math.max(n, 0))
override def splitAt(n: Int) = (take(n), drop(n))
private[this] def countWhile(p: ((A, B)) => Boolean): Int = {
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 681dbbd1a8..7378211db0 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -87,8 +87,8 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
else newSet(RB.slice(tree, from, until))
}
- override def dropRight(n: Int) = take(size - n)
- override def takeRight(n: Int) = drop(size - n)
+ override def dropRight(n: Int) = take(size - math.max(n, 0))
+ override def takeRight(n: Int) = drop(size - math.max(n, 0))
override def splitAt(n: Int) = (take(n), drop(n))
private[this] def countWhile(p: A => Boolean): Int = {
diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala
index 2c7e76c5f5..ed6ca1939d 100644
--- a/src/library/scala/collection/mutable/AnyRefMap.scala
+++ b/src/library/scala/collection/mutable/AnyRefMap.scala
@@ -224,7 +224,7 @@ extends AbstractMap[K, V]
override def put(key: K, value: V): Option[V] = {
val h = hashOf(key)
val k = key
- var i = seekEntryOrOpen(h, k)
+ val i = seekEntryOrOpen(h, k)
if (i < 0) {
val j = i & IndexMask
_hashes(j) = h
@@ -251,7 +251,7 @@ extends AbstractMap[K, V]
override def update(key: K, value: V): Unit = {
val h = hashOf(key)
val k = key
- var i = seekEntryOrOpen(h, k)
+ val i = seekEntryOrOpen(h, k)
if (i < 0) {
val j = i & IndexMask
_hashes(j) = h
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 2d43b352c5..011fd415ee 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -30,8 +30,8 @@ import parallel.mutable.ParArray
*
* @tparam A the type of this arraybuffer's elements.
*
- * @define Coll `ArrayBuffer`
- * @define coll arraybuffer
+ * @define Coll `mutable.ArrayBuffer`
+ * @define coll array buffer
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ArrayBuffer[B]` because an implicit of type `CanBuildFrom[ArrayBuffer, B, ArrayBuffer[B]]`
* is defined in object `ArrayBuffer`.
@@ -128,21 +128,22 @@ class ArrayBuffer[A](override protected val initialSize: Int)
override def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this }
/** Inserts new elements at the index `n`. Opposed to method
- * `update`, this method will not replace an element with a
+ * `update`, this method will not replace an element with a new
* one. Instead, it will insert a new element at index `n`.
*
* @param n the index where a new element will be inserted.
* @param seq the traversable object providing all elements to insert.
- * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
+ * @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def insertAll(n: Int, seq: Traversable[A]) {
if (n < 0 || n > size0) throw new IndexOutOfBoundsException(n.toString)
- val xs = seq.toList
- val len = xs.length
- ensureSize(size0 + len)
+ val len = seq.size
+ val newSize = size0 + len
+ ensureSize(newSize)
+
copy(n, n + len, size0 - n)
- xs.copyToArray(array.asInstanceOf[scala.Array[Any]], n)
- size0 += len
+ seq.copyToArray(array.asInstanceOf[Array[Any]], n)
+ size0 = newSize
}
/** Removes the element on a given index position. It takes time linear in
@@ -150,7 +151,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
*
* @param n the index which refers to the first element to delete.
* @param count the number of elements to delete
- * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
+ * @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
override def remove(n: Int, count: Int) {
require(count >= 0, "removing negative number of elements")
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 00491ef20e..2bc41b5802 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -40,9 +40,8 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
arrayElementClass(repr.getClass)
override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) {
- var l = math.min(len, repr.length)
- if (xs.length - start < l) l = xs.length - start max 0
- Array.copy(repr, 0, xs, start, l)
+ val l = len min repr.length min (xs.length - start)
+ if (l > 0) Array.copy(repr, 0, xs, start, l)
}
override def toArray[U >: T : ClassTag]: Array[U] = {
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 577a838315..5a50f4fb27 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -87,7 +87,7 @@ extends AbstractSeq[A]
*/
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
val len1 = len min (xs.length - start) min length
- Array.copy(array, 0, xs, start, len1)
+ if (len1 > 0) Array.copy(array, 0, xs, start, len1)
}
override def clone(): ArraySeq[A] = {
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index 43d23acc1a..faa4155317 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -110,7 +110,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
* @return the bitset itself.
*/
def |= (other: BitSet): this.type = {
- ensureCapacity(other.nwords)
+ ensureCapacity(other.nwords - 1)
for (i <- 0 until other.nwords)
elems(i) = elems(i) | other.word(i)
this
@@ -121,7 +121,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
* @return the bitset itself.
*/
def &= (other: BitSet): this.type = {
- ensureCapacity(other.nwords)
+ ensureCapacity(other.nwords - 1)
for (i <- 0 until other.nwords)
elems(i) = elems(i) & other.word(i)
this
@@ -132,7 +132,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
* @return the bitset itself.
*/
def ^= (other: BitSet): this.type = {
- ensureCapacity(other.nwords)
+ ensureCapacity(other.nwords - 1)
for (i <- 0 until other.nwords)
elems(i) = elems(i) ^ other.word(i)
this
@@ -143,7 +143,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int]
* @return the bitset itself.
*/
def &~= (other: BitSet): this.type = {
- ensureCapacity(other.nwords)
+ ensureCapacity(other.nwords - 1)
for (i <- 0 until other.nwords)
elems(i) = elems(i) & ~other.word(i)
this
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index 671b79f8c2..fd95e74fbc 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -41,7 +41,7 @@ import generic._
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
+@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
@SerialVersionUID(-8144992287952814767L)
class DoubleLinkedList[A]() extends AbstractSeq[A]
with LinearSeq[A]
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index a43fe34c99..aafe34f50a 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -56,10 +56,10 @@ import scala.annotation.migration
* @define Coll `DoubleLinkedList`
* @define coll double linked list
*/
-@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
+@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self =>
- /** A reference to the node in the linked list preceeding the current node. */
+ /** A reference to the node in the linked list preceding the current node. */
var prev: This = _
// returns that list if this list is empty
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index 31a4749960..7acdeeff18 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -50,7 +50,7 @@ self =>
trait Sliced extends super.Sliced with Transformed[A] {
override def length = endpoints.width
def update(idx: Int, elem: A) =
- if (idx + from < until) self.update(idx + from, elem)
+ if (idx >= 0 && idx + from < until) self.update(idx + from, elem)
else throw new IndexOutOfBoundsException(idx.toString)
}
diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala
index 092698ac0b..b3500367af 100644
--- a/src/library/scala/collection/mutable/LinkedList.scala
+++ b/src/library/scala/collection/mutable/LinkedList.scala
@@ -76,7 +76,7 @@ import generic._
* }}}
*/
@SerialVersionUID(-7308240733518833071L)
-@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
+@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
class LinkedList[A]() extends AbstractSeq[A]
with LinearSeq[A]
with GenericTraversableTemplate[A, LinkedList]
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 987b83d23b..a9d385bc5b 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -55,7 +55,7 @@ import scala.annotation.tailrec
*
* }}}
*/
-@deprecated("Low-level linked lists are deprecated due to idiosyncracies in interface and incomplete features.", "2.11.0")
+@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0")
trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends SeqLike[A, This] { self =>
var elem: A = _
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index a611048da2..1906c47f61 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -132,7 +132,7 @@ final class ListBuffer[A]
*
* @param n the index of the element to replace.
* @param x the new element.
- * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
+ * @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def update(n: Int, x: A) {
// We check the bounds early, so that we don't trigger copying.
@@ -217,7 +217,7 @@ final class ListBuffer[A]
*
* @param n the index where a new element will be inserted.
* @param seq the iterable object providing all elements to insert.
- * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
+ * @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def insertAll(n: Int, seq: Traversable[A]) {
// We check the bounds early, so that we don't trigger copying.
@@ -330,7 +330,7 @@ final class ListBuffer[A]
* @param n the index which refers to the element to delete.
* @return n the element that was formerly at position `n`.
* @note an element must exists at position `n`.
- * @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
+ * @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def remove(n: Int): A = {
if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString())
diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala
index eea33e3044..1eb12d817c 100644
--- a/src/library/scala/collection/mutable/LongMap.scala
+++ b/src/library/scala/collection/mutable/LongMap.scala
@@ -19,7 +19,7 @@ import generic.CanBuildFrom
* on a map that will no longer have elements removed but will be
* used heavily may save both time and storage space.
*
- * This map is not indended to contain more than 2^29 entries (approximately
+ * This map is not intended to contain more than 2^29 entries (approximately
* 500 million). The maximum capacity is 2^30, but performance will degrade
* rapidly as 2^30 is approached.
*
@@ -81,7 +81,7 @@ extends AbstractMap[Long, V]
private def toIndex(k: Long): Int = {
// Part of the MurmurHash3 32 bit finalizer
val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt
- var x = (h ^ (h >>> 16)) * 0x85EBCA6B
+ val x = (h ^ (h >>> 16)) * 0x85EBCA6B
(x ^ (x >>> 13)) & mask
}
@@ -311,7 +311,7 @@ extends AbstractMap[Long, V]
}
}
else {
- var i = seekEntryOrOpen(key)
+ val i = seekEntryOrOpen(key)
if (i < 0) {
val j = i & IndexMask
_keys(j) = key
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 8ba31d47b6..42000e5918 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -18,6 +18,8 @@ import scala.collection.parallel.mutable.ParMap
/** A template trait for mutable maps.
* $mapNote
* $mapTags
+ * @define Coll `mutable.Map`
+ * @define coll mutable map
* @since 2.8
*
* @define mapNote
@@ -143,7 +145,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
/** Creates a new map containing the key/value mappings provided by the specified traversable object
* and all the key/value mappings of this map.
*
- * Note that existing mappings from this map with the same key as those in `xs` will be overriden.
+ * Note that existing mappings from this map with the same key as those in `xs` will be overridden.
*
* @param xs the traversable object.
* @return a new map containing mappings of this map and those provided by `xs`.
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index 78dfc35268..ac2ebf31d8 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -65,10 +65,9 @@ trait MultiMap[A, B] extends Map[A, Set[B]] {
*/
protected def makeSet: Set[B] = new HashSet[B]
- /** Assigns the specified `value` to a specified `key`, replacing
- * the existing value assigned to that `key` if it is equal to
- * the specified value. Otherwise, simply adds another binding to
- * the `key`.
+ /** Assigns the specified `value` to a specified `key`. If the key
+ * already has a binding to equal to `value`, nothing is changed;
+ * otherwise a new binding is added for that `key`.
*
* @param key The key to which to bind the new value.
* @param value The value to bind to the key.
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index a0d3ee0ef0..b852a4747b 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -22,6 +22,8 @@ import immutable.{List, Nil}
* @author Martin Odersky
* @version 2.8
* @since 1
+ * @define Coll `mutable.MutableList`
+ * @define coll mutable list
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]]
* section on `Mutable Lists` for more information.
*/
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index aade2ed6fb..24f5761cf5 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -31,7 +31,7 @@ object OpenHashMap {
/** A mutable hash map based on an open hashing scheme. The precise scheme is
* undefined, but it should make a reasonable effort to ensure that an insert
- * with consecutive hash codes is not unneccessarily penalised. In particular,
+ * with consecutive hash codes is not unnecessarily penalised. In particular,
* mappings of consecutive integer keys should work without significant
* performance loss.
*
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index b949bec48a..d3c4161e3b 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -16,6 +16,11 @@ import generic._
* To prioritize elements of type A there must be an implicit
* Ordering[A] available at creation.
*
+ * Only the `dequeue` and `dequeueAll` methods will return methods in priority
+ * order (while removing elements from the heap). Standard collection methods
+ * including `drop` and `iterator` will remove or traverse the heap in whichever
+ * order seems most convenient.
+ *
* @tparam A type of the elements in this priority queue.
* @param ord implicit ordering used to compare the elements of type `A`.
*
@@ -121,7 +126,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
/** Returns the element with the highest priority in the queue,
* and removes this element from the queue.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the element with the highest priority.
*/
def dequeue(): A =
@@ -242,13 +247,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return a priority queue with the same elements.
*/
override def clone(): PriorityQueue[A] = new PriorityQueue[A] ++= this.iterator
-
- // def printstate() {
- // println("-----------------------")
- // println("Size: " + resarr.p_size0)
- // println("Internal array: " + resarr.p_array.toList)
- // println(toString)
- // }
}
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 7c890fe309..03d387a535 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -58,7 +58,7 @@ extends MutableList[A]
/** Returns the first element in the queue, and removes this element
* from the queue.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the first element of the queue.
*/
def dequeue(): A =
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index c3047522e2..85a299216e 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -74,7 +74,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
*/
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
val len1 = len min (xs.length - start) min length
- Array.copy(array, 0, xs, start, len1)
+ if (len1 > 0) Array.copy(array, 0, xs, start, len1)
}
//##########################################################################
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index cbe7a639dd..40a5c93064 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -219,7 +219,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
/** Send a message to this scriptable object.
*
* @param cmd the message to send.
- * @throws `Predef.UnsupportedOperationException`
+ * @throws UnsupportedOperationException
* if the message was not understood.
*/
@deprecated("Scripting is deprecated.", "2.11.0")
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 53b6c59939..1a92f23b7b 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -125,7 +125,7 @@ extends AbstractSeq[A]
* the element from the stack. An error is signaled if there is no
* element on the stack.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the top element
*/
def top: A =
@@ -133,7 +133,7 @@ extends AbstractSeq[A]
/** Removes the top element from the stack.
*
- * @throws Predef.NoSuchElementException
+ * @throws java.util.NoSuchElementException
* @return the top element
*/
def pop(): A = {
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 498e9e461e..c56d40786e 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -22,6 +22,8 @@ import immutable.StringLike
* @author Martin Odersky
* @version 2.8
* @since 2.7
+ * @define Coll `mutable.IndexedSeq`
+ * @define coll string builder
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html# "Scala's Collection Library overview"]]
* section on `StringBuilders` for more information.
*/
diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala
index 26b061b2a5..6a2b6de75a 100644
--- a/src/library/scala/collection/package.scala
+++ b/src/library/scala/collection/package.scala
@@ -18,7 +18,7 @@ package scala
*
* == Using Collections ==
*
- * It is convienient to treat all collections as either
+ * It is convenient to treat all collections as either
* a [[scala.collection.Traversable]] or [[scala.collection.Iterable]], as
* these traits define the vast majority of operations
* on a collection.
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index 2ceeb18eef..a5ba8c49ad 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -23,9 +23,6 @@ import scala.collection.parallel.mutable.ParArrayCombiner
*
* @author Aleksandar Prokopec
* @since 2.9
- *
- * @define Coll `ParIterable`
- * @define coll parallel iterable
*/
trait ParIterable[+T]
extends GenIterable[T]
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 445edd23cb..016255dca4 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -150,7 +150,8 @@ import scala.collection.parallel.ParallelCollectionImplicits._
* @define indexsignalling
* This method will use `indexFlag` signalling capabilities. This means
* that splitters may set and read the `indexFlag` state.
- *
+ * @define Coll `ParIterable`
+ * @define coll parallel iterable
*/
trait ParIterableLike[+T, +Repr <: ParIterable[T], +Sequential <: Iterable[T] with IterableLike[T, Sequential]]
extends GenIterableLike[T, Repr]
@@ -743,7 +744,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* The index flag is initially set to maximum integer value.
*
* @param pred the predicate used to test the elements
- * @return the longest prefix of this $coll of elements that satisy the predicate `pred`
+ * @return the longest prefix of this $coll of elements that satisfy the predicate `pred`
*/
def takeWhile(pred: T => Boolean): Repr = {
val cbf = combinerFactory
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index d2b15c727a..ee1334ba55 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -24,6 +24,8 @@ import scala.collection.generic.Signalling
*
* @tparam K the key type of the map
* @tparam V the value type of the map
+ * @define Coll `ParMap`
+ * @define coll parallel map
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index 4e9a2e5751..4feda5ff07 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -20,6 +20,8 @@ import scala.collection.Set
* $sideeffects
*
* @tparam T the element type of the set
+ * @define Coll `ParSet`
+ * @define coll parallel set
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index 42027f5bac..5d99394a50 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -22,6 +22,8 @@ import scala.collection.generic.Shrinkable
*
* @tparam K the key type of the map
* @tparam V the value type of the map
+ * @define Coll `ParMap`
+ * @define coll parallel map
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index 9367f1424d..4e2d3e0e4c 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -14,9 +14,6 @@ import scala.collection.parallel.Combiner
/** A mutable variant of `ParSet`.
*
- * @define Coll `mutable.ParSet`
- * @define coll mutable parallel set
- *
* @author Aleksandar Prokopec
*/
trait ParSet[T]
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 13af5ed649..08aa3b024b 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -21,6 +21,8 @@ import scala.collection.generic.Shrinkable
* $sideeffects
*
* @tparam T the element type of the set
+ * @define Coll `mutable.ParSet`
+ * @define coll mutable parallel set
*
* @author Aleksandar Prokopec
* @since 2.9
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 91c54fa8f1..d77dcb0658 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -206,7 +206,7 @@ package parallel {
* Methods `beforeCombine` and `afterCombine` are called before and after
* combining the buckets, respectively, given that the argument to `combine`
* is not `this` (as required by the `combine` contract).
- * They can be overriden in subclasses to provide custom behaviour by modifying
+ * They can be overridden in subclasses to provide custom behaviour by modifying
* the receiver (which will be the return value).
*/
private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]]
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index 875d811b9b..4c82d6e15b 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -70,9 +70,9 @@ object Platform {
* @param elemClass the `Class` object of the component type of the array
* @param length the length of the new array.
* @return an array of the given component type as an `AnyRef`.
- * @throws `java.lang.NullPointerException` If `elemClass` is `null`.
- * @throws `java.lang.IllegalArgumentException` if componentType is [[scala.Unit]] or `java.lang.Void.TYPE`
- * @throws `java.lang.NegativeArraySizeException` if the specified length is negative
+ * @throws java.lang.NullPointerException If `elemClass` is `null`.
+ * @throws java.lang.IllegalArgumentException if componentType is [[scala.Unit]] or `java.lang.Void.TYPE`
+ * @throws java.lang.NegativeArraySizeException if the specified length is negative
*/
@inline
def createArray(elemClass: Class[_], length: Int): AnyRef =
@@ -80,7 +80,7 @@ object Platform {
/** Assigns the value of 0 to each element in the array.
* @param arr A non-null Array[Int].
- * @throws `java.lang.NullPointerException` If `arr` is `null`.
+ * @throws java.lang.NullPointerException If `arr` is `null`.
*/
@inline
def arrayclear(arr: Array[Int]) { java.util.Arrays.fill(arr, 0) }
@@ -92,9 +92,9 @@ object Platform {
*
* @param name the fully qualified name of the desired class.
* @return the `Class` object for the class with the specified name.
- * @throws `java.lang.LinkageError` if the linkage fails
- * @throws `java.lang.ExceptionInInitializerError` if the initialization provoked by this method fails
- * @throws `java.lang.ClassNotFoundException` if the class cannot be located
+ * @throws java.lang.LinkageError if the linkage fails
+ * @throws java.lang.ExceptionInInitializerError if the initialization provoked by this method fails
+ * @throws java.lang.ClassNotFoundException if the class cannot be located
* @example {{{
* val a = scala.compat.Platform.getClassForName("java.lang.Integer") // returns the Class[_] for java.lang.Integer
* }}}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index d728a7f97a..df2d68c9c6 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -112,8 +112,9 @@ object ExecutionContext {
* The explicit global `ExecutionContext`. Invoke `global` when you want to provide the global
* `ExecutionContext` explicitly.
*
- * The default `ExecutionContext` implementation is backed by a port of
- * [[http://gee.cs.oswego.edu/dl/jsr166/dist/jsr166-4jdk7docs/java/util/concurrent/ForkJoinPool.html java.util.concurrent.ForkJoinPool]].
+ * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default,
+ * the thread pool uses a target number of worker threads equal to the number of
+ * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
*
* @return the global `ExecutionContext`
*/
@@ -124,15 +125,16 @@ object ExecutionContext {
* The implicit global `ExecutionContext`. Import `global` when you want to provide the global
* `ExecutionContext` implicitly.
*
- * The default `ExecutionContext` implementation is backed by a port of
- * [[http://gee.cs.oswego.edu/dl/jsr166/dist/jsr166-4jdk7docs/java/util/concurrent/ForkJoinPool.html java.util.concurrent.ForkJoinPool]].
+ * The default `ExecutionContext` implementation is backed by a work-stealing thread pool. By default,
+ * the thread pool uses a target number of worker threads equal to the number of
+ * [[https://docs.oracle.com/javase/8/docs/api/java/lang/Runtime.html#availableProcessors-- available processors]].
*/
implicit lazy val global: ExecutionContext = impl.ExecutionContextImpl.fromExecutor(null: Executor)
}
/** Creates an `ExecutionContext` from the given `ExecutorService`.
*
- * @param e the `ExecutorService` to use
+ * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @param reporter a function for error reporting
* @return the `ExecutionContext` using the given `ExecutorService`
*/
@@ -149,14 +151,14 @@ object ExecutionContext {
* val ec = ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor())
* }}}
*
- * @param e the `ExecutorService` to use
+ * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @return the `ExecutionContext` using the given `ExecutorService`
*/
def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter)
/** Creates an `ExecutionContext` from the given `Executor`.
*
- * @param e the `Executor` to use
+ * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @param reporter a function for error reporting
* @return the `ExecutionContext` using the given `Executor`
*/
@@ -165,7 +167,7 @@ object ExecutionContext {
/** Creates an `ExecutionContext` from the given `Executor` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]].
*
- * @param e the `Executor` to use
+ * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]].
* @return the `ExecutionContext` using the given `Executor`
*/
def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter)
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 2c7f0879ab..6304f35da9 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -592,7 +592,7 @@ object Future {
* The result becomes available once the asynchronous computation is completed.
*
* @tparam T the type of the result
- * @param body the asychronous computation
+ * @param body the asynchronous computation
* @param executor the execution context on which the future is run
* @return the `Future` holding the result of the computation
*/
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 494c955833..9634f6d900 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -93,7 +93,7 @@ class SyncVar[A] {
// [Heather] the reason why: it doesn't take into consideration
// whether or not the SyncVar is already defined. So, set has been
// deprecated in order to eventually be able to make "setting" private
- @deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0")
+ @deprecated("Use `put` instead, as `set` is potentially error-prone", "2.10.0")
// NOTE: Used by SBT 0.13.0-M2 and below
def set(x: A): Unit = setVal(x)
@@ -113,7 +113,7 @@ class SyncVar[A] {
// [Heather] the reason why: it doesn't take into consideration
// whether or not the SyncVar is already defined. So, unset has been
// deprecated in order to eventually be able to make "unsetting" private
- @deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0")
+ @deprecated("Use `take` instead, as `unset` is potentially error-prone", "2.10.0")
// NOTE: Used by SBT 0.13.0-M2 and below
def unset(): Unit = synchronized {
isDefined = false
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 4d88253de4..4843d28679 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -47,8 +47,8 @@ package object concurrent {
* Blocking on an [[Awaitable]] should be done using [[Await.result]] instead of `blocking`.
*
* @param body A piece of code which contains potentially blocking or long running calls.
- * @throws `CancellationException` if the computation was cancelled
- * @throws `InterruptedException` in the case that a wait within the blocking `body` was interrupted
+ * @throws CancellationException if the computation was cancelled
+ * @throws InterruptedException in the case that a wait within the blocking `body` was interrupted
*/
@throws(classOf[Exception])
def blocking[T](body: =>T): T = BlockContext.current.blockOn(body)(scala.concurrent.AwaitPermission)
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index dbd6a5f6f2..e38c197196 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -169,9 +169,20 @@ object Source {
createBufferedSource(is, reset = () => fromInputStream(is)(codec), close = () => is.close())(codec)
}
-/** The class `Source` implements an iterable representation of source data.
- * Calling method `reset` returns an identical, resetted source, where
- * possible.
+/** An iterable representation of source data.
+ * It may be reset with the optional `reset` method.
+ *
+ * Subclasses must supply [[scala.io.Source@iter the underlying iterator]].
+ *
+ * Error handling may be customized by overriding the [[scala.io.Source@report report]] method.
+ *
+ * The [[scala.io.Source@ch current input]] and [[scala.io.Source@pos position]],
+ * as well as the [[scala.io.Source@next next character]] methods delegate to
+ * [[scala.io.Source$Positioner the positioner]].
+ *
+ * The default positioner encodes line and column numbers in the position passed to `report`.
+ * This behavior can be changed by supplying a
+ * [[scala.io.Source@withPositioning(pos:Source.this.Positioner):Source.this.type custom positioner]].
*
* @author Burak Emir
* @version 1.0
diff --git a/src/library/scala/io/StdIn.scala b/src/library/scala/io/StdIn.scala
index 64836ecd6e..0f9656436b 100644
--- a/src/library/scala/io/StdIn.scala
+++ b/src/library/scala/io/StdIn.scala
@@ -4,7 +4,7 @@ package io
import java.text.MessageFormat
/** private[scala] because this is not functionality we should be providing
- * in the standard library, at least not in this idiosyncractic form.
+ * in the standard library, at least not in this idiosyncratic form.
* Factored into trait because it is better code structure regardless.
*/
private[scala] trait StdIn {
diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala
index c638f531bb..2eb5514a18 100644
--- a/src/library/scala/language.scala
+++ b/src/library/scala/language.scala
@@ -1,3 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
package scala
/**
diff --git a/src/library/scala/languageFeature.scala b/src/library/scala/languageFeature.scala
index 1f411c412a..51118b43be 100644
--- a/src/library/scala/languageFeature.scala
+++ b/src/library/scala/languageFeature.scala
@@ -1,3 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
package scala
import scala.annotation.meta
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 5a81710986..cf95f945ba 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -364,7 +364,7 @@ object BigDecimal {
* to a decimal text representation, and build a `BigDecimal` based on that.
* `BigDecimal.binary` will expand the binary fraction to the requested or default
* precision. `BigDecimal.exact` will expand the binary fraction to the
- * full number of digits, thus producing the exact decimal value corrsponding to
+ * full number of digits, thus producing the exact decimal value corresponding to
* the binary fraction of that floating-point number. `BigDecimal` equality
* matches the decimal expansion of `Double`: `BigDecimal.decimal(0.1) == 0.1`.
* Note that since `0.1f != 0.1`, the same is not true for `Float`. Instead,
@@ -417,7 +417,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
private final def computeHashCode(): Unit = {
computedHashCode =
if (isWhole && (precision - scale) < BigDecimal.maximumHashScale) toBigInt.hashCode
- else if (isValidDouble) doubleValue.##
+ else if (isDecimalDouble) doubleValue.##
else {
val temp = bigDecimal.stripTrailingZeros
scala.util.hashing.MurmurHash3.mixLast( temp.scaleByPowerOfTen(temp.scale).toBigInteger.hashCode, temp.scale )
@@ -477,7 +477,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
* `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble`, depending on the intended meaning.
* By default, `decimal` creation is used, so `isDecimalDouble` is probably what you want.
*/
- @deprecated("Validity has two distinct meanings. Use `isExactBinaryDouble` or `equivalentToDouble` instead.", "2.11")
+ @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11")
def isValidDouble = {
val d = toDouble
!d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 0d7ea8bce2..827cccc77e 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -284,6 +284,9 @@ object Ordering extends LowPriorityOrderingImplicits {
override def gteq(x: Float, y: Float): Boolean = outer.gteq(y, x)
override def lt(x: Float, y: Float): Boolean = outer.lt(y, x)
override def gt(x: Float, y: Float): Boolean = outer.gt(y, x)
+ override def min(x: Float, y: Float): Float = outer.max(x, y)
+ override def max(x: Float, y: Float): Float = outer.min(x, y)
+
}
}
implicit object Float extends FloatOrdering
@@ -309,6 +312,8 @@ object Ordering extends LowPriorityOrderingImplicits {
override def gteq(x: Double, y: Double): Boolean = outer.gteq(y, x)
override def lt(x: Double, y: Double): Boolean = outer.lt(y, x)
override def gt(x: Double, y: Double): Boolean = outer.gt(y, x)
+ override def min(x: Double, y: Double): Double = outer.max(x, y)
+ override def max(x: Double, y: Double): Double = outer.min(x, y)
}
}
implicit object Double extends DoubleOrdering
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
index bced505273..2f4aa9cb84 100644
--- a/src/library/scala/reflect/ClassTag.scala
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -2,8 +2,7 @@ package scala
package reflect
import java.lang.{ Class => jClass }
-import scala.language.{implicitConversions, existentials}
-import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass }
+import scala.runtime.ScalaRunTime.arrayElementClass
/**
*
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index 82a3b00ac4..a6df20165d 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -183,7 +183,7 @@ public final class BoxesRunTime
return xc.equals(y);
}
- private static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
+ public static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
if (yc == null)
return xn == null;
diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala
index 2d5f832e1f..a8fdfc1059 100644
--- a/src/library/scala/runtime/MethodCache.scala
+++ b/src/library/scala/runtime/MethodCache.scala
@@ -16,7 +16,7 @@ import java.lang.{ Class => JClass }
import scala.annotation.tailrec
/** An element of a polymorphic object cache.
- * This class is refered to by the `CleanUp` phase. Each `PolyMethodCache` chain
+ * This class is referred to by the `CleanUp` phase. Each `PolyMethodCache` chain
* must only relate to one method as `PolyMethodCache` does not identify
* the method name and argument types. In practice, one variable will be
* generated per call point, and will uniquely relate to the method called
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 6c69ebae9b..a0d89fc0e1 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -252,7 +252,7 @@ object ScalaRunTime {
*
* The primary motivation for this method is to provide a means for
* correctly obtaining a String representation of a value, while
- * avoiding the pitfalls of naïvely calling toString on said value.
+ * avoiding the pitfalls of naively calling toString on said value.
* In particular, it addresses the fact that (a) toString cannot be
* called on null and (b) depending on the apparent type of an
* array, toString may or may not print it in a human-readable form.
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index b28f6d4269..512c4fbc27 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -17,6 +17,10 @@ import scala.language.{ higherKinds, implicitConversions }
/** This interface is intended as a minimal interface, not complicated
* by the requirement to resolve type constructors, for implicit search (which only
* needs to find an implicit conversion to Traversable for our purposes.)
+ * @define Coll `ZippedTraversable2`
+ * @define coll collection
+ * @define collectExample
+ * @define willNotTerminateInf
*/
trait ZippedTraversable2[+El1, +El2] extends Any {
def foreach[U](f: (El1, El2) => U): Unit
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index 7c501380a3..ffd44acf81 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -14,7 +14,12 @@ import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
import scala.language.{ higherKinds, implicitConversions }
-/** See comment on ZippedTraversable2. */
+/** See comment on ZippedTraversable2
+ * @define Coll `ZippedTraversable3`
+ * @define coll collection
+ * @define collectExample
+ * @define willNotTerminateInf
+ */
trait ZippedTraversable3[+El1, +El2, +El3] extends Any {
def foreach[U](f: (El1, El2, El3) => U): Unit
}
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index 39f66f5030..d2ebf8c044 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -21,6 +21,8 @@ import scala.language.implicitConversions
* System properties. If a security manager is in place which prevents
* the properties from being read or written, the AccessControlException
* will be caught and discarded.
+ * @define Coll `collection.mutable.Map`
+ * @define coll mutable map
*
* @author Paul Phillips
* @version 2.9
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index 91fa99e3df..5ec2e73cb9 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -119,7 +119,7 @@ package scala.sys {
* ==Handling Input and Output==
*
* In the underlying Java model, once a `Process` has been started, one can
- * get `java.io.InputStream` and `java.io.OutpuStream` representing its
+ * get `java.io.InputStream` and `java.io.OutputStream` representing its
* output and input respectively. That is, what one writes to an
* `OutputStream` is turned into input to the process, and the output of a
* process can be read from an `InputStream` -- of which there are two, one
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index b1a932be7e..e196d403c2 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -274,7 +274,7 @@ object Either {
*/
final case class LeftProjection[+A, +B](e: Either[A, B]) {
/**
- * Returns the value from this `Left` or throws `Predef.NoSuchElementException`
+ * Returns the value from this `Left` or throws `java.util.NoSuchElementException`
* if this is a `Right`.
*
* {{{
@@ -282,7 +282,7 @@ object Either {
* Right(12).left.get // NoSuchElementException
* }}}
*
- * @throws Predef.NoSuchElementException if the projection is [[scala.util.Right]]
+ * @throws java.util.NoSuchElementException if the projection is [[scala.util.Right]]
*/
def get = e match {
case Left(a) => a
@@ -440,14 +440,14 @@ object Either {
/**
* Returns the value from this `Right` or throws
- * `Predef.NoSuchElementException` if this is a `Left`.
+ * `java.util.NoSuchElementException` if this is a `Left`.
*
* {{{
* Right(12).right.get // 12
* Left(12).right.get // NoSuchElementException
* }}}
*
- * @throws Predef.NoSuchElementException if the projection is `Left`.
+ * @throws java.util.NoSuchElementException if the projection is `Left`.
*/
def get = e match {
case Left(_) => throw new NoSuchElementException("Either.right.value on Left")
diff --git a/src/manual/scala/man1/Command.scala b/src/manual/scala/man1/Command.scala
index 1cf55cb28d..8f811f950e 100644
--- a/src/manual/scala/man1/Command.scala
+++ b/src/manual/scala/man1/Command.scala
@@ -47,7 +47,7 @@ trait Command {
def copyright = Section("COPYRIGHT",
"This is open-source software, available to you under a BSD-like license. " &
- "See accomponying \"copyright\" or \"LICENSE\" file for copying conditions. " &
+ "See accompanying \"copyright\" or \"LICENSE\" file for copying conditions. " &
"There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A " &
"PARTICULAR PURPOSE.")
diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala
index 31d25d4801..3954ed588e 100644
--- a/src/manual/scala/man1/scalac.scala
+++ b/src/manual/scala/man1/scalac.scala
@@ -360,7 +360,7 @@ object scalac extends Command {
"ANF pre-transform for " & MItalic("@cps") & " (CPS plugin)"),
Definition(
MItalic("selectivecps"),
- MItalic("@cps") & "-driven transform of selectiveanf assignements (CPS plugin)"),
+ MItalic("@cps") & "-driven transform of selectiveanf assignments (CPS plugin)"),
Definition(
MItalic("uncurry"),
"uncurry, translate function values to anonymous classes"),
diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
index 3261cada37..8459419fa5 100644
--- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala
+++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
@@ -116,10 +116,8 @@ abstract class BytecodeTest {
sys.error(s"Didn't find method '$name' in class '${classNode.name}'")
protected def loadClassNode(name: String, skipDebugInfo: Boolean = true): ClassNode = {
- val classBytes: InputStream = (for {
- classRep <- classpath.findClass(name)
- binary <- classRep.binary
- } yield binary.input) getOrElse sys.error(s"failed to load class '$name'; classpath = $classpath")
+ val classBytes: InputStream = classpath.findClassFile(name).map(_.input)
+ .getOrElse(sys.error(s"failed to load class '$name'; classpath = $classpath"))
val cr = new ClassReader(classBytes)
val cn = new ClassNode()
@@ -138,7 +136,7 @@ abstract class BytecodeTest {
object BytecodeTest {
/** Parse `file` as a class file, transforms the ASM representation with `f`,
- * and overwrites the orginal file.
+ * and overwrites the original file.
*/
def modifyClassFile(file: JFile)(f: ClassNode => ClassNode) {
val rfile = new reflect.io.File(file)
diff --git a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
index d6b62e1d9e..848103f5cc 100644
--- a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
+++ b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
@@ -12,7 +12,7 @@ import java.util.Map;
* A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
* call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
*
- * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
+ * WARNING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
* because it must be universally accessible for instrumentation needs. If you want to profile your test use
* {@link Instrumentation} instead.
*/
diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala
index e73c5ffa91..fbcf7f3e4f 100644
--- a/src/reflect/scala/reflect/api/Constants.scala
+++ b/src/reflect/scala/reflect/api/Constants.scala
@@ -60,7 +60,7 @@ package api
*
* object Test extends App {
* val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs
- * def jarg(name: String) = jann(newTermName(name)).asInstanceOf[LiteralArgument].value
+ * def jarg(name: String) = jann(TermName(name)).asInstanceOf[LiteralArgument].value
*
* val classRef = jarg("classRef").typeValue
* println(showRaw(classRef)) // TypeRef(ThisType(<empty>), JavaAnnottee, List())
@@ -150,7 +150,7 @@ trait Constants {
*
* object Test extends App {
* val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs
- * def jarg(name: String) = jann(newTermName(name)) match {
+ * def jarg(name: String) = jann(TermName(name)) match {
* // Constant is always wrapped into a Literal or LiteralArgument tree node
* case LiteralArgument(ct: Constant) => value
* case _ => sys.error("Not a constant")
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 3230fdbc67..ad03718898 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -84,7 +84,7 @@ trait Exprs { self: Universe =>
*
* It is equivalent to
* {{{
- * Select( expr.tree, newTermName("foo") )
+ * Select( expr.tree, TermName("foo") )
* }}}
*
* The following example code however does not compile
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index bf4d6353df..bcad84a3f0 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -20,20 +20,20 @@ import scala.language.implicitConversions
*
* For example, to create a class named `C` one would write something like:
* {{{
- * ClassDef(Modifiers(NoFlags), newTypeName("C"), Nil, ...)
+ * ClassDef(Modifiers(NoFlags), TypeName("C"), Nil, ...)
* }}}
*
* Here, the flag set is empty.
*
* To make `C` private, one would write something like:
* {{{
- * ClassDef(Modifiers(PRIVATE), newTypeName("C"), Nil, ...)
+ * ClassDef(Modifiers(PRIVATE), TypeName("C"), Nil, ...)
* }}}
*
* Flags can also be combined with the vertical bar operator (`|`).
* For example, a private final class is written something like:
* {{{
- * ClassDef(Modifiers(PRIVATE | FINAL), newTypeName("C"), Nil, ...)
+ * ClassDef(Modifiers(PRIVATE | FINAL), TypeName("C"), Nil, ...)
* }}}
*
* The list of all available flags is defined in [[scala.reflect.api.FlagSets#FlagValues]], available via
diff --git a/src/reflect/scala/reflect/api/Liftables.scala b/src/reflect/scala/reflect/api/Liftables.scala
index 673dbce6f5..c6352905d1 100644
--- a/src/reflect/scala/reflect/api/Liftables.scala
+++ b/src/reflect/scala/reflect/api/Liftables.scala
@@ -52,7 +52,7 @@ trait Liftables { self: Universe =>
object Unliftable extends StandardUnliftableInstances {
/** A helper method that simplifies creation of `Unliftable` instances.
* Takes a partial function which is defined on correct representations of `T`
- * and returns corresponing instances.
+ * and returns corresponding instances.
*
* For example to extract a reference to an object as object itself:
*
diff --git a/src/reflect/scala/reflect/api/Mirror.scala b/src/reflect/scala/reflect/api/Mirror.scala
index 318fdb369a..96aab48e75 100644
--- a/src/reflect/scala/reflect/api/Mirror.scala
+++ b/src/reflect/scala/reflect/api/Mirror.scala
@@ -58,7 +58,7 @@ abstract class Mirror[U <: Universe with Singleton] {
* scala> cm.staticPackage("scala")
* res2: scala.reflect.runtime.universe.ModuleSymbol = package scala
*
- * scala> res2.moduleClass.info member newTypeName("List")
+ * scala> res2.moduleClass.info member TypeName("List")
* res3: scala.reflect.runtime.universe.Symbol = type List
*
* scala> res3.fullName
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index ec420d184c..adaf829b32 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -292,7 +292,7 @@ trait Mirrors { self: Universe =>
* that can be used to create instances of the class, inspect its companion object or perform further reflections.
*
* To get a class symbol by the name of the class you would like to reflect,
- * use `<this mirror>.symbol.info.member(newTypeName(<name of the class>)).asClass`.
+ * use `<this mirror>.symbol.info.member(TypeName(<name of the class>)).asClass`.
* For further information about member lookup refer to `Symbol.info`.
*
* The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
@@ -338,7 +338,7 @@ trait Mirrors { self: Universe =>
* with getting a field or invoking a getter method of the field.
*
* If `symbol` represents a field of a base class with respect to the class of the receiver,
- * and this base field is overriden in the class of the receiver, then this method will retrieve
+ * and this base field is overridden in the class of the receiver, then this method will retrieve
* the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor.
*/
def get: Any
@@ -352,7 +352,7 @@ trait Mirrors { self: Universe =>
* with setting a field or invoking a setter method of the field.
*
* If `symbol` represents a field of a base class with respect to the class of the receiver,
- * and this base field is overriden in the class of the receiver, then this method will set
+ * and this base field is overridden in the class of the receiver, then this method will set
* the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor.
*/
def set(value: Any): Unit
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index fe5f47c25d..472da60338 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -17,11 +17,11 @@ import scala.language.implicitConversions
* To search for the `map` method (which is a term) declared in the `List` class, one can do:
*
* {{{
- * scala> typeOf[List[_]].member(newTermName("map"))
+ * scala> typeOf[List[_]].member(TermName("map"))
* res0: reflect.runtime.universe.Symbol = method map
* }}}
*
- * To search for a type member, one can follow the same procedure, using `newTypeName` instead.
+ * To search for a type member, one can follow the same procedure, using `TypeName` instead.
*
* For more information about creating and using `Name`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]]
*
@@ -30,14 +30,14 @@ import scala.language.implicitConversions
*/
trait Names {
/** An implicit conversion from String to TermName.
- * Enables an alternative notation `"map": TermName` as opposed to `newTermName("map")`.
+ * Enables an alternative notation `"map": TermName` as opposed to `TermName("map")`.
* @group Names
*/
@deprecated("Use explicit `TermName(s)` instead", "2.11.0")
implicit def stringToTermName(s: String): TermName = TermName(s)
/** An implicit conversion from String to TypeName.
- * Enables an alternative notation `"List": TypeName` as opposed to `newTypeName("List")`.
+ * Enables an alternative notation `"List": TypeName` as opposed to `TypeName("List")`.
* @group Names
*/
@deprecated("Use explicit `TypeName(s)` instead", "2.11.0")
@@ -72,10 +72,10 @@ trait Names {
* @group API
*/
abstract class NameApi {
- /** Checks wether the name is a term name */
+ /** Checks whether the name is a term name */
def isTermName: Boolean
- /** Checks wether the name is a type name */
+ /** Checks whether the name is a type name */
def isTypeName: Boolean
/** Returns a term name that wraps the same string as `this` */
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 92ae6d8b44..01b9759c70 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -46,15 +46,15 @@ import java.io.{ PrintWriter, StringWriter }
* {{{
* scala> showRaw(tree)
* res1: String = Block(List(
- * ClassDef(Modifiers(FINAL), newTypeName("C"), List(), Template(
- * List(Ident(newTypeName("AnyRef"))),
+ * ClassDef(Modifiers(FINAL), TypeName("C"), List(), Template(
+ * List(Ident(TypeName("AnyRef"))),
* noSelfType,
* List(
* DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(),
* Block(List(
* Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())),
* Literal(Constant(())))),
- * DefDef(Modifiers(), newTermName("x"), List(), List(), TypeTree(),
+ * DefDef(Modifiers(), TermName("x"), List(), List(), TypeTree(),
* Literal(Constant(2))))))),
* Literal(Constant(())))
* }}}
@@ -70,23 +70,23 @@ import java.io.{ PrintWriter, StringWriter }
*
* scala> showRaw(cm.mkToolBox().typecheck(tree), printTypes = true)
* res2: String = Block[1](List(
- * ClassDef[2](Modifiers(FINAL), newTypeName("C"), List(), Template[3](
- * List(Ident[4](newTypeName("AnyRef"))),
+ * ClassDef[2](Modifiers(FINAL), TypeName("C"), List(), Template[3](
+ * List(Ident[4](TypeName("AnyRef"))),
* noSelfType,
* List(
* DefDef[2](Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree[3](),
* Block[1](List(
- * Apply[4](Select[5](Super[6](This[3](newTypeName("C")), tpnme.EMPTY), ...))),
+ * Apply[4](Select[5](Super[6](This[3](TypeName("C")), tpnme.EMPTY), ...))),
* Literal[1](Constant(())))),
- * DefDef[2](Modifiers(), newTermName("x"), List(), List(), TypeTree[7](),
+ * DefDef[2](Modifiers(), TermName("x"), List(), List(), TypeTree[7](),
* Literal[8](Constant(2))))))),
* Literal[1](Constant(())))
* [1] TypeRef(ThisType(scala), scala.Unit, List())
* [2] NoType
- * [3] TypeRef(NoPrefix, newTypeName("C"), List())
+ * [3] TypeRef(NoPrefix, TypeName("C"), List())
* [4] TypeRef(ThisType(java.lang), java.lang.Object, List())
* [5] MethodType(List(), TypeRef(ThisType(java.lang), java.lang.Object, List()))
- * [6] SuperType(ThisType(newTypeName("C")), TypeRef(... java.lang.Object ...))
+ * [6] SuperType(ThisType(TypeName("C")), TypeRef(... java.lang.Object ...))
* [7] TypeRef(ThisType(scala), scala.Int, List())
* [8] ConstantType(Constant(2))
* }}}
@@ -112,10 +112,10 @@ import java.io.{ PrintWriter, StringWriter }
* // showRaw has already been discussed above
* scala> showRaw(tpe)
* res1: String = RefinedType(
- * List(TypeRef(ThisType(scala), newTypeName("AnyRef"), List())),
+ * List(TypeRef(ThisType(scala), TypeName("AnyRef"), List())),
* Scope(
- * newTermName("x"),
- * newTermName("y")))
+ * TermName("x"),
+ * TermName("y")))
* }}}
*
* `printIds` and/or `printKinds` can additionally be supplied as arguments in a call to
@@ -124,10 +124,10 @@ import java.io.{ PrintWriter, StringWriter }
* {{{
* scala> showRaw(tpe, printIds = true, printKinds = true)
* res2: String = RefinedType(
- * List(TypeRef(ThisType(scala#2043#PK), newTypeName("AnyRef")#691#TPE, List())),
+ * List(TypeRef(ThisType(scala#2043#PK), TypeName("AnyRef")#691#TPE, List())),
* Scope(
- * newTermName("x")#2540#METH,
- * newTermName("y")#2541#GET))
+ * TermName("x")#2540#METH,
+ * TermName("y")#2541#GET))
* }}}
*
* For more details about `Printer`s and other aspects of Scala reflection, see the
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
index 524b7ea14b..bf9cf5e334 100644
--- a/src/reflect/scala/reflect/api/StandardDefinitions.scala
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -128,7 +128,7 @@ trait StandardDefinitions {
* scala> import scala.reflect.runtime.universe._
* import scala.reflect.runtime.universe._
*
- * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * scala> val m = typeOf[C].member(TermName("m")).asMethod
* m: reflect.runtime.universe.MethodSymbol = method m
*
* scala> m.params(0)(0).info
@@ -156,7 +156,7 @@ trait StandardDefinitions {
* scala> import scala.reflect.runtime.universe._
* import scala.reflect.runtime.universe._
*
- * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * scala> val m = typeOf[C].member(TermName("m")).asMethod
* m: reflect.runtime.universe.MethodSymbol = method m
*
* scala> m.params(0)(0).info
@@ -181,7 +181,7 @@ trait StandardDefinitions {
* scala> import scala.reflect.runtime.universe._
* import scala.reflect.runtime.universe._
*
- * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * scala> val m = typeOf[C].member(TermName("m")).asMethod
* m: reflect.runtime.universe.MethodSymbol = method m
*
* scala> m.params(0)(0).info
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index 42cf600c85..c01029d067 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -27,7 +27,7 @@ package api
* scala> class C[T] { def test[U](x: T)(y: U): Int = ??? }
* defined class C
*
- * scala> val test = typeOf[C[Int]].member(newTermName("test")).asMethod
+ * scala> val test = typeOf[C[Int]].member(TermName("test")).asMethod
* test: reflect.runtime.universe.MethodSymbol = method test
*
* scala> test.info
@@ -339,7 +339,7 @@ trait Symbols { self: Universe =>
@deprecated("Use `overrides` instead", "2.11.0")
def allOverriddenSymbols: List[Symbol]
- /** Returns all symbols overriden by this symbol.
+ /** Returns all symbols overridden by this symbol.
*
* @group Basics
*/
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index ff8926651b..9ecd87c17e 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -33,7 +33,7 @@ package api
*
* The following creates an AST representing `print("Hello World")`:
* {{{
- * Apply(Select(Select(This(newTypeName("scala")), newTermName("Predef")), newTermName("print")), List(Literal(Constant("Hello World"))))
+ * Apply(Select(Select(This(TypeName("scala")), TermName("Predef")), TermName("print")), List(Literal(Constant("Hello World"))))
* }}}
*
* The following creates an AST from a literal 5, and then uses `showRaw` to print it in a readable format.
@@ -158,7 +158,7 @@ trait Trees { self: Universe =>
/** Do all parts of this tree satisfy predicate `p`? */
def forAll(p: Tree => Boolean): Boolean
- /** Tests whether two trees are structurall equal.
+ /** Tests whether two trees are structurally equal.
* Note that `==` on trees is reference equality.
*/
def equalsStructure(that : Tree): Boolean
@@ -1098,11 +1098,11 @@ trait Trees { self: Universe =>
* // a dummy node that carries the type of unapplication to patmat
* // the <unapply-selector> here doesn't have an underlying symbol
* // it only has a type assigned, therefore after `untypecheck` this tree is no longer typeable
- * Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))),
+ * Apply(Select(Ident(Foo), TermName("unapply")), List(Ident(TermName("<unapply-selector>")))),
* // arguments of the unapply => nothing synthetic here
- * List(Bind(newTermName("x"), Ident(nme.WILDCARD)))),
+ * List(Bind(TermName("x"), Ident(nme.WILDCARD)))),
* EmptyTree,
- * Ident(newTermName("x")))))
+ * Ident(TermName("x")))))
* }}}
*
* Introduced by typer. Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index fcef4dd6be..6863cdfd82 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -15,7 +15,6 @@ import scala.language.postfixOps
/** AnnotationInfo and its helpers */
trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
import definitions._
- import treeInfo._
// Common annotation code between Symbol and Type.
// For methods altering the annotation list, on Symbol it mutates
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 0ca8611719..54f64153c1 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -144,7 +144,7 @@ trait BaseTypeSeqs {
"\n --- because ---\n"+msg)
}
- /** A merker object for a base type sequence that's no yet computed.
+ /** A marker object for a base type sequence that's no yet computed.
* used to catch inheritance cycles
*/
val undetBaseTypeSeq: BaseTypeSeq = newBaseTypeSeq(List(), Array())
@@ -152,7 +152,7 @@ trait BaseTypeSeqs {
/** Create a base type sequence consisting of a single type */
def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = newBaseTypeSeq(List(), Array(tp))
- /** Create the base type sequence of a compound type wuth given tp.parents */
+ /** Create the base type sequence of a compound type with given tp.parents */
def compoundBaseTypeSeq(tp: Type): BaseTypeSeq = {
val tsym = tp.typeSymbol
val parents = tp.parents
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 02578e2038..9f4ec3e6d1 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -514,6 +514,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
+ lazy val MethodHandle = getClassIfDefined("java.lang.invoke.MethodHandle")
+
// Option classes
lazy val OptionClass: ClassSymbol = requiredClass[Option[_]]
lazy val OptionModule: ModuleSymbol = requiredModule[scala.Option.type]
@@ -653,6 +655,7 @@ trait Definitions extends api.StandardDefinitions {
// tends to change the course of events by forcing types.
def isFunctionType(tp: Type) = isFunctionTypeDirect(tp.dealiasWiden)
def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden)
+ def tupleComponents(tp: Type) = tp.dealiasWiden.typeArgs
lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity)
@@ -789,7 +792,7 @@ trait Definitions extends api.StandardDefinitions {
* The class defining the method is a supertype of `tp` that
* has a public no-arg primary constructor.
*/
- def samOf(tp: Type): Symbol = {
+ def samOf(tp: Type): Symbol = if (!settings.Xexperimental) NoSymbol else {
// if tp has a constructor, it must be public and must not take any arguments
// (not even an implicit argument list -- to keep it simple for now)
val tpSym = tp.typeSymbol
@@ -837,7 +840,7 @@ trait Definitions extends api.StandardDefinitions {
def typeOfMemberNamedApply(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.apply)(IntTpe))
def typeOfMemberNamedDrop(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.drop)(IntTpe))
def typesOfSelectors(tp: Type) =
- if (isTupleType(tp)) tp.typeArgs
+ if (isTupleType(tp)) tupleComponents(tp)
else getterMemberTypes(tp, productSelectors(tp))
// SI-8128 Still using the type argument of the base type at Seq/Option if this is an old-style (2.10 compatible)
@@ -904,12 +907,14 @@ trait Definitions extends api.StandardDefinitions {
)
}
- def EnumType(sym: Symbol) =
+ def EnumType(sym: Symbol) = {
// given (in java): "class A { enum E { VAL1 } }"
// - sym: the symbol of the actual enumeration value (VAL1)
// - .owner: the ModuleClassSymbol of the enumeration (object E)
// - .linkedClassOfClass: the ClassSymbol of the enumeration (class E)
- sym.owner.linkedClassOfClass.tpe
+ // SI-6613 Subsequent runs of the resident compiler demand the phase discipline here.
+ enteringPhaseNotLaterThan(picklerPhase)(sym.owner.linkedClassOfClass).tpe
+ }
/** Given a class symbol C with type parameters T1, T2, ... Tn
* which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn,
@@ -928,7 +933,7 @@ trait Definitions extends api.StandardDefinitions {
// members of class scala.Any
- // TODO these aren't final! They are now overriden in AnyRef/Object. Prior to the fix
+ // TODO these aren't final! They are now overridden in AnyRef/Object. Prior to the fix
// for SI-8129, they were actually *overloaded* by the members in AnyRef/Object.
// We should unfinalize these, override in AnyValClass, and make the overrides final.
// Refchecks never actually looks at these, so its just for consistency.
@@ -1087,6 +1092,10 @@ trait Definitions extends api.StandardDefinitions {
lazy val ClassfileAnnotationClass = requiredClass[scala.annotation.ClassfileAnnotation]
lazy val StaticAnnotationClass = requiredClass[scala.annotation.StaticAnnotation]
+ // Java retention annotations
+ lazy val AnnotationRetentionAttr = requiredClass[java.lang.annotation.Retention]
+ lazy val AnnotationRetentionPolicyAttr = requiredClass[java.lang.annotation.RetentionPolicy]
+
// Annotations
lazy val BridgeClass = requiredClass[scala.annotation.bridge]
lazy val ElidableMethodClass = requiredClass[scala.annotation.elidable]
@@ -1111,7 +1120,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val ScalaInlineClass = requiredClass[scala.inline]
lazy val ScalaNoInlineClass = requiredClass[scala.noinline]
lazy val SerialVersionUIDAttr = requiredClass[scala.SerialVersionUID]
- lazy val SerialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
+ lazy val SerialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(), List(nme.value -> LiteralAnnotArg(Constant(0))))
lazy val SpecializedClass = requiredClass[scala.specialized]
lazy val ThrowsClass = requiredClass[scala.throws[_]]
lazy val TransientAttr = requiredClass[scala.transient]
@@ -1432,6 +1441,10 @@ trait Definitions extends api.StandardDefinitions {
lazy val isUnbox = unboxMethod.values.toSet[Symbol]
lazy val isBox = boxMethod.values.toSet[Symbol]
+ lazy val Boolean_and = definitions.Boolean_and
+ lazy val Boolean_or = definitions.Boolean_or
+ lazy val Boolean_not = definitions.Boolean_not
+
lazy val Option_apply = getMemberMethod(OptionModule, nme.apply)
lazy val List_apply = DefinitionsClass.this.List_apply
@@ -1497,6 +1510,9 @@ trait Definitions extends api.StandardDefinitions {
lazy val PartialManifestClass = getTypeMember(ReflectPackage, tpnme.ClassManifest)
lazy val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass)
+
+ def isPolymorphicSignature(sym: Symbol) = PolySigMethods(sym)
+ private lazy val PolySigMethods: Set[Symbol] = Set[Symbol](MethodHandle.info.decl(sn.Invoke), MethodHandle.info.decl(sn.InvokeExact)).filter(_.exists)
}
}
}
diff --git a/src/reflect/scala/reflect/internal/Depth.scala b/src/reflect/scala/reflect/internal/Depth.scala
index 357abf765f..a330e0accb 100644
--- a/src/reflect/scala/reflect/internal/Depth.scala
+++ b/src/reflect/scala/reflect/internal/Depth.scala
@@ -21,8 +21,20 @@ final class Depth private (val depth: Int) extends AnyVal with Ordered[Depth] {
object Depth {
// A don't care value for the depth parameter in lubs/glbs and related operations.
- final val AnyDepth = new Depth(Int.MinValue)
+ // When passed this value, the recursion budget will be inferred from the shape of
+ // the `typeDepth` of the list of types.
+ final val AnyDepthValue = -3
+ final val AnyDepth = new Depth(AnyDepthValue)
+
final val Zero = new Depth(0)
- @inline final def apply(depth: Int): Depth = if (depth < 0) AnyDepth else new Depth(depth)
+ // SI-9018: A negative depth is used to signal that we have breached the recursion limit.
+ // The LUB/GLB implementation will then truncate to Any/Nothing.
+ //
+ // We only really need one of these, but we allow representation of Depth(-1) and Depth(-2)
+ // to mimic the historical choice of 2.10.4.
+ @inline final def apply(depth: Int): Depth = {
+ if (depth < AnyDepthValue) AnyDepth
+ else new Depth(depth)
+ }
}
diff --git a/src/reflect/scala/reflect/internal/Internals.scala b/src/reflect/scala/reflect/internal/Internals.scala
index 26f3bfd9d0..ad4cec5b4d 100644
--- a/src/reflect/scala/reflect/internal/Internals.scala
+++ b/src/reflect/scala/reflect/internal/Internals.scala
@@ -9,7 +9,6 @@ import scala.ref.WeakReference
import scala.reflect.api.Universe
import scala.reflect.macros.Attachments
import scala.reflect.internal.util.FreshNameCreator
-import scala.reflect.internal.Flags._
import scala.reflect.internal.util.ListOfNil
trait Internals extends api.Internals {
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 2fca99aff9..0f0f16574e 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -268,7 +268,7 @@ trait Mirrors extends api.Mirrors {
// TODO - having these as objects means they elude the attempt to
// add synchronization in SynchronizedSymbols. But we should either
- // flip on object overrides or find some other accomodation, because
+ // flip on object overrides or find some other accommodation, because
// lazy vals are unnecessarily expensive relative to objects and it
// is very beneficial for a handful of bootstrap symbols to have
// first class identities
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index b50f324074..32d12d305e 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -131,11 +131,11 @@ trait Names extends api.Names {
newTermName(cs, offset, len, cachedString).toTypeName
/** Create a term name from string. */
- @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
+ @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overridden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
def newTermName(s: String): TermName = newTermName(s.toCharArray(), 0, s.length(), null)
/** Create a type name from string. */
- @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
+ @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overridden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
def newTypeName(s: String): TypeName = newTermName(s).toTypeName
/** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala
index c16d8778d9..4d0e31b037 100644
--- a/src/reflect/scala/reflect/internal/Positions.scala
+++ b/src/reflect/scala/reflect/internal/Positions.scala
@@ -204,7 +204,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
/** Set position of all children of a node
* @param pos A target position.
* Uses the point of the position as the point of all positions it assigns.
- * Uses the start of this position as an Offset position for unpositioed trees
+ * Uses the start of this position as an Offset position for unpositioned trees
* without children.
* @param trees The children to position. All children must be positionable.
*/
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 2ce861898f..98b2c48379 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -546,10 +546,11 @@ trait Printers extends api.Printers { self: SymbolTable =>
import Chars._
val decName = name.decoded
val bslash = '\\'
+ val isDot = (x: Char) => x == '.'
val brackets = List('[',']','(',')','{','}')
def addBackquotes(s: String) =
- if (decoded && (decName.exists(ch => brackets.contains(ch) || isWhitespace(ch)) ||
+ if (decoded && (decName.exists(ch => brackets.contains(ch) || isWhitespace(ch) || isDot(ch)) ||
(name.isOperatorName && decName.exists(isOperatorPart) && decName.exists(isScalaLetter) && !decName.contains(bslash))))
s"`$s`" else s
@@ -761,7 +762,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
val build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) = cl
// constructor's modifier
- if (ctorMods.hasFlag(AccessFlags)) {
+ if (ctorMods.hasFlag(AccessFlags) || ctorMods.hasAccessBoundary) {
print(" ")
printModifiers(ctorMods, primaryCtorParam = false)
}
diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala
index 759bd2e791..c418321234 100644
--- a/src/reflect/scala/reflect/internal/ReificationSupport.scala
+++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala
@@ -7,7 +7,6 @@ import util._
trait ReificationSupport { self: SymbolTable =>
import definitions._
- import internal._
class ReificationSupportImpl extends ReificationSupportApi {
def selectType(owner: Symbol, name: String): TypeSymbol =
@@ -123,7 +122,7 @@ trait ReificationSupport { self: SymbolTable =>
if (vd.rhs.nonEmpty) newmods |= DEFAULTPARAM
copyValDef(vd)(mods = newmods | extraFlags)
case _ =>
- throw new IllegalArgumentException(s"$tree is not valid represenation of a parameter, " +
+ throw new IllegalArgumentException(s"$tree is not valid representation of a parameter, " +
"""consider reformatting it into q"val $name: $T = $default" shape""")
}
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index 614e71b597..cca33253be 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -27,7 +27,7 @@ trait StdAttachments {
def importAttachment(importer: Importer): this.type
}
- /** Attachment that doesn't contain any reflection artificats and can be imported as-is. */
+ /** Attachment that doesn't contain any reflection artifacts and can be imported as-is. */
trait PlainAttachment extends ImportableAttachment {
def importAttachment(importer: Importer): this.type = this
}
@@ -42,7 +42,7 @@ trait StdAttachments {
*/
case object BackquotedIdentifierAttachment extends PlainAttachment
- /** Identifies trees are either result or intermidiate value of for loop desugaring.
+ /** Identifies trees are either result or intermediate value of for loop desugaring.
*/
case object ForAttachment extends PlainAttachment
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 99ff6a10b4..ea07fb2a74 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -111,6 +111,7 @@ trait StdNames {
val PACKAGE: NameType = "package"
val ROOT: NameType = "<root>"
val SPECIALIZED_SUFFIX: NameType = "$sp"
+ val CASE_ACCESSOR: NameType = "$access"
// value types (and AnyRef) are all used as terms as well
// as (at least) arguments to the @specialize annotation.
@@ -128,6 +129,7 @@ trait StdNames {
final val AnyRef: NameType = "AnyRef"
final val Array: NameType = "Array"
final val List: NameType = "List"
+ final val Option: NameType = "Option"
final val Seq: NameType = "Seq"
final val Symbol: NameType = "Symbol"
final val WeakTypeTag: NameType = "WeakTypeTag"
@@ -247,6 +249,7 @@ trait StdNames {
final val Unliftable: NameType = "Unliftable"
final val Name: NameType = "Name"
final val Tree: NameType = "Tree"
+ final val Text: NameType = "Text"
final val TermName: NameType = "TermName"
final val Type : NameType = "Type"
final val TypeName: NameType = "TypeName"
@@ -777,6 +780,7 @@ trait StdNames {
val values : NameType = "values"
val wait_ : NameType = "wait"
val withFilter: NameType = "withFilter"
+ val xml: NameType = "xml"
val zero: NameType = "zero"
// quasiquote interpolators:
@@ -1144,6 +1148,7 @@ trait StdNames {
final val GetClassLoader: TermName = newTermName("getClassLoader")
final val GetMethod: TermName = newTermName("getMethod")
final val Invoke: TermName = newTermName("invoke")
+ final val InvokeExact: TermName = newTermName("invokeExact")
val Boxed = immutable.Map[TypeName, TypeName](
tpnme.Boolean -> BoxedBoolean,
diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala
index c088e8f57c..4763e77a34 100644
--- a/src/reflect/scala/reflect/internal/SymbolPairs.scala
+++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala
@@ -8,7 +8,6 @@ package reflect
package internal
import scala.collection.mutable
-import Flags._
import util.HashSet
import scala.annotation.tailrec
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 7539b6e046..bea6979431 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -338,7 +338,6 @@ abstract class SymbolTable extends macros.Universe
case _ => false
}
if (pkgModule.isModule && !fromSource) {
- // println("open "+pkgModule)//DEBUG
openPackageModule(pkgModule, pkgClass)
}
}
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index e65c55885e..f2aa14b866 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -173,7 +173,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
with HasFlags
with Annotatable[Symbol]
with Attachable {
-
// makes sure that all symbols that runtime reflection deals with are synchronized
private def isSynchronized = this.isInstanceOf[scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol]
private def isAprioriThreadsafe = isThreadsafe(AllOps)
@@ -182,7 +181,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
type AccessBoundaryType = Symbol
type AnnotationType = AnnotationInfo
- // TODO - don't allow names to be renamed in this unstructured a fashion.
+ // TODO - don't allow names to be renamed in this unstructured fashion.
// Rename as little as possible. Enforce invariants on all renames.
type TypeOfClonedSymbol >: Null <: Symbol { type NameType = Symbol.this.NameType }
@@ -683,7 +682,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* to fix the core of the compiler risk stability a few weeks before the final release.
* upd. Haha, "a few weeks before the final release". This surely sounds familiar :)
*
- * However we do need to fix this for runtime reflection, since this idionsynchrazy is not something
+ * However we do need to fix this for runtime reflection, since this idiosyncrasy is not something
* we'd like to expose to reflection users. Therefore a proposed solution is to check whether we're in a
* runtime reflection universe, and if yes and if we've not yet loaded the requested info, then to commence initialization.
*/
@@ -735,31 +734,31 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def hasGetter = isTerm && nme.isLocalName(name)
- /** A little explanation for this confusing situation.
- * Nested modules which have no static owner when ModuleDefs
- * are eliminated (refchecks) are given the lateMETHOD flag,
- * which makes them appear as methods after refchecks.
- * Here's an example where one can see all four of FF FT TF TT
- * for (isStatic, isMethod) at various phases.
+ /**
+ * Nested modules which have no static owner when ModuleDefs are eliminated (refchecks) are
+ * given the lateMETHOD flag, which makes them appear as methods after refchecks.
*
- * trait A1 { case class Quux() }
- * object A2 extends A1 { object Flax }
- * // -- namer object Quux in trait A1
- * // -M flatten object Quux in trait A1
- * // S- flatten object Flax in object A2
- * // -M posterasure object Quux in trait A1
- * // -M jvm object Quux in trait A1
- * // SM jvm object Quux in object A2
+ * Note: the lateMETHOD flag is added lazily in the info transformer of the RefChecks phase.
+ * This means that forcing the `sym.info` may change the value of `sym.isMethod`. Forcing the
+ * info is in the responsibility of the caller. Doing it eagerly here was tried (0ccdb151f) but
+ * has proven to lead to bugs (SI-8907).
*
- * So "isModuleNotMethod" exists not for its achievement in
- * brevity, but to encapsulate the relevant condition.
+ * Here's an example where one can see all four of FF FT TF TT for (isStatic, isMethod) at
+ * various phases.
+ *
+ * trait A1 { case class Quux() }
+ * object A2 extends A1 { object Flax }
+ * // -- namer object Quux in trait A1
+ * // -M flatten object Quux in trait A1
+ * // S- flatten object Flax in object A2
+ * // -M posterasure object Quux in trait A1
+ * // -M jvm object Quux in trait A1
+ * // SM jvm object Quux in object A2
+ *
+ * So "isModuleNotMethod" exists not for its achievement in brevity, but to encapsulate the
+ * relevant condition.
*/
- def isModuleNotMethod = {
- if (isModule) {
- if (phase.refChecked) this.info // force completion to make sure lateMETHOD is there.
- !isMethod
- } else false
- }
+ def isModuleNotMethod = isModule && !isMethod
// After RefChecks, the `isStatic` check is mostly redundant: all non-static modules should
// be methods (and vice versa). There's a corner case on the vice-versa with mixed-in module
@@ -793,6 +792,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isDefinedInPackage = effectiveOwner.isPackageClass
final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
+ // TODO introduce a flag for these?
+ final def isPatternTypeVariable: Boolean =
+ isAbstractType && !isExistential && !isTypeParameterOrSkolem && isLocalToBlock
+
/** change name by appending $$<fully-qualified-name-of-class `base`>
* Do the same for any accessed symbols or setters/getters.
* Implementation in TermSymbol.
@@ -1464,11 +1467,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def info: Type = try {
var cnt = 0
while (validTo == NoPeriod) {
- //if (settings.debug.value) System.out.println("completing " + this);//DEBUG
assert(infos ne null, this.name)
assert(infos.prev eq null, this.name)
val tp = infos.info
- //if (settings.debug.value) System.out.println("completing " + this.rawname + tp.getClass());//debug
if ((_rawflags & LOCKED) != 0L) { // rolled out once for performance
lock {
@@ -1477,6 +1478,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
} else {
_rawflags |= LOCKED
+ // TODO another commented out lines - this should be solved in one way or another
// activeLocks += 1
// lockedSyms += this
}
@@ -1598,13 +1600,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
assert(isCompilerUniverse)
if (infos == null || runId(infos.validFrom) == currentRunId) {
infos
- } else if (isPackageClass) {
- // SI-7801 early phase package scopes are mutated in new runs (Namers#enterPackage), so we have to
- // discard transformed infos, rather than just marking them as from this run.
- val oldest = infos.oldest
- oldest.validFrom = validTo
- this.infos = oldest
- oldest
+ } else if (infos ne infos.oldest) {
+ // SI-8871 Discard all but the first element of type history. Specialization only works in the resident
+ // compiler / REPL if re-run its info transformer in this run to correctly populate its
+ // per-run caches, e.g. typeEnv
+ adaptInfos(infos.oldest)
} else {
val prev1 = adaptInfos(infos.prev)
if (prev1 ne infos.prev) prev1
@@ -2029,12 +2029,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
info.decls.filter(sym => !sym.isMethod && sym.isParamAccessor).toList
/** The symbol accessed by this accessor (getter or setter) function. */
- final def accessed: Symbol = accessed(owner.info)
-
- /** The symbol accessed by this accessor function, but with given owner type. */
- final def accessed(ownerTp: Type): Symbol = {
+ final def accessed: Symbol = {
assert(hasAccessorFlag, this)
- ownerTp decl localName
+ val localField = owner.info decl localName
+
+ if (localField == NoSymbol && this.hasFlag(MIXEDIN)) {
+ // SI-8087: private[this] fields don't have a `localName`. When searching the accessed field
+ // for a mixin accessor of such a field, we need to look for `name` instead.
+ // The phase travel ensures that the field is found (`owner` is the trait class symbol, the
+ // field gets removed from there in later phases).
+ enteringPhase(picklerPhase)(owner.info).decl(name).suchThat(!_.isAccessor)
+ } else {
+ localField
+ }
}
/** The module corresponding to this module class (note that this
@@ -2156,6 +2163,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (isClass) this else moduleClass
} else owner.enclosingTopLevelClass
+ /** The top-level class or local dummy symbol containing this symbol. */
+ def enclosingTopLevelClassOrDummy: Symbol =
+ if (isTopLevel) {
+ if (isClass) this else moduleClass.orElse(this)
+ } else owner.enclosingTopLevelClassOrDummy
+
/** Is this symbol defined in the same scope and compilation unit as `that` symbol? */
def isCoDefinedWith(that: Symbol) = (
!rawInfoIsNoType
@@ -2805,7 +2818,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def outerSource: Symbol =
// SI-6888 Approximate the name to workaround the deficiencies in `nme.originalName`
- // in the face of clases named '$'. SI-2806 remains open to address the deeper problem.
+ // in the face of classes named '$'. SI-2806 remains open to address the deeper problem.
if (originalName endsWith (nme.OUTER)) initialize.referenced
else NoSymbol
@@ -3420,10 +3433,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
trait StubSymbol extends Symbol {
devWarning("creating stub symbol to defer error: " + missingMessage)
- protected def missingMessage: String
+ def missingMessage: String
/** Fail the stub by throwing a [[scala.reflect.internal.MissingRequirementError]]. */
- override final def failIfStub() = {MissingRequirementError.signal(missingMessage)} //
+ override final def failIfStub() =
+ MissingRequirementError.signal(missingMessage)
/** Fail the stub by reporting an error to the reporter, setting the IS_ERROR flag
* on this symbol, and returning the dummy value `alt`.
@@ -3448,8 +3462,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def rawInfo = fail(NoType)
override def companionSymbol = fail(NoSymbol)
}
- class StubClassSymbol(owner0: Symbol, name0: TypeName, protected val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
- class StubTermSymbol(owner0: Symbol, name0: TermName, protected val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
+ class StubClassSymbol(owner0: Symbol, name0: TypeName, val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
+ class StubTermSymbol(owner0: Symbol, name0: TermName, val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
trait FreeSymbol extends Symbol {
def origin: String
@@ -3500,6 +3514,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def enclClassChain = Nil
override def enclClass: Symbol = this
override def enclosingTopLevelClass: Symbol = this
+ override def enclosingTopLevelClassOrDummy: Symbol = this
override def enclosingPackageClass: Symbol = this
override def enclMethod: Symbol = this
override def associatedFile = NoAbstractFile
@@ -3557,7 +3572,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @param syms the prototypical symbols
* @param symFn the function to create new symbols
* @param tpe the prototypical type
- * @return the new symbol-subsituted type
+ * @return the new symbol-substituted type
*/
def deriveType(syms: List[Symbol], symFn: Symbol => Symbol)(tpe: Type): Type = {
val syms1 = deriveSymbols(syms, symFn)
@@ -3572,7 +3587,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @param as arguments to be passed to symFn together with symbols from syms (must be same length)
* @param symFn the function to create new symbols based on `as`
* @param tpe the prototypical type
- * @return the new symbol-subsituted type
+ * @return the new symbol-substituted type
*/
def deriveType2[A](syms: List[Symbol], as: List[A], symFn: (Symbol, A) => Symbol)(tpe: Type): Type = {
val syms1 = deriveSymbols2(syms, as, symFn)
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 6ddd49045c..4cedfe2665 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -713,7 +713,7 @@ abstract class TreeGen {
val rhsUnchecked = mkUnchecked(rhs)
- // TODO: clean this up -- there is too much information packked into mkPatDef's `pat` argument
+ // TODO: clean this up -- there is too much information packed into mkPatDef's `pat` argument
// when it's a simple identifier (case Some((name, tpt)) -- above),
// pat should have the type ascription that was specified by the user
// however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`)
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index c521277f69..7ad5fdf096 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -128,6 +128,7 @@ abstract class TreeInfo {
symOk(tree.symbol)
&& tree.symbol.isStable
&& !definitions.isByNameParamType(tree.tpe)
+ && !definitions.isByName(tree.symbol)
&& (allowVolatile || !tree.symbol.hasVolatileType) // TODO SPEC: not required by spec
)
@@ -588,7 +589,7 @@ abstract class TreeInfo {
private def hasNoSymbol(t: Tree) = t.symbol == null || t.symbol == NoSymbol
- /** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know
+ /** Is this pattern node a synthetic catch-all case, added during PartialFunction synthesis before we know
* whether the user provided cases are exhaustive. */
def isSyntheticDefaultCase(cdef: CaseDef) = cdef match {
case CaseDef(Bind(nme.DEFAULT_CASE, _), EmptyTree, _) => true
@@ -815,7 +816,7 @@ abstract class TreeInfo {
object Unapplied {
// Duplicated with `spliceApply`
def unapply(tree: Tree): Option[Tree] = tree match {
- // SI-7868 Admit Select() to account for numeric widening, e.g. <unappplySelector>.toInt
+ // SI-7868 Admit Select() to account for numeric widening, e.g. <unapplySelector>.toInt
case Apply(fun, (Ident(nme.SELECTOR_DUMMY)| Select(Ident(nme.SELECTOR_DUMMY), _)) :: Nil)
=> Some(fun)
case Apply(fun, _) => unapply(fun)
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 9dc4baee32..35de3adff6 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -8,8 +8,8 @@ package reflect
package internal
import Flags._
-import pickling.PickleFormat._
import scala.collection.{ mutable, immutable }
+import scala.reflect.macros.Attachments
import util.Statistics
trait Trees extends api.Trees {
@@ -1075,6 +1075,13 @@ trait Trees extends api.Trees {
override def setType(t: Type) = { requireLegal(t, NoType, "tpe"); this }
override def tpe_=(t: Type) = setType(t)
+ // We silently ignore attempts to add attachments to `EmptyTree`. See SI-8947 for an
+ // example of a bug in macro expansion that this solves.
+ override def setAttachments(attachments: Attachments {type Pos = Position}): this.type = attachmentWarning()
+ override def updateAttachment[T: ClassTag](attachment: T): this.type = attachmentWarning()
+ override def removeAttachment[T: ClassTag]: this.type = attachmentWarning()
+ private def attachmentWarning(): this.type = {devWarning(s"Attempt to mutate attachments on $self ignored"); this}
+
private def requireLegal(value: Any, allowed: Any, what: String) = (
if (value != allowed) {
log(s"can't set $what for $self to value other than $allowed")
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 325de013d7..4d230cc1cc 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -738,7 +738,7 @@ trait Types
* `substThis(from, to).substSym(symsFrom, symsTo)`.
*
* `SubstThisAndSymMap` performs a breadth-first map over this type, which meant that
- * symbol substitution occured before `ThisType` substitution. Consequently, in substitution
+ * symbol substitution occurred before `ThisType` substitution. Consequently, in substitution
* of a `SingleType(ThisType(`from`), sym), symbols were rebound to `from` rather than `to`.
*/
def substThisAndSym(from: Symbol, to: Type, symsFrom: List[Symbol], symsTo: List[Symbol]): Type =
@@ -1701,7 +1701,7 @@ trait Types
*/
private var refs: Array[RefMap] = _
- /** The initialization state of the class: UnInialized --> Initializing --> Initialized
+ /** The initialization state of the class: UnInitialized --> Initializing --> Initialized
* Syncnote: This var need not be protected with synchronized, because
* it is accessed only from expansiveRefs, which is called only from
* Typer.
@@ -1986,7 +1986,7 @@ trait Types
require(sym.isNonClassType, sym)
/* Syncnote: These are pure caches for performance; no problem to evaluate these
- * several times. Hence, no need to protected with synchronzied in a mutli-threaded
+ * several times. Hence, no need to protected with synchronized in a multi-threaded
* usage scenario.
*/
private var relativeInfoCache: Type = _
@@ -2657,7 +2657,7 @@ trait Types
* nowhere inside a type argument
* - no quantified type argument contains a quantified variable in its bound
* - the typeref's symbol is not itself quantified
- * - the prefix is not quanitified
+ * - the prefix is not quantified
*/
def isRepresentableWithWildcards = {
val qset = quantified.toSet
@@ -3115,7 +3115,7 @@ trait Types
// addressed here: all lower bounds are retained and their intersection calculated when the
// bounds are solved.
//
- // In a side-effect free universe, checking tp and tp.parents beofre checking tp.baseTypeSeq
+ // In a side-effect free universe, checking tp and tp.parents before checking tp.baseTypeSeq
// would be pointless. In this case, each check we perform causes us to lose specificity: in
// the end the best we'll do is the least specific type we tested against, since the typevar
// does not see these checks as "probes" but as requirements to fulfill.
@@ -3346,7 +3346,7 @@ trait Types
*
* SI-6385 Erasure's creation of bridges considers method signatures `exitingErasure`,
* which contain `ErasedValueType`-s. In order to correctly consider the overriding
- * and overriden signatures as equivalent in `run/t6385.scala`, it is critical that
+ * and overridden signatures as equivalent in `run/t6385.scala`, it is critical that
* this type contains the erasure of the wrapped type, rather than the unerased type
* of the value class itself, as was originally done.
*
diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala
index 12b765b7a6..ef22df3f2e 100644
--- a/src/reflect/scala/reflect/internal/Variances.scala
+++ b/src/reflect/scala/reflect/internal/Variances.scala
@@ -32,7 +32,7 @@ trait Variances {
/** Is every symbol in the owner chain between `site` and the owner of `sym`
* either a term symbol or private[this]? If not, add `sym` to the set of
- * esacped locals.
+ * escaped locals.
* @pre sym.isLocalToThis
*/
@tailrec final def checkForEscape(sym: Symbol, site: Symbol) {
diff --git a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
index a44bb54734..662d841c91 100644
--- a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
+++ b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
@@ -5,7 +5,7 @@ package annotations
/**
* An annotation that designates the annotated type should not be checked for violations of
* type parameter bounds in the `refchecks` phase of the compiler. This can be used by synthesized
- * code the uses an inferred type of an expression as the type of an artifict val/def (for example,
+ * code the uses an inferred type of an expression as the type of an artifact val/def (for example,
* a temporary value introduced by an ANF transform). See [[https://issues.scala-lang.org/browse/SI-7694]].
*
* @since 2.10.3
diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
index 8615e34fad..241638e88e 100644
--- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
+++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
@@ -196,10 +196,10 @@ object ByteCodecs {
*
* Sometimes returns (length+1) of the decoded array. Example:
*
- * scala> val enc = scala.reflect.generic.ByteCodecs.encode(Array(1,2,3))
+ * scala> val enc = scala.reflect.internal.pickling.ByteCodecs.encode(Array(1,2,3))
* enc: Array[Byte] = Array(2, 5, 13, 1)
*
- * scala> scala.reflect.generic.ByteCodecs.decode(enc)
+ * scala> scala.reflect.internal.pickling.ByteCodecs.decode(enc)
* res43: Int = 4
*
* scala> enc
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 8d4c3f752f..1fc7aebab0 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -211,7 +211,12 @@ abstract class UnPickler {
def fromName(name: Name) = name.toTermName match {
case nme.ROOT => loadingMirror.RootClass
case nme.ROOTPKG => loadingMirror.RootPackage
- case _ => adjust(owner.info.decl(name))
+ case _ =>
+ val decl = owner match {
+ case stub: StubSymbol => NoSymbol // SI-8502 Don't call .info and fail the stub
+ case _ => owner.info.decl(name)
+ }
+ adjust(decl)
}
def nestedObjectSymbol: Symbol = {
// If the owner is overloaded (i.e. a method), it's not possible to select the
@@ -243,8 +248,14 @@ abstract class UnPickler {
} getOrElse "")
}
+ def localDummy = {
+ if (nme.isLocalDummyName(name))
+ owner.newLocalDummy(NoPosition)
+ else NoSymbol
+ }
+
// (1) Try name.
- fromName(name) orElse {
+ localDummy orElse fromName(name) orElse {
// (2) Try with expanded name. Can happen if references to private
// symbols are read from outside: for instance when checking the children
// of a class. See #1722.
@@ -298,6 +309,7 @@ abstract class UnPickler {
* (.) ...
* (1) `local child` represents local child classes, see comment in Pickler.putSymbol.
* Since it is not a member, it should not be entered in the owner's scope.
+ * (2) Similarly, we ignore local dummy symbols, as seen in SI-8868
*/
def shouldEnterInOwnerScope = {
sym.owner.isClass &&
@@ -307,7 +319,8 @@ abstract class UnPickler {
!sym.isRefinementClass &&
!sym.isTypeParameter &&
!sym.isExistentiallyBound &&
- sym.rawname != tpnme.LOCAL_CHILD // (1)
+ sym.rawname != tpnme.LOCAL_CHILD && // (1)
+ !nme.isLocalDummyName(sym.rawname) // (2)
}
markFlagsCompleted(sym)(mask = AllFlags)
@@ -381,14 +394,24 @@ abstract class UnPickler {
case CLASSINFOtpe => ClassInfoType(parents, symScope(clazz), clazz)
}
+ def readThisType(): Type = {
+ val sym = readSymbolRef() match {
+ case stub: StubSymbol if !stub.isClass =>
+ // SI-8502 This allows us to create a stub for a unpickled reference to `missingPackage.Foo`.
+ stub.owner.newStubSymbol(stub.name.toTypeName, stub.missingMessage)
+ case sym => sym
+ }
+ ThisType(sym)
+ }
+
// We're stuck with the order types are pickled in, but with judicious use
// of named parameters we can recapture a declarative flavor in a few cases.
// But it's still a rat's nest of adhockery.
(tag: @switch) match {
case NOtpe => NoType
case NOPREFIXtpe => NoPrefix
- case THIStpe => ThisType(readSymbolRef())
- case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef())
+ case THIStpe => readThisType()
+ case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef().filter(_.isStable)) // SI-7596 account for overloading
case SUPERtpe => SuperType(readTypeRef(), readTypeRef())
case CONSTANTtpe => ConstantType(readConstantRef())
case TYPEREFtpe => TypeRef(readTypeRef(), readSymbolRef(), readTypes())
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index a494c7f0d0..38893d8db3 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -31,6 +31,9 @@ abstract class MutableSettings extends AbsSettings {
v = arg
postSetHook()
}
+
+ /** Returns Some(value) in the case of a value set by user and None otherwise. */
+ def valueSetByUser: Option[T] = if (isSetByUser) Some(value) else None
}
def Xexperimental: BooleanSetting
diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
index de54f3768e..42b13944f6 100644
--- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala
@@ -12,7 +12,7 @@ import TypesStats._
trait FindMembers {
this: SymbolTable =>
- /** Implementatation of `Type#{findMember, findMembers}` */
+ /** Implementation of `Type#{findMember, findMembers}` */
private[internal] abstract class FindMemberBase[T](tpe: Type, name: Name, excludedFlags: Long, requiredFlags: Long) {
protected val initBaseClasses: List[Symbol] = tpe.baseClasses
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
index 876685e24a..123b44aa05 100644
--- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -347,7 +347,9 @@ private[internal] trait GlbLubs {
def lubsym(proto: Symbol): Symbol = {
val prototp = lubThisType.memberInfo(proto)
val syms = narrowts map (t =>
- t.nonPrivateMember(proto.name).suchThat(sym =>
+ // SI-7602 With erroneous code, we could end up with overloaded symbols after filtering
+ // so `suchThat` unsuitable.
+ t.nonPrivateMember(proto.name).filter(sym =>
sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
if (syms contains NoSymbol) NoSymbol
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
index c1c43178e5..f79099213a 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -75,7 +75,7 @@ private[internal] trait TypeConstraints {
/* Syncnote: Type constraints are assumed to be used from only one
* thread. They are not exposed in api.Types and are used only locally
* in operations that are exposed from types. Hence, no syncing of any
- * variables should be ncessesary.
+ * variables should be necessary.
*/
/** Guard these lists against AnyClass and NothingClass appearing,
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index f06420de96..c705ca7069 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -422,6 +422,22 @@ private[internal] trait TypeMaps {
}
}
+ /**
+ * Get rid of BoundedWildcardType where variance allows us to do so.
+ * Invariant: `wildcardExtrapolation(tp) =:= tp`
+ *
+ * For example, the MethodType given by `def bla(x: (_ >: String)): (_ <: Int)`
+ * is both a subtype and a supertype of `def bla(x: String): Int`.
+ */
+ object wildcardExtrapolation extends TypeMap(trackVariance = true) {
+ def apply(tp: Type): Type =
+ tp match {
+ case BoundedWildcardType(TypeBounds(lo, AnyTpe)) if variance.isContravariant => lo
+ case BoundedWildcardType(TypeBounds(NothingTpe, hi)) if variance.isCovariant => hi
+ case tp => mapOver(tp)
+ }
+ }
+
/** Might the given symbol be important when calculating the prefix
* of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
* the result will be `tp` unchanged if `pre` is trivial and `clazz`
diff --git a/src/reflect/scala/reflect/internal/transform/PostErasure.scala b/src/reflect/scala/reflect/internal/transform/PostErasure.scala
index f0c7d0f050..dd4f044818 100644
--- a/src/reflect/scala/reflect/internal/transform/PostErasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/PostErasure.scala
@@ -5,7 +5,6 @@ package transform
trait PostErasure {
val global: SymbolTable
import global._
- import definitions._
object elimErasedValueType extends TypeMap {
def apply(tp: Type) = tp match {
diff --git a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
index 10a8b4c812..30dcbc21ca 100644
--- a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
@@ -5,16 +5,16 @@
package scala
package reflect.internal.util
-import scala.reflect.io.AbstractFile
+import scala.collection.{ mutable, immutable }
+import scala.reflect.io.{ AbstractFile, Streamable }
+import java.net.{ URL, URLConnection, URLStreamHandler }
import java.security.cert.Certificate
import java.security.{ ProtectionDomain, CodeSource }
-import java.net.{ URL, URLConnection, URLStreamHandler }
-import scala.collection.{ mutable, immutable }
+import java.util.{ Collections => JCollections, Enumeration => JEnumeration }
-/**
- * A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
+/** A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
*
- * @author Lex Spoon
+ * @author Lex Spoon
*/
class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
extends ClassLoader(parent)
@@ -22,7 +22,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
{
protected def classNameToPath(name: String): String =
if (name endsWith ".class") name
- else name.replace('.', '/') + ".class"
+ else s"${name.replace('.', '/')}.class"
protected def findAbstractFile(name: String): AbstractFile = {
var file: AbstractFile = root
@@ -56,35 +56,25 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
file
}
- // parent delegation in JCL uses getResource; so either add parent.getResAsStream
- // or implement findResource, which we do here as a study in scarlet (my complexion
- // after looking at CLs and URLs)
- override def findResource(name: String): URL = findAbstractFile(name) match {
+ override protected def findClass(name: String): Class[_] = {
+ val bytes = classBytes(name)
+ if (bytes.length == 0)
+ throw new ClassNotFoundException(name)
+ else
+ defineClass(name, bytes, 0, bytes.length, protectionDomain)
+ }
+ override protected def findResource(name: String): URL = findAbstractFile(name) match {
case null => null
- case file => new URL(null, "repldir:" + file.path, new URLStreamHandler {
+ case file => new URL(null, s"memory:${file.path}", new URLStreamHandler {
override def openConnection(url: URL): URLConnection = new URLConnection(url) {
- override def connect() { }
+ override def connect() = ()
override def getInputStream = file.input
}
})
}
-
- // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail
- override def getResourceAsStream(name: String) = findAbstractFile(name) match {
- case null => super.getResourceAsStream(name)
- case file => file.input
- }
- // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating
- override def classBytes(name: String): Array[Byte] = findAbstractFile(classNameToPath(name)) match {
- case null => super.classBytes(name)
- case file => file.toByteArray
- }
- override def findClass(name: String): Class[_] = {
- val bytes = classBytes(name)
- if (bytes.length == 0)
- throw new ClassNotFoundException(name)
- else
- defineClass(name, bytes, 0, bytes.length, protectionDomain)
+ override protected def findResources(name: String): JEnumeration[URL] = findResource(name) match {
+ case null => JCollections.enumeration(JCollections.emptyList[URL]) //JCollections.emptyEnumeration[URL]
+ case url => JCollections.enumeration(JCollections.singleton(url))
}
lazy val protectionDomain = {
@@ -106,15 +96,13 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
throw new UnsupportedOperationException()
}
- override def getPackage(name: String): Package = {
- findAbstractDir(name) match {
- case null => super.getPackage(name)
- case file => packages.getOrElseUpdate(name, {
- val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader])
- ctor.setAccessible(true)
- ctor.newInstance(name, null, null, null, null, null, null, null, this)
- })
- }
+ override def getPackage(name: String): Package = findAbstractDir(name) match {
+ case null => super.getPackage(name)
+ case file => packages.getOrElseUpdate(name, {
+ val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader])
+ ctor.setAccessible(true)
+ ctor.newInstance(name, null, null, null, null, null, null, null, this)
+ })
}
override def getPackages(): Array[Package] =
diff --git a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
index 63ea6e2c49..41011f6c6b 100644
--- a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
@@ -53,8 +53,10 @@ trait ScalaClassLoader extends JClassLoader {
}
/** An InputStream representing the given class name, or null if not found. */
- def classAsStream(className: String) =
- getResourceAsStream(className.replaceAll("""\.""", "/") + ".class")
+ def classAsStream(className: String) = getResourceAsStream {
+ if (className endsWith ".class") className
+ else s"${className.replace('.', '/')}.class" // classNameToPath
+ }
/** Run the main method of a class to be loaded by this classloader */
def run(objectName: String, arguments: Seq[String]) {
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index a9a7c7780d..3a7a7626fb 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -7,13 +7,13 @@ import scala.collection.generic.Clearable
import scala.collection.mutable.{Set => MSet}
/**
- * A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other
+ * A HashSet where the elements are stored weakly. Elements in this set are eligible for GC if no other
* hard references are associated with them. Its primary use case is as a canonical reference
* identity holder (aka "hash-consing") via findEntryOrUpdate
*
* This Set implementation cannot hold null. Any attempt to put a null in it will result in a NullPointerException
*
- * This set implmeentation is not in general thread safe without external concurrency control. However it behaves
+ * This set implementation is not in general thread safe without external concurrency control. However it behaves
* properly when GC concurrently collects elements in this set.
*/
final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with MSet[A] {
@@ -26,7 +26,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
/**
* queue of Entries that hold elements scheduled for GC
- * the removeStaleEntries() method works through the queue to remeove
+ * the removeStaleEntries() method works through the queue to remove
* stale entries from the table
*/
private[this] val queue = new ReferenceQueue[A]
@@ -62,7 +62,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
private[this] def computeThreshHold: Int = (table.size * loadFactor).ceil.toInt
/**
- * find the bucket associated with an elements's hash code
+ * find the bucket associated with an element's hash code
*/
private[this] def bucketFor(hash: Int): Int = {
// spread the bits around to try to avoid accidental collisions using the
diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
index ac1159b2ac..bcefcc471f 100644
--- a/src/reflect/scala/reflect/io/AbstractFile.scala
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -48,14 +48,16 @@ object AbstractFile {
else null
/**
- * If the specified URL exists and is a readable zip or jar archive,
- * returns an abstract directory backed by it. Otherwise, returns
- * `null`.
+ * If the specified URL exists and is a regular file or a directory, returns an
+ * abstract regular file or an abstract directory, respectively, backed by it.
+ * Otherwise, returns `null`.
*/
- def getURL(url: URL): AbstractFile = {
- if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null
- else ZipArchive fromURL url
- }
+ def getURL(url: URL): AbstractFile =
+ if (url.getProtocol == "file") {
+ val f = new java.io.File(url.getPath)
+ if (f.isDirectory) getDirectory(f)
+ else getFile(f)
+ } else null
def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url
}
diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala
index 45f38db745..1cb4f2fe6f 100644
--- a/src/reflect/scala/reflect/io/VirtualFile.scala
+++ b/src/reflect/scala/reflect/io/VirtualFile.scala
@@ -75,10 +75,10 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
}
/** Does this abstract file denote an existing file? */
- def create() { unsupported() }
+ def create(): Unit = unsupported()
/** Delete the underlying file or directory (recursively). */
- def delete() { unsupported() }
+ def delete(): Unit = unsupported()
/**
* Returns the abstract file in this abstract directory with the
diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala
index 8260189459..0c63acb86c 100644
--- a/src/reflect/scala/reflect/io/ZipArchive.scala
+++ b/src/reflect/scala/reflect/io/ZipArchive.scala
@@ -74,12 +74,6 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
def container = unsupported()
def absolute = unsupported()
- private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = {
- its flatMap { f =>
- if (f.isDirectory) walkIterator(f.iterator)
- else Iterator(f)
- }
- }
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) {
// have to keep this name for compat with sbt's compiler-interface
@@ -87,6 +81,7 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
override def underlyingSource = Some(self)
override def toString = self.path + "(" + path + ")"
}
+
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
class DirEntry(path: String) extends Entry(path) {
val entries = mutable.HashMap[String, Entry]()
@@ -125,14 +120,15 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
}
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
final class FileZipArchive(file: JFile) extends ZipArchive(file) {
- def iterator: Iterator[Entry] = {
+ lazy val (root, allDirs) = {
+ val root = new DirEntry("/")
+ val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
val zipFile = try {
new ZipFile(file)
} catch {
case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe)
}
- val root = new DirEntry("/")
- val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
+
val enum = zipFile.entries()
while (enum.hasMoreElements) {
@@ -150,11 +146,11 @@ final class FileZipArchive(file: JFile) extends ZipArchive(file) {
dir.entries(f.name) = f
}
}
-
- try root.iterator
- finally dirs.clear()
+ (root, dirs)
}
+ def iterator: Iterator[Entry] = root.iterator
+
def name = file.getName
def path = file.getPath
def input = File(file).inputStream()
@@ -244,11 +240,9 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) {
val manifest = new Manifest(input)
val iter = manifest.getEntries().keySet().iterator().filter(_.endsWith(".class")).map(new ZipEntry(_))
- while (iter.hasNext) {
- val zipEntry = iter.next()
+ for (zipEntry <- iter) {
val dir = getDir(dirs, zipEntry)
- if (zipEntry.isDirectory) dir
- else {
+ if (!zipEntry.isDirectory) {
class FileEntry() extends Entry(zipEntry.getName) {
override def lastModified = zipEntry.getTime()
override def input = resourceInputStream(path)
@@ -284,14 +278,14 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) {
private def resourceInputStream(path: String): InputStream = {
new FilterInputStream(null) {
override def read(): Int = {
- if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path);
+ if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path)
if(in == null) throw new RuntimeException(path + " not found")
- super.read();
+ super.read()
}
override def close(): Unit = {
- super.close();
- in = null;
+ super.close()
+ in = null
}
}
}
diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala
index 69ede42cc7..1eb6832b5b 100644
--- a/src/reflect/scala/reflect/macros/Enclosures.scala
+++ b/src/reflect/scala/reflect/macros/Enclosures.scala
@@ -47,7 +47,7 @@ trait Enclosures {
/** Tries to guess a position for the enclosing application.
* But that is simple, right? Just dereference `pos` of `macroApplication`? Not really.
- * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggerd this expansion.
+ * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggered this expansion.
* Surprisingly, quite often we can do this by navigation the `enclosingMacros` stack.
*/
def enclosingPosition: Position
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
index 720b754649..5fc0fd5078 100644
--- a/src/reflect/scala/reflect/macros/Parsers.scala
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -13,7 +13,7 @@ trait Parsers {
/** Parses a string with a Scala expression into an abstract syntax tree.
* Only works for expressions, i.e. parsing a package declaration will fail.
- * @throws [[scala.reflect.macros.ParseException]]
+ * @throws scala.reflect.macros.ParseException
*/
def parse(code: String): Tree
}
diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala
index d0dccb469d..bd608601dc 100644
--- a/src/reflect/scala/reflect/macros/Typers.scala
+++ b/src/reflect/scala/reflect/macros/Typers.scala
@@ -2,8 +2,6 @@ package scala
package reflect
package macros
-import scala.reflect.internal.{Mode => InternalMode}
-
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
@@ -72,7 +70,7 @@ trait Typers {
* `withImplicitViewsDisabled` recursively prohibits implicit views (though, implicit vals will still be looked up and filled in), default value is false
* `withMacrosDisabled` recursively prohibits macro expansions and macro-based implicits, default value is false
*
- * @throws [[scala.reflect.macros.TypecheckException]]
+ * @throws scala.reflect.macros.TypecheckException
*/
def typecheck(tree: Tree, mode: TypecheckMode = TERMmode, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree
@@ -84,7 +82,7 @@ trait Typers {
* Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
* Unlike in `typecheck`, `silent` is true by default.
*
- * @throws [[scala.reflect.macros.TypecheckException]]
+ * @throws scala.reflect.macros.TypecheckException
*/
def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree
@@ -96,7 +94,7 @@ trait Typers {
* Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
* Unlike in `typecheck`, `silent` is true by default.
*
- * @throws [[scala.reflect.macros.TypecheckException]]
+ * @throws scala.reflect.macros.TypecheckException
*/
def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index 1eb67215bb..3b57169565 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -44,7 +44,7 @@ abstract class Universe extends scala.reflect.api.Universe {
* it is imperative that you either call `untypecheck` or do `changeOwner(tree, x, y)`.
*
* Since at the moment `untypecheck` has fundamental problem that can sometimes lead to tree corruption,
- * `changeOwner` becomes an indispensible tool in building 100% robust macros.
+ * `changeOwner` becomes an indispensable tool in building 100% robust macros.
* Future versions of the reflection API might obviate the need in taking care of
* these low-level details, but at the moment this is what we've got.
*/
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index b7f229b6e5..1c751fb93b 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -38,7 +38,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
override lazy val rootMirror: Mirror = createMirror(NoSymbol, rootClassLoader)
- // overriden by ReflectGlobal
+ // overridden by ReflectGlobal
def rootClassLoader: ClassLoader = this.getClass.getClassLoader
trait JavaClassCompleter
@@ -1191,7 +1191,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive
* - top-level classes
* - Scala classes that were generated via jclassToScala
* - classes that have a class owner that has a corresponding Java class
- * @throws A `ClassNotFoundException` for all Scala classes not in one of these categories.
+ * @throws ClassNotFoundException for all Scala classes not in one of these categories.
*/
@throws(classOf[ClassNotFoundException])
def classToJava(clazz: ClassSymbol): jClass[_] = classCache.toJava(clazz) {
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index fe39e1f245..7848753e69 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -5,7 +5,7 @@ package runtime
import scala.reflect.internal.{TreeInfo, SomePhase}
import scala.reflect.internal.{SymbolTable => InternalSymbolTable}
import scala.reflect.runtime.{SymbolTable => RuntimeSymbolTable}
-import scala.reflect.api.{TreeCreator, TypeCreator, Universe}
+import scala.reflect.api.{TypeCreator, Universe}
/** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders.
*
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
index dcd262c288..1c0aa7cf6d 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -170,6 +170,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
this.dropSingletonType
this.abstractTypesToBounds
this.dropIllegalStarTypes
+ this.wildcardExtrapolation
this.IsDependentCollector
this.ApproximateDependentMap
this.wildcardToTypeVarMap
@@ -309,6 +310,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.QuasiquoteClass_api_unapply
definitions.ScalaSignatureAnnotation
definitions.ScalaLongSignatureAnnotation
+ definitions.MethodHandle
definitions.OptionClass
definitions.OptionModule
definitions.SomeClass
@@ -360,6 +362,8 @@ trait JavaUniverseForce { self: runtime.JavaUniverse =>
definitions.AnnotationClass
definitions.ClassfileAnnotationClass
definitions.StaticAnnotationClass
+ definitions.AnnotationRetentionAttr
+ definitions.AnnotationRetentionPolicyAttr
definitions.BridgeClass
definitions.ElidableMethodClass
definitions.ImplicitNotFoundClass
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index 02155578f8..092bbd711f 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -2,8 +2,6 @@ package scala
package reflect
package runtime
-import scala.reflect.internal.Flags._
-
/**
* This symbol table trait fills in the definitions so that class information is obtained by refection.
* It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index f5e16c6640..4f0c0253e9 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -2,8 +2,7 @@ package scala
package reflect
package runtime
-import scala.reflect.io.AbstractFile
-import scala.collection.{ immutable, mutable }
+import scala.collection.immutable
import scala.reflect.internal.Flags._
private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
@@ -40,7 +39,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
* Reasons for that differ from artifact to artifact. In some cases it's quite bad (e.g. types use a number
* of non-concurrent compiler caches, so we need to serialize certain operations on types in order to make
* sure that things stay deterministic). However, in case of symbols there's hope, because it's only during
- * initializaton that symbols are thread-unsafe. After everything's set up, symbols become immutable
+ * initialization that symbols are thread-unsafe. After everything's set up, symbols become immutable
* (sans a few deterministic caches that can be populated simultaneously by multiple threads) and therefore thread-safe.
*
* Note that by saying "symbols become immutable" I mean literally that. In a very common case of PackageClassSymbol's,
@@ -103,10 +102,10 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
*
* Just a volatile var is fine, because:
* 1) Status can only be changed in a single-threaded fashion (this is enforced by gilSynchronized
- * that effecively guards `Symbol.initialize`), which means that there can't be update conflicts.
+ * that effectively guards `Symbol.initialize`), which means that there can't be update conflicts.
* 2) If someone reads a stale value of status, then the worst thing that might happen is that this someone
- * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't inited yet)
- * or a no-op (if the symbol is already inited), and that is fine in both cases.
+ * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't initialized yet)
+ * or a no-op (if the symbol is already initialized), and that is fine in both cases.
*
* upd. It looks like we also need to keep track of a mask of initialized flags to make sure
* that normal symbol initialization routines don't trigger auto-init in Symbol.flags-related routines (e.g. Symbol.getFlag).
diff --git a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
index 5edc051461..586b8a5257 100644
--- a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
+++ b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
@@ -11,12 +11,16 @@ private[reflect] trait ThreadLocalStorage {
trait ThreadLocalStorage[T] { def get: T; def set(newValue: T): Unit }
private class MyThreadLocalStorage[T](initialValue: => T) extends ThreadLocalStorage[T] {
// TODO: how do we use org.cliffc.high_scale_lib.NonBlockingHashMap here?
- val values = new java.util.concurrent.ConcurrentHashMap[Thread, T]()
+ // (we would need a version that uses weak keys)
+ private val values = java.util.Collections.synchronizedMap(new java.util.WeakHashMap[Thread, T]())
def get: T = {
if (values containsKey currentThread) values.get(currentThread)
else {
val value = initialValue
- values.putIfAbsent(currentThread, value)
+ // since the key is currentThread, and `values` is private, it
+ // would be impossible for a value to have been set after the
+ // above containsKey check. `putIfAbsent` is not necessary.
+ values.put(currentThread, value)
value
}
}
diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
index e66e4eff29..df49e6a2e4 100644
--- a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
+++ b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
@@ -30,7 +30,7 @@ class AbstractOrMissingHandler[T](onError: String => Unit, value: T) extends Par
|Failed to initialize compiler: %s not found.
|** Note that as of 2.8 scala does not assume use of the java classpath.
|** For the old behavior pass -usejavacp to scala, or if using a Settings
- |** object programatically, settings.usejavacp.value = true.""".stripMargin.format(x.req)
+ |** object programmatically, settings.usejavacp.value = true.""".stripMargin.format(x.req)
)
value
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index 50c89f7442..4fd5768b79 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -19,6 +19,7 @@ import scala.reflect.internal.util.{ BatchSourceFile, ScalaClassLoader }
import ScalaClassLoader._
import scala.reflect.io.{ File, Directory }
import scala.tools.util._
+import io.AbstractFile
import scala.collection.generic.Clearable
import scala.concurrent.{ ExecutionContext, Await, Future, future }
import ExecutionContext.Implicits._
@@ -75,6 +76,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def history = in.history
// classpath entries added via :cp
+ @deprecated("Use reset, replay or require to update class path", since = "2.11")
var addedClasspath: String = ""
/** A reverse list of commands to replay if the user requests a :replay */
@@ -124,22 +126,18 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
/** print a friendly help message */
- def helpCommand(line: String): Result = {
- if (line == "") helpSummary()
- else uniqueCommand(line) match {
- case Some(lc) => echo("\n" + lc.help)
- case _ => ambiguousError(line)
- }
+ def helpCommand(line: String): Result = line match {
+ case "" => helpSummary()
+ case CommandMatch(cmd) => echo(f"%n${cmd.help}")
+ case _ => ambiguousError(line)
}
private def helpSummary() = {
- val usageWidth = commands map (_.usageMsg.length) max
- val formatStr = "%-" + usageWidth + "s %s"
+ val usageWidth = commands map (_.usageMsg.length) max
+ val formatStr = s"%-${usageWidth}s %s"
- echo("All commands can be abbreviated, e.g. :he instead of :help.")
+ echo("All commands can be abbreviated, e.g., :he instead of :help.")
- commands foreach { cmd =>
- echo(formatStr.format(cmd.usageMsg, cmd.help))
- }
+ for (cmd <- commands) echo(formatStr.format(cmd.usageMsg, cmd.help))
}
private def ambiguousError(cmd: String): Result = {
matchingCommands(cmd) match {
@@ -148,14 +146,14 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
Result(keepRunning = true, None)
}
+ // this lets us add commands willy-nilly and only requires enough command to disambiguate
private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
- private def uniqueCommand(cmd: String): Option[LoopCommand] = {
- // this lets us add commands willy-nilly and only requires enough command to disambiguate
- matchingCommands(cmd) match {
- case List(x) => Some(x)
- // exact match OK even if otherwise appears ambiguous
- case xs => xs find (_.name == cmd)
- }
+ private object CommandMatch {
+ def unapply(name: String): Option[LoopCommand] =
+ matchingCommands(name) match {
+ case x :: Nil => Some(x)
+ case xs => xs find (_.name == name) // accept an exact match
+ }
}
/** Show the history */
@@ -207,7 +205,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
/** Standard commands **/
lazy val standardCommands = List(
- cmd("cp", "<path>", "add a jar or directory to the classpath", addClasspath),
cmd("edit", "<id>|<line>", "edit history", editCommand),
cmd("help", "[command]", "print this summary or command-specific help", helpCommand),
historyCommand,
@@ -220,11 +217,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", pasteCommand),
nullary("power", "enable power user mode", powerCmd),
nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)),
- nullary("replay", "reset execution and replay all previous commands", replay),
- nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
+ cmd("replay", "[options]", "reset the repl and replay all previous commands", replayCommand),
+ cmd("require", "<path>", "add a jar to the classpath", require),
+ cmd("reset", "[options]", "reset the repl to its initial state, forgetting all session entries", resetCommand),
cmd("save", "<path>", "save replayable session to a file", saveCommand),
shCommand,
- cmd("settings", "[+|-]<options>", "+enable/-disable flags, set compiler options", changeSettings),
+ cmd("settings", "<options>", "update compiler options, if possible; see reset", changeSettings),
nullary("silent", "disable/enable automatic printing of results", verbosity),
cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
cmd("kind", "[-v] <expr>", "display the kind of expression's type", kindCommand),
@@ -304,57 +302,23 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) }
}
- private def changeSettings(args: String): Result = {
- def showSettings() = {
- for (s <- settings.userSetSettings.toSeq.sorted) echo(s.toString)
- }
- def updateSettings() = {
- // put aside +flag options
- val (pluses, rest) = (args split "\\s+").toList partition (_.startsWith("+"))
- val tmps = new Settings
- val (ok, leftover) = tmps.processArguments(rest, processAll = true)
- if (!ok) echo("Bad settings request.")
- else if (leftover.nonEmpty) echo("Unprocessed settings.")
- else {
- // boolean flags set-by-user on tmp copy should be off, not on
- val offs = tmps.userSetSettings filter (_.isInstanceOf[Settings#BooleanSetting])
- val (minuses, nonbools) = rest partition (arg => offs exists (_ respondsTo arg))
- // update non-flags
- settings.processArguments(nonbools, processAll = true)
- // also snag multi-value options for clearing, e.g. -Ylog: and -language:
- for {
- s <- settings.userSetSettings
- if s.isInstanceOf[Settings#MultiStringSetting] || s.isInstanceOf[Settings#PhasesSetting]
- if nonbools exists (arg => arg.head == '-' && arg.last == ':' && (s respondsTo arg.init))
- } s match {
- case c: Clearable => c.clear()
- case _ =>
- }
- def update(bs: Seq[String], name: String=>String, setter: Settings#Setting=>Unit) = {
- for (b <- bs)
- settings.lookupSetting(name(b)) match {
- case Some(s) =>
- if (s.isInstanceOf[Settings#BooleanSetting]) setter(s)
- else echo(s"Not a boolean flag: $b")
- case _ =>
- echo(s"Not an option: $b")
- }
- }
- update(minuses, identity, _.tryToSetFromPropertyValue("false")) // turn off
- update(pluses, "-" + _.drop(1), _.tryToSet(Nil)) // turn on
- }
- }
- if (args.isEmpty) showSettings() else updateSettings()
+ private def changeSettings(line: String): Result = {
+ def showSettings() = for (s <- settings.userSetSettings.toSeq.sorted) echo(s.toString)
+ if (line.isEmpty) showSettings() else { updateSettings(line) ; () }
+ }
+ private def updateSettings(line: String) = {
+ val (ok, rest) = settings.processArguments(words(line), processAll = false)
+ ok && rest.isEmpty
}
private def javapCommand(line: String): Result = {
if (javap == null)
- ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome)
+ s":javap unavailable, no tools.jar at $jdkHome. Set JDK_HOME."
else if (line == "")
":javap [-lcsvp] [path1 path2 ...]"
else
javap(words(line)) foreach { res =>
- if (res.isError) return "Failed: " + res.value
+ if (res.isError) return s"Failed: ${res.value}"
else res.show()
}
}
@@ -425,46 +389,56 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
true
}
+ // after process line, OK continue, ERR break, or EOF all done
+ object LineResults extends Enumeration {
+ type LineResult = Value
+ val EOF, ERR, OK = Value
+ }
+ import LineResults.LineResult
+
// return false if repl should exit
def processLine(line: String): Boolean = {
import scala.concurrent.duration._
- Await.ready(globalFuture, 60.seconds)
-
- if (line eq null) {
- // SI-4563: this means the console was properly interrupted (Ctrl+D usually)
- // so we display the output message (which by default ends with
- // a newline so as not to break the user's terminal)
- if (in.interactive) out.print(Properties.shellInterruptedString)
+ Await.ready(globalFuture, 10.minutes) // Long timeout here to avoid test failures under heavy load.
- false
- } else (command(line) match {
+ command(line) match {
case Result(false, _) => false
case Result(_, Some(line)) => addReplay(line) ; true
case _ => true
- })
+ }
}
private def readOneLine() = {
+ import scala.io.AnsiColor.{ MAGENTA, RESET }
out.flush()
- in readLine prompt
+ in readLine (
+ if (replProps.colorOk)
+ MAGENTA + prompt + RESET
+ else
+ prompt
+ )
}
/** The main read-eval-print loop for the repl. It calls
* command() for each line of input, and stops when
* command() returns false.
*/
- @tailrec final def loop() {
- if ( try processLine(readOneLine()) catch crashRecovery )
- loop()
+ @tailrec final def loop(): LineResult = {
+ import LineResults._
+ readOneLine() match {
+ case null => EOF
+ case line => if (try processLine(line) catch crashRecovery) loop() else ERR
+ }
}
/** interpret all lines from a specified file */
- def interpretAllFrom(file: File) {
+ def interpretAllFrom(file: File, verbose: Boolean = false) {
savingReader {
savingReplayStack {
file applyReader { reader =>
- in = SimpleReader(reader, out, interactive = false)
- echo("Loading " + file + "...")
+ in = if (verbose) new SimpleReader(reader, out, interactive = true) with EchoReader
+ else SimpleReader(reader, out, interactive = false)
+ echo(s"Loading $file...")
loop()
}
}
@@ -472,8 +446,16 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
/** create a new interpreter and replay the given commands */
- def replay() {
- reset()
+ def replayCommand(line: String): Unit = {
+ def run(destructive: Boolean): Unit = {
+ if (destructive) createInterpreter() else reset()
+ replay()
+ }
+ if (line.isEmpty) run(destructive = false)
+ else if (updateSettings(line)) run(destructive = true)
+ }
+ /** Announces as it replays. */
+ def replay(): Unit = {
if (replayCommandStack.isEmpty)
echo("Nothing to replay.")
else for (cmd <- replayCommands) {
@@ -482,21 +464,28 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
echo("")
}
}
- def resetCommand() {
- echo("Resetting interpreter state.")
- if (replayCommandStack.nonEmpty) {
- echo("Forgetting this session history:\n")
- replayCommands foreach echo
- echo("")
- replayCommandStack = Nil
+ /** `reset` the interpreter in an attempt to start fresh.
+ * Supplying settings creates a new compiler.
+ */
+ def resetCommand(line: String): Unit = {
+ def run(destructive: Boolean): Unit = {
+ echo("Resetting interpreter state.")
+ if (replayCommandStack.nonEmpty) {
+ echo("Forgetting this session history:\n")
+ replayCommands foreach echo
+ echo("")
+ replayCommandStack = Nil
+ }
+ if (intp.namedDefinedTerms.nonEmpty)
+ echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", "))
+ if (intp.definedTypes.nonEmpty)
+ echo("Forgetting defined types: " + intp.definedTypes.mkString(", "))
+ if (destructive) createInterpreter() else reset()
}
- if (intp.namedDefinedTerms.nonEmpty)
- echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", "))
- if (intp.definedTypes.nonEmpty)
- echo("Forgetting defined types: " + intp.definedTypes.mkString(", "))
-
- reset()
+ if (line.isEmpty) run(destructive = false)
+ else if (updateSettings(line)) run(destructive = true)
}
+ /** Resets without announcements. */
def reset() {
intp.reset()
unleashAndSetPhase()
@@ -604,13 +593,17 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
res
}
- def loadCommand(arg: String) = {
- var shouldReplay: Option[String] = None
- withFile(arg)(f => {
- interpretAllFrom(f)
- shouldReplay = Some(":load " + arg)
- })
- Result(keepRunning = true, shouldReplay)
+ def loadCommand(arg: String): Result = {
+ def run(file: String, verbose: Boolean) = withFile(file) { f =>
+ interpretAllFrom(f, verbose)
+ Result recording s":load $arg"
+ } getOrElse Result.default
+
+ words(arg) match {
+ case "-v" :: file :: Nil => run(file, verbose = true)
+ case file :: Nil => run(file, verbose = false)
+ case _ => echo("usage: :load -v file") ; Result.default
+ }
}
def saveCommand(filename: String): Result = (
@@ -619,17 +612,62 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
else File(filename).printlnAll(replayCommands: _*)
)
+ @deprecated("Use reset, replay or require to update class path", since = "2.11")
def addClasspath(arg: String): Unit = {
val f = File(arg).normalize
if (f.exists) {
addedClasspath = ClassPath.join(addedClasspath, f.path)
- val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
- echo("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, totalClasspath))
- replay()
+ intp.addUrlsToClassPath(f.toURI.toURL)
+ echo("Added '%s' to classpath.".format(f.path, intp.global.classPath.asClassPathString))
+ repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClassPathString))
}
else echo("The path '" + f + "' doesn't seem to exist.")
}
+ /** Adds jar file to the current classpath. Jar will only be added if it
+ * does not contain classes that already exist on the current classpath.
+ *
+ * Importantly, `require` adds jars to the classpath ''without'' resetting
+ * the state of the interpreter. This is in contrast to `replay` which can
+ * be used to add jars to the classpath and which creates a new instance of
+ * the interpreter and replays all interpreter expressions.
+ */
+ def require(arg: String): Unit = {
+ class InfoClassLoader extends java.lang.ClassLoader {
+ def classOf(arr: Array[Byte]): Class[_] =
+ super.defineClass(null, arr, 0, arr.length)
+ }
+
+ val f = File(arg).normalize
+
+ if (f.isDirectory) {
+ echo("Adding directories to the classpath is not supported. Add a jar instead.")
+ return
+ }
+
+ val jarFile = AbstractFile.getDirectory(new java.io.File(arg))
+
+ def flatten(f: AbstractFile): Iterator[AbstractFile] =
+ if (f.isClassContainer) f.iterator.flatMap(flatten)
+ else Iterator(f)
+
+ val entries = flatten(jarFile)
+ val cloader = new InfoClassLoader
+
+ def classNameOf(classFile: AbstractFile): String = cloader.classOf(classFile.toByteArray).getName
+ def alreadyDefined(clsName: String) = intp.classLoader.tryToLoadClass(clsName).isDefined
+ val exists = entries.filter(_.hasExtension("class")).map(classNameOf).exists(alreadyDefined)
+
+ if (!f.exists) echo(s"The path '$f' doesn't seem to exist.")
+ else if (exists) echo(s"The path '$f' cannot be loaded, because existing classpath entries conflict.") // TODO tell me which one
+ else {
+ addedClasspath = ClassPath.join(addedClasspath, f.path)
+ intp.addUrlsToClassPath(f.toURI.toURL)
+ echo("Added '%s' to classpath.".format(f.path, intp.global.classPath.asClassPathString))
+ repldbg("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, intp.global.classPath.asClassPathString))
+ }
+ }
+
def powerCmd(): Result = {
if (isReplPower) "Already in power mode."
else enablePowerMode(isDuringInit = false)
@@ -659,20 +697,23 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
/** Run one command submitted by the user. Two values are returned:
- * (1) whether to keep running, (2) the line to record for replay,
- * if any. */
+ * (1) whether to keep running, (2) the line to record for replay, if any.
+ */
def command(line: String): Result = {
- if (line startsWith ":") {
- val cmd = line.tail takeWhile (x => !x.isWhitespace)
- uniqueCommand(cmd) match {
- case Some(lc) => lc(line.tail stripPrefix cmd dropWhile (_.isWhitespace))
- case _ => ambiguousError(cmd)
- }
- }
+ if (line startsWith ":") colonCommand(line.tail)
else if (intp.global == null) Result(keepRunning = false, None) // Notice failure to create compiler
else Result(keepRunning = true, interpretStartingWith(line))
}
+ private val commandish = """(\S+)(?:\s+)?(.*)""".r
+
+ private def colonCommand(line: String): Result = line.trim match {
+ case "" => helpSummary()
+ case commandish(CommandMatch(cmd), rest) => cmd(rest)
+ case commandish(name, _) => ambiguousError(name)
+ case _ => echo("?")
+ }
+
private def readWhile(cond: String => Boolean) = {
Iterator continually in.readLine("") takeWhile (x => x != null && cond(x))
}
@@ -696,13 +737,13 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
val code = file match {
case Some(name) =>
- withFile(name)(f => {
+ withFile(name) { f =>
shouldReplay = Some(s":paste $arg")
val s = f.slurp.trim
if (s.isEmpty) echo(s"File contains no code: $f")
else echo(s"Pasting file $f...")
s
- }) getOrElse ""
+ } getOrElse ""
case None =>
echo("// Entering paste mode (ctrl-D to finish)\n")
val text = (readWhile(_ => true) mkString "\n").trim
@@ -831,7 +872,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
)
catch {
case ex @ (_: Exception | _: NoClassDefFoundError) =>
- echo("Failed to created JLineReader: " + ex + "\nFalling back to SimpleReader.")
+ echo(f"Failed to created JLineReader: ${ex}%nFalling back to SimpleReader.")
SimpleReader()
}
}
@@ -858,6 +899,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
case _ =>
}
}
+
+ // start an interpreter with the given settings
def process(settings: Settings): Boolean = savingContextLoader {
this.settings = settings
createInterpreter()
@@ -872,7 +915,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
loadFiles(settings)
printWelcome()
- try loop()
+ try loop() match {
+ case LineResults.EOF => out print Properties.shellInterruptedString
+ case _ =>
+ }
catch AbstractOrMissingHandler()
finally closeInterpreter()
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 6e30b73e0e..0347622cf4 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -15,12 +15,16 @@ import scala.concurrent.{ Future, ExecutionContext }
import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ ClassTag, classTag }
import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
-import scala.tools.util.PathResolver
+import scala.tools.util.PathResolverFactory
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
-import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps }
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromReader, stringFromWriter, StackTraceOps, ClassPath, MergedClassPath }
+import ScalaClassLoader.URLClassLoader
import scala.tools.nsc.util.Exceptional.unwrap
+import scala.tools.nsc.backend.JavaPlatform
import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
+import java.net.URL
+import java.io.File
/** An interpreter for Scala code.
*
@@ -41,7 +45,7 @@ import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine
* all variables defined by that code. To extract the result of an
* interpreted line to show the user, a second "result object" is created
* which imports the variables exported by the above object and then
- * exports members called "$eval" and "$print". To accomodate user expressions
+ * exports members called "$eval" and "$print". To accommodate user expressions
* that read from variables or methods defined in previous statements, "import"
* statements are used.
*
@@ -82,9 +86,11 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private var _classLoader: util.AbstractFileClassLoader = null // active classloader
private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler
+ private var _runtimeClassLoader: URLClassLoader = null // wrapper exposing addURL
+
def compilerClasspath: Seq[java.net.URL] = (
if (isInitializeComplete) global.classPath.asURLs
- else new PathResolver(settings).result.asURLs // the compiler's classpath
+ else PathResolverFactory.create(settings).resultAsURLs // the compiler's classpath
)
def settings = initialSettings
// Run the code body with the given boolean settings flipped to true.
@@ -237,6 +243,18 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
new Global(settings, reporter) with ReplGlobal { override def toString: String = "<global>" }
}
+ /**
+ * Adds all specified jars to the compile and runtime classpaths.
+ *
+ * @note Currently only supports jars, not directories.
+ * @param urls The list of items to add to the compile and runtime classpaths.
+ */
+ def addUrlsToClassPath(urls: URL*): Unit = {
+ new Run // force some initialization
+ urls.foreach(_runtimeClassLoader.addURL) // Add jars to runtime classloader
+ global.extendCompilerClassPath(urls: _*) // Add jars to compile-time classpath
+ }
+
/** Parent classloader. Overridable. */
protected def parentClassLoader: ClassLoader =
settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() )
@@ -295,27 +313,43 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def originalPath(name: Name): String = typerOp path name
def originalPath(sym: Symbol): String = typerOp path sym
def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName
+
def translatePath(path: String) = {
val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path)
sym.toOption map flatPath
}
+
+ /** If path represents a class resource in the default package,
+ * see if the corresponding symbol has a class file that is a REPL artifact
+ * residing at a different resource path. Translate X.class to $line3/$read$$iw$$iw$X.class.
+ */
+ def translateSimpleResource(path: String): Option[String] = {
+ if (!(path contains '/') && (path endsWith ".class")) {
+ val name = path stripSuffix ".class"
+ val sym = if (name endsWith "$") symbolOfTerm(name.init) else symbolOfIdent(name)
+ def pathOf(s: String) = s"${s.replace('.', '/')}.class"
+ sym.toOption map (s => pathOf(flatPath(s)))
+ } else {
+ None
+ }
+ }
def translateEnclosingClass(n: String) = symbolOfTerm(n).enclClass.toOption map flatPath
+ /** If unable to find a resource foo.class, try taking foo as a symbol in scope
+ * and use its java class name as a resource to load.
+ *
+ * $intp.classLoader classBytes "Bippy" or $intp.classLoader getResource "Bippy.class" just work.
+ */
private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) {
- /** Overridden here to try translating a simple name to the generated
- * class name if the original attempt fails. This method is used by
- * getResourceAsStream as well as findClass.
- */
- override protected def findAbstractFile(name: String): AbstractFile =
- super.findAbstractFile(name) match {
- case null if _initializeComplete => translatePath(name) map (super.findAbstractFile(_)) orNull
- case file => file
- }
+ override protected def findAbstractFile(name: String): AbstractFile = super.findAbstractFile(name) match {
+ case null if _initializeComplete => translateSimpleResource(name) map super.findAbstractFile orNull
+ case file => file
+ }
}
private def makeClassLoader(): util.AbstractFileClassLoader =
- new TranslatingClassLoader(parentClassLoader match {
- case null => ScalaClassLoader fromURLs compilerClasspath
- case p => new ScalaClassLoader.URLClassLoader(compilerClasspath, p)
+ new TranslatingClassLoader({
+ _runtimeClassLoader = new URLClassLoader(compilerClasspath, parentClassLoader)
+ _runtimeClassLoader
})
// Set the current Java "context" class loader to this interpreter's class loader
@@ -1174,6 +1208,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
finally isettings.unwrapStrings = saved
}
+ def withoutTruncating[A](body: => A): A = reporter withoutTruncating body
+
def symbolDefString(sym: Symbol) = {
TypeStrings.quieter(
exitingTyper(sym.defString),
@@ -1246,9 +1282,11 @@ object IMain {
def getProgram(statements: String*): String = null
- def getScriptEngine: ScriptEngine = new IMain(this, new Settings() {
- usemanifestcp.value = true
- })
+ def getScriptEngine: ScriptEngine = {
+ val settings = new Settings()
+ settings.usemanifestcp.value = true
+ new IMain(this, settings)
+ }
}
// The two name forms this is catching are the two sides of this assignment:
diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
index 3cb6ba11c1..c80b94bf89 100644
--- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
@@ -9,7 +9,7 @@ package interpreter
import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
import scala.tools.nsc.util.ScalaClassLoader
-import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, Writer }
+import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, StringWriter, Writer }
import java.util.{ Locale }
import java.util.concurrent.ConcurrentLinkedQueue
import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener,
@@ -18,39 +18,47 @@ import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener,
import scala.reflect.io.{ AbstractFile, Directory, File, Path }
import scala.io.Source
import scala.util.{ Try, Success, Failure }
-import scala.util.Properties.lineSeparator
+import scala.util.Properties.{ lineSeparator => EOL }
import scala.util.matching.Regex
-import scala.collection.JavaConverters
+import scala.collection.JavaConverters._
import scala.collection.generic.Clearable
import java.net.URL
import scala.language.reflectiveCalls
+import PartialFunction.{ cond => when }
import Javap._
+/** Javap command implementation. Supports platform tool for Java 6 or 7+.
+ * Adds a few options for REPL world, to show bodies of `App` classes and closures.
+ */
class JavapClass(
val loader: ScalaClassLoader,
val printWriter: PrintWriter,
intp: Option[IMain] = None
-) extends scala.tools.util.Javap {
+) extends Javap {
import JavapTool.ToolArgs
import JavapClass._
lazy val tool = JavapTool()
- /** Run the tool. Option args start with "-".
+ /** Run the tool. Option args start with "-", except that "-" itself
+ * denotes the last REPL result.
* The default options are "-protected -verbose".
* Byte data for filename args is retrieved with findBytes.
+ * @return results for invoking JpResult.show()
*/
def apply(args: Seq[String]): List[JpResult] = {
val (options, classes) = args partition (s => (s startsWith "-") && s.length > 1)
- val (flags, upgraded) = upgrade(options)
+ val (flags, upgraded) = upgrade(options)
import flags.{ app, fun, help, raw }
+
val targets = if (fun && !help) FunFinder(loader, intp).funs(classes) else classes
+
if (help || classes.isEmpty)
List(JpResult(JavapTool.helper(printWriter)))
else if (targets.isEmpty)
- List(JpResult("No anonfuns found."))
+ List(JpResult("No closures found."))
else
- tool(raw, upgraded)(targets map (klass => targeted(klass, app)))
+ tool(raw, upgraded)(targets map (targeted(_, app))) // JavapTool.apply
}
/** Cull our tool options. */
@@ -79,8 +87,10 @@ class JavapClass(
case s => s
}
val targetedBytes = if (app) findAppBody(req) else (path, findBytes(req))
- if (targetedBytes._2.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '$path'")
- targetedBytes
+ targetedBytes match {
+ case (_, bytes) if bytes.isEmpty => throw new FileNotFoundException(s"Could not find class bytes for '$path'")
+ case ok => ok
+ }
}
private def findAppBody(path: String): (String, Array[Byte]) = {
@@ -89,16 +99,12 @@ class JavapClass(
// assumes only the first match is of interest (because only one endpoint is generated).
def findNewStyle(bytes: Array[Byte]) = {
import scala.tools.asm.ClassReader
- import scala.tools.asm.tree.ClassNode
- import PartialFunction.cond
- import JavaConverters._
- val rdr = new ClassReader(bytes)
- val nod = new ClassNode
- rdr.accept(nod, 0)
//foo/Bar.delayedEndpoint$foo$Bar$1
val endpoint = "delayedEndpoint".r.unanchored
- def isEndPoint(s: String) = (s contains '$') && cond(s) { case endpoint() => true }
- nod.methods.asScala collectFirst { case m if isEndPoint(m.name) => m.name }
+ def isEndPoint(s: String) = (s contains '$') && when(s) { case endpoint() => true }
+ new ClassReader(bytes) withMethods { methods =>
+ methods collectFirst { case m if isEndPoint(m.name) => m.name }
+ }
}
// try new style, and add foo#delayedEndpoint$bar$1 to filter on the endpoint
def asNewStyle(bytes: Array[Byte]) = Some(bytes) filter (_.nonEmpty) flatMap { bs =>
@@ -122,8 +128,7 @@ class JavapClass(
def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path)
- /** Assume the string is a path and try to find the classfile
- * it represents.
+ /** Assume the string is a path and try to find the classfile it represents.
*/
def tryFile(path: String): Option[Array[Byte]] =
(Try (File(path.asClassResource)) filter (_.exists) map (_.toByteArray())).toOption
@@ -202,55 +207,67 @@ class JavapClass(
w
}
+ def filterLines(target: String, text: String): String = {
+ // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#;
+ // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#?
+ val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s }
+ var filtering = false // true if in region matching filter
+ // turn filtering on/off given the pattern of interest
+ def filterStatus(line: String, pattern: String) = {
+ def isSpecialized(method: String) = (method startsWith pattern+"$") && (method endsWith "$sp")
+ def isAnonymized(method: String) = (pattern == "$anonfun") && (method startsWith "$anonfun$")
+ // cheap heuristic, todo maybe parse for the java sig.
+ // method sigs end in paren semi
+ def isAnyMethod = line endsWith ");"
+ // take the method name between the space char and left paren.
+ // accept exact match or something that looks like what we might be asking for.
+ def isOurMethod = {
+ val lparen = line lastIndexOf '('
+ val blank = line.lastIndexOf(' ', lparen)
+ if (blank < 0) false
+ else {
+ val method = line.substring(blank+1, lparen)
+ (method == pattern || isSpecialized(method) || isAnonymized(method))
+ }
+ }
+ filtering =
+ if (filtering) {
+ // next blank line terminates section
+ // in non-verbose mode, next line is next method, more or less
+ line.trim.nonEmpty && (!isAnyMethod || isOurMethod)
+ } else {
+ isAnyMethod && isOurMethod
+ }
+ filtering
+ }
+ // do we output this line?
+ def checkFilter(line: String) = filterOn map (filterStatus(line, _)) getOrElse true
+ val sw = new StringWriter
+ val pw = new PrintWriter(sw)
+ for {
+ line <- Source.fromString(text).getLines()
+ if checkFilter(line)
+ } pw println line
+ pw.flush()
+ sw.toString
+ }
+
/** Create a Showable with output massage.
* @param raw show ugly repl names
* @param target attempt to filter output to show region of interest
* @param preamble other messages to output
*/
- def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable = new Showable {
- // ReplStrippingWriter clips and scrubs on write(String)
- // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping
- def show() =
- if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() }
- else writeLines()
- private def writeLines() {
- // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#;
- // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#?
- val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s }
- var filtering = false // true if in region matching filter
- // turn filtering on/off given the pattern of interest
- def filterStatus(line: String, pattern: String) = {
- // cheap heuristic, todo maybe parse for the java sig.
- // method sigs end in paren semi
- def isAnyMethod = line.endsWith(");")
- def isOurMethod = {
- val lparen = line.lastIndexOf('(')
- val blank = line.lastIndexOf(' ', lparen)
- if (blank < 0) false
- else {
- val method = line.substring(blank+1, lparen)
- (method == pattern || ((method startsWith pattern+"$") && (method endsWith "$sp")))
- }
- }
- filtering =
- if (filtering) {
- // next blank line terminates section
- // for -public, next line is next method, more or less
- line.trim.nonEmpty && !isAnyMethod
- } else {
- isAnyMethod && isOurMethod
- }
- filtering
- }
- // do we output this line?
- def checkFilter(line: String) = filterOn map (filterStatus(line, _)) getOrElse true
- for {
- line <- Source.fromString(preamble + written).getLines()
- if checkFilter(line)
- } printWriter write f"$line%n"
- printWriter.flush()
+ def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable =
+ new Showable {
+ private def writeLines() = filterLines(target, preamble + written)
+ val output = writeLines()
+
+ // ReplStrippingWriter clips and scrubs on write(String)
+ // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping
+ def show() =
+ if (raw && intp.isDefined) intp.get withoutUnwrapping { printWriter.write(output, 0, output.length) }
+ else intp.get withoutTruncating(printWriter write output)
}
- }
}
class JavapTool6 extends JavapTool {
@@ -291,6 +308,7 @@ class JavapClass(
}
class JavapTool7 extends JavapTool {
+ import JavapTool._
type Task = {
def call(): Boolean // true = ok
//def run(args: Array[String]): Int // all args
@@ -322,19 +340,14 @@ class JavapClass(
/** All diagnostic messages.
* @param locale Locale for diagnostic messages, null by default.
*/
- def messages(implicit locale: Locale = null) = {
- import JavaConverters._
- diagnostics.asScala.map(_ getMessage locale).toList
- }
+ def messages(implicit locale: Locale = null) = diagnostics.asScala.map(_ getMessage locale).toList
+ // don't filter this message if raw, since the names are likely to differ
+ private val container = "Binary file .* contains .*".r
def reportable(raw: Boolean): String = {
- // don't filter this message if raw, since the names are likely to differ
- val container = "Binary file .* contains .*".r
- val m = if (raw) messages
- else messages filter (_ match { case container() => false case _ => true })
+ val m = if (raw) messages else messages filterNot (when(_) { case container() => true })
clear()
- if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator)
- else ""
+ if (m.nonEmpty) m mkString ("", EOL, EOL) else ""
}
}
val reporter = new JavaReporter
@@ -396,7 +409,6 @@ class JavapClass(
def task(options: Seq[String], classes: Seq[String], inputs: Seq[Input]): Task = {
//ServiceLoader.load(classOf[javax.tools.DisassemblerTool]).
//getTask(writer, fileManager, reporter, options.asJava, classes.asJava)
- import JavaConverters.asJavaIterableConverter
TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, classes.asJava)
.orFailed (throw new IllegalStateException)
}
@@ -476,7 +488,7 @@ class JavapClass(
object ToolArgs {
def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) {
case ((t,others), s) => s match {
- case "-fun" => (t copy (fun=true), others)
+ case "-fun" => (t copy (fun=true), others :+ "-private")
case "-app" => (t copy (app=true), others)
case "-help" => (t copy (help=true), others)
case "-raw" => (t copy (raw=true), others)
@@ -542,24 +554,26 @@ class JavapClass(
val DefaultOptions = List("-protected", "-verbose")
- def isAvailable = Seq(Env, Tool) exists (cn => hasClass(loader, cn))
-
private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined
- private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool)
+ def isAvailable = Seq(Env, Tool) exists (hasClass(loader, _))
/** Select the tool implementation for this platform. */
- def apply() = if (isTaskable(loader)) new JavapTool7 else new JavapTool6
+ def apply() = if (hasClass(loader, Tool)) new JavapTool7 else new JavapTool6
}
}
object JavapClass {
+ import scala.tools.asm.ClassReader
+ import scala.tools.asm.tree.{ ClassNode, MethodNode }
+
def apply(
loader: ScalaClassLoader = ScalaClassLoader.appLoader,
printWriter: PrintWriter = new PrintWriter(System.out, true),
intp: Option[IMain] = None
) = new JavapClass(loader, printWriter, intp)
+ /** Match foo#bar, both groups are optional (may be null). */
val HashSplit = "([^#]+)?(?:#(.+)?)?".r
// We enjoy flexibility in specifying either a fully-qualified class name com.acme.Widget
@@ -580,9 +594,9 @@ object JavapClass {
else (s take i, Some(s drop i+1))
}
}
- implicit class ClassLoaderOps(val cl: ClassLoader) extends AnyVal {
+ implicit class ClassLoaderOps(val loader: ScalaClassLoader) extends AnyVal {
private def parentsOf(x: ClassLoader): List[ClassLoader] = if (x == null) Nil else x :: parentsOf(x.getParent)
- def parents: List[ClassLoader] = parentsOf(cl)
+ def parents: List[ClassLoader] = parentsOf(loader)
/* all file locations */
def locations = {
def alldirs = parents flatMap (_ match {
@@ -596,7 +610,7 @@ object JavapClass {
/* only the file location from which the given class is loaded */
def locate(k: String): Option[Path] = {
Try {
- val klass = try cl loadClass k catch {
+ val klass = try loader loadClass k catch {
case _: NoClassDefFoundError => null // let it snow
}
// cf ScalaClassLoader.originOfClass
@@ -608,44 +622,66 @@ object JavapClass {
}
}
/* would classBytes succeed with a nonempty array */
- def resourceable(className: String): Boolean = cl.getResource(className.asClassResource) != null
+ def resourceable(className: String): Boolean = loader.getResource(className.asClassResource) != null
+
+ /* class reader of class bytes */
+ def classReader(resource: String): ClassReader = new ClassReader(loader classBytes resource)
+ }
+ implicit class `class reader convenience`(val reader: ClassReader) extends AnyVal {
+ def withMethods[A](f: Seq[MethodNode] => A): A = {
+ val cls = new ClassNode
+ reader.accept(cls, 0)
+ f(cls.methods.asScala)
+ }
}
implicit class PathOps(val p: Path) extends AnyVal {
import scala.tools.nsc.io.Jar
def isJar = Jar isJarOrZip p
}
+ implicit class `fun with files`(val f: AbstractFile) extends AnyVal {
+ def descend(path: Seq[String]): Option[AbstractFile] = {
+ def lookup(f: AbstractFile, path: Seq[String]): Option[AbstractFile] = path match {
+ case p if p.isEmpty => Option(f)
+ case p => Option(f.lookupName(p.head, directory = true)) flatMap (lookup(_, p.tail))
+ }
+ lookup(f, path)
+ }
+ }
implicit class URLOps(val url: URL) extends AnyVal {
def isFile: Boolean = url.getProtocol == "file"
}
object FunFinder {
def apply(loader: ScalaClassLoader, intp: Option[IMain]) = new FunFinder(loader, intp)
}
+ // FunFinder.funs(ks) finds anonfuns
class FunFinder(loader: ScalaClassLoader, intp: Option[IMain]) {
+ // manglese for closure: typename, $anonfun or lambda, opt method, digits
+ val closure = """(.*)\$(\$anonfun|lambda)(?:\$+([^$]+))?\$(\d+)""".r
+
+ // manglese for closure
+ val cleese = "(?:anonfun|lambda)"
+
// class k, candidate f without prefix
- def isFunOfClass(k: String, f: String) = {
- val p = (s"${Regex quote k}\\$$+anonfun").r
- (p findPrefixOf f).nonEmpty
- }
+ def isFunOfClass(k: String, f: String) = (s"${Regex quote k}\\$$+$cleese".r findPrefixOf f).nonEmpty
+
// class k, candidate f without prefix, method m
- def isFunOfMethod(k: String, m: String, f: String) = {
- val p = (s"${Regex quote k}\\$$+anonfun\\$$${Regex quote m}\\$$").r
- (p findPrefixOf f).nonEmpty
- }
- def isFunOfTarget(k: String, m: Option[String], f: String) =
- if (m.isEmpty) isFunOfClass(k, f)
- else isFunOfMethod(k, m.get, f)
- def listFunsInAbsFile(k: String, m: Option[String], d: AbstractFile) = {
- for (f <- d; if !f.isDirectory && isFunOfTarget(k, m, f.name)) yield f.name
- }
- // path prefix p, class k, dir d
- def listFunsInDir(p: String, k: String, m: Option[String])(d: Directory) = {
- val subdir = Path(p)
- for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(k, m, f.name))
+ def isFunOfMethod(k: String, m: String, f: String) =
+ (s"${Regex quote k}\\$$+$cleese\\$$+${Regex quote m}\\$$".r findPrefixOf f).nonEmpty
+
+ def isFunOfTarget(target: Target, f: String) =
+ target.member map (isFunOfMethod(target.name, _, f)) getOrElse isFunOfClass(target.name, f)
+
+ def listFunsInAbsFile(target: Target)(d: AbstractFile) =
+ for (f <- d; if !f.isDirectory && isFunOfTarget(target, f.name)) yield f.name
+
+ def listFunsInDir(target: Target)(d: Directory) = {
+ val subdir = Path(target.prefix)
+ for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(target, f.name))
yield f.name
}
- // path prefix p, class k, jar file f
- def listFunsInJar(p: String, k: String, m: Option[String])(f: File) = {
+
+ def listFunsInJar(target: Target)(f: File) = {
import java.util.jar.JarEntry
import scala.tools.nsc.io.Jar
def maybe(e: JarEntry) = {
@@ -654,78 +690,133 @@ object JavapClass {
if (parts.length < 2) ("", e.getName)
else (parts.init mkString "/", parts.last)
}
- if (path == p && isFunOfTarget(k, m, name)) Some(name) else None
+ if (path == target.prefix && isFunOfTarget(target, name)) Some(name) else None
}
(new Jar(f) map maybe).flatten
}
def loadable(name: String) = loader resourceable name
- // translated class, optional member, opt member to filter on, whether it is repl output
- def translate(s: String): (String, Option[String], Option[String], Boolean) = {
+ case class Target(path: String, member: Option[String], filter: Option[String], isRepl: Boolean, isModule: Boolean) {
+ val splat = path split "\\."
+ val name = splat.last
+ val prefix = if (splat.length > 1) splat.init mkString "/" else ""
+ val pkg = if (splat.length > 1) splat.init mkString "." else ""
+ val targetName = s"$name${ if (isModule) "$" else "" }"
+ }
+ // translated class, optional member, opt member to filter on, whether it is repl output and a module
+ def translate(s: String): Target = {
val (k0, m0) = s.splitHashMember
- val k = k0.asClassName
+ val isModule = k0 endsWith "$"
+ val k = (k0 stripSuffix "$").asClassName
val member = m0 filter (_.nonEmpty) // take Foo# as no member, not ""
val filter = m0 flatMap { case "" => Some("apply") case _ => None } // take Foo# as filter on apply
// class is either something replish or available to loader
// $line.$read$$etc$Foo#member
- ((intp flatMap (_ translatePath k) filter (loadable) map ((_, member, filter, true)))
+ ((intp flatMap (_ translatePath k) filter (loadable) map (x => Target(x stripSuffix "$", member, filter, true, isModule)))
// s = "f" and $line.$read$$etc$#f is what we're after,
// ignoring any #member (except take # as filter on #apply)
- orElse (intp flatMap (_ translateEnclosingClass k) map ((_, Some(k), filter, true)))
- getOrElse ((k, member, filter, false)))
+ orElse (intp flatMap (_ translateEnclosingClass k) map (x => Target(x stripSuffix "$", Some(k), filter, true, isModule)))
+ getOrElse (Target(k, member, filter, false, isModule)))
}
/** Find the classnames of anonfuns associated with k,
* where k may be an available class or a symbol in scope.
*/
- def funsOf(k0: String): Seq[String] = {
+ def funsOf(selection: String): Seq[String] = {
// class is either something replish or available to loader
- val (k, member, filter, isReplish) = translate(k0)
- val splat = k split "\\."
- val name = splat.last
- val prefix = if (splat.length > 1) splat.init mkString "/" else ""
- val pkg = if (splat.length > 1) splat.init mkString "." else ""
+ val target = translate(selection)
+
// reconstitute an anonfun with a package
// if filtered, add the hash back, e.g. pkg.Foo#bar, pkg.Foo$anon$1#apply
def packaged(s: String) = {
- val p = if (pkg.isEmpty) s else s"$pkg.$s"
- val pm = filter map (p + "#" + _)
- pm getOrElse p
+ val p = if (target.pkg.isEmpty) s else s"${target.pkg}.$s"
+ target.filter map (p + "#" + _) getOrElse p
}
- // is this translated path in (usually virtual) repl outdir? or loadable from filesystem?
- val fs = if (isReplish) {
- def outed(d: AbstractFile, p: Seq[String]): Option[AbstractFile] = {
- if (p.isEmpty) Option(d)
- else Option(d.lookupName(p.head, directory = true)) flatMap (f => outed(f, p.tail))
- }
- outed(intp.get.replOutput.dir, splat.init) map { d =>
- listFunsInAbsFile(name, member, d) map packaged
- }
- } else {
- loader locate k map { w =>
- if (w.isDirectory) listFunsInDir(prefix, name, member)(w.toDirectory) map packaged
- else if (w.isJar) listFunsInJar(prefix, name, member)(w.toFile) map packaged
- else Nil
+ // find closure classes in repl outdir or try asking the classloader where to look
+ val fs =
+ if (target.isRepl)
+ (intp.get.replOutput.dir descend target.splat.init) map { d =>
+ listFunsInAbsFile(target)(d) map (_.asClassName) map packaged
+ }
+ else
+ loader locate target.path map {
+ case d if d.isDirectory => listFunsInDir(target)(d.toDirectory) map packaged
+ case j if j.isJar => listFunsInJar(target)(j.toFile) map packaged
+ case _ => Nil
+ }
+ val res = fs map (_.to[Seq]) getOrElse Seq()
+ // on second thought, we don't care about lambda method classes, just the impl methods
+ val rev =
+ res flatMap {
+ case x @ closure(_, "lambda", _, _) => labdaMethod(x, target)
+ //target.member flatMap (_ => labdaMethod(x, target)) getOrElse s"${target.name}#$$anonfun"
+ case x => Some(x)
+ }
+ rev
+ }
+ // given C$lambda$$g$n for member g and n in 1..N, find the C.accessor$x
+ // and the C.$anonfun$x it forwards to.
+ def labdaMethod(lambda: String, target: Target): Option[String] = {
+ import scala.tools.asm.ClassReader
+ import scala.tools.asm.Opcodes.INVOKESTATIC
+ import scala.tools.asm.tree.{ ClassNode, MethodInsnNode }
+ // the accessor methods invoked statically by the apply of the given closure class
+ def accesses(s: String): Seq[(String, String)] = {
+ val accessor = """accessor\$\d+""".r
+ loader classReader s withMethods { ms =>
+ ms filter (_.name == "apply") flatMap (_.instructions.toArray.collect {
+ case i: MethodInsnNode if i.getOpcode == INVOKESTATIC && when(i.name) { case accessor(_*) => true } => (i.owner, i.name)
+ })
}
}
- fs match {
- case Some(xs) => xs.to[Seq] // maybe empty
- case None => Seq() // nothing found, e.g., junk input
+ // get the k.$anonfun for the accessor k.m
+ def anonOf(k: String, m: String): String = {
+ val res =
+ loader classReader k withMethods { ms =>
+ ms filter (_.name == m) flatMap (_.instructions.toArray.collect {
+ case i: MethodInsnNode if i.getOpcode == INVOKESTATIC && i.name.startsWith("$anonfun") => i.name
+ })
+ }
+ assert(res.size == 1)
+ res.head
+ }
+ // the lambdas invoke accessors that call the anonfuns of interest. Filter k on the k#$anonfuns.
+ val ack = accesses(lambda)
+ assert(ack.size == 1) // There can be only one.
+ ack.head match {
+ case (k, _) if target.isModule && !(k endsWith "$") => None
+ case (k, m) => Some(s"${k}#${anonOf(k, m)}")
}
}
- def funs(ks: Seq[String]) = ks flatMap funsOf _
+ /** Translate the supplied targets to patterns for anonfuns.
+ * Pattern is typename $ label [[$]$func] $n where label is $anonfun or lambda,
+ * and lambda includes the extra dollar, func is a method name, and n is an int.
+ * The typename for a nested class is dollar notation, Betty$Bippy.
+ *
+ * If C has anonfun closure classes, then use C$$anonfun$f$1 (various names, C# filters on apply).
+ * If C has lambda closure classes, then use C#$anonfun (special-cased by output filter).
+ */
+ def funs(ks: Seq[String]): Seq[String] = ks flatMap funsOf
}
}
+trait Javap {
+ def loader: ScalaClassLoader
+ def printWriter: PrintWriter
+ def apply(args: Seq[String]): List[Javap.JpResult]
+ def tryFile(path: String): Option[Array[Byte]]
+ def tryClass(path: String): Array[Byte]
+}
+
object Javap {
def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl).JavapTool.isAvailable
def apply(path: String): Unit = apply(Seq(path))
def apply(args: Seq[String]): Unit = JavapClass() apply args foreach (_.show())
- trait Showable {
+ private[interpreter] trait Showable {
def show(): Unit
}
- sealed trait JpResult extends scala.tools.util.JpResult {
+ sealed trait JpResult {
type ResultType
def isError: Boolean
def value: ResultType
@@ -751,8 +842,13 @@ object Javap {
def isError = false
def show() = value.show() // output to tool's PrintWriter
}
- implicit class Lastly[A](val t: Try[A]) extends AnyVal {
- private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t }
- def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _)
- }
+}
+
+object NoJavap extends Javap {
+ import Javap._
+ def loader: ScalaClassLoader = getClass.getClassLoader
+ def printWriter: PrintWriter = new PrintWriter(System.err, true)
+ def apply(args: Seq[String]): List[JpResult] = Nil
+ def tryFile(path: String): Option[Array[Byte]] = None
+ def tryClass(path: String): Array[Byte] = Array()
}
diff --git a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
index 12d6ee5112..9f555aee14 100644
--- a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -76,8 +76,11 @@ trait LoopCommands {
// the default result means "keep running, and don't record that line"
val default = Result(keepRunning = true, None)
+ // "keep running, and record this line"
+ def recording(line: String) = Result(keepRunning = true, Option(line))
+
// most commands do not want to micromanage the Result, but they might want
- // to print something to the console, so we accomodate Unit and String returns.
+ // to print something to the console, so we accommodate Unit and String returns.
implicit def resultFromUnit(x: Unit): Result = default
implicit def resultFromString(msg: String): Result = {
echoCommandMessage(msg)
@@ -85,4 +88,3 @@ trait LoopCommands {
}
}
}
-
diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
index a37cdc2ec8..bcba7b6dfd 100644
--- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -102,6 +102,18 @@ trait MemberHandlers {
class GenericHandler(member: Tree) extends MemberHandler(member)
+ import scala.io.AnsiColor.{ BOLD, BLUE, GREEN, RESET }
+
+ def color(c: String, s: String) =
+ if (replProps.colorOk) string2code(BOLD) + string2code(c) + s + string2code(RESET)
+ else s
+
+ def colorName(s: String) =
+ color(BLUE, string2code(s))
+
+ def colorType(s: String) =
+ color(GREEN, string2code(s))
+
class ValHandler(member: ValDef) extends MemberDefHandler(member) {
val maxStringElements = 1000 // no need to mkString billions of elements
override def definesValue = true
@@ -119,15 +131,20 @@ trait MemberHandlers {
if (replProps.vids) s"""" + f"@$${System.identityHashCode($path)}%8x" + """"
else ""
- """ + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString)
+ val nameString = colorName(prettyName) + vidString
+ val typeString = colorType(req typeOf name)
+ s""" + "$nameString: $typeString = " + $resultString"""
}
}
}
class DefHandler(member: DefDef) extends MemberDefHandler(member) {
override def definesValue = flattensToEmpty(member.vparamss) // true if 0-arity
- override def resultExtractionCode(req: Request) =
- if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
+ override def resultExtractionCode(req: Request) = {
+ val nameString = colorName(name)
+ val typeString = colorType(req typeOf name)
+ if (mods.isPublic) s""" + "$nameString: $typeString\\n"""" else ""
+ }
}
abstract class MacroHandler(member: DefDef) extends MemberDefHandler(member) {
diff --git a/src/repl/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala
index f69a5b487d..8d8140b638 100644
--- a/src/repl/scala/tools/nsc/interpreter/Power.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Power.scala
@@ -155,7 +155,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
}
object InternalInfo extends LowPriorityInternalInfo { }
- /** Now dealing with the problem of acidentally calling a method on Type
+ /** Now dealing with the problem of accidentally calling a method on Type
* when you're holding a Symbol and seeing the Symbol converted to the
* type of Symbol rather than the type of the thing represented by the
* symbol, by only implicitly installing one method, "?", and the rest
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
index 36e6dbbccc..8c4faf7278 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
@@ -13,6 +13,9 @@ class ReplProps {
private def bool(name: String) = BooleanProp.keyExists(name)
private def int(name: String) = IntProp(name)
+ // This property is used in TypeDebugging. Let's recycle it.
+ val colorOk = bool("scala.color")
+
val info = bool("scala.repl.info")
val debug = bool("scala.repl.debug")
val trace = bool("scala.repl.trace")
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
index 88372334d6..e6f5a4089e 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
@@ -32,6 +32,24 @@ class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.i
override def warning(pos: Position, msg: String): Unit = withoutTruncating(super.warning(pos, msg))
override def error(pos: Position, msg: String): Unit = withoutTruncating(super.error(pos, msg))
+ import scala.io.AnsiColor.{ RED, YELLOW, RESET }
+
+ def severityColor(severity: Severity): String = severity match {
+ case ERROR => RED
+ case WARNING => YELLOW
+ case INFO => RESET
+ }
+
+ override def print(pos: Position, msg: String, severity: Severity) {
+ val prefix = (
+ if (replProps.colorOk)
+ severityColor(severity) + clabel(severity) + RESET
+ else
+ clabel(severity)
+ )
+ printMessage(pos, prefix + msg)
+ }
+
override def printMessage(msg: String) {
// Avoiding deadlock if the compiler starts logging before
// the lazy val is complete.
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
index 43da5c6f12..1664546cab 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
@@ -28,5 +28,8 @@ trait ReplStrings {
def any2stringOf(x: Any, maxlen: Int) =
"scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen)
- def words(s: String) = (s.trim split "\\s+" filterNot (_ == "")).toList
+ // no escaped or nested quotes
+ private[this] val inquotes = """(['"])(.*?)\1""".r
+ def unquoted(s: String) = s match { case inquotes(_, w) => w ; case _ => s }
+ def words(s: String) = (s.trim split "\\s+" filterNot (_ == "") map unquoted).toList
}
diff --git a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
index 6634dc6944..49b8433a8c 100644
--- a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
@@ -22,14 +22,19 @@ extends InteractiveReader
def reset() = ()
def redrawLine() = ()
- def readOneLine(prompt: String): String = {
- if (interactive) {
- out.print(prompt)
- out.flush()
- }
- in.readLine()
+
+ // InteractiveReader internals
+ protected def readOneLine(prompt: String): String = {
+ echo(prompt)
+ readOneLine()
+ }
+ protected def readOneKey(prompt: String) = sys.error("No char-based input in SimpleReader")
+
+ protected def readOneLine(): String = in.readLine()
+ protected def echo(s: String): Unit = if (interactive) {
+ out.print(s)
+ out.flush()
}
- def readOneKey(prompt: String) = sys.error("No char-based input in SimpleReader")
}
object SimpleReader {
@@ -39,3 +44,13 @@ object SimpleReader {
def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader =
new SimpleReader(in, out, interactive)
}
+
+// pretend we are a console for verbose purposes
+trait EchoReader extends SimpleReader {
+ // if there is more input, then maybe echo the prompt and the input
+ override def readOneLine(prompt: String) = {
+ val input = readOneLine()
+ if (input != null) echo(f"$prompt$input%n")
+ input
+ }
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
index 079097d7a2..56f1e65376 100644
--- a/src/repl/scala/tools/nsc/interpreter/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
@@ -11,6 +11,7 @@ import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ClassTag, classTag}
import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
import scala.util.control.Exception.catching
+import scala.util.Try
/** The main REPL related classes and values are as follows.
* In addition to standard compiler classes Global and Settings, there are:
@@ -196,4 +197,14 @@ package object interpreter extends ReplConfig with ReplStrings {
}
}
}
+
+ /* debug assist
+ private[nsc] implicit class `smart stringifier`(val sc: StringContext) extends AnyVal {
+ import StringContext._, runtime.ScalaRunTime.stringOf
+ def ss(args: Any*): String = sc.standardInterpolator(treatEscapes, args map stringOf)
+ } debug assist */
+ private[nsc] implicit class `try lastly`[A](val t: Try[A]) extends AnyVal {
+ private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t }
+ def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _)
+ }
}
diff --git a/src/scaladoc/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala
index 36a1405b11..034416e844 100644
--- a/src/scaladoc/scala/tools/ant/Scaladoc.scala
+++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala
@@ -543,7 +543,7 @@ class Scaladoc extends ScalaMatchingTask {
/** Tests if a file exists and prints a warning in case it doesn't. Always
* returns the file, even if it doesn't exist.
*
- * @param file A file to test for existance.
+ * @param file A file to test for existence.
* @return The same file.
*/
private def existing(file: File): File = {
diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
index 52a0c20a11..32a6ba0ce3 100644
--- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
+++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
@@ -18,14 +18,10 @@ class ScalaDoc {
val versionMsg = "Scaladoc %s -- %s".format(Properties.versionString, Properties.copyrightString)
def process(args: Array[String]): Boolean = {
- var reporter: ConsoleReporter = null
+ var reporter: ScalaDocReporter = null
val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"),
msg => reporter.printMessage(msg))
- reporter = new ConsoleReporter(docSettings) {
- // need to do this so that the Global instance doesn't trash all the
- // symbols just because there was an error
- override def hasErrors = false
- }
+ reporter = new ScalaDocReporter(docSettings)
val command = new ScalaDoc.Command(args.toList, docSettings)
def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty
@@ -50,12 +46,18 @@ class ScalaDoc {
}
finally reporter.printSummary()
- // not much point in returning !reporter.hasErrors when it has
- // been overridden with constant false.
- true
+ !reporter.reallyHasErrors
}
}
+class ScalaDocReporter(settings: Settings) extends ConsoleReporter(settings) {
+
+ // need to do sometimes lie so that the Global instance doesn't
+ // trash all the symbols just because there was an error
+ override def hasErrors = false
+ def reallyHasErrors = super.hasErrors
+}
+
object ScalaDoc extends ScalaDoc {
class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) {
override def cmdName = "scaladoc"
diff --git a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
index 6dc3e5a62b..f03b848af6 100644
--- a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
@@ -15,13 +15,14 @@ import DocParser.Parsed
* right after parsing so it can read `DocDefs` from source code which would
* otherwise cause the compiler to go haywire.
*/
-class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) {
+class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait {
def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
def this() = this(new Settings(Console println _))
// the usual global initialization
locally { new Run() }
+ override def forScaladoc = true
override protected def computeInternalPhases() {
phasesSet += syntaxAnalyzer
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index 7cd8fa8e51..d31b877262 100755
--- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -345,12 +345,28 @@ trait CommentFactoryBase { this: MemberLookupBase =>
Map.empty[String, Body] ++ pairs
}
+ def linkedExceptions: Map[String, Body] = {
+ val m = allSymsOneTag(SimpleTagKey("throws"))
+
+ m.map { case (name,body) =>
+ val link = memberLookup(pos, name, site)
+ val newBody = body match {
+ case Body(List(Paragraph(Chain(content)))) =>
+ val descr = Text(" ") +: content
+ val entityLink = EntityLink(Monospace(Text(name)), link)
+ Body(List(Paragraph(Chain(entityLink +: descr))))
+ case _ => body
+ }
+ (name, newBody)
+ }
+ }
+
val com = createComment (
body0 = Some(parseWikiAtSymbol(docBody.toString, pos, site)),
authors0 = allTags(SimpleTagKey("author")),
see0 = allTags(SimpleTagKey("see")),
result0 = oneTag(SimpleTagKey("return")),
- throws0 = allSymsOneTag(SimpleTagKey("throws")),
+ throws0 = linkedExceptions,
valueParams0 = allSymsOneTag(SimpleTagKey("param")),
typeParams0 = allSymsOneTag(SimpleTagKey("tparam")),
version0 = oneTag(SimpleTagKey("version")),
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
index 295bae5bef..3738e79ffe 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -228,6 +228,26 @@ abstract class HtmlPage extends Page { thisPage =>
</a>
</span>
+ def companionAndPackage(tpl: DocTemplateEntity): Elem =
+ <span class="morelinks">{
+ tpl.companion match {
+ case Some(companionTpl) =>
+ val objClassTrait =
+ if (companionTpl.isObject) s"object ${tpl.name}"
+ else if (companionTpl.isTrait) s"trait ${companionTpl.name}"
+ else s"class ${companionTpl.name}"
+ <div>
+ Related Docs:
+ <a href={relativeLinkTo(tpl.companion.get)} title="See companion">{objClassTrait}</a>
+ | {templateToHtml(tpl.inTemplate, s"package ${tpl.inTemplate.name}")}
+ </div>
+ case None =>
+ <div>Related Doc:
+ {templateToHtml(tpl.inTemplate, s"package ${tpl.inTemplate.name}")}
+ </div>
+ }
+ }</span>
+
def memberToUrl(template: Entity, isSelf: Boolean = true): String = {
val (signature: Option[String], containingTemplate: TemplateEntity) = template match {
case dte: DocTemplateEntity if (!isSelf) => (Some(dte.signature), dte.inTemplate)
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 45cef88f7a..9994cac3b4 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -110,7 +110,9 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<img src={ relativeLinkTo(List(docEntityKindToBigImage(tpl), "lib")) }/>
}}
{ owner }
- <h1>{ displayName }</h1> { permalink(tpl) }
+ <h1>{ displayName }</h1>{
+ if (tpl.isPackage) NodeSeq.Empty else <h3>{companionAndPackage(tpl)}</h3>
+ }{ permalink(tpl) }
</div>
{ signature(tpl, isSelf = true) }
@@ -611,7 +613,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<dd>{
val exceptionsXml: List[NodeSeq] =
for((name, body) <- comment.throws.toList.sortBy(_._1) ) yield
- <span class="cmt">{Text(name) ++ bodyToHtml(body)}</span>
+ <span class="cmt">{bodyToHtml(body)}</span>
exceptionsXml.reduceLeft(_ ++ Text("") ++ _)
}</dd>
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
index 478f2e38ac..680ead7a59 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
@@ -25,7 +25,7 @@ $(document).ready(function()
$(".diagram-container").css("display", "block");
$(".diagram").each(function() {
- // store inital dimensions
+ // store initial dimensions
$(this).data("width", $("svg", $(this)).width());
$(this).data("height", $("svg", $(this)).height());
// store unscaled clone of SVG element
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
index 6d94452f3a..6eee280267 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -397,6 +397,18 @@ div.members > ol > li:last-child {
margin-bottom: 5px;
}
+#definition .morelinks {
+ text-align: right;
+ position: absolute;
+ top: 40px;
+ right: 10px;
+ width: 450px;
+}
+
+#definition .morelinks a {
+ color: #EBEBEB;
+}
+
#template .members li .permalink {
position: absolute;
top: 5px;
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
index 6932f01e9a..7fe8903c76 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
@@ -117,7 +117,7 @@ trait MemberEntity extends Entity {
def toRoot: List[MemberEntity]
/** The templates in which this member has been declared. The first element of the list is the template that contains
- * the currently active declaration of this member, subsequent elements are declarations that have been overriden. If
+ * the currently active declaration of this member, subsequent elements are declarations that have been overridden. If
* the first element is equal to `inTemplate`, the member is declared locally, if not, it has been inherited. All
* elements of this list are in the linearization of `inTemplate`. */
def inDefinitionTemplates: List[TemplateEntity]
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 2993c4c4b9..cc2c0f890d 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -313,7 +313,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
/* Subclass cache */
private lazy val subClassesCache = (
- if (sym == AnyRefClass) null
+ if (sym == AnyRefClass || sym == AnyClass) null
else mutable.ListBuffer[DocTemplateEntity]()
)
def registerSubClass(sc: DocTemplateEntity): Unit = {
@@ -753,8 +753,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
})
}
else if (bSym.isConstructor)
- if (conversion.isDefined)
- None // don't list constructors inherted by implicit conversion
+ if (conversion.isDefined || (bSym.enclClass.isAbstract && (bSym.enclClass.isSealed || bSym.enclClass.isFinal)))
+ // don't list constructors inherited by implicit conversion
+ // and don't list constructors of abstract sealed types (they cannot be accessed anyway)
+ None
else
Some(new NonTemplateParamMemberImpl(bSym, conversion, useCaseOf, inTpl) with Constructor {
override def isConstructor = true
diff --git a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
index 70423cc7dc..fa3e8ff5cb 100644
--- a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
+++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
@@ -182,14 +182,16 @@ abstract class ScaladocModelTest extends DirectTest {
}
}
- def countLinks(c: Comment, p: EntityLink => Boolean) = {
- def countLinks(body: Any): Int = body match {
+ def countLinks(c: Comment, p: EntityLink => Boolean): Int = countLinksInBody(c.body, p)
+
+ def countLinksInBody(body: Body, p: EntityLink => Boolean): Int = {
+ def countLinks(b: Any): Int = b match {
case el: EntityLink if p(el) => 1
case s: Seq[_] => s.toList.map(countLinks(_)).sum
case p: Product => p.productIterator.toList.map(countLinks(_)).sum
case _ => 0
}
- countLinks(c.body)
+ countLinks(body)
}
def testDiagram(doc: DocTemplateEntity, diag: Option[Diagram], nodes: Int, edges: Int) = {
diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala
index c375a5bac4..de9c30b8af 100644
--- a/src/scalap/scala/tools/scalap/Arguments.scala
+++ b/src/scalap/scala/tools/scalap/Arguments.scala
@@ -9,7 +9,7 @@
package scala.tools.scalap
import scala.collection.mutable
-import mutable.{ Buffer, ListBuffer }
+import mutable.ListBuffer
object Arguments {
case class Parser(optionPrefix: Char) {
@@ -47,7 +47,7 @@ object Arguments {
}
def parseBinding(str: String, separator: Char): (String, String) = (str indexOf separator) match {
- case -1 => argumentError("missing '" + separator + "' in binding '" + str + "'") ; ("", "")
+ case -1 => argumentError(s"missing '$separator' in binding '$str'") ; ("", "")
case idx => ((str take idx).trim, (str drop (idx + 1)).trim)
}
@@ -71,7 +71,7 @@ object Arguments {
i += 1
} else if (optionalArgs contains args(i)) {
if ((i + 1) == args.length) {
- argumentError("missing argument for '" + args(i) + "'")
+ argumentError(s"missing argument for '${args(i)}'")
i += 1
} else {
res.addArgument(args(i), args(i + 1))
@@ -79,11 +79,11 @@ object Arguments {
}
} else if (optionalBindings contains args(i)) {
if ((i + 1) == args.length) {
- argumentError("missing argument for '" + args(i) + "'")
+ argumentError(s"missing argument for '${args(i)}'")
i += 1
} else {
res.addBinding(args(i),
- parseBinding(args(i + 1), optionalBindings(args(i))));
+ parseBinding(args(i + 1), optionalBindings(args(i))))
i += 2
}
} else {
@@ -92,23 +92,23 @@ object Arguments {
while ((i == j) && iter.hasNext) {
val prefix = iter.next
if (args(i) startsWith prefix) {
- res.addPrefixed(prefix, args(i).substring(prefix.length()).trim());
+ res.addPrefixed(prefix, args(i).substring(prefix.length()).trim())
i += 1
}
}
if (i == j) {
- val iter = prefixedBindings.keysIterator;
+ val iter = prefixedBindings.keysIterator
while ((i == j) && iter.hasNext) {
val prefix = iter.next
if (args(i) startsWith prefix) {
val arg = args(i).substring(prefix.length()).trim()
i = i + 1
res.addBinding(prefix,
- parseBinding(arg, prefixedBindings(prefix)));
+ parseBinding(arg, prefixedBindings(prefix)))
}
}
if (i == j) {
- argumentError("unknown option '" + args(i) + "'")
+ argumentError(s"unknown option '${args(i)}'")
i = i + 1
}
}
@@ -119,7 +119,7 @@ object Arguments {
def parse(options: String*)(args: Array[String]): Arguments = {
val parser = new Parser('-')
- options foreach (parser withOption _)
+ options foreach parser.withOption
parser parse args
}
}
@@ -142,7 +142,7 @@ class Arguments {
if (key.length > 0)
bindings.getOrElseUpdate(tag, new mutable.HashMap)(key) = value
- def addBinding(tag: String, binding: Tuple2[String, String]): Unit =
+ def addBinding(tag: String, binding: (String, String)): Unit =
addBinding(tag, binding._1, binding._2)
def addOther(arg: String): Unit = others += arg
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index c72f416a89..7c554d196c 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -10,11 +10,16 @@ package tools.scalap
import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
import scala.reflect.NameTransformer
-import scalax.rules.scalasig._
-import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
-import scala.tools.util.PathResolver
-import ClassPath.DefaultJavaContext
+import scala.tools.nsc.Settings
+import scala.tools.nsc.classpath.AggregateFlatClassPath
+import scala.tools.nsc.classpath.FlatClassPathFactory
import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.settings.ClassPathRepresentationType
+import scala.tools.nsc.util.ClassFileLookup
+import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+import scala.tools.nsc.util.JavaClassPath
+import scala.tools.util.PathResolverFactory
+import scalax.rules.scalasig._
/**The main object used to execute scalap on the command-line.
*
@@ -42,12 +47,12 @@ class Main {
*
* @param clazz the class file to be processed.
*/
- def processJavaClassFile(clazz: Classfile) {
+ def processJavaClassFile(clazz: Classfile): Unit = {
// construct a new output stream writer
val out = new OutputStreamWriter(Console.out)
val writer = new JavaWriter(clazz, out)
// print the class
- writer.printClass
+ writer.printClass()
out.flush()
}
@@ -60,21 +65,20 @@ class Main {
syms.head.parent match {
// Partial match
- case Some(p) if (p.name != "<empty>") => {
+ case Some(p) if p.name != "<empty>" =>
val path = p.path
if (!isPackageObject) {
- stream.print("package ");
- stream.print(path);
+ stream.print("package ")
+ stream.print(path)
stream.print("\n")
} else {
val i = path.lastIndexOf(".")
if (i > 0) {
- stream.print("package ");
+ stream.print("package ")
stream.print(path.substring(0, i))
stream.print("\n")
}
}
- }
case _ =>
}
// Print classes
@@ -96,7 +100,7 @@ class Main {
/** Executes scalap with the given arguments and classpath for the
* class denoted by `classname`.
*/
- def process(args: Arguments, path: ClassPath[AbstractFile])(classname: String): Unit = {
+ def process(args: Arguments, path: ClassFileLookup[AbstractFile])(classname: String): Unit = {
// find the classfile
val encName = classname match {
case "scala.AnyRef" => "java.lang.Object"
@@ -106,92 +110,115 @@ class Main {
// we can afford allocations because this is not a performance critical code
classname.split('.').map(NameTransformer.encode).mkString(".")
}
- val cls = path.findClass(encName)
- if (cls.isDefined && cls.get.binary.isDefined) {
- val cfile = cls.get.binary.get
- if (verbose) {
- Console.println(Console.BOLD + "FILENAME" + Console.RESET + " = " + cfile.path)
- }
- val bytes = cfile.toByteArray
- if (isScalaFile(bytes)) {
- Console.println(decompileScala(bytes, isPackageObjectFile(encName)))
- } else {
- // construct a reader for the classfile content
- val reader = new ByteArrayReader(cfile.toByteArray)
- // parse the classfile
- val clazz = new Classfile(reader)
- processJavaClassFile(clazz)
- }
- // if the class corresponds to the artificial class scala.Any.
- // (see member list in class scala.tool.nsc.symtab.Definitions)
- }
- else
- Console.println("class/object " + classname + " not found.")
- }
- object EmptyClasspath extends ClassPath[AbstractFile] {
- /**
- * The short name of the package (without prefix)
- */
- def name = ""
- def asURLs = Nil
- def asClasspathString = ""
-
- val context = DefaultJavaContext
- val classes = IndexedSeq()
- val packages = IndexedSeq()
- val sourcepaths = IndexedSeq()
+ path.findClassFile(encName) match {
+ case Some(classFile) =>
+ if (verbose) {
+ Console.println(Console.BOLD + "FILENAME" + Console.RESET + " = " + classFile.path)
+ }
+ val bytes = classFile.toByteArray
+ if (isScalaFile(bytes)) {
+ Console.println(decompileScala(bytes, isPackageObjectFile(encName)))
+ } else {
+ // construct a reader for the classfile content
+ val reader = new ByteArrayReader(classFile.toByteArray)
+ // parse the classfile
+ val clazz = new Classfile(reader)
+ processJavaClassFile(clazz)
+ }
+ // if the class corresponds to the artificial class scala.Any.
+ // (see member list in class scala.tool.nsc.symtab.Definitions)
+ case _ =>
+ Console.println(s"class/object $classname not found.")
+ }
}
}
object Main extends Main {
+
+ private object opts {
+ val cp = "-cp"
+ val help = "-help"
+ val classpath = "-classpath"
+ val showPrivateDefs = "-private"
+ val verbose = "-verbose"
+ val version = "-version"
+
+ val classPathImplType = "-YclasspathImpl"
+ val disableFlatClassPathCaching = "-YdisableFlatCpCaching"
+ val logClassPath = "-Ylog-classpath"
+ }
+
/** Prints usage information for scalap. */
- def usage() {
- Console println """
+ def usage(): Unit = {
+ Console println s"""
|Usage: scalap {<option>} <name>
|where <name> is fully-qualified class name or <package_name>.package for package objects
|and <option> is
- | -private print private definitions
- | -verbose print out additional information
- | -version print out the version number of scalap
- | -help display this usage message
- | -classpath <path> specify where to find user class files
- | -cp <path> specify where to find user class files
+ | ${opts.showPrivateDefs} print private definitions
+ | ${opts.verbose} print out additional information
+ | ${opts.version} print out the version number of scalap
+ | ${opts.help} display this usage message
+ | ${opts.classpath} <path> specify where to find user class files
+ | ${opts.cp} <path> specify where to find user class files
""".stripMargin.trim
}
- def main(args: Array[String]) {
- // print usage information if there is no command-line argument
- if (args.isEmpty)
- return usage()
-
- val arguments = Arguments.Parser('-')
- .withOption("-private")
- .withOption("-verbose")
- .withOption("-version")
- .withOption("-help")
- .withOptionalArg("-classpath")
- .withOptionalArg("-cp")
- .parse(args);
-
- if (arguments contains "-version")
- Console.println(versionMsg)
- if (arguments contains "-help")
- usage()
-
- verbose = arguments contains "-verbose"
- printPrivates = arguments contains "-private"
- // construct a custom class path
- val cparg = List("-classpath", "-cp") map (arguments getArgument _) reduceLeft (_ orElse _)
- val path = cparg match {
- case Some(cp) => new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
- case _ => PathResolver.fromPathString(".") // include '.' in the default classpath SI-6669
+ def main(args: Array[String]): Unit =
+ // print usage information if there is no command-line argument
+ if (args.isEmpty) usage()
+ else {
+ val arguments = parseArguments(args)
+
+ if (arguments contains opts.version)
+ Console.println(versionMsg)
+ if (arguments contains opts.help)
+ usage()
+
+ verbose = arguments contains opts.verbose
+ printPrivates = arguments contains opts.showPrivateDefs
+ // construct a custom class path
+ val cpArg = List(opts.classpath, opts.cp) map arguments.getArgument reduceLeft (_ orElse _)
+
+ val settings = new Settings()
+
+ arguments getArgument opts.classPathImplType foreach settings.YclasspathImpl.tryToSetFromPropertyValue
+ settings.YdisableFlatCpCaching.value = arguments contains opts.disableFlatClassPathCaching
+ settings.Ylogcp.value = arguments contains opts.logClassPath
+
+ val path = createClassPath(cpArg, settings)
+
+ // print the classpath if output is verbose
+ if (verbose)
+ Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path.asClassPathString)
+
+ // process all given classes
+ arguments.getOthers foreach process(arguments, path)
}
- // print the classpath if output is verbose
- if (verbose)
- Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path)
- // process all given classes
- arguments.getOthers foreach process(arguments, path)
+ private def parseArguments(args: Array[String]) =
+ Arguments.Parser('-')
+ .withOption(opts.showPrivateDefs)
+ .withOption(opts.verbose)
+ .withOption(opts.version)
+ .withOption(opts.help)
+ .withOptionalArg(opts.classpath)
+ .withOptionalArg(opts.cp)
+ // TODO three temporary, hidden options to be able to test different classpath representations
+ .withOptionalArg(opts.classPathImplType)
+ .withOption(opts.disableFlatClassPathCaching)
+ .withOption(opts.logClassPath)
+ .parse(args)
+
+ private def createClassPath(cpArg: Option[String], settings: Settings) = cpArg match {
+ case Some(cp) => settings.YclasspathImpl.value match {
+ case ClassPathRepresentationType.Flat =>
+ AggregateFlatClassPath(new FlatClassPathFactory(settings).classesInExpandedPath(cp))
+ case ClassPathRepresentationType.Recursive =>
+ new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
+ }
+ case _ =>
+ settings.classpath.value = "." // include '.' in the default classpath SI-6669
+ PathResolverFactory.create(settings).result
}
}