summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore25
-rwxr-xr-xbuild.xml242
-rw-r--r--dbuild-meta.json61
-rw-r--r--src/actors/scala/actors/Scheduler.scala1
-rw-r--r--src/actors/scala/actors/remote/TcpService.scala2
-rw-r--r--src/build/dbuild-meta-json-gen.scala11
-rw-r--r--src/build/maven/maven-deploy.xml1
-rw-r--r--src/build/maven/scala-partest-pom.xml62
-rw-r--r--src/build/pack.xml5
-rw-r--r--src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala2
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Validators.scala10
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Context.scala1
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Synthetics.scala66
-rw-r--r--src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala2
-rw-r--r--src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala3
-rw-r--r--src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala2
-rw-r--r--src/compiler/scala/reflect/macros/util/Helpers.scala7
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala20
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Settings.scala2
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl68
-rw-r--r--src/compiler/scala/tools/cmd/CommandLine.scala6
-rw-r--r--src/compiler/scala/tools/cmd/CommandLineParser.scala10
-rw-r--r--src/compiler/scala/tools/cmd/Opt.scala8
-rw-r--r--src/compiler/scala/tools/cmd/Reference.scala22
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala107
-rw-r--r--src/compiler/scala/tools/cmd/gen/Codegen.scala6
-rw-r--r--src/compiler/scala/tools/cmd/package.scala12
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala6
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala55
-rw-r--r--src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala30
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala188
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala26
-rw-r--r--src/compiler/scala/tools/nsc/backend/Platform.scala21
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala34
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala7
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala26
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala328
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala8
-rw-r--r--src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala21
-rw-r--r--src/compiler/scala/tools/nsc/io/package.scala14
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala5
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala55
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala174
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala91
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala26
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala1068
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala28
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala27
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala49
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala82
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala37
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala674
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala15
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala86
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala27
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala36
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala184
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala64
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala19
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala471
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala108
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala13
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala520
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala74
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala9
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala3
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala15
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala94
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala21
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala2
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala4
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala1
-rw-r--r--src/eclipse/interactive/.classpath4
-rw-r--r--src/eclipse/partest/.classpath6
-rw-r--r--src/eclipse/partest/.project10
-rw-r--r--src/eclipse/scala-compiler/.classpath8
-rw-r--r--src/eclipse/scaladoc/.classpath6
-rw-r--r--src/eclipse/test-junit/.classpath1
-rw-r--r--src/intellij/README15
-rw-r--r--src/intellij/compiler.iml.SAMPLE5
-rwxr-xr-xsrc/intellij/diff.sh8
-rw-r--r--src/intellij/library.iml.SAMPLE2
-rw-r--r--src/intellij/manual.iml.SAMPLE3
-rw-r--r--src/intellij/parser-combinators.iml.SAMPLE22
-rw-r--r--src/intellij/partest.iml.SAMPLE12
-rw-r--r--src/intellij/reflect.iml.SAMPLE2
-rw-r--r--src/intellij/repl.iml.SAMPLE2
-rw-r--r--src/intellij/scala-lang.ipr.SAMPLE42
-rw-r--r--src/intellij/scala.iml.SAMPLE4
-rw-r--r--src/intellij/scaladoc.iml.SAMPLE3
-rwxr-xr-xsrc/intellij/setup.sh23
-rw-r--r--src/intellij/test.iml.SAMPLE3
-rw-r--r--src/intellij/xml.iml.SAMPLE22
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala9
-rw-r--r--src/library/scala/Boolean.scala42
-rw-r--r--src/library/scala/Byte.scala336
-rw-r--r--src/library/scala/Char.scala336
-rw-r--r--src/library/scala/Double.scala325
-rw-r--r--src/library/scala/Float.scala330
-rw-r--r--src/library/scala/Int.scala336
-rw-r--r--src/library/scala/Long.scala336
-rw-r--r--src/library/scala/Short.scala336
-rw-r--r--src/library/scala/StringContext.scala3
-rw-r--r--src/library/scala/Unit.scala9
-rw-r--r--src/library/scala/annotation/compileTimeOnly.scala22
-rw-r--r--src/library/scala/collection/immutable/List.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala1
-rw-r--r--src/library/scala/concurrent/Lock.scala1
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala40
-rw-r--r--src/library/scala/sys/process/ProcessBuilder.scala2
-rw-r--r--src/library/scala/sys/process/package.scala2
-rw-r--r--src/library/scala/util/Properties.scala7
-rw-r--r--src/library/scala/util/matching/Regex.scala38
-rw-r--r--src/partest-extras/scala/tools/partest/ASMConverters.scala (renamed from src/partest/scala/tools/partest/ASMConverters.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/AsmNode.scala (renamed from src/partest/scala/tools/partest/AsmNode.scala)7
-rw-r--r--src/partest-extras/scala/tools/partest/BytecodeTest.scala (renamed from src/partest/scala/tools/partest/BytecodeTest.scala)19
-rw-r--r--src/partest-extras/scala/tools/partest/JavapTest.scala (renamed from src/partest/scala/tools/partest/JavapTest.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/ReplTest.scala (renamed from src/partest/scala/tools/partest/ReplTest.scala)5
-rw-r--r--src/partest-extras/scala/tools/partest/SigTest.scala (renamed from src/partest/scala/tools/partest/SigTest.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/Util.scala52
-rw-r--r--src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala (renamed from src/partest/scala/tools/partest/instrumented/Instrumentation.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/instrumented/Profiler.java82
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java49
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF (renamed from src/partest/scala/tools/partest/javaagent/MANIFEST.MF)0
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java (renamed from src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java)4
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java25
-rw-r--r--src/partest/README31
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala60
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala128
-rw-r--r--src/partest/scala/tools/partest/IcodeTest.scala43
-rw-r--r--src/partest/scala/tools/partest/MemoryTest.scala38
-rw-r--r--src/partest/scala/tools/partest/PartestDefaults.scala28
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala207
-rw-r--r--src/partest/scala/tools/partest/SecurityTest.scala19
-rw-r--r--src/partest/scala/tools/partest/StoreReporterDirectTest.scala15
-rw-r--r--src/partest/scala/tools/partest/TestKinds.scala66
-rw-r--r--src/partest/scala/tools/partest/TestState.scala65
-rw-r--r--src/partest/scala/tools/partest/TestUtil.scala38
-rw-r--r--src/partest/scala/tools/partest/antlib.xml4
-rw-r--r--src/partest/scala/tools/partest/instrumented/Profiler.java82
-rw-r--r--src/partest/scala/tools/partest/javaagent/ASMTransformer.java49
-rw-r--r--src/partest/scala/tools/partest/javaagent/ProfilingAgent.java25
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala30
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala189
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala224
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala54
-rw-r--r--src/partest/scala/tools/partest/nest/DirectCompiler.scala105
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala165
-rw-r--r--src/partest/scala/tools/partest/nest/NestRunner.scala15
-rw-r--r--src/partest/scala/tools/partest/nest/NestUI.scala182
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala88
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala99
-rw-r--r--src/partest/scala/tools/partest/nest/Runner.scala894
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala85
-rw-r--r--src/partest/scala/tools/partest/nest/StreamCapture.scala53
-rw-r--r--src/partest/scala/tools/partest/package.scala241
-rw-r--r--src/partest/scala/tools/partest/utils/Properties.scala18
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala5
-rw-r--r--src/reflect/scala/reflect/api/Mirrors.scala12
-rw-r--r--src/reflect/scala/reflect/api/Names.scala4
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala6
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala8
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala36
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala113
-rw-r--r--src/reflect/scala/reflect/internal/Depth.scala28
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Kinds.scala22
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala3
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala52
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala24
-rw-r--r--src/reflect/scala/reflect/internal/TypeDebugging.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala183
-rw-r--r--src/reflect/scala/reflect/internal/Variance.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Variances.scala6
-rw-r--r--src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala32
-rw-r--r--src/reflect/scala/reflect/internal/annotations/package.scala6
-rw-r--r--src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala13
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/GlbLubs.scala50
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala10
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala60
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala19
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala36
-rw-r--r--src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala4
-rw-r--r--src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/TriState.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/WeakHashSet.scala31
-rw-r--r--src/reflect/scala/reflect/internal/util/package.scala2
-rw-r--r--src/reflect/scala/reflect/io/ZipArchive.scala6
-rw-r--r--src/reflect/scala/reflect/macros/Context.scala3
-rw-r--r--src/reflect/scala/reflect/macros/Synthetics.scala107
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala21
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectSetup.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala6
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala11
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedTypes.scala3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ExprTyper.scala26
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala148
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala4
-rw-r--r--src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Naming.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Power.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplVals.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Results.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/package.scala4
-rw-r--r--src/scalacheck/org/scalacheck/Arbitrary.scala447
-rw-r--r--src/scalacheck/org/scalacheck/Arg.scala20
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala148
-rw-r--r--src/scalacheck/org/scalacheck/ConsoleReporter.scala52
-rw-r--r--src/scalacheck/org/scalacheck/Gen.scala542
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala127
-rw-r--r--src/scalacheck/org/scalacheck/Prop.scala818
-rw-r--r--src/scalacheck/org/scalacheck/Properties.scala96
-rw-r--r--src/scalacheck/org/scalacheck/ScalaCheckFramework.scala92
-rw-r--r--src/scalacheck/org/scalacheck/Shrink.scala208
-rw-r--r--src/scalacheck/org/scalacheck/Test.scala392
-rw-r--r--src/scalacheck/org/scalacheck/util/Buildable.scala63
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala101
-rw-r--r--src/scalacheck/org/scalacheck/util/FreqMap.scala65
-rw-r--r--src/scalacheck/org/scalacheck/util/StdRand.scala12
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala6
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala7
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala9
-rw-r--r--src/scalap/scala/tools/scalap/Arguments.scala1
-rw-r--r--src/scalap/scala/tools/scalap/ByteArrayReader.scala13
-rw-r--r--src/scalap/scala/tools/scalap/Classfile.scala2
-rw-r--r--src/scalap/scala/tools/scalap/Classfiles.scala2
-rw-r--r--src/scalap/scala/tools/scalap/CodeWriter.scala8
-rw-r--r--src/scalap/scala/tools/scalap/Decode.scala30
-rw-r--r--src/scalap/scala/tools/scalap/JavaWriter.scala4
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala18
-rw-r--r--src/scalap/scala/tools/scalap/MetaParser.scala7
-rw-r--r--src/scalap/scala/tools/scalap/Properties.scala3
-rw-r--r--src/scalap/scala/tools/scalap/rules/Memoisable.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala)23
-rw-r--r--src/scalap/scala/tools/scalap/rules/Result.scala69
-rw-r--r--src/scalap/scala/tools/scalap/rules/Rule.scala172
-rw-r--r--src/scalap/scala/tools/scalap/rules/Rules.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/Rules.scala)62
-rw-r--r--src/scalap/scala/tools/scalap/rules/SeqRule.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala)57
-rw-r--r--src/scalap/scala/tools/scalap/rules/package.scala6
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/ClassFileParser.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala)93
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/Flags.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala)7
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/ScalaSig.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala)83
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/ScalaSigPrinter.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala)51
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/SourceFileAttributeParser.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala)12
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/Symbol.scala70
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/Type.scala22
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Result.scala72
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Rule.scala177
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala73
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala25
-rw-r--r--src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala19
-rwxr-xr-xsrc/xml/scala/xml/Elem.scala1
-rw-r--r--starr.number1
-rwxr-xr-xtest/build-partest.xml8
-rw-r--r--test/files/ant/README42
-rw-r--r--test/files/ant/fsc001-build.check14
-rw-r--r--test/files/ant/fsc001-build.xml26
-rw-r--r--test/files/ant/fsc001.scala7
-rw-r--r--test/files/ant/fsc002-build.check14
-rw-r--r--test/files/ant/fsc002-build.xml28
-rw-r--r--test/files/ant/fsc002.scala6
-rw-r--r--test/files/ant/fsc003-build.check14
-rw-r--r--test/files/ant/fsc003-build.xml25
-rw-r--r--test/files/ant/fsc003.scala7
-rw-r--r--test/files/ant/imported.xml150
-rw-r--r--test/files/ant/scalac001-build.check14
-rw-r--r--test/files/ant/scalac001-build.xml26
-rw-r--r--test/files/ant/scalac001.scala6
-rw-r--r--test/files/ant/scalac002-build.check14
-rw-r--r--test/files/ant/scalac002-build.xml28
-rw-r--r--test/files/ant/scalac002.scala7
-rw-r--r--test/files/ant/scalac003-build.check14
-rw-r--r--test/files/ant/scalac003-build.xml25
-rw-r--r--test/files/ant/scalac003.scala7
-rw-r--r--test/files/ant/scalac004-build.check24
-rw-r--r--test/files/ant/scalac004-build.xml26
-rw-r--r--test/files/ant/scalac004.scala11
-rw-r--r--test/files/ant/scaladoc-build.check15
-rw-r--r--test/files/ant/scaladoc-build.xml26
-rw-r--r--test/files/ant/scaladoc.scala7
-rw-r--r--test/files/jvm/opt_value_class.check2
-rw-r--r--test/files/jvm/opt_value_class/Value_1.scala28
-rw-r--r--test/files/jvm/opt_value_class/test.scala16
-rw-r--r--test/files/lib/scalacheck.jar.desired.sha11
-rw-r--r--test/files/neg/compile-time-only-a.check49
-rw-r--r--test/files/neg/compile-time-only-a.scala57
-rw-r--r--test/files/neg/compile-time-only-b.check7
-rw-r--r--test/files/neg/compile-time-only-b.scala15
-rw-r--r--test/files/neg/javac-error.check10
-rw-r--r--test/files/neg/macro-abort.check4
-rw-r--r--test/files/neg/macro-abort/Macros_1.scala9
-rw-r--r--test/files/neg/macro-abort/Test_2.scala3
-rw-r--r--test/files/neg/macro-exception.check7
-rw-r--r--test/files/neg/macro-exception/Macros_1.scala9
-rw-r--r--test/files/neg/macro-exception/Test_2.scala3
-rw-r--r--test/files/neg/macro-invalidusage-presuper.check2
-rw-r--r--test/files/neg/quasiquotes-syntax-error-position.check32
-rw-r--r--test/files/neg/quasiquotes-syntax-error-position.scala15
-rw-r--r--test/files/neg/t1980.check12
-rw-r--r--test/files/neg/t1980.flags1
-rw-r--r--test/files/neg/t1980.scala9
-rw-r--r--test/files/neg/t2796.check5
-rw-r--r--test/files/neg/t2796.flags2
-rw-r--r--test/files/neg/t2796.scala3
-rw-r--r--test/files/neg/t4425.check13
-rw-r--r--test/files/neg/t4425.scala10
-rw-r--r--test/files/neg/t4425b.check61
-rw-r--r--test/files/neg/t4425b.scala38
-rw-r--r--test/files/neg/t5903a.check7
-rw-r--r--test/files/neg/t5903a/Macros_1.scala28
-rw-r--r--test/files/neg/t5903a/Test_2.scala6
-rw-r--r--test/files/neg/t5903b.check9
-rw-r--r--test/files/neg/t5903b/Macros_1.scala23
-rw-r--r--test/files/neg/t5903b/Test_2.scala6
-rw-r--r--test/files/neg/t5903c.check7
-rw-r--r--test/files/neg/t5903c/Macros_1.scala26
-rw-r--r--test/files/neg/t5903c/Test_2.scala6
-rw-r--r--test/files/neg/t5903d.check7
-rw-r--r--test/files/neg/t5903d/Macros_1.scala23
-rw-r--r--test/files/neg/t5903d/Test_2.scala6
-rw-r--r--test/files/neg/t5903e.check4
-rw-r--r--test/files/neg/t5903e/Macros_1.scala25
-rw-r--r--test/files/neg/t5903e/Test_2.scala6
-rw-r--r--test/files/neg/t6289.check10
-rw-r--r--test/files/neg/t6289.flags (renamed from test/files/neg/javac-error.flags)0
-rw-r--r--test/files/neg/t6289/J.java (renamed from test/files/neg/javac-error/J.java)0
-rw-r--r--test/files/neg/t6289/SUT_5.scala (renamed from test/files/neg/javac-error/SUT_5.scala)0
-rw-r--r--test/files/neg/t6675.check2
-rw-r--r--test/files/neg/t7020.check19
-rw-r--r--test/files/neg/t7020.flags1
-rw-r--r--test/files/neg/t7020.scala30
-rw-r--r--test/files/neg/t7214neg.check7
-rw-r--r--test/files/neg/t7214neg.scala57
-rw-r--r--test/files/neg/t7325.check10
-rw-r--r--test/files/neg/t7501.check7
-rw-r--r--test/files/neg/t7501/t7501_1.scala12
-rw-r--r--test/files/neg/t7501/t7501_2.scala5
-rw-r--r--test/files/neg/t7694b.check7
-rw-r--r--test/files/neg/t7715.check13
-rw-r--r--test/files/neg/t7715.scala18
-rw-r--r--test/files/neg/t7721.check21
-rw-r--r--test/files/neg/t7721.flags1
-rw-r--r--test/files/neg/t7721.scala140
-rw-r--r--test/files/neg/t7752.check27
-rw-r--r--test/files/neg/t7752.scala26
-rw-r--r--test/files/neg/t7756a.check7
-rw-r--r--test/files/neg/t7756a.scala11
-rw-r--r--test/files/neg/t7756b.check6
-rw-r--r--test/files/neg/t7756b.flags1
-rw-r--r--test/files/neg/t7756b.scala5
-rw-r--r--test/files/neg/t7757a.check4
-rw-r--r--test/files/neg/t7757a.scala1
-rw-r--r--test/files/neg/t7757b.check4
-rw-r--r--test/files/neg/t7757b.scala2
-rw-r--r--test/files/neg/t997.check7
-rw-r--r--test/files/pos/annotated-treecopy/Impls_Macros_1.scala2
-rw-r--r--test/files/pos/erasure-nsquared.scala35
-rw-r--r--test/files/pos/extractor-types.scala30
-rw-r--r--test/files/pos/optmatch.scala33
-rw-r--r--test/files/pos/overloaded-unapply.scala8
-rw-r--r--test/files/pos/patmat-extract-tparam.scala13
-rw-r--r--test/files/pos/t6797.scala4
-rw-r--r--test/files/pos/t7014/ThreadSafety.java9
-rw-r--r--test/files/pos/t7014/ThreadSafetyLevel.java8
-rw-r--r--test/files/pos/t7014/t7014.scala4
-rw-r--r--test/files/pos/t7486-named.scala8
-rw-r--r--test/files/pos/t7486.scala (renamed from test/pending/pos/t7486.scala)0
-rw-r--r--test/files/pos/t7690.scala17
-rw-r--r--test/files/pos/t7694.scala40
-rw-r--r--test/files/pos/t7716.scala16
-rw-r--r--test/files/pos/t7785.scala34
-rw-r--r--test/files/pos/t942/Amount_1.java5
-rw-r--r--test/files/pos/t942/Test_2.scala3
-rwxr-xr-xtest/files/presentation/doc/doc.scala4
-rw-r--r--test/files/run/analyzerPlugins.scala6
-rw-r--r--test/files/run/deprecate-early-type-defs.check3
-rw-r--r--test/files/run/deprecate-early-type-defs.flags1
-rw-r--r--test/files/run/deprecate-early-type-defs.scala1
-rw-r--r--test/files/run/interpolation.flags1
-rw-r--r--test/files/run/interpolationArgs.check4
-rw-r--r--test/files/run/interpolationArgs.flags1
-rw-r--r--test/files/run/interpolationMultiline1.flags1
-rw-r--r--test/files/run/interpolationMultiline2.flags1
-rw-r--r--test/files/run/macro-auto-duplicate.check1
-rw-r--r--test/files/run/macro-auto-duplicate/Macros_1.scala17
-rw-r--r--test/files/run/macro-auto-duplicate/Test_2.scala (renamed from test/files/run/macro-toplevel/Test_2.scala)3
-rw-r--r--test/files/run/macro-duplicate/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-expand-unapply-b.check2
-rw-r--r--test/files/run/macro-expand-unapply-b.flags1
-rw-r--r--test/files/run/macro-expand-unapply-b/Impls_Macros_1.scala37
-rw-r--r--test/files/run/macro-expand-unapply-b/Test_2.scala8
-rw-r--r--test/files/run/macro-toplevel-companion-a.flags1
-rw-r--r--test/files/run/macro-toplevel-companion-a/Impls_Macros_1.scala14
-rw-r--r--test/files/run/macro-toplevel-companion-a/Test_2.scala8
-rw-r--r--test/files/run/macro-toplevel-companion-b.check4
-rw-r--r--test/files/run/macro-toplevel-companion-b.flags1
-rw-r--r--test/files/run/macro-toplevel-companion-b/Impls_Macros_1.scala15
-rw-r--r--test/files/run/macro-toplevel-companion-b/Test_2.scala11
-rw-r--r--test/files/run/macro-toplevel-companion-c.check3
-rw-r--r--test/files/run/macro-toplevel-companion-c.flags1
-rw-r--r--test/files/run/macro-toplevel-companion-c.scala51
-rw-r--r--test/files/run/macro-toplevel.check2
-rw-r--r--test/files/run/macro-toplevel/Macros_1.scala15
-rw-r--r--test/files/run/matchonseq.scala10
-rw-r--r--test/files/run/name-based-patmat.check10
-rw-r--r--test/files/run/name-based-patmat.scala75
-rw-r--r--test/files/run/patmat-behavior-2.check24
-rw-r--r--test/files/run/patmat-behavior-2.scala50
-rw-r--r--test/files/run/patmat-behavior.check90
-rw-r--r--test/files/run/patmat-behavior.scala95
-rw-r--r--test/files/run/patmat-bind-typed.check1
-rw-r--r--test/files/run/patmat-bind-typed.scala8
-rw-r--r--test/files/run/reflection-magicsymbols-invoke.check2
-rw-r--r--test/files/run/repl-javap-app.check9
-rw-r--r--test/files/run/repl-javap-outdir-funs/run-repl_7.scala13
-rw-r--r--test/files/run/repl-trim-stack-trace.scala33
-rw-r--r--test/files/run/stream_length.check1
-rw-r--r--test/files/run/string-extractor.check9
-rw-r--r--test/files/run/string-extractor.scala60
-rw-r--r--test/files/run/t5903a.check1
-rw-r--r--test/files/run/t5903a.flags1
-rw-r--r--test/files/run/t5903a/Macros_1.scala28
-rw-r--r--test/files/run/t5903a/Test_2.scala6
-rw-r--r--test/files/run/t5903b.check1
-rw-r--r--test/files/run/t5903b.flags1
-rw-r--r--test/files/run/t5903b/Macros_1.scala25
-rw-r--r--test/files/run/t5903b/Test_2.scala6
-rw-r--r--test/files/run/t5903c.check1
-rw-r--r--test/files/run/t5903c.flags1
-rw-r--r--test/files/run/t5903c/Macros_1.scala23
-rw-r--r--test/files/run/t5903c/Test_2.scala6
-rw-r--r--test/files/run/t5903d.check1
-rw-r--r--test/files/run/t5903d.flags1
-rw-r--r--test/files/run/t5903d/Macros_1.scala25
-rw-r--r--test/files/run/t5903d/Test_2.scala6
-rw-r--r--test/files/run/t5923a/Macros_1.scala42
-rw-r--r--test/files/run/t5923c.check1
-rw-r--r--test/files/run/t5923c/Macros_1.scala39
-rw-r--r--test/files/run/t5923c/Test_2.scala12
-rw-r--r--test/files/run/t5923d.check (renamed from test/files/run/macro-toplevel-companion-a.check)0
-rw-r--r--test/files/run/t5923d/Macros_1.scala9
-rw-r--r--test/files/run/t5923d/Test_2.scala7
-rw-r--r--test/files/run/t6331b.scala2
-rw-r--r--test/files/run/t6392b.check2
-rw-r--r--test/files/run/t6507.check26
-rw-r--r--test/files/run/t6507.scala14
-rw-r--r--test/files/run/t6989.check24
-rw-r--r--test/files/run/t7214.scala2
-rw-r--r--test/files/run/t7265.scala27
-rw-r--r--test/files/run/t7331a.check2
-rw-r--r--test/files/run/t7331a.scala10
-rw-r--r--test/files/run/t7331b.check3
-rw-r--r--test/files/run/t7331b.scala11
-rw-r--r--test/files/run/t7331c.check3
-rw-r--r--test/files/run/t7331c.scala11
-rw-r--r--test/files/run/t7407.check1
-rw-r--r--test/files/run/t7407.flags1
-rw-r--r--test/files/run/t7407.scala11
-rw-r--r--test/files/run/t7407b.check2
-rw-r--r--test/files/run/t7407b.flags1
-rw-r--r--test/files/run/t7407b.scala20
-rw-r--r--test/files/run/t7510.check0
-rw-r--r--test/files/run/t7510/Ann_1.java4
-rw-r--r--test/files/run/t7510/Test_2.scala9
-rw-r--r--test/files/run/t7715.check3
-rw-r--r--test/files/run/t7715.scala24
-rw-r--r--test/files/run/t7763.scala20
-rw-r--r--test/files/run/tailcalls.check39
-rw-r--r--test/files/run/toolbox_current_run_compiles.check2
-rw-r--r--test/files/run/toolbox_current_run_compiles.scala28
-rw-r--r--test/files/run/value-class-extractor-2.check8
-rw-r--r--test/files/run/value-class-extractor-2.scala108
-rw-r--r--test/files/run/value-class-extractor-seq.check3
-rw-r--r--test/files/run/value-class-extractor-seq.scala59
-rw-r--r--test/files/run/value-class-extractor.check9
-rw-r--r--test/files/run/value-class-extractor.scala91
-rw-r--r--test/files/scalacheck/CheckCollections.scala59
-rw-r--r--test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala2
-rw-r--r--test/files/scalacheck/quasiquotes/ErrorProps.scala6
-rw-r--r--test/files/scalap/abstractClass.check2
-rw-r--r--test/files/scalap/abstractMethod.check6
-rw-r--r--test/files/scalap/caseClass.check32
-rw-r--r--test/files/scalap/caseObject.check16
-rw-r--r--test/files/scalap/cbnParam.check2
-rw-r--r--test/files/scalap/classPrivate.check8
-rw-r--r--test/files/scalap/classWithExistential.check2
-rw-r--r--test/files/scalap/classWithSelfAnnotation.check4
-rw-r--r--test/files/scalap/covariantParam.check2
-rw-r--r--test/files/scalap/defaultParameter.check2
-rw-r--r--test/files/scalap/implicitParam.check2
-rw-r--r--test/files/scalap/packageObject.check2
-rw-r--r--test/files/scalap/paramClauses.check2
-rw-r--r--test/files/scalap/paramNames.check2
-rw-r--r--test/files/scalap/sequenceParam.check2
-rw-r--r--test/files/scalap/simpleClass.check2
-rw-r--r--test/files/scalap/traitObject.check6
-rw-r--r--test/files/scalap/typeAnnotations.check4
-rw-r--r--test/files/scalap/valAndVar.check4
-rw-r--r--test/files/scalap/wildcardType.check2
-rw-r--r--test/junit/scala/reflect/io/ZipArchiveTest.scala37
-rw-r--r--test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala67
-rw-r--r--test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala89
-rw-r--r--test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala47
-rw-r--r--test/junit/scala/tools/testing/AssertThrowsTest.scala34
-rw-r--r--test/junit/scala/tools/testing/AssertUtil.scala19
-rw-r--r--test/junit/scala/util/matching/regextract-char.scala58
-rw-r--r--test/junit/scala/util/t7265.scala (renamed from test/pending/junit/scala/util/t7265.scala)13
-rwxr-xr-xtest/partest92
-rw-r--r--test/pending/neg/t6680a.scala13
-rw-r--r--test/pending/neg/t6680b.check6
-rw-r--r--test/pending/neg/t6680b.scala10
-rw-r--r--test/pending/neg/t6680c.scala17
-rw-r--r--test/pending/pos/t7778/Foo_1.java6
-rw-r--r--test/pending/pos/t7778/Test_2.scala3
-rw-r--r--test/pending/run/t7733.check1
-rw-r--r--test/pending/run/t7733/Separate_1.scala5
-rw-r--r--test/pending/run/t7733/Test_2.scala9
-rwxr-xr-xtools/partest-ack158
-rw-r--r--versions.properties7
545 files changed, 8725 insertions, 13216 deletions
diff --git a/.gitignore b/.gitignore
index e60505f663..f90835d970 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,10 +9,21 @@
# see also test/files/.gitignore
#
-*.jar
-*~
+#
+# JARs aren't checked in, they are fetched by Ant / pull_binary_libs.sh
+#
+# We could be more concise with /lib/**/*.jar but that assumes
+# a late-model git.
+#
+/lib/ant/*.jar
+/lib/*.jar
+/test/files/codelib/*.jar
+/test/files/lib/*.jar
+/test/files/speclib/instrumented.jar
+/tools/*.jar
-build.properties
+# Developer specific Ant properties
+/build.properties
# target directories for ant build
/build/
@@ -33,11 +44,5 @@ build.properties
/.idea
/.settings
-# bak files produced by ./cleanup-commit
-*.bak
-
# Standard symbolic link to build/quick/bin
-qbin
-
-# Mac specific, but that is common enough a dev platform to warrant inclusion.
-.DS_Store
+/qbin
diff --git a/build.xml b/build.xml
index c399fcaf15..c65a3531ee 100755
--- a/build.xml
+++ b/build.xml
@@ -1,6 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
-<project name="sabbus" default="build" xmlns:artifact="urn:maven-artifact-ant">
+<project name="sabbus" default="build"
+ xmlns:artifact="urn:maven-artifact-ant"
+ xmlns:rsel="antlib:org.apache.tools.ant.types.resources.selectors">
<include file="test/build-partest.xml" as="partest"/>
<description>
@@ -56,6 +58,8 @@ TODO:
<target name="build-opt" description="Optimized version of build."> <optimized name="build"/></target>
<target name="test-opt" description="Optimized version of test."> <optimized name="test"/></target>
+ <target name="test-core-opt" description="Optimized version of test.core."> <optimized name="test.core"/></target>
+ <target name="test-stab-opt" description="Optimized version of test.stability."> <optimized name="test.stability"/></target>
<target name="dist-opt" description="Optimized version of dist."> <optimized name="dist"/></target>
<target name="partialdist-opt" description="Optimized version of partialdist."> <optimized name="partialdist"/></target>
<target name="fastdist-opt" description="Optimized version of fastdist."> <optimized name="fastdist"/></target>
@@ -116,18 +120,18 @@ TODO:
<!-- Generating version number -->
<property file="${basedir}/build.number"/>
- <!-- read starr.version -->
- <property file="${basedir}/starr.number"/>
+ <!-- read versions.properties -->
+ <property file="${basedir}/versions.properties"/>
<!-- Sets location of pre-compiled libraries -->
<property name="library.starr.jar" value="${lib.dir}/scala-library.jar"/>
<property name="reflect.starr.jar" value="${lib.dir}/scala-reflect.jar"/>
<property name="compiler.starr.jar" value="${lib.dir}/scala-compiler.jar"/>
<property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
- <property name="scalacheck.jar" value="${lib.dir}/scalacheck.jar"/>
<!-- Sets location of build folders -->
<property name="build.dir" value="${basedir}/build"/>
+ <property name="build-deps.dir" value="${build.dir}/deps"/>
<property name="build-libs.dir" value="${build.dir}/libs"/>
<property name="build-asm.dir" value="${build.dir}/asm"/>
<property name="build-forkjoin.dir" value="${build-libs.dir}"/>
@@ -150,7 +154,6 @@ TODO:
<property name="dists.dir" value="${basedir}/dists"/>
<property name="copyright.string" value="Copyright 2002-2013, LAMP/EPFL"/>
- <property name="partest.version.number" value="0.9.3"/>
<property name="jline.version" value="2.11"/>
@@ -192,6 +195,18 @@ TODO:
</touch>
</target>
+ <macrodef name="copy-deps" description="Copy a file set based on maven dependency resolution to a directory. Currently used by the IntelliJ config files.">
+ <attribute name="fileset.prefix"></attribute>
+ <attribute name="out"></attribute>
+ <sequential>
+ <delete dir="${build-deps.dir}/@{out}" includes="*.jar"/>
+ <copy todir="${build-deps.dir}/@{out}">
+ <fileset refid="@{fileset.prefix}.fileset" />
+ <mapper type="flatten" />
+ </copy>
+ </sequential>
+ </macrodef>
+
<target name="init" depends="boot">
<!-- Set up Ant contrib tasks so we can use <if><then><else> instead of the clunky `unless` attribute -->
<taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib-ant.dir}/ant-contrib.jar"/>
@@ -218,6 +233,7 @@ TODO:
<artifact:dependencies pathId="junit.classpath" filesetId="junit.fileset">
<dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
</artifact:dependencies>
+ <copy-deps fileset.prefix="junit" out="junit"/>
<!-- Pax runner -->
<property name="pax.exam.version" value="2.5.0"/>
@@ -234,20 +250,25 @@ TODO:
</artifact:dependencies>
- <artifact:dependencies pathId="partest.extras.classpath" filesetId="partest.extras.fileset" versionsId="partest.extras.versions">
- <dependency groupId="com.googlecode.java-diff-utils" artifactId="diffutils" version="1.3.0"/>
- <dependency groupId="org.scala-tools.testing" artifactId="test-interface" version="0.5" />
+ <artifact:dependencies pathId="partest.classpath" filesetId="partest.fileset" versionsId="partest.versions">
+ <!-- to facilitate building and publishing partest locally -->
+ <localRepository path="${user.home}/.m2/repository"/>
+ <!-- so we don't have to wait for artifacts to synch to maven central: -->
+ <artifact:remoteRepository id="sonatype-release" url="https://oss.sonatype.org/content/repositories/releases"/>
+ <dependency groupId="org.scala-lang.modules" artifactId="scala-partest_${scala.binary.version}" version="${partest.version.number}" />
</artifact:dependencies>
+ <copy-deps fileset.prefix="partest" out="partest"/>
<artifact:dependencies pathId="repl.deps.classpath" filesetId="repl.deps.fileset" versionsId="repl.deps.versions">
<dependency groupId="jline" artifactId="jline" version="${jline.version}"/>
</artifact:dependencies>
+ <copy-deps fileset.prefix="repl.deps" out="repl"/>
<!-- BND support -->
<typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
<!-- Download STARR via maven if `starr.use.released` is set,
- and `starr.version` is specified (see the starr.number properties file).
+ and `starr.version` is specified (see the versions.properties properties file).
Want to slow down STARR changes, using only released versions. -->
<if><isset property="starr.use.released"/><then>
<echo message="Using Scala ${starr.version} for STARR."/>
@@ -364,7 +385,7 @@ TODO:
</then></if>
<!-- Allow this to be overridden simply -->
- <property name="sbt.latest.version" value="0.12.2"/>
+ <property name="sbt.latest.version" value="0.12.4"/>
<property name="sbt.src.dir" value="${build-sbt.dir}/${sbt.latest.version}/src"/>
<property name="sbt.lib.dir" value="${build-sbt.dir}/${sbt.latest.version}/lib"/>
@@ -379,7 +400,7 @@ TODO:
<property name="scalac.args" value=""/>
<property name="javac.args" value=""/>
- <property name="scalac.args.always" value="" />
+ <property name="scalac.args.always" value="-feature" />
<property name="scalac.args.optimise" value=""/> <!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
<property name="scalac.args.all" value="${scalac.args.always} ${scalac.args} ${scalac.args.optimise}"/>
<property name="scalac.args.locker" value="${scalac.args.all}"/>
@@ -464,6 +485,8 @@ TODO:
<if><isset property="locker.skip"/><then>
<echo message="Using STARR to build the quick stage (skipping locker)."/>
<path id="locker.compiler.path" refid="starr.compiler.path"/>
+ <!-- this is cheating, but should be close enough: -->
+ <path id="locker.compiler.build.path" refid="starr.compiler.path"/>
<property name="locker.locked" value="locker skipped"/></then>
<else>
<path id="locker.compiler.path"><path refid="locker.compiler.build.path"/></path></else></if>
@@ -477,7 +500,7 @@ TODO:
There must be a variable of the shape @{stage}.@{project}.build.path
for all @{stage} in locker, quick, strap
and all @{project} in library, reflect, compiler
- when stage is quick, @{project} also includes: actors, parser-combinators, xml, repl, swing, plugins, scalacheck, interactive, scaladoc, partest, scalap
+ when stage is quick, @{project} also includes: actors, parser-combinators, xml, repl, swing, plugins, scalacheck, interactive, scaladoc, scalap
-->
<!-- LOCKER -->
@@ -497,11 +520,14 @@ TODO:
<pathelement location="${build-locker.dir}/classes/reflect"/>
</path>
+ <if><not><isset property="locker.skip"/></not><then>
<path id="locker.compiler.build.path">
<path refid="locker.reflect.build.path"/>
<pathelement location="${build-locker.dir}/classes/compiler"/>
<path refid="asm.classpath"/>
</path>
+ </then></if>
+ <!-- else, locker.compiler.build.path is set above -->
<!-- QUICK -->
<path id="quick.library.build.path">
@@ -548,37 +574,36 @@ TODO:
</path>
<path id="quick.plugins.build.path">
- <path refid="quick.compiler.build.path"/>
+ <!-- plugins are run by locker compiler during quick stage,
+ so must compile against the same classes the locker was compiled to
+ -->
+ <path refid="locker.compiler.build.path"/>
<pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
</path>
- <path id="quick.scalacheck.build.path">
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/actors"/>
- <pathelement location="${build-quick.dir}/classes/parser-combinators"/>
- <pathelement location="${build-quick.dir}/classes/scalacheck"/>
- <path refid="partest.extras.classpath"/>
- </path>
-
<path id="quick.scalap.build.path">
<path refid="quick.compiler.build.path"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
</path>
- <path id="quick.partest.build.path">
- <path refid="quick.xml.build.path"/>
- <path refid="quick.scalap.build.path"/>
- <path refid="partest.extras.classpath"/>
+ <path id="quick.partest-extras.build.path">
+ <path refid="asm.classpath"/>
+ <path refid="partest.classpath"/>
<pathelement location="${build-quick.dir}/classes/repl"/>
- <pathelement location="${scalacheck.jar}"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
+ <!-- for the java dependency: Profiler.java -->
+ <pathelement location="${build-quick.dir}/classes/partest-extras"/>
+ </path>
+
+ <path id="quick.partest-javaagent.build.path">
+ <path refid="partest.classpath"/>
+ <path refid="asm.classpath"/>
</path>
<path id="quick.scaladoc.build.path">
<path refid="quick.xml.build.path"/>
<path refid="quick.compiler.build.path"/>
<path refid="quick.parser-combinators.build.path"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
+ <path refid="partest.classpath"/>
<pathelement location="${build-quick.dir}/classes/scaladoc"/>
</path>
@@ -642,21 +667,11 @@ TODO:
<path id="pack.swing.files"> <fileset dir="${build-quick.dir}/classes/swing"/> </path>
<path id="pack.reflect.files"> <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
<path id="pack.plugins.files"> <fileset dir="${build-quick.dir}/classes/continuations-plugin"/> </path>
- <path id="pack.scalacheck.files"> <fileset dir="${build-quick.dir}/classes/scalacheck"/> </path>
<path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/>
<fileset file="${src.dir}/scalap/decoder.properties"/> </path>
- <path id="pack.partest.files">
- <fileset dir="${build-quick.dir}/classes/partest">
- <exclude name="scala/tools/partest/javaagent/**"/>
- </fileset>
- </path>
-
- <path id="pack.partest-javaagent.files">
- <fileset dir="${build-quick.dir}/classes/partest">
- <include name="scala/tools/partest/javaagent/**"/>
- </fileset>
- </path>
+ <path id="pack.partest-extras.files"> <fileset dir="${build-quick.dir}/classes/partest-extras"/> </path>
+ <path id="pack.partest-javaagent.files"> <fileset dir="${build-quick.dir}/classes/partest-javaagent"/> </path>
<!-- STRAP -->
<path id="strap.library.build.path">
@@ -683,11 +698,10 @@ TODO:
<pathelement location="${build-pack.dir}/lib/scala-xml.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
+ <!-- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/> -->
<pathelement location="${build-pack.dir}/lib/scalap.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
<pathelement location="${ant.jar}"/>
- <path refid="partest.extras.classpath"/>
<path refid="aux.libs"/>
</path>
@@ -704,14 +718,52 @@ TODO:
<pathelement location="${build.dir}/manmaker/classes"/>
</path>
- <path id="partest.classpath">
- <path refid="pack.compiler.path"/>
+ <!--
+ This is the classpath used to run partest, which is what it uses to run the compiler and find other required jars.
+ "What's on the compiler's compilation path when compiling partest tests," you ask?
+ Why, the compiler we're testing, of course, and partest with all its dependencies.
+ -->
+ <path id="partest.compilation.path">
+ <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-parser-combinators.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-xml.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
+
+ <!-- to test a quick build without packing, replace the above pathelements with: (may need a little tweaking)
+ <path refid="quick.bin.tool.path">
+ <path refid="quick.interactive.build.path">
+ -->
+
<pathelement location="${build-pack.dir}/lib/scalap.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
- <path refid="partest.extras.classpath"/>
+ <!-- TODO: move scalap out of repo -->
+
+ <!--
+ include partest and its run-time dependencies,
+ but filter out the compiler we just built, as that's what we want to test!
+ TODO: mark partest's compiler dependencies as provided when publishing to maven,
+ so that we don't have to filter them out here...
+ -->
+ <restrict>
+ <path refid="partest.classpath"/>
+ <rsel:not><rsel:or>
+ <rsel:name name="scala-library*.jar"/>
+ <rsel:name name="scala-reflect*.jar"/>
+ <rsel:name name="scala-compiler*.jar"/>
+ <rsel:name name="scala-actors*.jar"/>
+ <rsel:name name="scala-scalap*.jar"/>
+ <!-- <rsel:name name="scala-parser-combinators*.jar"/>
+ <rsel:name name="scala-xml*.jar"/> -->
+ </rsel:or></rsel:not>
+ </restrict>
+
+ <!-- partest classes specific to the core compiler build -->
+ <pathelement location="${build-pack.dir}/lib/scala-partest-extras.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-partest-javaagent.jar"/>
+
+ <!-- sneaky extras used in tests -->
+ <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
</path>
<!-- obsolete? -->
@@ -751,10 +803,6 @@ TODO:
<path id="test.positions.sub.build.path" path="${build-quick.dir}/classes/library"/>
<!-- TODO: consolidate *.includes -->
- <patternset id="partest.includes">
- <include name="**/*.xml"/>
- </patternset>
-
<patternset id="lib.includes">
<include name="**/*.tmpl"/>
<include name="**/*.xml"/>
@@ -986,7 +1034,7 @@ TODO:
<attribute name="args" default=""/> <!-- additional args -->
<attribute name="includes" default="comp.includes"/>
<attribute name="java-excludes" default=""/>
- <attribute name="version" default=""/> <!-- non-empty for partest and scaladoc: use @{version}.version.number in property file-->
+ <attribute name="version" default=""/> <!-- non-empty for scaladoc: use @{version}.version.number in property file-->
<sequential>
<staged-uptodate stage="@{stage}" project="@{project}">
@@ -1193,16 +1241,10 @@ TODO:
<target name="quick.repl" depends="quick.comp">
<staged-build with="locker" stage="quick" project="repl"/> </target>
- <target name="quick.scalacheck" depends="quick.actors, quick.parser-combinators, quick.lib">
- <staged-build with="locker" stage="quick" project="scalacheck" args="-nowarn"/> </target>
-
<target name="quick.scalap" depends="quick.repl">
<staged-build with="locker" stage="quick" project="scalap"/> </target>
- <target name="quick.partest" depends="quick.scalap, quick.xml, quick.repl, asm.done">
- <staged-build with="locker" stage="quick" project="partest" version="partest"/> </target>
-
- <target name="quick.scaladoc" depends="quick.comp, quick.partest, quick.parser-combinators">
+ <target name="quick.scaladoc" depends="quick.comp, quick.parser-combinators">
<staged-build with="locker" stage="quick" project="scaladoc" version="scaladoc"/> </target>
<target name="quick.interactive" depends="quick.comp, quick.scaladoc">
@@ -1232,13 +1274,7 @@ TODO:
<!-- might split off library part into its own ant target -->
<mkdir dir="${build-quick.dir}/classes/continuations-library"/>
- <!-- TODO: must build with quick to avoid
- [quick.plugins] error: java.lang.NoClassDefFoundError: scala/tools/nsc/transform/patmat/PatternMatching
- [quick.plugins] at scala.tools.selectivecps.SelectiveCPSTransform.newTransformer(SelectiveCPSTransform.scala:29)
-
- WHY OH WHY!? scala/tools/nsc/transform/patmat/PatternMatching should be on locker.compiler.path
- -->
- <staged-scalac with="quick" stage="quick" project="plugins"
+ <staged-scalac with="locker" stage="quick" project="plugins"
srcdir="continuations/library" destproject="continuations-library"
args="-Xplugin-require:continuations -P:continuations:enable -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins"/>
@@ -1247,7 +1283,7 @@ TODO:
</staged-uptodate>
</target>
- <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalacheck, quick.scalap, quick.interactive, quick.xml, quick.parser-combinators, quick.swing, quick.plugins, quick.partest, quick.scaladoc">
+ <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalap, quick.interactive, quick.xml, quick.parser-combinators, quick.swing, quick.plugins, quick.scaladoc">
<staged-bin stage="quick" classpathref="quick.bin.tool.path"/>
</target>
@@ -1293,31 +1329,17 @@ TODO:
</target>
<target name="pack.plugins" depends="quick.plugins"> <staged-pack project="plugins" targetdir="misc/scala-devel/plugins" targetjar="continuations.jar"/> </target>
- <target name="pack.scalacheck" depends="quick.scalacheck"> <staged-pack project="scalacheck" targetjar="scalacheck.jar"/> </target>
-
- <target name="pack.partest" depends="quick.partest">
- <staged-pack project="partest"/>
- <!-- TODO the manifest should influence actuality of this target -->
- <staged-pack project="partest-javaagent" manifest="${src.dir}/partest/scala/tools/partest/javaagent/MANIFEST.MF"/>
- </target>
<target name="pack.scalap" depends="quick.scalap"> <staged-pack project="scalap" targetjar="scalap.jar"/> </target>
- <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.partest, pack.plugins, pack.reflect, pack.scalacheck, pack.scalap, pack.xml, pack.swing, pack.parser-combinators">
+ <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.plugins, pack.reflect, pack.scalap, pack.xml, pack.swing, pack.parser-combinators">
<staged-bin stage="pack"/>
</target>
<!-- depend on quick.done so quick.bin is run when pack.done is -->
<target name="pack.done" depends="quick.done, pack.bin">
<!-- copy dependencies to build/pack/lib, it only takes a second so don't bother with uptodate checks -->
- <copy todir="${build-pack.dir}/lib">
- <resources refid="partest.extras.fileset"/>
- <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
- from="${partest.extras.versions}" to="flatten"/>
- </copy>
-
<taskdef resource="scala/tools/ant/antlib.xml" classpathref="docs.compiler.path"/>
- <taskdef resource="scala/tools/partest/antlib.xml" classpathref="partest.classpath"/>
</target>
@@ -1538,7 +1560,8 @@ TODO:
<target name="test.junit" depends="test.junit.comp">
<stopwatch name="test.junit.timer"/>
<mkdir dir="${test.junit.classes}"/>
- <junit fork="yes" haltonfailure="yes" showoutput="yes" printsummary="on">
+ <echo message="Note: details of failed tests will be output to ${build-junit.dir}"/>
+ <junit fork="yes" haltonfailure="yes" printsummary="on">
<classpath refid="test.junit.compiler.build.path"/>
<batchtest fork="yes" todir="${build-junit.dir}">
<fileset dir="${test.junit.classes}">
@@ -1551,30 +1574,50 @@ TODO:
</target>
<!-- See test/build-partest.xml for the macro(s) being used here. -->
+ <target name="partest.task" depends="init">
+ <!-- note the classpathref! this is the classpath used to run partest,
+ so it must have the new compiler.... -->
+ <taskdef
+ classpathref="partest.compilation.path"
+ resource="scala/tools/partest/antlib.xml"/>
+
+ <!-- compile compiler-specific parts of partest -->
+ <staged-build with="starr" stage="quick" project="partest-extras" />
+ <staged-build with="starr" stage="quick" project="partest-javaagent" />
+ <staged-pack project="partest-extras"/>
+ <staged-pack project="partest-javaagent" manifest="${src.dir}/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF"/>
+ </target>
- <target name="test.suite" depends="pack.done">
- <testSuite/>
+ <target name="test.suite.init" depends="pack.done, partest.task">
+ <!-- read by test/partest to determine classpath used to run partest -->
+ <propertyfile file = "build/pack/partest.properties">
+ <entry key = "partest.classpath" value="${toString:partest.compilation.path}"/>
+ </propertyfile>
</target>
- <target name="test.suite.color" depends="pack.done">
- <testSuite colors="8"/>
+ <target name="test.suite" depends="test.suite.init">
+ <testSuite kinds="pos neg run jvm res scalap scalacheck specialized instrumented"/>
</target>
- <target name="test.run" depends="pack.done">
+ <target name="test.suite.color" depends="test.suite.init">
+ <testSuite colors="8" kinds="pos neg run jvm res scalap scalacheck specialized instrumented"/>
+ </target>
+
+ <target name="test.run" depends="test.suite.init">
<testSuite kinds="run jvm"/>
</target>
- <target name="test.continuations.suite" depends="pack.done">
+ <target name="test.continuations.suite" depends="test.suite.init">
<testSuite kinds="continuations-neg continuations-run"
scalacOpts="${scalac.args.optimise} -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins -Xplugin-require:continuations -P:continuations:enable"
/>
</target>
- <target name="test.scaladoc" depends="pack.done">
+ <target name="test.scaladoc" depends="test.suite.init">
<testSuite kinds="run scalacheck" srcdir="scaladoc"/>
</target>
- <target name="test.interactive" depends="pack.done">
+ <target name="test.interactive" depends="test.suite.init">
<testSuite kinds="presentation"/>
</target>
@@ -1755,12 +1798,6 @@ TODO:
</staged-docs>
</target>
- <target name="docs.partest" depends="docs.start">
- <staged-docs project="partest" title="Scala Parallel Testing Framework">
- <include name="**/*.scala"/>
- </staged-docs>
- </target>
-
<target name="docs.continuations-plugin" depends="docs.start">
<staged-docs project="continuations-plugin" dir="continuations/plugin" title="Delimited Continuations Compiler Plugin">
<include name="**/*.scala"/>
@@ -1804,7 +1841,7 @@ TODO:
</staged-uptodate>
</target>
- <target name="docs.done" depends="docs.comp, docs.man, docs.lib, docs.xml, docs.parser-combinators, docs.scalap, docs.partest, docs.continuations-plugin"/>
+ <target name="docs.done" depends="docs.comp, docs.man, docs.lib, docs.xml, docs.parser-combinators, docs.scalap, docs.continuations-plugin"/>
<!-- ===========================================================================
DISTRIBUTION
@@ -1832,19 +1869,11 @@ TODO:
<mkdir dir="${dist.dir}/lib"/>
<copy toDir="${dist.dir}/lib">
<fileset dir="${build-pack.dir}/lib">
- <include name="scalacheck.jar"/>
- <include name="scala-partest.jar"/>
<include name="scalap.jar"/>
</fileset>
</copy>
<copy todir="${dist.dir}/lib">
- <resources refid="partest.extras.fileset"/>
- <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
- from="${partest.extras.versions}" to="flatten"/>
- </copy>
-
- <copy todir="${dist.dir}/lib">
<resources refid="repl.deps.fileset"/>
<mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
from="${repl.deps.versions}" to="flatten"/>
@@ -1919,7 +1948,6 @@ TODO:
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-parser-combinators-src.jar" basedir="${src.dir}/parser-combinators"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
- <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-partest-src.jar" basedir="${src.dir}/partest"/>
</target>
<target name="dist.partial" depends="dist.base">
diff --git a/dbuild-meta.json b/dbuild-meta.json
index 705eeeb6b6..3987afa395 100644
--- a/dbuild-meta.json
+++ b/dbuild-meta.json
@@ -126,67 +126,6 @@
"artifacts": [
{
"extension": "jar",
- "name": "scalacheck",
- "organization": "org.scala-lang"
- }
- ],
- "dependencies": [
- {
- "extension": "jar",
- "name": "scala-library",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scala-actors",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scala-parser-combinators",
- "organization": "org.scala-lang"
- }
- ],
- "name": "scalacheck",
- "organization": "org.scala-lang"
- },
- {
- "artifacts": [
- {
- "extension": "jar",
- "name": "scala-partest",
- "organization": "org.scala-lang"
- }
- ],
- "dependencies": [
- {
- "extension": "jar",
- "name": "scala-compiler",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scalap",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scala-xml",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scalacheck",
- "organization": "org.scala-lang"
- }
- ],
- "name": "scala-partest",
- "organization": "org.scala-lang"
- },
- {
- "artifacts": [
- {
- "extension": "jar",
"name": "scaladoc",
"organization": "org.scala-lang"
}
diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala
index 5b5b4a946d..67c8e5cd10 100644
--- a/src/actors/scala/actors/Scheduler.scala
+++ b/src/actors/scala/actors/Scheduler.scala
@@ -9,7 +9,6 @@
package scala.actors
-import java.util.concurrent._
import scheduler.{DelegatingScheduler, ForkJoinScheduler, ResizableThreadPoolScheduler, ThreadPoolConfig}
/**
diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala
index ad78ff784c..75e36b2738 100644
--- a/src/actors/scala/actors/remote/TcpService.scala
+++ b/src/actors/scala/actors/remote/TcpService.scala
@@ -67,7 +67,7 @@ object TcpService {
timeout =>
try {
val to = timeout.toInt
- Debug.info("Using socket timeout $to")
+ Debug.info(s"Using socket timeout $to")
Some(to)
} catch {
case e: NumberFormatException =>
diff --git a/src/build/dbuild-meta-json-gen.scala b/src/build/dbuild-meta-json-gen.scala
index 42214dd191..73eee8ac3a 100644
--- a/src/build/dbuild-meta-json-gen.scala
+++ b/src/build/dbuild-meta-json-gen.scala
@@ -1,6 +1,6 @@
// use this script to generate dbuild-meta.json
// make sure the version is specified correctly,
-// update the dependency structura and
+// update the dependency structure and
// check out distributed-build and run `sbt console`:
// TODO: also generate build.xml and eclipse config from a similar data-structure
@@ -40,15 +40,6 @@ val meta =
Seq(ProjectRef("scala-parser-combinators", "org.scala-lang")),
Seq(ProjectRef("scala-library", "org.scala-lang"))),
- Project("scalacheck", "org.scala-lang",
- Seq(ProjectRef("scalacheck", "org.scala-lang")),
- Seq(ProjectRef("scala-library", "org.scala-lang"), ProjectRef("scala-actors", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
-
- Project("scala-partest", "org.scala-lang",
- Seq(ProjectRef("scala-partest", "org.scala-lang")),
- Seq(ProjectRef("scala-compiler", "org.scala-lang"), // TODO: refine to scala-repl
- ProjectRef("scalap", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scalacheck", "org.scala-lang"))),
-
Project("scaladoc", "org.scala-lang",
Seq(ProjectRef("scaladoc", "org.scala-lang")),
Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 84a12066f5..f52a7888ce 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -110,7 +110,6 @@
<deploy-one name="scala-library" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-xml" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-parser-combinators" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-partest" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-reflect" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-swing" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scalap" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
diff --git a/src/build/maven/scala-partest-pom.xml b/src/build/maven/scala-partest-pom.xml
deleted file mode 100644
index ac05f242d5..0000000000
--- a/src/build/maven/scala-partest-pom.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-partest</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Parallel Test Framework</name>
- <description>testing framework for the Scala compiler.</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
-
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Typesafe</id>
- <name>Typesafe, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 6b6579ce12..fa030300ac 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -158,7 +158,6 @@ MAIN DISTRIBUTION PACKAGING
<mvn-copy-lib mvn.artifact.name="scala-compiler"/>
<mvn-copy-lib mvn.artifact.name="scala-swing"/>
<mvn-copy-lib mvn.artifact.name="scala-actors"/>
- <mvn-copy-lib mvn.artifact.name="scala-partest"/>
<mvn-copy-lib mvn.artifact.name="scalap"/>
</target>
@@ -210,10 +209,6 @@ MAIN DISTRIBUTION PACKAGING
basedir="${build-docs.dir}/scalap">
<include name="**/*"/>
</jar>
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
- basedir="${build-docs.dir}/partest">
- <include name="**/*"/>
- </jar>
<jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
index 32c6da8007..2e82e34bd9 100644
--- a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
+++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
@@ -2,7 +2,6 @@ package scala.reflect.macros
package compiler
import scala.tools.nsc.Global
-import scala.reflect.macros.contexts.Context
abstract class DefaultMacroCompiler extends Resolvers
with Validators
@@ -11,7 +10,6 @@ abstract class DefaultMacroCompiler extends Resolvers
import global._
val typer: global.analyzer.Typer
- private implicit val context0 = typer.context
val context = typer.context
val macroDdef: DefDef
diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala
index 60cfc94a23..8d396a56d8 100644
--- a/src/compiler/scala/reflect/macros/compiler/Validators.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala
@@ -11,8 +11,6 @@ trait Validators {
import global._
import analyzer._
import definitions._
- import treeInfo._
- import typer.infer._
def validateMacroImplRef() = {
sanityCheck()
@@ -59,7 +57,7 @@ trait Validators {
checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
- val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, depth = maxLubDepth)
+ val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, maxLubDepth)
val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
boundsOk match {
case SilentResultValue(true) => // do nothing, success
@@ -83,7 +81,11 @@ trait Validators {
// Technically this can be just an alias to MethodType, but promoting it to a first-class entity
// provides better encapsulation and convenient syntax for pattern matching.
- private case class MacroImplSig(tparams: List[Symbol], paramss: List[List[Symbol]], ret: Type)
+ private case class MacroImplSig(tparams: List[Symbol], paramss: List[List[Symbol]], ret: Type) {
+ private def tparams_s = if (tparams.isEmpty) "" else tparams.map(_.defString).mkString("[", ", ", "]")
+ private def paramss_s = paramss map (ps => ps.map(s => s"${s.name}: ${s.tpe_*}").mkString("(", ", ", ")")) mkString ""
+ override def toString = "MacroImplSig(" + tparams_s + paramss_s + ret + ")"
+ }
/** An actual macro implementation signature extracted from a macro implementation method.
*
diff --git a/src/compiler/scala/reflect/macros/contexts/Context.scala b/src/compiler/scala/reflect/macros/contexts/Context.scala
index bd1d7d5248..1355a839d9 100644
--- a/src/compiler/scala/reflect/macros/contexts/Context.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Context.scala
@@ -14,7 +14,6 @@ abstract class Context extends scala.reflect.macros.Context
with Parsers
with Evals
with ExprUtils
- with Synthetics
with Traces {
val universe: Global
diff --git a/src/compiler/scala/reflect/macros/contexts/Synthetics.scala b/src/compiler/scala/reflect/macros/contexts/Synthetics.scala
deleted file mode 100644
index ada16a8113..0000000000
--- a/src/compiler/scala/reflect/macros/contexts/Synthetics.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- */
-
-package scala.reflect.macros
-package contexts
-
-import scala.reflect.internal.Flags._
-import scala.reflect.internal.util.BatchSourceFile
-import scala.reflect.io.VirtualFile
-
-trait Synthetics {
- self: Context =>
-
- import global._
- import mirror.wrapMissing
-
- // getClassIfDefined and getModuleIfDefined cannot be used here
- // because they don't work for stuff declared in the empty package
- // (as specified in SLS, code inside non-empty packages cannot see
- // declarations from the empty package, so compiler internals
- // default to ignoring contents of the empty package)
- // to the contrast, staticModule and staticClass are designed
- // to be a part of the reflection API and, therefore, they
- // correctly resolve all names
- private def topLevelSymbol(name: Name): Symbol = wrapMissing {
- if (name.isTermName) mirror.staticModule(name.toString)
- else mirror.staticClass(name.toString)
- }
-
- def topLevelDef(name: Name): Tree =
- enclosingRun.units.toList.map(_.body).flatMap {
- // it's okay to check `stat.symbol` here, because currently macros expand strictly after namer
- // which means that by the earliest time one can call this method all top-level definitions will have already been entered
- case PackageDef(_, stats) => stats filter (stat => stat.symbol != NoSymbol && stat.symbol == topLevelSymbol(name))
- case _ => Nil // should never happen, but better be safe than sorry
- }.headOption getOrElse EmptyTree
-
- def topLevelRef(name: Name): Tree = {
- if (topLevelDef(name).nonEmpty) gen.mkUnattributedRef(name)
- else EmptyTree
- }
-
- def introduceTopLevel[T: PackageSpec](packagePrototype: T, definition: universe.ImplDef): RefTree =
- introduceTopLevel(packagePrototype, List(definition)).head
-
- def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: universe.ImplDef*): List[RefTree] =
- introduceTopLevel(packagePrototype, definitions.toList)
-
- private def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: List[universe.ImplDef]): List[RefTree] = {
- val code @ PackageDef(pid, _) = implicitly[PackageSpec[T]].mkPackageDef(packagePrototype, definitions)
- universe.currentRun.compileLate(code)
- definitions map (definition => Select(pid, definition.name))
- }
-
- protected def mkPackageDef(name: String, stats: List[Tree]) = gen.mkPackageDef(name, stats)
-
- protected def mkPackageDef(name: TermName, stats: List[Tree]) = gen.mkPackageDef(name.toString, stats)
-
- protected def mkPackageDef(tree: RefTree, stats: List[Tree]) = PackageDef(tree, stats)
-
- protected def mkPackageDef(sym: Symbol, stats: List[Tree]) = {
- assert(sym hasFlag PACKAGE, s"expected a package or package class symbol, found: $sym")
- gen.mkPackageDef(sym.fullName.toString, stats)
- }
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
index 3ef11fad9d..450cb4d9ea 100644
--- a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
+++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
@@ -10,8 +10,6 @@ trait JavaReflectionRuntimes {
trait JavaReflectionResolvers {
self: MacroRuntimeResolver =>
- import global._
-
def resolveJavaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = {
val implClass = Class.forName(className, true, classLoader)
val implMeths = implClass.getDeclaredMethods.find(_.getName == methName)
diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
index 0f89163803..ffdbe11151 100644
--- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
+++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
@@ -1,11 +1,8 @@
package scala.reflect.macros
package runtime
-import scala.collection.mutable.{Map => MutableMap}
import scala.reflect.internal.Flags._
import scala.reflect.runtime.ReflectionUtils
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.nsc.util.AbstractFileClassLoader
trait MacroRuntimes extends JavaReflectionRuntimes with ScalaReflectionRuntimes {
self: scala.tools.nsc.typechecker.Analyzer =>
diff --git a/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala
index 1999e525ff..50f64310f8 100644
--- a/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala
+++ b/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala
@@ -9,8 +9,6 @@ trait ScalaReflectionRuntimes {
trait ScalaReflectionResolvers {
self: MacroRuntimeResolver =>
- import global._
-
def resolveScalaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = {
val macroMirror: ru.JavaMirror = ru.runtimeMirror(classLoader)
val implContainerSym = macroMirror.classSymbol(Class.forName(className, true, classLoader))
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
index 9b7680717e..f12582a3a1 100644
--- a/src/compiler/scala/reflect/macros/util/Helpers.scala
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -23,7 +23,7 @@ trait Helpers {
* or to streamline creation of the list of macro arguments.
*/
def transformTypeTagEvidenceParams(macroImplRef: Tree, transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
- val treeInfo.MacroImplReference(isBundle, owner, macroImpl, _) = macroImplRef
+ val treeInfo.MacroImplReference(isBundle, _, macroImpl, _) = macroImplRef
val paramss = macroImpl.paramss
if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
val rc =
@@ -44,11 +44,6 @@ trait Helpers {
if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
}
- private def dealiasAndRewrap(tp: Type)(fn: Type => Type): Type = {
- if (isRepeatedParamType(tp)) scalaRepeatedType(fn(tp.typeArgs.head.dealias))
- else fn(tp.dealias)
- }
-
/** Increases metalevel of the type, i.e. transforms:
* * T to c.Expr[T]
*
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 7578def687..7610df67dc 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -8,6 +8,7 @@ trait Reshape {
import global._
import definitions._
+ import treeInfo.Unapplied
/**
* Rolls back certain changes that were introduced during typechecking of the reifee.
@@ -65,22 +66,9 @@ trait Reshape {
case block @ Block(stats, expr) =>
val stats1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(stats))
Block(stats1, expr).copyAttrs(block)
- case unapply @ UnApply(fun, args) =>
- def extractExtractor(tree: Tree): Tree = {
- val Apply(fun, args) = tree
- args match {
- case List(Ident(special)) if special == nme.SELECTOR_DUMMY =>
- val Select(extractor, flavor) = fun
- assert(flavor == nme.unapply || flavor == nme.unapplySeq)
- extractor
- case _ =>
- extractExtractor(fun)
- }
- }
-
+ case unapply @ UnApply(Unapplied(Select(fun, nme.unapply | nme.unapplySeq)), args) =>
if (reifyDebug) println("unapplying unapply: " + tree)
- val fun1 = extractExtractor(fun)
- Apply(fun1, args).copyAttrs(unapply)
+ Apply(fun, args).copyAttrs(unapply)
case _ =>
tree
}
@@ -256,7 +244,7 @@ trait Reshape {
val flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions
val mods2 = toPreTyperModifiers(mods1, ddef.symbol)
- ValDef(mods2, name1.toTermName, tpt0, extractRhs(rhs0))
+ ValDef(mods2, name1, tpt0, extractRhs(rhs0))
}
private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = {
diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala
index 4cbc03d8d4..a86af73fe3 100644
--- a/src/compiler/scala/tools/ant/sabbus/Settings.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala
@@ -93,7 +93,7 @@ class Settings {
case _ => false
}
- override lazy val hashCode: Int = Seq(
+ override lazy val hashCode: Int = Seq[Any](
gBf,
uncheckedBf,
classpathBf,
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index a3a95ffd37..1288eb0b7c 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -22,13 +22,74 @@ if "%~1"=="-toolcp" (
goto another_param
)
-set _LINE_PARAMS=%1
+rem We keep in _JAVA_PARAMS all -J-prefixed and -D-prefixed arguments
+set _JAVA_PARAMS=
+
+if [%1]==[] goto param_afterloop
+set _TEST_PARAM=%~1
+if not "%_TEST_PARAM:~0,1%"=="-" goto param_afterloop
+
+rem ignore -e "scala code"
+if "%_TEST_PARAM:~0,2%"=="-e" (
+ shift
+ shift
+ if [%1]==[] goto param_afterloop
+)
+
+set _TEST_PARAM=%~1
+if "%_TEST_PARAM:~0,2%"=="-J" (
+ set _JAVA_PARAMS=%_TEST_PARAM:~2%
+)
+
+if "%_TEST_PARAM:~0,2%"=="-D" (
+ rem test if this was double-quoted property "-Dprop=42"
+ for /F "delims== tokens=1-2" %%G in ("%_TEST_PARAM%") DO (
+ if not "%%G" == "%_TEST_PARAM%" (
+ rem double quoted: "-Dprop=42" -> -Dprop="42"
+ set _JAVA_PARAMS=%%G="%%H"
+ ) else if [%2] neq [] (
+ rem it was a normal property: -Dprop=42 or -Drop="42"
+ set _JAVA_PARAMS=%_TEST_PARAM%=%2
+ shift
+ )
+ )
+)
+
:param_loop
shift
+
if [%1]==[] goto param_afterloop
-set _LINE_PARAMS=%_LINE_PARAMS% %1
+set _TEST_PARAM=%~1
+if not "%_TEST_PARAM:~0,1%"=="-" goto param_afterloop
+
+rem ignore -e "scala code"
+if "%_TEST_PARAM:~0,2%"=="-e" (
+ shift
+ shift
+ if [%1]==[] goto param_afterloop
+)
+
+set _TEST_PARAM=%~1
+if "%_TEST_PARAM:~0,2%"=="-J" (
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %_TEST_PARAM:~2%
+)
+
+if "%_TEST_PARAM:~0,2%"=="-D" (
+ rem test if this was double-quoted property "-Dprop=42"
+ for /F "delims== tokens=1-2" %%G in ("%_TEST_PARAM%") DO (
+ if not "%%G" == "%_TEST_PARAM%" (
+ rem double quoted: "-Dprop=42" -> -Dprop="42"
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %%G="%%H"
+ ) else if [%2] neq [] (
+ rem it was a normal property: -Dprop=42 or -Drop="42"
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %_TEST_PARAM%=%2
+ shift
+ )
+ )
+)
goto param_loop
:param_afterloop
+
if "%OS%" NEQ "Windows_NT" (
echo "Warning, your version of Windows is not supported. Attempting to start scala anyway."
)
@@ -51,6 +112,9 @@ rem We use the value of the JAVA_OPTS environment variable if defined
set _JAVA_OPTS=%JAVA_OPTS%
if not defined _JAVA_OPTS set _JAVA_OPTS=@javaflags@
+rem We append _JAVA_PARAMS java arguments to JAVA_OPTS if necessary
+if defined _JAVA_PARAMS set _JAVA_OPTS=%_JAVA_OPTS% %_JAVA_PARAMS%
+
set _TOOL_CLASSPATH=@classpath@
if "%_TOOL_CLASSPATH%"=="" (
for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala
index e44752eb6e..781cc564cb 100644
--- a/src/compiler/scala/tools/cmd/CommandLine.scala
+++ b/src/compiler/scala/tools/cmd/CommandLine.scala
@@ -24,13 +24,13 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
val Terminator = "--"
val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
- def mapForUnary(opt: String) = Map(opt -> ValueForUnaryOption)
+ def mapForUnary(opt: String) = Map(fromOpt(opt) -> ValueForUnaryOption)
def errorFn(msg: String) = println(msg)
/** argMap is option -> argument (or "" if it is a unary argument)
* residualArgs are what is left after removing the options and their args.
*/
- lazy val (argMap, residualArgs) = {
+ lazy val (argMap, residualArgs): (Map[String, String], List[String]) = {
val residualBuffer = new ListBuffer[String]
def loop(args: List[String]): Map[String, String] = {
@@ -72,7 +72,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
if (x2 == Terminator) mapForUnary(x1) ++ residual(xs)
else if (isUnaryOption(x1)) mapForUnary(x1) ++ loop(args.tail)
- else if (isBinaryOption(x1)) Map(x1 -> x2) ++ loop(xs)
+ else if (isBinaryOption(x1)) Map(fromOpt(x1) -> x2) ++ loop(xs)
else if (isUnknown(x1)) loop(args.tail)
else residual(List(x1)) ++ loop(args.tail)
}
diff --git a/src/compiler/scala/tools/cmd/CommandLineParser.scala b/src/compiler/scala/tools/cmd/CommandLineParser.scala
index ef55178594..6132eff557 100644
--- a/src/compiler/scala/tools/cmd/CommandLineParser.scala
+++ b/src/compiler/scala/tools/cmd/CommandLineParser.scala
@@ -40,16 +40,16 @@ object CommandLineParser {
// parse `in` for an argument, return it and the remainder of the input (or an error message)
// (argument may be in single/double quotes, taking escaping into account, quotes are stripped)
private def argument(in: String): Either[String, (String, String)] = in match {
- case DoubleQuoted(arg, rest) => Right(arg, rest)
- case SingleQuoted(arg, rest) => Right(arg, rest)
- case Word(arg, rest) => Right(arg, rest)
- case _ => Left("Illegal argument: "+ in)
+ case DoubleQuoted(arg, rest) => Right((arg, rest))
+ case SingleQuoted(arg, rest) => Right((arg, rest))
+ case Word(arg, rest) => Right((arg, rest))
+ case _ => Left(s"Illegal argument: $in")
}
// parse a list of whitespace-separated arguments (ignoring whitespace in quoted arguments)
@tailrec private def commandLine(in: String, accum: List[String] = Nil): Either[String, (List[String], String)] = {
val trimmed = in.trim
- if (trimmed.isEmpty) Right(accum.reverse, "")
+ if (trimmed.isEmpty) Right((accum.reverse, ""))
else argument(trimmed) match {
case Right((arg, next)) =>
(next span Character.isWhitespace) match {
diff --git a/src/compiler/scala/tools/cmd/Opt.scala b/src/compiler/scala/tools/cmd/Opt.scala
index 2c193128f1..df3d0c4462 100644
--- a/src/compiler/scala/tools/cmd/Opt.scala
+++ b/src/compiler/scala/tools/cmd/Opt.scala
@@ -26,10 +26,10 @@ object Opt {
trait Implicit {
def name: String
def programInfo: Info
- protected def opt = toOpt(name)
+ protected def opt = fromOpt(name)
def --? : Boolean // --opt is set
- def --> (body: => Unit): Unit // if --opt is set, execute body
+ def --> (body: => Unit): Boolean // if --opt is set, execute body
def --| : Option[String] // --opt <arg: String> is optional, result is Option[String]
def --^[T: FromString] : Option[T] // --opt <arg: T> is optional, result is Option[T]
@@ -51,7 +51,7 @@ object Opt {
import options._
def --? = { addUnary(opt) ; false }
- def --> (body: => Unit) = { addUnary(opt) }
+ def --> (body: => Unit) = { addUnary(opt) ; false }
def --| = { addBinary(opt) ; None }
def --^[T: FromString] = { addBinary(opt) ; None }
@@ -65,7 +65,7 @@ object Opt {
class Instance(val programInfo: Info, val parsed: CommandLine, val name: String) extends Implicit with Error {
def --? = parsed isSet opt
- def --> (body: => Unit) = if (parsed isSet opt) body
+ def --> (body: => Unit) = { val isSet = parsed isSet opt ; if (isSet) body ; isSet }
def --| = parsed get opt
def --^[T: FromString] = {
val fs = implicitly[FromString[T]]
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
index ec2a414065..62b6c893cf 100644
--- a/src/compiler/scala/tools/cmd/Reference.scala
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -23,13 +23,13 @@ trait Reference extends Spec {
def helpMsg = options.helpMsg
def propertyArgs: List[String] = Nil
- def isUnaryOption(s: String) = unary contains toOpt(s)
- def isBinaryOption(s: String) = binary contains toOpt(s)
- def isExpandOption(s: String) = expansionMap contains toOpt(s)
+ def isUnaryOption(s: String) = unary contains fromOpt(s)
+ def isBinaryOption(s: String) = binary contains fromOpt(s)
+ def isExpandOption(s: String) = expansionMap contains fromOpt(s)
- def expandArg(arg: String) = expansionMap.getOrElse(fromOpt(arg), List(arg))
+ def expandArg(arg: String): List[String] = expansionMap.getOrElse(fromOpt(arg), List(arg))
- protected def help(str: => String) = addHelp(() => str)
+ protected def help(str: => String): Unit = addHelp(() => str)
type ThisCommandLine <: CommandLine
@@ -53,20 +53,20 @@ object Reference {
def helpFormatStr = " %-" + longestArg + "s %s"
def defaultFormatStr = (" " * (longestArg + 7)) + "%s"
- def addUnary(s: String) = _unary +:= s
- def addBinary(s: String) = _binary +:= s
+ def addUnary(s: String): Unit = _unary +:= s
+ def addBinary(s: String): Unit = _binary +:= s
def addExpand(opt: String, expanded: List[String]) =
_expand += (opt -> expanded)
- def mapHelp(g: String => String) = {
+ def mapHelp(g: String => String): Unit = {
val idx = _help.length - 1
val f = _help(idx)
_help(idx) = () => g(f())
}
- def addHelp(f: () => String) = _help += f
+ def addHelp(f: () => String): Unit = _help += f
def addHelpAlias(f: () => String) = mapHelp { s =>
val str = "alias for '%s'" format f()
def noHelp = (helpFormatStr.format("", "")).length == s.length
@@ -74,13 +74,13 @@ object Reference {
s + str2
}
- def addHelpDefault(f: () => String) = mapHelp { s =>
+ def addHelpDefault(f: () => String): Unit = mapHelp { s =>
val str = "(default: %s)" format f()
if (s.length + str.length < MaxLine) s + " " + str
else defaultFormatStr.format(s, str)
}
- def addHelpEnvDefault(name: String) = mapHelp { s =>
+ def addHelpEnvDefault(name: String): Unit = mapHelp { s =>
val line1 = "%s (default: %s)".format(s, name)
val envNow = envOrNone(name) map ("'" + _ + "'") getOrElse "unset"
val line2 = defaultFormatStr.format("Currently " + envNow)
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 7e01afac2b..842851b4f6 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -6,24 +6,23 @@
package scala.tools.cmd
package gen
-/** Code generation of the AnyVal types and their companions.
- */
+/** Code generation of the AnyVal types and their companions. */
trait AnyValReps {
self: AnyVals =>
- sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) {
+ sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String)
+ extends AnyValRep(name,repr,javaEquiv) {
case class Op(op : String, doc : String)
private def companionCoercions(tos: AnyValRep*) = {
tos.toList map (to =>
- """implicit def @javaequiv@2%s(x: @name@): %s = x.to%s""".format(to.javaEquiv, to.name, to.name)
+ s"implicit def @javaequiv@2${to.javaEquiv}(x: @name@): ${to.name} = x.to${to.name}"
)
}
- def coercionCommentExtra = ""
- def coercionComment = """
-/** Language mandated coercions from @name@ to "wider" types.%s
- */""".format(coercionCommentExtra)
+ def coercionComment =
+"""/** Language mandated coercions from @name@ to "wider" types. */
+import scala.language.implicitConversions"""
def implicitCoercions: List[String] = {
val coercions = this match {
@@ -41,12 +40,8 @@ trait AnyValReps {
def isCardinal: Boolean = isIntegerType(this)
def unaryOps = {
val ops = List(
- Op("+", "/**\n" +
- " * Returns this value, unmodified.\n" +
- " */"),
- Op("-", "/**\n" +
- " * Returns the negation of this value.\n" +
- " */"))
+ Op("+", "/** Returns this value, unmodified. */"),
+ Op("-", "/** Returns the negation of this value. */"))
if(isCardinal)
Op("~", "/**\n" +
@@ -95,7 +90,7 @@ trait AnyValReps {
" */"))
else Nil
- def shiftOps =
+ def shiftOps =
if (isCardinal)
List(
Op("<<", "/**\n" +
@@ -127,20 +122,20 @@ trait AnyValReps {
" */"))
else Nil
- def comparisonOps = List(
- Op("==", "/**\n * Returns `true` if this value is equal to x, `false` otherwise.\n */"),
- Op("!=", "/**\n * Returns `true` if this value is not equal to x, `false` otherwise.\n */"),
- Op("<", "/**\n * Returns `true` if this value is less than x, `false` otherwise.\n */"),
- Op("<=", "/**\n * Returns `true` if this value is less than or equal to x, `false` otherwise.\n */"),
- Op(">", "/**\n * Returns `true` if this value is greater than x, `false` otherwise.\n */"),
- Op(">=", "/**\n * Returns `true` if this value is greater than or equal to x, `false` otherwise.\n */"))
+ def comparisonOps = List(
+ Op("==", "/** Returns `true` if this value is equal to x, `false` otherwise. */"),
+ Op("!=", "/** Returns `true` if this value is not equal to x, `false` otherwise. */"),
+ Op("<", "/** Returns `true` if this value is less than x, `false` otherwise. */"),
+ Op("<=", "/** Returns `true` if this value is less than or equal to x, `false` otherwise. */"),
+ Op(">", "/** Returns `true` if this value is greater than x, `false` otherwise. */"),
+ Op(">=", "/** Returns `true` if this value is greater than or equal to x, `false` otherwise. */"))
def otherOps = List(
- Op("+", "/**\n * Returns the sum of this value and `x`.\n */"),
- Op("-", "/**\n * Returns the difference of this value and `x`.\n */"),
- Op("*", "/**\n * Returns the product of this value and `x`.\n */"),
- Op("/", "/**\n * Returns the quotient of this value and `x`.\n */"),
- Op("%", "/**\n * Returns the remainder of the division of this value by `x`.\n */"))
+ Op("+", "/** Returns the sum of this value and `x`. */"),
+ Op("-", "/** Returns the difference of this value and `x`. */"),
+ Op("*", "/** Returns the product of this value and `x`. */"),
+ Op("/", "/** Returns the quotient of this value and `x`. */"),
+ Op("%", "/** Returns the remainder of the division of this value by `x`. */"))
// Given two numeric value types S and T , the operation type of S and T is defined as follows:
// If both S and T are subrange types then the operation type of S and T is Int.
@@ -278,8 +273,7 @@ trait AnyValReps {
}
trait AnyValTemplates {
- def headerTemplate = ("""
-/* __ *\
+ def headerTemplate = """/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
@@ -287,12 +281,13 @@ trait AnyValTemplates {
** |/ **
\* */
-%s
-package scala
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
-import scala.language.implicitConversions
+package scala
-""".trim.format(timestampString) + "\n\n")
+"""
def classDocTemplate = ("""
/** `@name@`@representation@ (equivalent to Java's `@javaequiv@` primitive type) is a
@@ -304,8 +299,6 @@ import scala.language.implicitConversions
*/
""".trim + "\n")
- def timestampString = "// DO NOT EDIT, CHANGES WILL BE LOST.\n"
-
def allCompanions = """
/** Transform a value type into a boxed reference type.
*@boxRunTimeDoc@
@@ -324,20 +317,17 @@ def box(x: @name@): @boxed@ = @boxImpl@
*/
def unbox(x: java.lang.Object): @name@ = @unboxImpl@
-/** The String representation of the scala.@name@ companion object.
- */
+/** The String representation of the scala.@name@ companion object. */
override def toString = "object scala.@name@"
"""
def nonUnitCompanions = "" // todo
def cardinalCompanion = """
-/** The smallest value representable as a @name@.
- */
+/** The smallest value representable as a @name@. */
final val MinValue = @boxed@.MIN_VALUE
-/** The largest value representable as a @name@.
- */
+/** The largest value representable as a @name@. */
final val MaxValue = @boxed@.MAX_VALUE
"""
@@ -372,18 +362,16 @@ class AnyVals extends AnyValReps with AnyValTemplates {
object D extends AnyValNum("Double", Some("64-bit IEEE-754 floating point number"), "double")
object Z extends AnyValRep("Boolean", None, "boolean") {
def classLines = """
-/**
- * Negates a Boolean expression.
- *
- * - `!a` results in `false` if and only if `a` evaluates to `true` and
- * - `!a` results in `true` if and only if `a` evaluates to `false`.
- *
- * @return the negated expression
- */
+/** Negates a Boolean expression.
+ *
+ * - `!a` results in `false` if and only if `a` evaluates to `true` and
+ * - `!a` results in `true` if and only if `a` evaluates to `false`.
+ *
+ * @return the negated expression
+ */
def unary_! : Boolean
-/**
- * Compares two Boolean expressions and returns `true` if they evaluate to the same value.
+/** Compares two Boolean expressions and returns `true` if they evaluate to the same value.
*
* `a == b` returns `true` if and only if
* - `a` and `b` are `true` or
@@ -400,8 +388,7 @@ def ==(x: Boolean): Boolean
*/
def !=(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
*
* `a || b` returns `true` if and only if
* - `a` is `true` or
@@ -414,8 +401,7 @@ def !=(x: Boolean): Boolean
*/
def ||(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
*
* `a && b` returns `true` if and only if
* - `a` and `b` are `true`.
@@ -430,8 +416,7 @@ def &&(x: Boolean): Boolean
// def ||(x: => Boolean): Boolean
// def &&(x: => Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
*
* `a | b` returns `true` if and only if
* - `a` is `true` or
@@ -442,8 +427,7 @@ def &&(x: Boolean): Boolean
*/
def |(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
*
* `a & b` returns `true` if and only if
* - `a` and `b` are `true`.
@@ -452,8 +436,7 @@ def |(x: Boolean): Boolean
*/
def &(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+/** Compares two Boolean expressions and returns `true` if they evaluate to a different value.
*
* `a ^ b` returns `true` if and only if
* - `a` is `true` and `b` is `false` or
@@ -499,5 +482,3 @@ override def getClass(): Class[Boolean] = null
def make() = values map (x => (x.name, x.make()))
}
-
-object AnyVals extends AnyVals { }
diff --git a/src/compiler/scala/tools/cmd/gen/Codegen.scala b/src/compiler/scala/tools/cmd/gen/Codegen.scala
index b49322ab4a..c3aa527ef2 100644
--- a/src/compiler/scala/tools/cmd/gen/Codegen.scala
+++ b/src/compiler/scala/tools/cmd/gen/Codegen.scala
@@ -6,11 +6,9 @@
package scala.tools.cmd
package gen
-import scala.language.postfixOps
-
class Codegen(args: List[String]) extends {
val parsed = CodegenSpec(args: _*)
-} with CodegenSpec with Instance { }
+} with CodegenSpec with Instance
object Codegen {
def echo(msg: String) = Console println msg
@@ -31,7 +29,7 @@ object Codegen {
val av = new AnyVals { }
av.make() foreach { case (name, code ) =>
- val file = out / (name + ".scala") toFile;
+ val file = (out / (name + ".scala")).toFile
echo("Writing: " + file)
file writeAll code
}
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
index 7d67fa738b..9754becf10 100644
--- a/src/compiler/scala/tools/cmd/package.scala
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -13,19 +13,19 @@ package object cmd {
implicit def implicitConversions = scala.language.implicitConversions
implicit def postfixOps = scala.language.postfixOps
- private[cmd] def debug(msg: String) = println(msg)
+ private[cmd] def debug(msg: String): Unit = println(msg)
def runAndExit(body: => Unit): Nothing = {
body
sys.exit(0)
}
- def toOpt(s: String) = if (s startsWith "--") s else "--" + s
- def fromOpt(s: String) = s stripPrefix "--"
- def toArgs(line: String) = CommandLineParser tokenize line
- def fromArgs(args: List[String]) = args mkString " "
+ def toOpt(s: String): String = if (s startsWith "--") s else "--" + s
+ def fromOpt(s: String): String = s stripPrefix "--"
+ def toArgs(line: String): List[String] = CommandLineParser tokenize line
+ def fromArgs(args: List[String]): String = args mkString " "
- def stripQuotes(s: String) = {
+ def stripQuotes(s: String): String = {
def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c
if (List('"', '\'') exists isQuotedBy) s.tail.init else s
}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index b52e6fdf57..f7437e4e6c 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -57,8 +57,8 @@ trait CompilationUnits { self: Global =>
// SBT compatibility (SI-6875)
//
// imagine we have a file named A.scala, which defines a trait named Foo and a module named Main
- // Main contains a call to a macro, which calls c.introduceTopLevel to define a mock for Foo
- // c.introduceTopLevel creates a virtual file Virt35af32.scala, which contains a class named FooMock extending Foo,
+ // Main contains a call to a macro, which calls compileLate to define a mock for Foo
+ // compileLate creates a virtual file Virt35af32.scala, which contains a class named FooMock extending Foo,
// and macro expansion instantiates FooMock. the stage is now set. let's see what happens next.
//
// without this workaround in scalac or without being patched itself, sbt will think that
@@ -91,7 +91,7 @@ trait CompilationUnits { self: Global =>
debuglog(s"removing synthetic $sym from $self")
map -= sym
}
- def get(sym: Symbol): Option[Tree] = logResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
+ def get(sym: Symbol): Option[Tree] = debuglogResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
map get sym
}
def keys: Iterable[Symbol] = map.keys
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 20cb1dab5b..3f2d759a6d 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -51,7 +51,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
class GlobalMirror extends Roots(NoSymbol) {
val universe: self.type = self
- def rootLoader: LazyType = platform.rootLoader
+ def rootLoader: LazyType = new loaders.PackageLoader(classPath)
override def toString = "compiler mirror"
}
@@ -83,12 +83,15 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// platform specific elements
- type ThisPlatform = Platform { val global: Global.this.type }
+ protected class GlobalPlatform extends {
+ val global: Global.this.type = Global.this
+ val settings: Settings = Global.this.settings
+ } with JavaPlatform
- lazy val platform: ThisPlatform =
- new { val global: Global.this.type = Global.this } with JavaPlatform
+ type ThisPlatform = JavaPlatform { val global: Global.this.type }
+ lazy val platform: ThisPlatform = new GlobalPlatform
- type PlatformClassPath = ClassPath[platform.BinaryRepr]
+ type PlatformClassPath = ClassPath[AbstractFile]
type OptClassPath = Option[PlatformClassPath]
def classPath: PlatformClassPath = platform.classPath
@@ -217,12 +220,15 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// not deprecated yet, but a method called "error" imported into
// nearly every trait really must go. For now using globalError.
- def error(msg: String) = globalError(msg)
- override def inform(msg: String) = reporter.echo(msg)
- override def globalError(msg: String) = reporter.error(NoPosition, msg)
- override def warning(msg: String) =
- if (settings.fatalWarnings) globalError(msg)
- else reporter.warning(NoPosition, msg)
+ def error(msg: String) = globalError(msg)
+
+ override def inform(msg: String) = inform(NoPosition, msg)
+ override def globalError(msg: String) = globalError(NoPosition, msg)
+ override def warning(msg: String) = warning(NoPosition, msg)
+
+ def globalError(pos: Position, msg: String) = reporter.error(pos, msg)
+ def warning(pos: Position, msg: String) = if (settings.fatalWarnings) globalError(pos, msg) else reporter.warning(pos, msg)
+ def inform(pos: Position, msg: String) = reporter.echo(pos, msg)
// Getting in front of Predef's asserts to supplement with more info.
// This has the happy side effect of masking the one argument forms
@@ -255,25 +261,25 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (settings.debug)
body
}
+
+ override protected def isDeveloper = settings.developer || super.isDeveloper
+
/** This is for WARNINGS which should reach the ears of scala developers
* whenever they occur, but are not useful for normal users. They should
* be precise, explanatory, and infrequent. Please don't use this as a
* logging mechanism. !!! is prefixed to all messages issued via this route
* to make them visually distinct.
*/
- @inline final override def devWarning(msg: => String) {
- if (settings.developer || settings.debug)
- warning("!!! " + msg)
+ @inline final override def devWarning(msg: => String): Unit = devWarning(NoPosition, msg)
+ @inline final def devWarning(pos: Position, msg: => String) {
+ def pos_s = if (pos eq NoPosition) "" else s" [@ $pos]"
+ if (isDeveloper)
+ warning(pos, "!!! " + msg)
else
- log("!!! " + msg) // such warnings always at least logged
+ log(s"!!!$pos_s $msg") // such warnings always at least logged
}
- private def elapsedMessage(msg: String, start: Long) =
- msg + " in " + (currentTime - start) + "ms"
-
def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg))
- def informProgress(msg: String) = if (settings.verbose) inform("[" + msg + "]")
- def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
def logError(msg: String, t: Throwable): Unit = ()
@@ -357,9 +363,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
getSourceFile(f)
}
- lazy val loaders = new SymbolLoaders {
+ lazy val loaders = new {
val global: Global.this.type = Global.this
- }
+ val platform: Global.this.platform.type = Global.this.platform
+ } with GlobalSymbolLoaders
/** Returns the mirror that loaded given symbol */
def mirrorThatLoaded(sym: Symbol): Mirror = rootMirror
@@ -914,7 +921,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
- val getName: ClassPath[platform.BinaryRepr] => String = (_.name)
+ val getName: ClassPath[AbstractFile] => String = (_.name)
def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty
def invalidateOrRemove(root: ClassSymbol) = {
allEntries match {
@@ -1108,7 +1115,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
"symbol owners" -> ownerChainString(sym),
"call site" -> (site.fullLocationString + " in " + site.enclosingPackage)
)
- ("\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
+ ("\n " + errorMessage + "\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
}
catch { case _: Exception | _: TypeError => errorMessage }
}
diff --git a/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala
new file mode 100644
index 0000000000..6921548230
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala
@@ -0,0 +1,30 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package tools
+package nsc
+
+/**
+ * Symbol loaders implementation that wires dependencies using Global.
+ */
+abstract class GlobalSymbolLoaders extends symtab.SymbolLoaders {
+ val global: Global
+ val symbolTable: global.type = global
+ val platform: symbolTable.platform.type
+ import global._
+ def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol = {
+ def lookup = sym.info.member(name)
+ // if loading during initialization of `definitions` typerPhase is not yet set.
+ // in that case we simply load the member at the current phase
+ if (currentRun.typerPhase eq null)
+ lookup
+ else
+ enteringTyper { lookup }
+ }
+
+ protected def compileLate(srcfile: io.AbstractFile): Unit =
+ currentRun.compileLate(srcfile)
+}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index b17de9b9d5..d7a32c3be0 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -83,6 +83,7 @@ trait TreeDSL {
def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other)
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
+ def INT_- (other: Tree) = fn(target, getMember(IntClass, nme.MINUS), other)
// generic operations on ByteClass, IntClass, LongClass
def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other)
@@ -187,7 +188,7 @@ trait TreeDSL {
def vparamss: List[List[ValDef]]
type ResultTreeType = DefDef
- def mkTree(rhs: Tree): DefDef = DefDef(mods, name, tparams, vparamss, tpt, rhs)
+ def mkTree(rhs: Tree): DefDef = DefDef(mods, name.toTermName, tparams, vparamss, tpt, rhs)
}
class DefSymStart(val sym: Symbol) extends SymVODDStart with DefCreator {
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 641ab9c279..381ffb1ed9 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -66,10 +66,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
*/
def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): ClassDef = {
// "if they have symbols they should be owned by `sym`"
- assert(
- mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
- ((mmap(vparamss)(_.symbol), sym))
- )
+ assert(mforall(vparamss)(_.symbol.owner == sym), (mmap(vparamss)(_.symbol), sym))
ClassDef(sym,
gen.mkTemplate(sym.info.parents map TypeTree,
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index eb924a811b..e5101a27a8 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -334,22 +334,27 @@ self =>
def parseStartRule: () => Tree
- /** This is the general parse entry point.
- */
- def parse(): Tree = {
- val t = parseStartRule()
+ def parseRule[T](rule: this.type => T): T = {
+ val t = rule(this)
accept(EOF)
t
}
+ /** This is the general parse entry point.
+ */
+ def parse(): Tree = parseRule(_.parseStartRule())
+
+ /** This is alternative entry point for repl, script runner, toolbox and quasiquotes.
+ */
+ def parseStats(): List[Tree] = parseRule(_.templateStats())
+
/** This is the parse entry point for code which is not self-contained, e.g.
* a script which is a series of template statements. They will be
* swaddled in Trees until the AST is equivalent to the one returned
* by compilationUnit().
*/
def scriptBody(): Tree = {
- val stmts = templateStats()
- accept(EOF)
+ val stmts = parseStats()
def mainModuleName = newTermName(settings.script.value)
/* If there is only a single object template in the file and it has a
@@ -563,8 +568,8 @@ self =>
and
}
- def expectedMsg(token: Int): String =
- token2string(token) + " expected but " +token2string(in.token) + " found."
+ def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found."
+ def expectedMsg(token: Int): String = expectedMsgTemplate(token2string(token), token2string(in.token))
/** Consume one token of the specified type, or signal an error if it is not there. */
def accept(token: Int): Int = {
@@ -627,6 +632,8 @@ self =>
def isAnnotation: Boolean = in.token == AT
+ def isCaseDefStart: Boolean = in.token == CASE
+
def isLocalModifier: Boolean = in.token match {
case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
case _ => false
@@ -1137,32 +1144,70 @@ self =>
})
}
- private def interpolatedString(inPattern: Boolean): Tree = atPos(in.offset) {
- val start = in.offset
- val interpolator = in.name
+ /** Handle placeholder syntax.
+ * If evaluating the tree produces placeholders, then make it a function.
+ */
+ private def withPlaceholders(tree: =>Tree, isAny: Boolean): Tree = {
+ val savedPlaceholderParams = placeholderParams
+ placeholderParams = List()
+ var res = tree
+ if (placeholderParams.nonEmpty && !isWildcard(res)) {
+ res = atPos(res.pos)(Function(placeholderParams.reverse, res))
+ if (isAny) placeholderParams foreach (_.tpt match {
+ case tpt @ TypeTree() => tpt setType definitions.AnyTpe
+ case _ => // some ascription
+ })
+ placeholderParams = List()
+ }
+ placeholderParams = placeholderParams ::: savedPlaceholderParams
+ res
+ }
- val partsBuf = new ListBuffer[Tree]
- val exprBuf = new ListBuffer[Tree]
+ /** Consume a USCORE and create a fresh synthetic placeholder param. */
+ private def freshPlaceholder(): Tree = {
+ val start = in.offset
+ val pname = freshName("x$")
in.nextToken()
- while (in.token == STRINGPART) {
- partsBuf += literal()
- exprBuf += (
- if (inPattern) dropAnyBraces(pattern())
- else in.token match {
- case IDENTIFIER => atPos(in.offset)(Ident(ident()))
- case LBRACE => expr()
- case THIS => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY))
- case _ => syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected", skipIt = true)(EmptyTree)
- }
- )
- }
- if (in.token == STRINGLIT) partsBuf += literal()
+ val id = atPos(start)(Ident(pname))
+ val param = atPos(id.pos.focus)(gen.mkSyntheticParam(pname.toTermName))
+ placeholderParams = param :: placeholderParams
+ id
+ }
+
+ private def interpolatedString(inPattern: Boolean): Tree = {
+ def errpolation() = syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected",
+ skipIt = true)(EmptyTree)
+ // Like Swiss cheese, with holes
+ def stringCheese: Tree = atPos(in.offset) {
+ val start = in.offset
+ val interpolator = in.name
- val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
- val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
- t2 setPos t2.pos.makeTransparent
- val t3 = Select(t2, interpolator) setPos t2.pos
- atPos(start) { Apply(t3, exprBuf.toList) }
+ val partsBuf = new ListBuffer[Tree]
+ val exprBuf = new ListBuffer[Tree]
+ in.nextToken()
+ while (in.token == STRINGPART) {
+ partsBuf += literal()
+ exprBuf += (
+ if (inPattern) dropAnyBraces(pattern())
+ else in.token match {
+ case IDENTIFIER => atPos(in.offset)(Ident(ident()))
+ //case USCORE => freshPlaceholder() // ifonly etapolation
+ case LBRACE => expr() // dropAnyBraces(expr0(Local))
+ case THIS => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY))
+ case _ => errpolation()
+ }
+ )
+ }
+ if (in.token == STRINGLIT) partsBuf += literal()
+
+ val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
+ val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
+ t2 setPos t2.pos.makeTransparent
+ val t3 = Select(t2, interpolator) setPos t2.pos
+ atPos(start) { Apply(t3, exprBuf.toList) }
+ }
+ if (inPattern) stringCheese
+ else withPlaceholders(stringCheese, isAny = true) // strinterpolator params are Any* by definition
}
/* ------------- NEW LINES ------------------------------------------------- */
@@ -1260,18 +1305,7 @@ self =>
*/
def expr(): Tree = expr(Local)
- def expr(location: Int): Tree = {
- val savedPlaceholderParams = placeholderParams
- placeholderParams = List()
- var res = expr0(location)
- if (!placeholderParams.isEmpty && !isWildcard(res)) {
- res = atPos(res.pos){ Function(placeholderParams.reverse, res) }
- placeholderParams = List()
- }
- placeholderParams = placeholderParams ::: savedPlaceholderParams
- res
- }
-
+ def expr(location: Int): Tree = withPlaceholders(expr0(location), isAny = false)
def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
case IF =>
@@ -1298,7 +1332,7 @@ self =>
in.nextToken()
if (in.token != LBRACE) catchFromExpr()
else inBracesOrNil {
- if (in.token == CASE) caseClauses()
+ if (isCaseDefStart) caseClauses()
else catchFromExpr()
}
}
@@ -1520,13 +1554,7 @@ self =>
case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
path(thisOK = true, typeOK = false)
case USCORE =>
- val start = in.offset
- val pname = freshName("x$")
- in.nextToken()
- val id = atPos(start) (Ident(pname))
- val param = atPos(id.pos.focus){ gen.mkSyntheticParam(pname.toTermName) }
- placeholderParams = param :: placeholderParams
- id
+ freshPlaceholder()
case LPAREN =>
atPos(in.offset)(makeParens(commaSeparated(expr())))
case LBRACE =>
@@ -1613,7 +1641,7 @@ self =>
*/
def blockExpr(): Tree = atPos(in.offset) {
inBraces {
- if (in.token == CASE) Match(EmptyTree, caseClauses())
+ if (isCaseDefStart) Match(EmptyTree, caseClauses())
else block()
}
}
@@ -2542,7 +2570,7 @@ self =>
}
expr()
}
- DefDef(newmods, name, tparams, vparamss, restype, rhs)
+ DefDef(newmods, name.toTermName, tparams, vparamss, restype, rhs)
}
signalParseProgress(result.pos)
result
@@ -2605,7 +2633,7 @@ self =>
case EQUALS =>
in.nextToken()
TypeDef(mods, name, tparams, typ())
- case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE =>
+ case t if t == SUPERTYPE || t == SUBTYPE || t == COMMA || t == RBRACE || isStatSep(t) =>
TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds())
case _ =>
syntaxErrorOrIncompleteAnd("`=', `>:', or `<:' expected", skipIt = true)(EmptyTree)
@@ -2665,7 +2693,7 @@ self =>
syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", skipIt = false)
classContextBounds = List()
}
- val constrAnnots = constructorAnnotations()
+ val constrAnnots = if (!mods.isTrait) constructorAnnotations() else Nil
val (constrMods, vparamss) =
if (mods.isTrait) (Modifiers(Flags.TRAIT), List())
else (accessModifierOpt(), paramClauses(name, classContextBounds, ofCaseClass = mods.isCase))
@@ -2772,9 +2800,10 @@ self =>
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
case tdef @ TypeDef(mods, name, tparams, rhs) =>
+ deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.")
List(treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs))
case stat if !stat.isEmpty =>
- syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", skipIt = false)
+ syntaxError(stat.pos, "only concrete field definitions allowed in early object initialization section", skipIt = false)
List()
case _ => List()
}
@@ -2906,27 +2935,14 @@ self =>
stats.toList
}
- /** Informal - for the repl and other direct parser accessors.
- */
- def templateStats(): List[Tree] = templateStatSeq(isPre = false)._2 match {
- case Nil => EmptyTree.asList
- case stats => stats
- }
-
/** {{{
- * TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
- * TemplateStat ::= Import
- * | Annotations Modifiers Def
- * | Annotations Modifiers Dcl
- * | Expr1
- * | super ArgumentExprs {ArgumentExprs}
- * |
+ * TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStats
* }}}
* @param isPre specifies whether in early initializer (true) or not (false)
*/
def templateStatSeq(isPre : Boolean): (ValDef, List[Tree]) = checkNoEscapingPlaceholders {
var self: ValDef = emptyValDef
- val stats = new ListBuffer[Tree]
+ var firstOpt: Option[Tree] = None
if (isExprIntro) {
in.flushDoc
val first = expr(InTemplate) // @S: first statement is potentially converted so cannot be stubbed.
@@ -2943,10 +2959,25 @@ self =>
}
in.nextToken()
} else {
- stats += first
+ firstOpt = Some(first)
acceptStatSepOpt()
}
}
+ (self, firstOpt ++: templateStats())
+ }
+
+ /** {{{
+ * TemplateStats ::= TemplateStat {semi TemplateStat}
+ * TemplateStat ::= Import
+ * | Annotations Modifiers Def
+ * | Annotations Modifiers Dcl
+ * | Expr1
+ * | super ArgumentExprs {ArgumentExprs}
+ * |
+ * }}}
+ */
+ def templateStats(): List[Tree] = {
+ val stats = new ListBuffer[Tree]
while (!isStatSeqEnd) {
if (in.token == IMPORT) {
in.flushDoc
@@ -2961,7 +2992,14 @@ self =>
}
acceptStatSepOpt()
}
- (self, stats.toList)
+ stats.toList
+ }
+
+ /** Informal - for the repl and other direct parser accessors.
+ */
+ def templateStatsCompat(): List[Tree] = templateStats() match {
+ case Nil => EmptyTree.asList
+ case stats => stats
}
/** {{{
@@ -3026,14 +3064,14 @@ self =>
*/
def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd && in.token != CASE) {
+ while (!isStatSeqEnd && !isCaseDefStart) {
if (in.token == IMPORT) {
stats ++= importClause()
acceptStatSep()
}
else if (isExprIntro) {
stats += statement(InBlock)
- if (in.token != RBRACE && in.token != CASE) acceptStatSep()
+ if (in.token != RBRACE && !isCaseDefStart) acceptStatSep()
}
else if (isDefIntro || isLocalModifier || isAnnotation) {
if (in.token == IMPLICIT) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 2a8412b105..6957f85689 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -741,6 +741,10 @@ trait Scanners extends ScannersCommon {
finishStringPart()
nextRawChar()
next.token = LBRACE
+ } else if (ch == '_') {
+ finishStringPart()
+ nextRawChar()
+ next.token = USCORE
} else if (Character.isUnicodeIdentifierStart(ch)) {
finishStringPart()
do {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 666f19851d..ed694023d7 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -39,7 +39,7 @@ abstract class TreeBuilder {
* x becomes x @ _
* x: T becomes x @ (_: T)
*/
- private object patvarTransformer extends Transformer {
+ object patvarTransformer extends Transformer {
override def transform(tree: Tree): Tree = tree match {
case Ident(name) if (treeInfo.isVarPattern(tree) && name != nme.WILDCARD) =>
atPos(tree.pos)(Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD))))
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index c5fc12e3ec..32b5a98b98 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -11,33 +11,22 @@ import util.{ClassPath,MergedClassPath,DeltaClassPath}
import scala.tools.util.PathResolver
trait JavaPlatform extends Platform {
+ val global: Global
+ override val symbolTable: global.type = global
import global._
import definitions._
- type BinaryRepr = AbstractFile
+ private var currentClassPath: Option[MergedClassPath[AbstractFile]] = None
- private var currentClassPath: Option[MergedClassPath[BinaryRepr]] = None
-
- def classPath: ClassPath[BinaryRepr] = {
+ def classPath: ClassPath[AbstractFile] = {
if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
currentClassPath.get
}
/** Update classpath with a substituted subentry */
- def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
+ def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]) =
currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
- def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
- // [Martin] Why do we need a cast here?
- // The problem is that we cannot specify at this point that global.platform should be of type JavaPlatform.
- // So we cannot infer that global.platform.BinaryRepr is AbstractFile.
- // Ideally, we should be able to write at the top of the JavaPlatform trait:
- // val global: Global { val platform: JavaPlatform }
- // import global._
- // Right now, this does nothing because the concrete definition of platform in Global
- // replaces the tighter abstract definition here. If we had DOT typing rules, the two
- // types would be conjoined and everything would work out. Yet another reason to push for DOT.
-
private def classEmitPhase =
if (settings.isBCodeActive) genBCode
else genASM
@@ -66,10 +55,7 @@ trait JavaPlatform extends Platform {
(sym isNonBottomSubClass BoxedBooleanClass)
}
- def newClassLoader(bin: AbstractFile): loaders.SymbolLoader =
- new loaders.ClassfileLoader(bin)
-
- def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = true
+ def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true
def needCompile(bin: AbstractFile, src: AbstractFile) =
src.lastModified >= bin.lastModified
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
index e2b22c06d7..3bca16635b 100644
--- a/src/compiler/scala/tools/nsc/backend/Platform.scala
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -12,20 +12,18 @@ import io.AbstractFile
/** The platform dependent pieces of Global.
*/
trait Platform {
- val global: Global
- import global._
+ val symbolTable: symtab.SymbolTable
+ import symbolTable._
/** The binary classfile representation type */
- type BinaryRepr
+ @deprecated("BinaryRepr is not an abstract type anymore. It's an alias that points at AbstractFile. It'll be removed before Scala 2.11 is released.", "2.11.0-M5")
+ type BinaryRepr = AbstractFile
/** The compiler classpath. */
- def classPath: ClassPath[BinaryRepr]
-
- /** The root symbol loader. */
- def rootLoader: LazyType
+ def classPath: ClassPath[AbstractFile]
/** Update classpath with a substitution that maps entries to entries */
- def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]])
+ def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]])
/** Any platform-specific phases. */
def platformPhases: List[SubComponent]
@@ -36,16 +34,13 @@ trait Platform {
/** The various ways a boxed primitive might materialize at runtime. */
def isMaybeBoxed(sym: Symbol): Boolean
- /** Create a new class loader to load class file `bin` */
- def newClassLoader(bin: BinaryRepr): loaders.SymbolLoader
-
/**
* Tells whether a class should be loaded and entered into the package
* scope. On .NET, this method returns `false` for all synthetic classes
* (anonymous classes, implementation classes, module classes), their
* symtab is encoded in the pickle of another class.
*/
- def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean
+ def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean
/**
* Tells whether a class with both a binary and a source representation
@@ -53,6 +48,6 @@ trait Platform {
* on the JVM similar to javac, i.e. if the source file is newer than the classfile,
* a re-compile is triggered. On .NET by contrast classfiles always take precedence.
*/
- def needCompile(bin: BinaryRepr, src: AbstractFile): Boolean
+ def needCompile(bin: AbstractFile, src: AbstractFile): Boolean
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index e6f21fc1e3..843299398b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -72,7 +72,7 @@ abstract class GenICode extends SubComponent {
* it is the host class; otherwise the symbol's owner.
*/
def findHostClass(selector: Type, sym: Symbol) = selector member sym.name match {
- case NoSymbol => log(s"Rejecting $selector as host class for $sym") ; sym.owner
+ case NoSymbol => debuglog(s"Rejecting $selector as host class for $sym") ; sym.owner
case _ => selector.typeSymbol
}
@@ -739,7 +739,7 @@ abstract class GenICode extends SubComponent {
resolveForwardLabel(ctx.defdef, ctx, sym)
ctx.labels.get(sym) match {
case Some(l) =>
- log("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
+ debuglog("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
l
case _ =>
abort("Unknown label target: " + sym + " at: " + (fun.pos) + ": ctx: " + ctx)
@@ -845,7 +845,7 @@ abstract class GenICode extends SubComponent {
val sym = tree.symbol
generatedType = toTypeKind(sym.info)
val hostClass = findHostClass(qualifier.tpe, sym)
- log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+ debuglog(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
def genLoadQualUnlessElidable: Context =
@@ -1026,7 +1026,7 @@ abstract class GenICode extends SubComponent {
* type Null is holding a null.
*/
private def adaptNullRef(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
- log(s"GenICode#adaptNullRef($from, $to, $ctx, $pos)")
+ debuglog(s"GenICode#adaptNullRef($from, $to, $ctx, $pos)")
// Don't need to adapt null to unit because we'll just drop it anyway. Don't
// need to adapt to Object or AnyRef because the JVM is happy with
@@ -1046,7 +1046,7 @@ abstract class GenICode extends SubComponent {
private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
// An awful lot of bugs explode here - let's leave ourselves more clues.
// A typical example is an overloaded type assigned after typer.
- log(s"GenICode#adapt($from, $to, $ctx, $pos)")
+ debuglog(s"GenICode#adapt($from, $to, $ctx, $pos)")
def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
@@ -1479,26 +1479,18 @@ abstract class GenICode extends SubComponent {
if (mustUseAnyComparator) {
// when -optimise is on we call the @inline-version of equals, found in ScalaRunTime
- val equalsMethod =
+ val equalsMethod: Symbol = {
if (!settings.optimise) {
- def default = platform.externalEquals
- platform match {
- case x: JavaPlatform =>
- import x._
- if (l.tpe <:< BoxedNumberClass.tpe) {
- if (r.tpe <:< BoxedNumberClass.tpe) externalEqualsNumNum
- else if (r.tpe <:< BoxedCharacterClass.tpe) externalEqualsNumChar
- else externalEqualsNumObject
- }
- else default
-
- case _ => default
- }
- }
- else {
+ if (l.tpe <:< BoxedNumberClass.tpe) {
+ if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
+ else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
+ else platform.externalEqualsNumObject
+ } else platform.externalEquals
+ } else {
ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
getMember(ScalaRunTimeModule, nme.inlinedEquals)
}
+ }
val ctx1 = genLoad(l, ctx, ObjectReference)
val ctx2 = genLoad(r, ctx1, ObjectReference)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index 3f2141782a..b9eb8f8aac 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -10,6 +10,7 @@ package icode
import java.io.PrintWriter
import analysis.{ Liveness, ReachingDefinitions }
import scala.tools.nsc.symtab.classfile.ICodeReader
+import scala.reflect.io.AbstractFile
/** Glue together ICode parts.
*
@@ -108,6 +109,12 @@ abstract class ICodes extends AnyRef
object icodeReader extends ICodeReader {
lazy val global: ICodes.this.global.type = ICodes.this.global
+ import global._
+ def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol =
+ global.loaders.lookupMemberAtTyperPhaseIfPossible(sym, name)
+ lazy val symbolTable: global.type = global
+ lazy val loaders: global.loaders.type = global.loaders
+ def classPath: util.ClassPath[AbstractFile] = ICodes.this.global.platform.classPath
}
/** A phase which works on icode. */
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
index a7f43eefed..817546b0f1 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -36,7 +36,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
* it is the host class; otherwise the symbol's owner.
*/
def findHostClass(selector: Type, sym: Symbol) = selector member sym.name match {
- case NoSymbol => log(s"Rejecting $selector as host class for $sym") ; sym.owner
+ case NoSymbol => debuglog(s"Rejecting $selector as host class for $sym") ; sym.owner
case _ => selector.typeSymbol
}
@@ -330,7 +330,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
val sym = tree.symbol
generatedType = symInfoTK(sym)
val hostClass = findHostClass(qualifier.tpe, sym)
- log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+ debuglog(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
def genLoadQualUnlessElidable() { if (!qualSafeToElide) { genLoadQualifier(tree) } }
@@ -1187,22 +1187,12 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
}
if (mustUseAnyComparator) {
- val equalsMethod = {
-
- def default = platform.externalEquals
-
- platform match {
- case x: JavaPlatform =>
- import x._
- if (l.tpe <:< BoxedNumberClass.tpe) {
- if (r.tpe <:< BoxedNumberClass.tpe) externalEqualsNumNum
- else if (r.tpe <:< BoxedCharacterClass.tpe) externalEqualsNumChar
- else externalEqualsNumObject
- }
- else default
-
- case _ => default
- }
+ val equalsMethod: Symbol = {
+ if (l.tpe <:< BoxedNumberClass.tpe) {
+ if (r.tpe <:< BoxedNumberClass.tpe) platform.externalEqualsNumNum
+ else if (r.tpe <:< BoxedCharacterClass.tpe) platform.externalEqualsNumChar
+ else platform.externalEqualsNumObject
+ } else platform.externalEquals
}
genLoad(l, ObjectReference)
genLoad(r, ObjectReference)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 4cb2f514ec..3947db2dd4 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -1122,13 +1122,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
- debuglog("No forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
+ debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'")
else if (conflictingNames(m.name))
- log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
+ log(s"No forwarder for $m due to conflict with " + linkedClass.info.member(m.name))
else if (m.hasAccessBoundary)
log(s"No forwarder for non-public member $m")
else {
- log("Adding static forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
+ debuglog(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'")
addForwarder(isRemoteClass, jclass, moduleClass, m)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
index e55a3baed0..193100474c 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
@@ -17,13 +17,25 @@ import scala.tools.asm
/*
* Prepare in-memory representations of classfiles using the ASM Tree API, and serialize them to disk.
*
- * `BCodePhase.apply(CompilationUnit)` is invoked by some external force and that sets in motion:
- * - visiting each ClassDef contained in that CompilationUnit
- * - lowering the ClassDef into:
+ * Three pipelines are at work, each taking work items from a queue dedicated to that pipeline:
+ *
+ * (There's another pipeline so to speak, the one that populates queue-1 by traversing a CompilationUnit until ClassDefs are found,
+ * but the "interesting" pipelines are the ones described below)
+ *
+ * (1) In the first queue, an item consists of a ClassDef along with its arrival position.
+ * This position is needed at the time classfiles are serialized to disk,
+ * so as to emit classfiles in the same order CleanUp handed them over.
+ * As a result, two runs of the compiler on the same files produce jars that are identical on a byte basis.
+ * See `ant test.stability`
+ *
+ * (2) The second queue contains items where a ClassDef has been lowered into:
* (a) an optional mirror class,
* (b) a plain class, and
* (c) an optional bean class.
- * - each of the ClassNodes above is lowered into a byte-array (ie into a classfile) and serialized.
+ *
+ * (3) The third queue contains items ready for serialization.
+ * It's a priority queue that follows the original arrival order,
+ * so as to emit identical jars on repeated compilation of the same sources.
*
* Plain, mirror, and bean classes are built respectively by PlainClassBuilder, JMirrorBuilder, and JBeanInfoBuilder.
*
@@ -50,75 +62,192 @@ abstract class GenBCode extends BCodeSyncAndTry {
private var mirrorCodeGen : JMirrorBuilder = null
private var beanInfoCodeGen : JBeanInfoBuilder = null
- private var needsOutFolder = false // whether getOutFolder(claszSymbol) should be invoked for each claszSymbol
+ /* ---------------- q1 ---------------- */
+
+ case class Item1(arrivalPos: Int, cd: ClassDef, cunit: CompilationUnit) {
+ def isPoison = { arrivalPos == Int.MaxValue }
+ }
+ private val poison1 = Item1(Int.MaxValue, null, null)
+ private val q1 = new java.util.LinkedList[Item1]
+
+ /* ---------------- q2 ---------------- */
- val caseInsensitively = mutable.Map.empty[String, Symbol]
+ case class Item2(arrivalPos: Int,
+ mirror: asm.tree.ClassNode,
+ plain: asm.tree.ClassNode,
+ bean: asm.tree.ClassNode,
+ outFolder: scala.tools.nsc.io.AbstractFile) {
+ def isPoison = { arrivalPos == Int.MaxValue }
+ }
+
+ private val poison2 = Item2(Int.MaxValue, null, null, null, null)
+ private val q2 = new _root_.java.util.LinkedList[Item2]
+
+ /* ---------------- q3 ---------------- */
/*
- * Checks for duplicate internal names case-insensitively,
- * builds ASM ClassNodes for mirror, plain, and bean classes.
+ * An item of queue-3 (the last queue before serializing to disk) contains three of these
+ * (one for each of mirror, plain, and bean classes).
*
+ * @param jclassName internal name of the class
+ * @param jclassBytes bytecode emitted for the class SubItem3 represents
*/
- def visit(arrivalPos: Int, cd: ClassDef, cunit: CompilationUnit) {
- val claszSymbol = cd.symbol
-
- // GenASM checks this before classfiles are emitted, https://github.com/scala/scala/commit/e4d1d930693ac75d8eb64c2c3c69f2fc22bec739
- val lowercaseJavaClassName = claszSymbol.javaClassName.toLowerCase
- caseInsensitively.get(lowercaseJavaClassName) match {
- case None =>
- caseInsensitively.put(lowercaseJavaClassName, claszSymbol)
- case Some(dupClassSym) =>
- cunit.warning(
- claszSymbol.pos,
- s"Class ${claszSymbol.javaClassName} differs only in case from ${dupClassSym.javaClassName}. " +
- "Such classes will overwrite one another on case-insensitive filesystems."
- )
+ case class SubItem3(
+ jclassName: String,
+ jclassBytes: Array[Byte]
+ )
+
+ case class Item3(arrivalPos: Int,
+ mirror: SubItem3,
+ plain: SubItem3,
+ bean: SubItem3,
+ outFolder: scala.tools.nsc.io.AbstractFile) {
+
+ def isPoison = { arrivalPos == Int.MaxValue }
+ }
+ private val i3comparator = new java.util.Comparator[Item3] {
+ override def compare(a: Item3, b: Item3) = {
+ if (a.arrivalPos < b.arrivalPos) -1
+ else if (a.arrivalPos == b.arrivalPos) 0
+ else 1
}
+ }
+ private val poison3 = Item3(Int.MaxValue, null, null, null, null)
+ private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator)
+
+ /*
+ * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2
+ */
+ class Worker1(needsOutFolder: Boolean) {
+
+ val caseInsensitively = mutable.Map.empty[String, Symbol]
- // -------------- mirror class, if needed --------------
- val mirrorC =
- if (isStaticModule(claszSymbol) && isTopLevelModule(claszSymbol)) {
- if (claszSymbol.companionClass == NoSymbol) {
- mirrorCodeGen.genMirrorClass(claszSymbol, cunit)
- } else {
- log(s"No mirror class for module with linked class: ${claszSymbol.fullName}");
- null
+ def run() {
+ while (true) {
+ val item = q1.poll
+ if (item.isPoison) {
+ q2 add poison2
+ return
}
- } else null
-
- // -------------- "plain" class --------------
- val pcb = new PlainClassBuilder(cunit)
- pcb.genPlainClass(cd)
- val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName, cunit) else null;
- val plainC = pcb.cnode
-
- // -------------- bean info class, if needed --------------
- val beanC =
- if (claszSymbol hasAnnotation BeanInfoAttr) {
- beanInfoCodeGen.genBeanInfoClass(
- claszSymbol, cunit,
- fieldSymbols(claszSymbol),
- methodSymbols(cd)
- )
- } else null
-
- // ----------- serialize classfiles to disk
-
- def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = {
- val cw = new CClassWriter(extraProc)
- cn.accept(cw)
- cw.toByteArray
+ else {
+ try { visit(item) }
+ catch {
+ case ex: Throwable =>
+ ex.printStackTrace()
+ error(s"Error while emitting ${item.cunit.source}\n${ex.getMessage}")
+ }
+ }
+ }
}
- if (mirrorC != null) {
- sendToDisk(mirrorC.name, getByteArray(mirrorC), outF)
+ /*
+ * Checks for duplicate internal names case-insensitively,
+ * builds ASM ClassNodes for mirror, plain, and bean classes;
+ * enqueues them in queue-2.
+ *
+ */
+ def visit(item: Item1) {
+ val Item1(arrivalPos, cd, cunit) = item
+ val claszSymbol = cd.symbol
+
+ // GenASM checks this before classfiles are emitted, https://github.com/scala/scala/commit/e4d1d930693ac75d8eb64c2c3c69f2fc22bec739
+ val lowercaseJavaClassName = claszSymbol.javaClassName.toLowerCase
+ caseInsensitively.get(lowercaseJavaClassName) match {
+ case None =>
+ caseInsensitively.put(lowercaseJavaClassName, claszSymbol)
+ case Some(dupClassSym) =>
+ item.cunit.warning(
+ claszSymbol.pos,
+ s"Class ${claszSymbol.javaClassName} differs only in case from ${dupClassSym.javaClassName}. " +
+ "Such classes will overwrite one another on case-insensitive filesystems."
+ )
+ }
+
+ // -------------- mirror class, if needed --------------
+ val mirrorC =
+ if (isStaticModule(claszSymbol) && isTopLevelModule(claszSymbol)) {
+ if (claszSymbol.companionClass == NoSymbol) {
+ mirrorCodeGen.genMirrorClass(claszSymbol, cunit)
+ } else {
+ log(s"No mirror class for module with linked class: ${claszSymbol.fullName}")
+ null
+ }
+ } else null
+
+ // -------------- "plain" class --------------
+ val pcb = new PlainClassBuilder(cunit)
+ pcb.genPlainClass(cd)
+ val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName, cunit) else null;
+ val plainC = pcb.cnode
+
+ // -------------- bean info class, if needed --------------
+ val beanC =
+ if (claszSymbol hasAnnotation BeanInfoAttr) {
+ beanInfoCodeGen.genBeanInfoClass(
+ claszSymbol, cunit,
+ fieldSymbols(claszSymbol),
+ methodSymbols(cd)
+ )
+ } else null
+
+ // ----------- hand over to pipeline-2
+
+ val item2 =
+ Item2(arrivalPos,
+ mirrorC, plainC, beanC,
+ outF)
+
+ q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done.
+
+ } // end of method visit(Item1)
+
+ } // end of class BCodePhase.Worker1
+
+ /*
+ * Pipeline that takes ClassNodes from queue-2. The unit of work depends on the optimization level:
+ *
+ * (a) no optimization involves:
+ * - converting the plain ClassNode to byte array and placing it on queue-3
+ */
+ class Worker2 {
+
+ def run() {
+ while (true) {
+ val item = q2.poll
+ if (item.isPoison) {
+ q3 add poison3
+ return
+ }
+ else {
+ try { addToQ3(item) }
+ catch {
+ case ex: Throwable =>
+ ex.printStackTrace()
+ error(s"Error while emitting ${item.plain.name}\n${ex.getMessage}")
+ }
+ }
+ }
}
- sendToDisk(plainC.name, getByteArray(plainC), outF)
- if (beanC != null) {
- sendToDisk(beanC.name, getByteArray(beanC), outF)
+
+ private def addToQ3(item: Item2) {
+
+ def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = {
+ val cw = new CClassWriter(extraProc)
+ cn.accept(cw)
+ cw.toByteArray
+ }
+
+ val Item2(arrivalPos, mirror, plain, bean, outFolder) = item
+
+ val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror))
+ val plainC = SubItem3(plain.name, getByteArray(plain))
+ val beanC = if (bean == null) null else SubItem3(bean.name, getByteArray(bean))
+
+ q3 add Item3(arrivalPos, mirrorC, plainC, beanC, outFolder)
+
}
- } // end of method visit()
+ } // end of class BCodePhase.Worker2
var arrivalPos = 0
@@ -144,15 +273,12 @@ abstract class GenBCode extends BCodeSyncAndTry {
mirrorCodeGen = new JMirrorBuilder
beanInfoCodeGen = new JBeanInfoBuilder
- needsOutFolder = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
-
- super.run()
+ val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+ buildAndSendToDisk(needsOutfileForSymbol)
// closing output files.
bytecodeWriter.close()
- caseInsensitively.clear()
-
/* TODO Bytecode can be verified (now that all classfiles have been written to disk)
*
* (1) asm.util.CheckAdapter.verify()
@@ -170,17 +296,69 @@ abstract class GenBCode extends BCodeSyncAndTry {
clearBCodeTypes()
}
- def sendToDisk(jclassName: String, jclassBytes: Array[Byte], outFolder: _root_.scala.tools.nsc.io.AbstractFile) {
- try {
- val outFile =
- if (outFolder == null) null
- else getFileForClassfile(outFolder, jclassName, ".class")
- bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, outFile)
+ /*
+ * Sequentially:
+ * (a) place all ClassDefs in queue-1
+ * (b) dequeue one at a time from queue-1, convert it to ASM ClassNode, place in queue-2
+ * (c) dequeue one at a time from queue-2, convert it to byte-array, place in queue-3
+ * (d) serialize to disk by draining queue-3.
+ */
+ private def buildAndSendToDisk(needsOutFolder: Boolean) {
+
+ feedPipeline1()
+ (new Worker1(needsOutFolder)).run()
+ (new Worker2).run()
+ drainQ3()
+
+ }
+
+ /* Feed pipeline-1: place all ClassDefs on q1, recording their arrival position. */
+ private def feedPipeline1() {
+ super.run()
+ q1 add poison1
+ }
+
+ /* Pipeline that writes classfile representations to disk. */
+ private def drainQ3() {
+
+ def sendToDisk(cfr: SubItem3, outFolder: scala.tools.nsc.io.AbstractFile) {
+ if (cfr != null){
+ val SubItem3(jclassName, jclassBytes) = cfr
+ try {
+ val outFile =
+ if (outFolder == null) null
+ else getFileForClassfile(outFolder, jclassName, ".class")
+ bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, outFile)
+ }
+ catch {
+ case e: FileConflictException =>
+ error(s"error writing $jclassName: ${e.getMessage}")
+ }
+ }
}
- catch {
- case e: FileConflictException =>
- error(s"error writing $jclassName: ${e.getMessage}")
+
+ var moreComing = true
+ // `expected` denotes the arrivalPos whose Item3 should be serialized next
+ var expected = 0
+
+ while (moreComing) {
+ val incoming = q3.poll
+ moreComing = !incoming.isPoison
+ if (moreComing) {
+ val item = incoming
+ val outFolder = item.outFolder
+ sendToDisk(item.mirror, outFolder)
+ sendToDisk(item.plain, outFolder)
+ sendToDisk(item.bean, outFolder)
+ expected += 1
+ }
}
+
+ // we're done
+ assert(q1.isEmpty, s"Some ClassDefs remained in the first queue: $q1")
+ assert(q2.isEmpty, s"Some classfiles remained in the second queue: $q2")
+ assert(q3.isEmpty, s"Some classfiles weren't written to disk: $q3")
+
}
override def apply(cunit: CompilationUnit): Unit = {
@@ -190,7 +368,7 @@ abstract class GenBCode extends BCodeSyncAndTry {
case EmptyTree => ()
case PackageDef(_, stats) => stats foreach gen
case cd: ClassDef =>
- visit(arrivalPos, cd, cunit)
+ q1 add Item1(arrivalPos, cd, cunit)
arrivalPos += 1
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 483bff6467..7511da8b00 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -159,7 +159,7 @@ abstract class DeadCodeElimination extends SubComponent {
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
- LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() =>
+ LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() | CHECK_CAST(_) =>
moveToWorkList()
case CALL_METHOD(m1, _) if isSideEffecting(m1) =>
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 56191cc981..09095879bf 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -78,10 +78,10 @@ abstract class Inliners extends SubComponent {
assert(clazz != NoSymbol, "Walked up past Object.superClass looking for " + sym +
", most likely this reveals the TFA at fault (receiver and callee don't match).")
if (sym.owner == clazz || isBottomType(clazz)) sym
- else sym.overridingSymbol(clazz) match {
- case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
- case imp => imp
- }
+ else sym.overridingSymbol(clazz) orElse (
+ if (sym.owner.isTrait) sym
+ else lookup(clazz.superClass)
+ )
}
if (needsLookup) {
val concreteMethod = lookup(clazz)
diff --git a/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala b/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
deleted file mode 100644
index 98c3d27202..0000000000
--- a/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.util.concurrent._
-
-class DaemonThreadFactory extends ThreadFactory {
- def newThread(r: Runnable): Thread = {
- val thread = new Thread(r)
- thread setDaemon true
- thread
- }
-}
-
-object DaemonThreadFactory {
- def newPool() = Executors.newCachedThreadPool(new DaemonThreadFactory)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 0b2db115fb..5f2f90c284 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -5,8 +5,6 @@
package scala.tools.nsc
-import java.util.concurrent.{ Future, Callable }
-import java.util.{ Timer, TimerTask }
import scala.language.implicitConversions
package object io {
@@ -29,16 +27,4 @@ package object io {
type JFile = java.io.File
implicit def enrichManifest(m: JManifest): Jar.WManifest = Jar.WManifest(m)
- private lazy val daemonThreadPool = DaemonThreadFactory.newPool()
-
- def runnable(body: => Unit): Runnable = new Runnable { override def run() = body }
- def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
- def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body)
-
- def newThread(f: Thread => Unit)(body: => Unit): Thread = {
- val thread = new Thread(runnable(body))
- f(thread)
- thread.start
- thread
- }
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 2a799acbc7..239ecb4f8a 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -510,7 +510,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (inInterface) mods1 |= Flags.DEFERRED
List {
atPos(pos) {
- DefDef(mods1, name, tparams, List(vparams), rtpt, body)
+ DefDef(mods1, name.toTermName, tparams, List(vparams), rtpt, body)
}
}
} else {
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
index 4e4efef607..4b9e056df3 100644
--- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -12,9 +12,10 @@ import scala.tools.nsc.io.AbstractFile
* This class should be used whenever file dependencies and recompile sets
* are managed automatically.
*/
-abstract class BrowsingLoaders extends SymbolLoaders {
- import global._
+abstract class BrowsingLoaders extends GlobalSymbolLoaders {
+ val global: Global
+ import global._
import syntaxAnalyzer.{OutlineParser, MalformedInput}
/** In browse mode, it can happen that an encountered symbol is already
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index fd85bbb169..6f27eb8660 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -20,8 +20,23 @@ import scala.reflect.io.{ AbstractFile, NoAbstractFile }
* @version 1.0
*/
abstract class SymbolLoaders {
- val global: Global
- import global._
+ val symbolTable: symtab.SymbolTable {
+ def settings: Settings
+ }
+ val platform: backend.Platform {
+ val symbolTable: SymbolLoaders.this.symbolTable.type
+ }
+ import symbolTable._
+ /**
+ * Required by ClassfileParser. Check documentation in that class for details.
+ */
+ def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
+ /**
+ * Should forward to `Run.compileLate`. The more principled fix would be to
+ * determine why this functionality is needed and extract it into a separate
+ * interface.
+ */
+ protected def compileLate(srcfile: AbstractFile): Unit
import SymbolLoadersStats._
protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
@@ -75,14 +90,14 @@ abstract class SymbolLoaders {
name+"\none of them needs to be removed from classpath"
)
else if (settings.termConflict.value == "package") {
- global.warning(
+ warning(
"Resolving package/object name conflict in favor of package " +
preExisting.fullName + ". The object will be inaccessible."
)
root.info.decls.unlink(preExisting)
}
else {
- global.warning(
+ warning(
"Resolving package/object name conflict in favor of object " +
preExisting.fullName + ". The package will be inaccessible."
)
@@ -139,17 +154,17 @@ abstract class SymbolLoaders {
/** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
*/
- def initializeFromClassPath(owner: Symbol, classRep: ClassPath[platform.BinaryRepr]#ClassRep) {
+ def initializeFromClassPath(owner: Symbol, classRep: ClassPath[AbstractFile]#ClassRep) {
((classRep.binary, classRep.source) : @unchecked) match {
case (Some(bin), Some(src))
if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
if (settings.verbose) inform("[symloader] picked up newer source file for " + src.path)
- global.loaders.enterToplevelsFromSource(owner, classRep.name, src)
+ enterToplevelsFromSource(owner, classRep.name, src)
case (None, Some(src)) =>
if (settings.verbose) inform("[symloader] no class, picked up source file for " + src.path)
- global.loaders.enterToplevelsFromSource(owner, classRep.name, src)
+ enterToplevelsFromSource(owner, classRep.name, src)
case (Some(bin), _) =>
- global.loaders.enterClassAndModule(owner, classRep.name, platform.newClassLoader(bin))
+ enterClassAndModule(owner, classRep.name, new ClassfileLoader(bin))
}
}
@@ -221,7 +236,7 @@ abstract class SymbolLoaders {
/**
* Load contents of a package
*/
- class PackageLoader(classpath: ClassPath[platform.BinaryRepr]) extends SymbolLoader with FlagAgnosticCompleter {
+ class PackageLoader(classpath: ClassPath[AbstractFile]) extends SymbolLoader with FlagAgnosticCompleter {
protected def description = "package loader "+ classpath.name
protected def doComplete(root: Symbol) {
@@ -245,8 +260,24 @@ abstract class SymbolLoaders {
class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
private object classfileParser extends {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
- } with ClassfileParser
+ val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable
+ } with ClassfileParser {
+ override protected type ThisConstantPool = ConstantPool
+ override protected def newConstantPool: ThisConstantPool = new ConstantPool
+ override protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol =
+ SymbolLoaders.this.lookupMemberAtTyperPhaseIfPossible(sym, name)
+ /*
+ * The type alias and the cast (where the alias is used) is needed due to problem described
+ * in SI-7585. In this particular case, the problem is that we need to make sure that symbol
+ * table used by symbol loaders is exactly the same as they one used by classfileParser.
+ * If you look at the path-dependent types we have here everything should work out ok but
+ * due to issue described in SI-7585 type-checker cannot tie the knot here.
+ *
+ */
+ private type SymbolLoadersRefined = SymbolLoaders { val symbolTable: classfileParser.symbolTable.type }
+ val loaders = SymbolLoaders.this.asInstanceOf[SymbolLoadersRefined]
+ val classPath = platform.classPath
+ }
protected def description = "class file "+ classfile.toString
@@ -272,7 +303,7 @@ abstract class SymbolLoaders {
protected def description = "source file "+ srcfile.toString
override def fromSource = true
override def sourcefile = Some(srcfile)
- protected def doComplete(root: Symbol): Unit = global.currentRun.compileLate(srcfile)
+ protected def doComplete(root: Symbol): Unit = compileLate(srcfile)
}
object moduleClassLoader extends SymbolLoader with FlagAssigningCompleter {
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index e4e3862bcd..2b96961291 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -17,6 +17,7 @@ import scala.reflect.internal.{ JavaAccFlags }
import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
import scala.tools.nsc.io.AbstractFile
+import util.ClassPath
/** This abstract class implements a class file parser.
*
@@ -24,18 +25,40 @@ import scala.tools.nsc.io.AbstractFile
* @version 1.0
*/
abstract class ClassfileParser {
- val global: Global
- import global._
+ val symbolTable: SymbolTable {
+ def settings: Settings
+ }
+ val loaders: SymbolLoaders {
+ val symbolTable: ClassfileParser.this.symbolTable.type
+ }
+
+ import symbolTable._
+ /**
+ * If typer phase is defined then perform member lookup of a symbol
+ * `sym` at typer phase. This method results from refactoring. The
+ * original author of the logic that uses typer phase didn't explain
+ * why we need to force infos at that phase specifically. It only mentioned
+ * that ClassfileParse can be called late (e.g. at flatten phase) and
+ * we make to make sure we handle such situation properly.
+ */
+ protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol
+
+ /** The compiler classpath. */
+ def classPath: ClassPath[AbstractFile]
+
import definitions._
import scala.reflect.internal.ClassfileConstants._
import Flags._
+ protected type ThisConstantPool <: ConstantPool
+ protected def newConstantPool: ThisConstantPool
+
protected var in: AbstractFileReader = _ // the class file reader
protected var clazz: Symbol = _ // the class symbol containing dynamic members
protected var staticModule: Symbol = _ // the module symbol containing static members
protected var instanceScope: Scope = _ // the scope of all instance definitions
protected var staticScope: Scope = _ // the scope of all static definitions
- protected var pool: ConstantPool = _ // the classfile's constant pool
+ protected var pool: ThisConstantPool = _ // the classfile's constant pool
protected var isScala: Boolean = _ // does class file describe a scala class?
protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
@@ -50,8 +73,7 @@ abstract class ClassfileParser {
def srcfile = srcfile0
- private def optimized = global.settings.optimise.value
- private def currentIsTopLevel = !(currentClass.decodedName containsChar '$')
+ private def optimized = settings.optimise.value
// u1, u2, and u4 are what these data types are called in the JVM spec.
// They are an unsigned byte, unsigned char, and unsigned int respectively.
@@ -70,7 +92,7 @@ abstract class ClassfileParser {
private def readType() = pool getType u2
private object unpickler extends scala.reflect.internal.pickling.UnPickler {
- val global: ClassfileParser.this.global.type = ClassfileParser.this.global
+ val symbolTable: ClassfileParser.this.symbolTable.type = ClassfileParser.this.symbolTable
}
private def handleMissing(e: MissingRequirementError) = {
@@ -119,7 +141,7 @@ abstract class ClassfileParser {
this.isScala = false
parseHeader()
- this.pool = new ConstantPool
+ this.pool = newConstantPool
parseClass()
}
}
@@ -134,11 +156,14 @@ abstract class ClassfileParser {
abort(s"class file ${in.file} has unknown version $major.$minor, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
}
- class ConstantPool {
- private val len = u2
- private val starts = new Array[Int](len)
- private val values = new Array[AnyRef](len)
- private val internalized = new Array[Name](len)
+ /**
+ * Constructor of this class should not be called directly, use `newConstantPool` instead.
+ */
+ protected class ConstantPool {
+ protected val len = u2
+ protected val starts = new Array[Int](len)
+ protected val values = new Array[AnyRef](len)
+ protected val internalized = new Array[Name](len)
{ var i = 1
while (i < starts.length) {
@@ -212,76 +237,13 @@ abstract class ClassfileParser {
getExternalName((in getChar start).toInt)
}
- /** Return the symbol of the class member at `index`.
- * The following special cases exist:
- * - If the member refers to special `MODULE$` static field, return
- * the symbol of the corresponding module.
- * - If the member is a field, and is not found with the given name,
- * another try is made by appending `nme.LOCAL_SUFFIX_STRING`
- * - If no symbol is found in the right tpe, a new try is made in the
- * companion class, in case the owner is an implementation class.
- */
- def getMemberSymbol(index: Int, static: Boolean): Symbol = {
- if (index <= 0 || len <= index) errorBadIndex(index)
- var f = values(index).asInstanceOf[Symbol]
- if (f eq null) {
- val start = starts(index)
- val first = in.buf(start).toInt
- if (first != CONSTANT_FIELDREF &&
- first != CONSTANT_METHODREF &&
- first != CONSTANT_INTFMETHODREF) errorBadTag(start)
- val ownerTpe = getClassOrArrayType(in.getChar(start + 1).toInt)
- debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
- val (name0, tpe0) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
- debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
-
- forceMangledName(tpe0.typeSymbol.name, module = false)
- val (name, tpe) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
- if (name == nme.MODULE_INSTANCE_FIELD) {
- val index = in.getChar(start + 1).toInt
- val name = getExternalName(in.getChar(starts(index).toInt + 1).toInt)
- //assert(name.endsWith("$"), "Not a module class: " + name)
- f = forceMangledName(name dropRight 1, module = true)
- if (f == NoSymbol)
- f = rootMirror.getModuleByName(name dropRight 1)
- } else {
- val origName = nme.unexpandedName(name)
- val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
- f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe)
- if (f == NoSymbol)
- f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
- if (f == NoSymbol) {
- // if it's an impl class, try to find it's static member inside the class
- if (ownerTpe.typeSymbol.isImplClass) {
- f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
- } else {
- log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
- f = tpe match {
- case MethodType(_, _) => owner.newMethod(name.toTermName, owner.pos)
- case _ => owner.newVariable(name.toTermName, owner.pos)
- }
- f setInfo tpe
- log("created fake member " + f.fullName)
- }
- }
- }
- assert(f != NoSymbol,
- s"could not find $name: $tpe in $ownerTpe" + (
- if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else ""
- )
- )
- values(index) = f
- }
- f
- }
-
/** Return a name and a type at the given index. If the type is a method
* type, a dummy symbol is created in `ownerTpe`, which is used as the
* owner of its value parameters. This might lead to inconsistencies,
* if a symbol of the given name already exists, and has a different
* type.
*/
- private def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = {
+ protected def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = {
if (index <= 0 || len <= index) errorBadIndex(index)
values(index) match {
case p: ((Name @unchecked, Type @unchecked)) => p
@@ -381,37 +343,16 @@ abstract class ClassfileParser {
}
/** Throws an exception signaling a bad constant index. */
- private def errorBadIndex(index: Int) =
+ protected def errorBadIndex(index: Int) =
abort(s"bad constant pool index: $index at pos: ${in.bp}")
/** Throws an exception signaling a bad tag at given address. */
- private def errorBadTag(start: Int) =
- abort("bad constant pool tag ${in.buf(start)} at byte $start")
- }
-
- /** Try to force the chain of enclosing classes for the given name. Otherwise
- * flatten would not lift classes that were not referenced in the source code.
- */
- def forceMangledName(name: Name, module: Boolean): Symbol = {
- val parts = name.decode.toString.split(Array('.', '$'))
- var sym: Symbol = rootMirror.RootClass
-
- // was "at flatten.prev"
- enteringFlatten {
- for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
- val sym1 = enteringIcode {
- sym.linkedClassOfClass.info
- sym.info.decl(part.encode)
- }//.suchThat(module == _.isModule)
-
- sym = sym1 orElse sym.info.decl(part.encode.toTypeName)
- }
- }
- sym
+ protected def errorBadTag(start: Int) =
+ abort(s"bad constant pool tag ${in.buf(start)} at byte $start")
}
private def loadClassSymbol(name: Name): Symbol = {
- val file = global.classPath findSourceFile ("" +name) getOrElse {
+ val file = classPath findSourceFile ("" +name) getOrElse {
// SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
// therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
// that are not in their correct place (see bug for details)
@@ -419,7 +360,7 @@ abstract class ClassfileParser {
warning(s"Class $name not found - continuing with a stub.")
return NoSymbol.newClass(name.toTypeName)
}
- val completer = new global.loaders.ClassfileLoader(file)
+ val completer = new loaders.ClassfileLoader(file)
var owner: Symbol = rootMirror.RootClass
var sym: Symbol = NoSymbol
var ss: Name = null
@@ -718,14 +659,14 @@ abstract class ClassfileParser {
}
accept('>')
assert(xs.length > 0, tp)
- logResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)))
+ debuglogResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)))
}
// isMonomorphicType is false if the info is incomplete, as it usually is here
// so have to check unsafeTypeParams.isEmpty before worrying about raw type case below,
// or we'll create a boatload of needless existentials.
else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) tp
// raw type - existentially quantify all type parameters
- else logResult(s"raw type from $classSym")(unsafeClassExistentialType(classSym))
+ else debuglogResult(s"raw type from $classSym")(unsafeClassExistentialType(classSym))
case tp =>
assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp")
tp
@@ -933,9 +874,14 @@ abstract class ClassfileParser {
case ENUM_TAG =>
val t = pool.getType(index)
val n = readName()
- val s = t.typeSymbol.companionModule.info.decls.lookup(n)
- assert(s != NoSymbol, t)
- Some(LiteralAnnotArg(Constant(s)))
+ val module = t.typeSymbol.companionModule
+ val s = module.info.decls.lookup(n)
+ if (s != NoSymbol) Some(LiteralAnnotArg(Constant(s)))
+ else {
+ warning(s"""While parsing annotations in ${in.file}, could not find $n in enum $module.\nThis is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (SI-7014).""")
+ None
+ }
+
case ARRAY_TAG =>
val arr = new ArrayBuffer[ClassfileAnnotArg]()
var hasError = false
@@ -1065,7 +1011,7 @@ abstract class ClassfileParser {
def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) {
def jflags = entry.jflags
- val completer = new global.loaders.ClassfileLoader(file)
+ val completer = new loaders.ClassfileLoader(file)
val name = entry.originalName
val sflags = jflags.toScalaFlags
val owner = ownerForFlags(jflags)
@@ -1073,7 +1019,7 @@ abstract class ClassfileParser {
val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
- innerModule.moduleClass setInfo global.loaders.moduleClassLoader
+ innerModule.moduleClass setInfo loaders.moduleClassLoader
List(innerClass, innerModule.moduleClass) foreach (_.associatedFile = file)
scope enter innerClass
@@ -1094,7 +1040,7 @@ abstract class ClassfileParser {
for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClass) {
- val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
+ val file = classPath.findSourceFile(entry.externalName.toString) getOrElse {
throw new AssertionError(entry.externalName)
}
enterClassAndModule(entry, file)
@@ -1179,19 +1125,15 @@ abstract class ClassfileParser {
case Some(entry) => innerSymbol(entry)
case _ => NoSymbol
}
- // if loading during initialization of `definitions` typerPhase is not yet set.
- // in that case we simply load the member at the current phase
- @inline private def enteringTyperIfPossible(body: => Symbol): Symbol =
- if (currentRun.typerPhase eq null) body else enteringTyper(body)
private def innerSymbol(entry: InnerClassEntry): Symbol = {
val name = entry.originalName.toTypeName
val enclosing = entry.enclosing
def getMember = (
if (enclosing == clazz) entry.scope lookup name
- else enclosing.info member name
+ else lookupMemberAtTyperPhaseIfPossible(enclosing, name)
)
- enteringTyperIfPossible(getMember)
+ getMember
/* There used to be an assertion that this result is not NoSymbol; changing it to an error
* revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
* in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 01a117895f..f704d8ac89 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -20,6 +20,8 @@ import scala.reflect.internal.JavaAccFlags
*/
abstract class ICodeReader extends ClassfileParser {
val global: Global
+ val symbolTable: global.type
+ val loaders: global.loaders.type
import global._
import icodes._
@@ -28,6 +30,95 @@ abstract class ICodeReader extends ClassfileParser {
var method: IMethod = NoIMethod // the current IMethod
var isScalaModule = false
+ override protected type ThisConstantPool = ICodeConstantPool
+ override protected def newConstantPool = new ICodeConstantPool
+
+ /** Try to force the chain of enclosing classes for the given name. Otherwise
+ * flatten would not lift classes that were not referenced in the source code.
+ */
+ def forceMangledName(name: Name, module: Boolean): Symbol = {
+ val parts = name.decode.toString.split(Array('.', '$'))
+ var sym: Symbol = rootMirror.RootClass
+
+ // was "at flatten.prev"
+ enteringFlatten {
+ for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
+ val sym1 = enteringIcode {
+ sym.linkedClassOfClass.info
+ sym.info.decl(part.encode)
+ }//.suchThat(module == _.isModule)
+
+ sym = sym1 orElse sym.info.decl(part.encode.toTypeName)
+ }
+ }
+ sym
+ }
+
+ protected class ICodeConstantPool extends ConstantPool {
+ /** Return the symbol of the class member at `index`.
+ * The following special cases exist:
+ * - If the member refers to special `MODULE$` static field, return
+ * the symbol of the corresponding module.
+ * - If the member is a field, and is not found with the given name,
+ * another try is made by appending `nme.LOCAL_SUFFIX_STRING`
+ * - If no symbol is found in the right tpe, a new try is made in the
+ * companion class, in case the owner is an implementation class.
+ */
+ def getMemberSymbol(index: Int, static: Boolean): Symbol = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var f = values(index).asInstanceOf[Symbol]
+ if (f eq null) {
+ val start = starts(index)
+ val first = in.buf(start).toInt
+ if (first != CONSTANT_FIELDREF &&
+ first != CONSTANT_METHODREF &&
+ first != CONSTANT_INTFMETHODREF) errorBadTag(start)
+ val ownerTpe = getClassOrArrayType(in.getChar(start + 1).toInt)
+ debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
+ val (name0, tpe0) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
+ debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
+
+ forceMangledName(tpe0.typeSymbol.name, module = false)
+ val (name, tpe) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
+ if (name == nme.MODULE_INSTANCE_FIELD) {
+ val index = in.getChar(start + 1).toInt
+ val name = getExternalName(in.getChar(starts(index).toInt + 1).toInt)
+ //assert(name.endsWith("$"), "Not a module class: " + name)
+ f = forceMangledName(name dropRight 1, module = true)
+ if (f == NoSymbol)
+ f = rootMirror.getModuleByName(name dropRight 1)
+ } else {
+ val origName = nme.unexpandedName(name)
+ val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
+ f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe)
+ if (f == NoSymbol)
+ f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
+ if (f == NoSymbol) {
+ // if it's an impl class, try to find it's static member inside the class
+ if (ownerTpe.typeSymbol.isImplClass) {
+ f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
+ } else {
+ log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
+ f = tpe match {
+ case MethodType(_, _) => owner.newMethod(name.toTermName, owner.pos)
+ case _ => owner.newVariable(name.toTermName, owner.pos)
+ }
+ f setInfo tpe
+ log("created fake member " + f.fullName)
+ }
+ }
+ }
+ assert(f != NoSymbol,
+ s"could not find $name: $tpe in $ownerTpe" + (
+ if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else ""
+ )
+ )
+ values(index) = f
+ }
+ f
+ }
+ }
+
/** Read back bytecode for the given class symbol. It returns
* two IClass objects, one for static members and one
* for non-static members.
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 9bad29097c..9ac1ce1b9c 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -88,12 +88,17 @@ abstract class Pickler extends SubComponent {
/** Returns usually symbol's owner, but picks classfile root instead
* for existentially bound variables that have a non-local owner.
* Question: Should this be done for refinement class symbols as well?
+ *
+ * Note: tree pickling also finds its way here; e.g. in SI-7501 the pickling
+ * of trees in annotation arguments considers the parameter symbol of a method
+ * called in such a tree as "local". The condition `sym.isValueParameter` was
+ * added to fix that bug, but there may be a better way.
*/
private def localizedOwner(sym: Symbol) =
if (isLocal(sym) && !isRootSym(sym) && !isLocal(sym.owner))
// don't use a class as the localized owner for type parameters that are not owned by a class: those are not instantiated by asSeenFrom
// however, they would suddenly be considered by asSeenFrom if their localized owner became a class (causing the crashes of #4079, #2741)
- (if(sym.isTypeParameter && !sym.owner.isClass) nonClassRoot
+ (if ((sym.isTypeParameter || sym.isValueParameter) && !sym.owner.isClass) nonClassRoot
else root)
else sym.owner
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 0dcf4d00b7..d9d08dde1e 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -367,29 +367,3 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
}
}
}
-/*
- val ensureNoEscapes = new TypeTraverser {
- def ensureNoEscape(sym: Symbol) {
- if (sym.hasFlag(PRIVATE)) {
- var o = currentOwner;
- while (o != NoSymbol && o != sym.owner && !o.isLocal && !o.hasFlag(PRIVATE))
- o = o.owner
- if (o == sym.owner) sym.makeNotPrivate(base);
- }
- }
- def traverse(t: Type): TypeTraverser = {
- t match {
- case TypeRef(qual, sym, args) =>
- ensureNoEscape(sym)
- mapOver(t)
- case ClassInfoType(parents, decls, clazz) =>
- parents foreach { p => traverse; () }
- traverse(t.typeOfThis)
- case _ =>
- mapOver(t)
- }
- this
- }
- }
-
-*/
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 7dfa7cdf8d..cbe4f69d25 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -28,13 +28,14 @@ abstract class Constructors extends Transform with ast.TreeDSL {
class ConstructorTransformer(unit: CompilationUnit) extends Transformer {
- def transformClassTemplate(impl: Template): Template = {
- val clazz = impl.symbol.owner // the transformed class
- val stats = impl.body // the transformed template body
- val localTyper = typer.atOwner(impl, clazz)
+ /*
+ * Inspect for obvious out-of-order initialization; concrete, eager vals or vars, declared in this class,
+ * for which a reference to the member precedes its definition.
+ */
+ private def checkUninitializedReads(cd: ClassDef) {
+ val stats = cd.impl.body
+ val clazz = cd.symbol
- // Inspect for obvious out-of-order initialization; concrete, eager vals or vars,
- // declared in this class, for which a reference to the member precedes its definition.
def checkableForInit(sym: Symbol) = (
(sym ne null)
&& (sym.isVal || sym.isVar)
@@ -43,8 +44,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val uninitializedVals = mutable.Set[Symbol](
stats collect { case vd: ValDef if checkableForInit(vd.symbol) => vd.symbol.accessedOrSelf }: _*
)
- if (uninitializedVals.nonEmpty)
- log("Checking constructor for init order issues among: " + uninitializedVals.map(_.name).mkString(", "))
+ if (uninitializedVals.size > 1)
+ log("Checking constructor for init order issues among: " + uninitializedVals.toList.map(_.name.toString.trim).distinct.sorted.mkString(", "))
for (stat <- stats) {
// Checking the qualifier symbol is necessary to prevent a selection on
@@ -68,531 +69,652 @@ abstract class Constructors extends Transform with ast.TreeDSL {
case t => check(t) // constructor body statement
}
}
- val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
- val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
- case class ConstrInfo(
- constr: DefDef, // The primary constructor
- constrParams: List[Symbol], // ... and its parameters
- constrBody: Block // ... and its body
- )
- // decompose primary constructor into the three entities above.
- val constrInfo: ConstrInfo = {
- stats find (_.symbol.isPrimaryConstructor) match {
- case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
- ConstrInfo(ddef, vparams map (_.symbol), rhs)
- case x =>
- // AnyVal constructor is OK
- assert(clazz eq AnyValClass, "no constructor in template: impl = " + impl)
- return impl
- }
+ } // end of checkUninitializedReads()
+
+ override def transform(tree: Tree): Tree = {
+ tree match {
+ case cd @ ClassDef(mods0, name0, tparams0, impl0) if !cd.symbol.isInterface && !isPrimitiveValueClass(cd.symbol) =>
+ if(cd.symbol eq AnyValClass) {
+ cd
+ }
+ else {
+ checkUninitializedReads(cd)
+ val tplTransformer = new TemplateTransformer(unit, impl0)
+ treeCopy.ClassDef(cd, mods0, name0, tparams0, tplTransformer.transformed)
+ }
+ case _ =>
+ super.transform(tree)
+ }
+ }
+
+ } // ConstructorTransformer
+
+ /*
+ * Summary
+ * -------
+ *
+ * The following gets elided unless they're actually needed:
+ * (a) parameter-accessor fields for non-val, non-var, constructor-param-symbols, as well as
+ * (b) outer accessors of a final class which don't override anything.
+ *
+ *
+ * Gory details
+ * ------------
+ *
+ * The constructors phase elides
+ *
+ * (a) parameter-accessor fields for non-val, non-var, constructor-param-symbols
+ * provided they're only accessed within the primary constructor;
+ *
+ * as well as
+ *
+ * (b) outer accessors directly owned by the class of interest,
+ * provided that class is final, they don't override anything, and moreover they aren't accessed anywhere.
+ * An outer accessor is backed by a param-accessor field.
+ * If an outer-accessor can be elided then its supporting field can be elided as well.
+ *
+ * Once the potential candidates for elision are known (as described above) it remains to visit
+ * those program locations where they might be accessed, and only those.
+ *
+ * What trees can be visited at this point?
+ * To recap, by the time the constructors phase runs, local definitions have been hoisted out of their original owner.
+ * Moreover, by the time elision is about to happen, the `intoConstructors` rewriting
+ * of template-level statements has taken place (the resulting trees can be found in `constrStatBuf`).
+ *
+ * That means:
+ *
+ * - nested classes are to be found in `defBuf`
+ *
+ * - value and method definitions are also in `defBuf` and none of them contains local methods or classes.
+ *
+ * - auxiliary constructors are to be found in `auxConstructorBuf`
+ *
+ * Coming back to the question which trees may contain accesses:
+ *
+ * (c) regarding parameter-accessor fields, all candidates in (a) are necessarily private-local,
+ * and thus may only be accessed from value or method definitions owned by the current class
+ * (ie there's no point drilling down into nested classes).
+ *
+ * (d) regarding candidates in (b), they are accesible from all places listed in (c) and in addition
+ * from nested classes (nested at any number of levels).
+ *
+ * In all cases, we're done with traversing as soon as all candidates have been ruled out.
+ *
+ * Finally, the whole affair of eliding is avoided for DelayedInit subclasses,
+ * given that for them usually nothing gets elided anyway.
+ * That's a consequence from re-locating the post-super-calls statements from their original location
+ * (the primary constructor) into a dedicated synthetic method that an anon-closure may invoke, as required by DelayedInit.
+ *
+ */
+ private trait OmittablesHelper { self: TemplateTransformer =>
+
+ /*
+ * Initially populated with all elision candidates.
+ * Trees are traversed, and those candidates are removed which are actually needed.
+ * After that, `omittables` doesn't shrink anymore: each symbol it contains can be unlinked from clazz.info.decls.
+ */
+ val omittables = mutable.Set.empty[Symbol]
+
+ def populateOmittables() {
+
+ omittables.clear()
+
+ if(isDelayedInitSubclass) {
+ return
}
- import constrInfo._
- // The parameter accessor fields which are members of the class
- val paramAccessors = clazz.constrParamAccessors
+ def isParamCandidateForElision(sym: Symbol) = (sym.isParamAccessor && sym.isPrivateLocal)
+ def isOuterCandidateForElision(sym: Symbol) = (sym.isOuterAccessor && sym.owner.isEffectivelyFinal && !sym.isOverridingSymbol)
+
+ val paramCandidatesForElision: Set[ /*Field*/ Symbol] = (clazz.info.decls.toSet filter isParamCandidateForElision)
+ val outerCandidatesForElision: Set[ /*Method*/ Symbol] = (clazz.info.decls.toSet filter isOuterCandidateForElision)
- // The constructor parameter corresponding to an accessor
- def parameter(acc: Symbol): Symbol = parameterNamed(acc.unexpandedName.getterName)
+ omittables ++= paramCandidatesForElision
+ omittables ++= outerCandidatesForElision
- // The constructor parameter with given name. This means the parameter
- // has given name, or starts with given name, and continues with a `$` afterwards.
- def parameterNamed(name: Name): Symbol = {
- def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING)
+ val bodyOfOuterAccessor: Map[Symbol, DefDef] =
+ defBuf collect { case dd: DefDef if outerCandidatesForElision(dd.symbol) => dd.symbol -> dd } toMap
- (constrParams filter matchesName) match {
- case Nil => abort(name + " not in " + constrParams)
- case p :: _ => p
+ // no point traversing further once omittables is empty, all candidates ruled out already.
+ object detectUsages extends Traverser {
+ private def markUsage(sym: Symbol) {
+ omittables -= debuglogResult("omittables -= ")(sym)
+ // recursive call to mark as needed the field supporting the outer-accessor-method.
+ bodyOfOuterAccessor get sym foreach (this traverse _.rhs)
}
+ override def traverse(tree: Tree): Unit = if (omittables.nonEmpty) {
+ def sym = tree.symbol
+ tree match {
+ // don't mark as "needed" the field supporting this outer-accessor, ie not just yet.
+ case _: DefDef if outerCandidatesForElision(sym) => ()
+ case _: Select if omittables(sym) => markUsage(sym) ; super.traverse(tree)
+ case _ => super.traverse(tree)
+ }
+ }
+ def walk(xs: Seq[Tree]) = xs.iterator foreach traverse
}
+ if (omittables.nonEmpty) {
+ detectUsages walk defBuf
+ detectUsages walk auxConstructorBuf
+ }
+ }
+ def mustbeKept(sym: Symbol) = !omittables(sym)
+
+ } // OmittablesHelper
+
+ /*
+ * TemplateTransformer rewrites DelayedInit subclasses.
+ * The list of statements that will end up in the primary constructor can be split into:
+ *
+ * (a) up to and including the super-constructor call.
+ * These statements can occur only in the (bytecode-level) primary constructor.
+ *
+ * (b) remaining statements
+ *
+ * The purpose of DelayedInit is leaving (b) out of the primary constructor and have their execution "delayed".
+ *
+ * The rewriting to achieve "delayed initialization" involves:
+ * (c) an additional, synthetic, public method encapsulating (b)
+ * (d) an additional, synthetic closure whose argless apply() just invokes (c)
+ * (e) after executing the statements in (a),
+ * the primary constructor instantiates (d) and passes it as argument
+ * to a `delayedInit()` invocation on the current instance.
+ * In turn, `delayedInit()` is a method defined as abstract in the `DelayedInit` trait
+ * so that it can be overridden (for an example see `scala.App`)
+ *
+ * The following helper methods prepare Trees as part of this rewriting:
+ *
+ * (f) `delayedEndpointDef()` prepares (c).
+ * A transformer, `constrStatTransformer`, is used to re-locate statements (b) from template-level
+ * to become statements in method (c). The main task here is re-formulating accesses to params
+ * of the primary constructors (to recap, (c) has zero-params) in terms of param-accessor fields.
+ * In a Delayed-Init subclass, each class-constructor gets a param-accessor field because `mustbeKept()` forces it.
+ *
+ * (g) `delayedInitClosure()` prepares (d)
+ *
+ * (h) `delayedInitCall()` prepares the `delayedInit()` invocation referred to in (e)
+ *
+ * Both (c) and (d) are added to the Template returned by `transformClassTemplate()`
+ *
+ * A note of historic interest: Previously the rewriting for DelayedInit would include in the closure body
+ * all of the delayed initialization sequence, which in turn required:
+ * - reformulating "accesses-on-this" into "accesses-on-outer", and
+ * - adding public getters and setters.
+ *
+ * @param stats the statements in (b) above
+ *
+ * @return the DefDef for (c) above
+ *
+ * */
+ private trait DelayedInitHelper { self: TemplateTransformer =>
+
+ private def delayedEndpointDef(stats: List[Tree]): DefDef = {
+
+ val methodName = currentUnit.freshTermName("delayedEndpoint$" + clazz.fullNameAsName('$').toString + "$")
+ val methodSym = clazz.newMethod(methodName, impl.pos, SYNTHETIC | FINAL)
+ methodSym setInfoAndEnter MethodType(Nil, UnitTpe)
+
+ // changeOwner needed because the `stats` contained in the DefDef were owned by the template, not long ago.
+ val blk = Block(stats, gen.mkZero(UnitTpe)).changeOwner(impl.symbol -> methodSym)
+ val delayedDD = localTyper typed { DefDef(methodSym, Nil, blk) }
+
+ delayedDD.asInstanceOf[DefDef]
+ }
- var usesSpecializedField: Boolean = false
-
- // A transformer for expressions that go into the constructor
- val intoConstructorTransformer = new Transformer {
- def isParamRef(sym: Symbol) =
- sym.isParamAccessor &&
- sym.owner == clazz &&
- !(clazz isSubClass DelayedInitClass) &&
- !(sym.isGetter && sym.accessed.isVariable) &&
- !sym.isSetter
- private def possiblySpecialized(s: Symbol) = specializeTypes.specializedTypeVars(s).nonEmpty
- override def transform(tree: Tree): Tree = tree match {
- case Apply(Select(This(_), _), List()) =>
- // references to parameter accessor methods of own class become references to parameters
- // outer accessors become references to $outer parameter
- if (isParamRef(tree.symbol) && !possiblySpecialized(tree.symbol))
- gen.mkAttributedIdent(parameter(tree.symbol.accessed)) setPos tree.pos
- else if (tree.symbol.outerSource == clazz && !clazz.isImplClass)
- gen.mkAttributedIdent(parameterNamed(nme.OUTER)) setPos tree.pos
- else
- super.transform(tree)
- case Select(This(_), _) if (isParamRef(tree.symbol) && !possiblySpecialized(tree.symbol)) =>
- // references to parameter accessor field of own class become references to parameters
- gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos
- case Select(_, _) =>
- if (specializeTypes.specializedTypeVars(tree.symbol).nonEmpty)
- usesSpecializedField = true
- super.transform(tree)
- case _ =>
- super.transform(tree)
+ private def delayedInitClosure(delayedEndPointSym: MethodSymbol): ClassDef = {
+ val satelliteClass = localTyper.typed {
+ atPos(impl.pos) {
+ val closureClass = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL)
+ val closureParents = List(AbstractFunctionClass(0).tpe)
+
+ closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass)
+
+ val outerField: TermSymbol = (
+ closureClass
+ newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR)
+ setInfoAndEnter clazz.tpe
+ )
+ val applyMethod: MethodSymbol = (
+ closureClass
+ newMethod(nme.apply, impl.pos, FINAL)
+ setInfoAndEnter MethodType(Nil, ObjectTpe)
+ )
+ val outerFieldDef = ValDef(outerField)
+ val closureClassTyper = localTyper.atOwner(closureClass)
+ val applyMethodTyper = closureClassTyper.atOwner(applyMethod)
+
+ def applyMethodStat =
+ applyMethodTyper.typed {
+ atPos(impl.pos) {
+ val receiver = Select(This(closureClass), outerField)
+ Apply(Select(receiver, delayedEndPointSym), Nil)
+ }
+ }
+
+ val applyMethodDef = DefDef(
+ sym = applyMethod,
+ vparamss = ListOfNil,
+ rhs = Block(applyMethodStat, gen.mkAttributedRef(BoxedUnit_UNIT)))
+
+ ClassDef(
+ sym = closureClass,
+ constrMods = Modifiers(0),
+ vparamss = List(List(outerFieldDef)),
+ body = applyMethodDef :: Nil,
+ superPos = impl.pos)
}
}
- // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
- def intoConstructor(oldowner: Symbol, tree: Tree) =
- intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol)
+ satelliteClass.asInstanceOf[ClassDef]
+ }
- // Should tree be moved in front of super constructor call?
- def canBeMoved(tree: Tree) = tree match {
- case ValDef(mods, _, _, _) => (mods hasFlag PRESUPER | PARAMACCESSOR)
- case _ => false
- }
+ private def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
+ gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
+ }
+
+ def rewriteDelayedInit() {
+ /* XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
+ * but excluding it includes too much. The constructor sequence being mimicked
+ * needs to be reproduced with total fidelity.
+ *
+ * See test case files/run/bug4680.scala, the output of which is wrong in many
+ * particulars.
+ */
+ val needsDelayedInit = (isDelayedInitSubclass && remainingConstrStats.nonEmpty)
- // Create an assignment to class field `to` with rhs `from`
- def mkAssign(to: Symbol, from: Tree): Tree =
- localTyper.typedPos(to.pos) { Assign(Select(This(clazz), to), from) }
-
- // Create code to copy parameter to parameter accessor field.
- // If parameter is $outer, check that it is not null so that we NPE
- // here instead of at some unknown future $outer access.
- def copyParam(to: Symbol, from: Symbol): Tree = {
- import CODE._
- val result = mkAssign(to, Ident(from))
-
- if (from.name != nme.OUTER ||
- from.tpe.typeSymbol.isPrimitiveValueClass) result
- else localTyper.typedPos(to.pos) {
- // `throw null` has the same effect as `throw new NullPointerException`, see JVM spec on instruction `athrow`
- IF (from OBJ_EQ NULL) THEN Throw(gen.mkZero(ThrowableTpe)) ELSE result
+ if (needsDelayedInit) {
+ val delayedHook: DefDef = delayedEndpointDef(remainingConstrStats)
+ defBuf += delayedHook
+ val hookCallerClass = {
+ // transform to make the closure-class' default constructor assign the the outer instance to its param-accessor field.
+ val drillDown = new ConstructorTransformer(unit)
+ drillDown transform delayedInitClosure(delayedHook.symbol.asInstanceOf[MethodSymbol])
}
+ defBuf += hookCallerClass
+ remainingConstrStats = delayedInitCall(hookCallerClass) :: Nil
}
+ }
- // The list of definitions that go into class
- val defBuf = new ListBuffer[Tree]
-
- // The auxiliary constructors, separate from the defBuf since they should
- // follow the primary constructor
- val auxConstructorBuf = new ListBuffer[Tree]
-
- // The list of statements that go into constructor after and including the superclass constructor call
- val constrStatBuf = new ListBuffer[Tree]
+ } // DelayedInitHelper
- // The list of early initializer statements that go into constructor before the superclass constructor call
- val constrPrefixBuf = new ListBuffer[Tree]
+ private trait GuardianOfCtorStmts { self: TemplateTransformer =>
- // The early initialized field definitions of the class (these are the class members)
- val presupers = treeInfo.preSuperFields(stats)
+ /* Return a single list of statements, merging the generic class constructor with the
+ * specialized stats. The original statements are retyped in the current class, and
+ * assignments to generic fields that have a corresponding specialized assignment in
+ * `specializedStats` are replaced by the specialized assignment.
+ */
+ private def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = {
+ val specBuf = new ListBuffer[Tree]
+ specBuf ++= specializedStats
- // generate code to copy pre-initialized fields
- for (stat <- constrBody.stats) {
- constrStatBuf += stat
- stat match {
- case ValDef(mods, name, _, _) if (mods hasFlag PRESUPER) =>
- // stat is the constructor-local definition of the field value
- val fields = presupers filter (_.getterName == name)
- assert(fields.length == 1)
- val to = fields.head.symbol
- if (!to.tpe.isInstanceOf[ConstantType])
- constrStatBuf += mkAssign(to, Ident(stat.symbol))
- case _ =>
+ def specializedAssignFor(sym: Symbol): Option[Tree] =
+ specializedStats find {
+ case Assign(sel @ Select(This(_), _), _) =>
+ sel.symbol.isSpecialized && (nme.unspecializedName(sel.symbol.getterName) == sym.getterName)
+ case _ => false
}
- }
- // Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf.
- for (stat <- stats) stat match {
- case DefDef(_,_,_,_,_,rhs) =>
- // methods with constant result type get literals as their body
- // all methods except the primary constructor go into template
- stat.symbol.tpe match {
- case MethodType(List(), tp @ ConstantType(c)) =>
- defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp)
- case _ =>
- if (stat.symbol.isPrimaryConstructor) ()
- else if (stat.symbol.isConstructor) auxConstructorBuf += stat
- else defBuf += stat
- }
- case ValDef(_, _, _, rhs) =>
- // val defs with constant right-hand sides are eliminated.
- // for all other val defs, an empty valdef goes into the template and
- // the initializer goes as an assignment into the constructor
- // if the val def is an early initialized or a parameter accessor, it goes
- // before the superclass constructor call, otherwise it goes after.
- // Lazy vals don't get the assignment in the constructor.
- if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
- if (rhs != EmptyTree && !stat.symbol.isLazy) {
- val rhs1 = intoConstructor(stat.symbol, rhs)
- (if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
- stat.symbol, rhs1)
- }
- defBuf += deriveValDef(stat)(_ => EmptyTree)
+ /* Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array.
+ * Erasure transforms Array.update to ScalaRunTime.update when the element type is a type
+ * variable, but after specialization this is a concrete primitive type, so it would
+ * be an error to pass it to array_update(.., .., Object).
+ */
+ def rewriteArrayUpdate(tree: Tree): Tree = {
+ val adapter = new Transformer {
+ override def transform(t: Tree): Tree = t match {
+ case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod =>
+ localTyper.typed(Apply(gen.mkAttributedSelect(xs, arrayUpdateMethod), List(idx, v)))
+ case _ => super.transform(t)
}
- case ClassDef(_, _, _, _) =>
- // classes are treated recursively, and left in the template
- defBuf += new ConstructorTransformer(unit).transform(stat)
- case _ =>
- // all other statements go into the constructor
- constrStatBuf += intoConstructor(impl.symbol, stat)
+ }
+ adapter.transform(tree)
}
- // ----------- avoid making parameter-accessor fields for symbols accessed only within the primary constructor --------------
+ log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n"))
+ val res = for (s <- originalStats; stat = s.duplicate) yield {
+ log("merge: looking at " + stat)
+ val stat1 = stat match {
+ case Assign(sel @ Select(This(_), field), _) =>
+ specializedAssignFor(sel.symbol).getOrElse(stat)
+ case _ => stat
+ }
+ if (stat1 ne stat) {
+ log("replaced " + stat + " with " + stat1)
+ specBuf -= stat1
+ }
- // A sorted set of symbols that are known to be accessed outside the primary constructor.
- val ord = Ordering.fromLessThan[Symbol](_ isLess _)
- val accessedSyms = mutable.TreeSet.empty[Symbol](ord)
+ if (stat1 eq stat) {
+ assert(ctorParams(genericClazz).length == constrInfo.constrParams.length)
+ // this is just to make private fields public
+ (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), constrInfo.constrParams, null, true))(stat1)
+
+ val stat2 = rewriteArrayUpdate(stat1)
+ // statements coming from the original class need retyping in the current context
+ debuglog("retyping " + stat2)
+
+ val d = new specializeTypes.Duplicator(Map[Symbol, Type]())
+ d.retyped(localTyper.context1.asInstanceOf[d.Context],
+ stat2,
+ genericClazz,
+ clazz,
+ Map.empty)
+ } else
+ stat1
+ }
+ if (specBuf.nonEmpty)
+ println("residual specialized constructor statements: " + specBuf)
+ res
+ }
- // a list of outer accessor symbols and their bodies
- var outerAccessors: List[(Symbol, Tree)] = List()
+ /* Add an 'if' around the statements coming after the super constructor. This
+ * guard is necessary if the code uses specialized fields. A specialized field is
+ * initialized in the subclass constructor, but the accessors are (already) overridden
+ * and pointing to the (empty) fields. To fix this, a class with specialized fields
+ * will not run its constructor statements if the instance is specialized. The specialized
+ * subclass includes a copy of those constructor statements, and runs them. To flag that a class
+ * has specialized fields, and their initialization should be deferred to the subclass, method
+ * 'specInstance$' is added in phase specialize.
+ */
+ def guardSpecializedInitializer(stats: List[Tree]): List[Tree] = if (settings.nospecialization.value) stats else {
+ // // split the statements in presuper and postsuper
+ // var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
+ // if (postfix.nonEmpty) {
+ // prefix = prefix :+ postfix.head
+ // postfix = postfix.tail
+ // }
+
+ if (shouldGuard && usesSpecializedField && stats.nonEmpty) {
+ // save them for duplication in the specialized subclass
+ guardedCtorStats(clazz) = stats
+ ctorParams(clazz) = constrInfo.constrParams
+
+ val tree =
+ If(
+ Apply(
+ CODE.NOT (
+ Apply(gen.mkAttributedRef(specializedFlag), List())),
+ List()),
+ Block(stats, Literal(Constant(()))),
+ EmptyTree)
+
+ List(localTyper.typed(tree))
+ }
+ else if (clazz.hasFlag(SPECIALIZED)) {
+ // add initialization from its generic class constructor
+ val genericName = nme.unspecializedName(clazz.name)
+ val genericClazz = clazz.owner.info.decl(genericName.toTypeName)
+ assert(genericClazz != NoSymbol, clazz)
+
+ guardedCtorStats.get(genericClazz) match {
+ case Some(stats1) => mergeConstructors(genericClazz, stats1, stats)
+ case None => stats
+ }
+ } else stats
+ }
- val isDelayedInitSubclass = (clazz isSubClass DelayedInitClass)
+ } // GuardianOfCtorStmts
+
+ private class TemplateTransformer(val unit: CompilationUnit, val impl: Template)
+ extends Transformer
+ with DelayedInitHelper
+ with OmittablesHelper
+ with GuardianOfCtorStmts {
+
+ val clazz = impl.symbol.owner // the transformed class
+ val stats = impl.body // the transformed template body
+ val localTyper = typer.atOwner(impl, clazz)
+
+ val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
+ val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
+
+ val isDelayedInitSubclass = (clazz isSubClass DelayedInitClass)
+
+ case class ConstrInfo(
+ constr: DefDef, // The primary constructor
+ constrParams: List[Symbol], // ... and its parameters
+ constrBody: Block // ... and its body
+ )
+ // decompose primary constructor into the three entities above.
+ val constrInfo: ConstrInfo = {
+ val ddef = (stats find (_.symbol.isPrimaryConstructor))
+ ddef match {
+ case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
+ ConstrInfo(ddef, vparams map (_.symbol), rhs)
+ case x =>
+ abort("no constructor in template: impl = " + impl)
+ }
+ }
+ import constrInfo._
- // Could symbol's definition be omitted, provided it is not accessed?
- // This is the case if the symbol is defined in the current class, and
- // ( the symbol is an object private parameter accessor field, or
- // the symbol is an outer accessor of a final class which does not override another outer accessor. )
- def maybeOmittable(sym: Symbol) = sym.owner == clazz && (
- sym.isParamAccessor && sym.isPrivateLocal ||
- sym.isOuterAccessor && sym.owner.isEffectivelyFinal && !sym.isOverridingSymbol &&
- !isDelayedInitSubclass
- )
+ // The parameter accessor fields which are members of the class
+ val paramAccessors = clazz.constrParamAccessors
- // Is symbol known to be accessed outside of the primary constructor,
- // or is it a symbol whose definition cannot be omitted anyway?
- def mustbeKept(sym: Symbol) = isDelayedInitSubclass || !maybeOmittable(sym) || (accessedSyms contains sym)
+ // The constructor parameter corresponding to an accessor
+ def parameter(acc: Symbol): Symbol = parameterNamed(acc.unexpandedName.getterName)
- // A traverser to set accessedSyms and outerAccessors
- val accessTraverser = new Traverser {
- override def traverse(tree: Tree) = {
- tree match {
- case DefDef(_, _, _, _, _, body)
- if (tree.symbol.isOuterAccessor && tree.symbol.owner == clazz && clazz.isEffectivelyFinal) =>
- debuglog("outerAccessors += " + tree.symbol.fullName)
- outerAccessors ::= ((tree.symbol, body))
- case Select(_, _) =>
- if (!mustbeKept(tree.symbol)) {
- debuglog("accessedSyms += " + tree.symbol.fullName)
- accessedSyms += tree.symbol
- }
- super.traverse(tree)
- case _ =>
- super.traverse(tree)
- }
- }
- }
+ // The constructor parameter with given name. This means the parameter
+ // has given name, or starts with given name, and continues with a `$` afterwards.
+ def parameterNamed(name: Name): Symbol = {
+ def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING)
- // first traverse all definitions except outeraccesors
- // (outeraccessors are avoided in accessTraverser)
- for (stat <- defBuf.iterator ++ auxConstructorBuf.iterator)
- accessTraverser.traverse(stat)
-
- // then traverse all bodies of outeraccessors which are accessed themselves
- // note: this relies on the fact that an outer accessor never calls another
- // outer accessor in the same class.
- for ((accSym, accBody) <- outerAccessors)
- if (mustbeKept(accSym)) accessTraverser.traverse(accBody)
-
- // Initialize all parameters fields that must be kept.
- val paramInits = paramAccessors filter mustbeKept map { acc =>
- // Check for conflicting symbol amongst parents: see bug #1960.
- // It would be better to mangle the constructor parameter name since
- // it can only be used internally, but I think we need more robust name
- // mangling before we introduce more of it.
- val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
- if (conflict ne NoSymbol)
- unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
-
- copyParam(acc, parameter(acc))
+ (constrParams filter matchesName) match {
+ case Nil => abort(name + " not in " + constrParams)
+ case p :: _ => p
}
+ }
- /* Return a single list of statements, merging the generic class constructor with the
- * specialized stats. The original statements are retyped in the current class, and
- * assignments to generic fields that have a corresponding specialized assignment in
- * `specializedStats` are replaced by the specialized assignment.
+ /*
+ * `usesSpecializedField` makes a difference in deciding whether constructor-statements
+ * should be guarded in a `shouldGuard` class, ie in a class that's the generic super-class of
+ * one or more specialized sub-classes.
+ *
+ * Given that `usesSpecializedField` isn't read for any other purpose than the one described above,
+ * we skip setting `usesSpecializedField` in case the current class isn't `shouldGuard` to start with.
+ * That way, trips to a map in `specializeTypes` are saved.
+ */
+ var usesSpecializedField: Boolean = false
+
+ // A transformer for expressions that go into the constructor
+ private class IntoCtorTransformer extends Transformer {
+
+ private def isParamRef(sym: Symbol) = (sym.isParamAccessor && sym.owner == clazz)
+
+ // Terminology: a stationary location is never written after being read.
+ private def isStationaryParamRef(sym: Symbol) = (
+ isParamRef(sym) &&
+ !(sym.isGetter && sym.accessed.isVariable) &&
+ !sym.isSetter
+ )
+
+ private def possiblySpecialized(s: Symbol) = specializeTypes.specializedTypeVars(s).nonEmpty
+
+ /*
+ * whether `sym` denotes a param-accessor (ie a field) that fulfills all of:
+ * (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and
+ * (b) isn't subject to specialization. We might be processing statements for:
+ * (b.1) the constructur in the generic (super-)class; or
+ * (b.2) the constructor in the specialized (sub-)class.
+ * (c) isn't part of a DelayedInit subclass.
*/
- def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = {
- val specBuf = new ListBuffer[Tree]
- specBuf ++= specializedStats
-
- def specializedAssignFor(sym: Symbol): Option[Tree] =
- specializedStats find {
- case Assign(sel @ Select(This(_), _), _) =>
- sel.symbol.isSpecialized && (nme.unspecializedName(sel.symbol.getterName) == sym.getterName)
- case _ => false
- }
+ private def canBeSupplanted(sym: Symbol) = (!isDelayedInitSubclass && isStationaryParamRef(sym) && !possiblySpecialized(sym))
+
+ override def transform(tree: Tree): Tree = tree match {
+
+ case Apply(Select(This(_), _), List()) =>
+ // references to parameter accessor methods of own class become references to parameters
+ // outer accessors become references to $outer parameter
+ if (canBeSupplanted(tree.symbol))
+ gen.mkAttributedIdent(parameter(tree.symbol.accessed)) setPos tree.pos
+ else if (tree.symbol.outerSource == clazz && !clazz.isImplClass)
+ gen.mkAttributedIdent(parameterNamed(nme.OUTER)) setPos tree.pos
+ else
+ super.transform(tree)
- /* Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array.
- * Erasure transforms Array.update to ScalaRunTime.update when the element type is a type
- * variable, but after specialization this is a concrete primitive type, so it would
- * be an error to pass it to array_update(.., .., Object).
- */
- def rewriteArrayUpdate(tree: Tree): Tree = {
- val adapter = new Transformer {
- override def transform(t: Tree): Tree = t match {
- case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod =>
- localTyper.typed(Apply(gen.mkAttributedSelect(xs, arrayUpdateMethod), List(idx, v)))
- case _ => super.transform(t)
- }
- }
- adapter.transform(tree)
- }
+ case Select(This(_), _) if canBeSupplanted(tree.symbol) =>
+ // references to parameter accessor field of own class become references to parameters
+ gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos
- log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n"))
- val res = for (s <- originalStats; stat = s.duplicate) yield {
- log("merge: looking at " + stat)
- val stat1 = stat match {
- case Assign(sel @ Select(This(_), field), _) =>
- specializedAssignFor(sel.symbol).getOrElse(stat)
- case _ => stat
- }
- if (stat1 ne stat) {
- log("replaced " + stat + " with " + stat1)
- specBuf -= stat1
+ case Select(_, _) if shouldGuard => // reasoning behind this guard in the docu of `usesSpecializedField`
+ if (possiblySpecialized(tree.symbol)) {
+ usesSpecializedField = true
}
+ super.transform(tree)
- if (stat1 eq stat) {
- assert(ctorParams(genericClazz).length == constrParams.length)
- // this is just to make private fields public
- (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), constrParams, null, true))(stat1)
-
- val stat2 = rewriteArrayUpdate(stat1)
- // statements coming from the original class need retyping in the current context
- debuglog("retyping " + stat2)
-
- val d = new specializeTypes.Duplicator(Map[Symbol, Type]())
- d.retyped(localTyper.context1.asInstanceOf[d.Context],
- stat2,
- genericClazz,
- clazz,
- Map.empty)
- } else
- stat1
- }
- if (specBuf.nonEmpty)
- println("residual specialized constructor statements: " + specBuf)
- res
+ case _ =>
+ super.transform(tree)
}
- /* Add an 'if' around the statements coming after the super constructor. This
- * guard is necessary if the code uses specialized fields. A specialized field is
- * initialized in the subclass constructor, but the accessors are (already) overridden
- * and pointing to the (empty) fields. To fix this, a class with specialized fields
- * will not run its constructor statements if the instance is specialized. The specialized
- * subclass includes a copy of those constructor statements, and runs them. To flag that a class
- * has specialized fields, and their initialization should be deferred to the subclass, method
- * 'specInstance$' is added in phase specialize.
- */
- def guardSpecializedInitializer(stats: List[Tree]): List[Tree] = if (settings.nospecialization.value) stats else {
- // // split the statements in presuper and postsuper
- // var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
- // if (postfix.nonEmpty) {
- // prefix = prefix :+ postfix.head
- // postfix = postfix.tail
- // }
-
- if (usesSpecializedField && shouldGuard && stats.nonEmpty) {
- // save them for duplication in the specialized subclass
- guardedCtorStats(clazz) = stats
- ctorParams(clazz) = constrParams
-
- val tree =
- If(
- Apply(
- CODE.NOT (
- Apply(gen.mkAttributedRef(specializedFlag), List())),
- List()),
- Block(stats, Literal(Constant(()))),
- EmptyTree)
-
- List(localTyper.typed(tree))
- }
- else if (clazz.hasFlag(SPECIALIZED)) {
- // add initialization from its generic class constructor
- val genericName = nme.unspecializedName(clazz.name)
- val genericClazz = clazz.owner.info.decl(genericName.toTypeName)
- assert(genericClazz != NoSymbol, clazz)
-
- guardedCtorStats.get(genericClazz) match {
- case Some(stats1) => mergeConstructors(genericClazz, stats1, stats)
- case None => stats
- }
- } else stats
- }
+ }
- /*
- * Translation scheme for DelayedInit
- * ----------------------------------
- *
- * Before returning, transformClassTemplate() rewrites DelayedInit subclasses.
- * The list of statements that will end up in the primary constructor can be split into:
- *
- * (a) up to and including the super-constructor call.
- * These statements can occur only in the (bytecode-level) primary constructor.
- *
- * (b) remaining statements
- *
- * The purpose of DelayedInit is leaving (b) out of the primary constructor and have their execution "delayed".
- *
- * The rewriting to achieve "delayed initialization" involves:
- * (c) an additional, synthetic, public method encapsulating (b)
- * (d) an additional, synthetic closure whose argless apply() just invokes (c)
- * (e) after executing the statements in (a),
- * the primary constructor instantiates (d) and passes it as argument
- * to a `delayedInit()` invocation on the current instance.
- * In turn, `delayedInit()` is a method defined as abstract in the `DelayedInit` trait
- * so that it can be overridden (for an example see `scala.App`)
- *
- * The following helper methods prepare Trees as part of this rewriting:
- *
- * (f) `delayedEndpointDef()` prepares (c).
- * A transformer, `constrStatTransformer`, is used to re-locate statements (b) from template-level
- * to become statements in method (c). The main task here is re-formulating accesses to params
- * of the primary constructors (to recap, (c) has zero-params) in terms of param-accessor fields.
- * In a Delayed-Init subclass, each class-constructor gets a param-accessor field because `mustbeKept()` forces it.
- *
- * (g) `delayedInitClosure()` prepares (d)
- *
- * (h) `delayedInitCall()` prepares the `delayedInit()` invocation referred to in (e)
- *
- * Both (c) and (d) are added to the Template returned by `transformClassTemplate()`
- *
- * A note of historic interest: Previously the rewriting for DelayedInit would include in the closure body
- * all of the delayed initialization sequence, which in turn required:
- * - reformulating "accesses-on-this" into "accesses-on-outer", and
- * - adding public getters and setters.
- *
- * @param stats the statements in (b) above
- *
- * @return the DefDef for (c) above
- *
- * */
- def delayedEndpointDef(stats: List[Tree]): DefDef = {
+ private val intoConstructorTransformer = new IntoCtorTransformer
- val methodName = currentUnit.freshTermName("delayedEndpoint$" + clazz.fullNameAsName('$').toString + "$")
- val methodSym = clazz.newMethod(methodName, impl.pos, SYNTHETIC | FINAL)
- methodSym setInfoAndEnter MethodType(Nil, UnitTpe)
+ // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
+ def intoConstructor(oldowner: Symbol, tree: Tree) =
+ intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol)
- // changeOwner needed because the `stats` contained in the DefDef were owned by the template, not long ago.
- val blk = Block(stats, gen.mkZero(UnitTpe)).changeOwner(impl.symbol -> methodSym)
- val delayedDD = localTyper typed { DefDef(methodSym, Nil, blk) }
+ // Should tree be moved in front of super constructor call?
+ def canBeMoved(tree: Tree) = tree match {
+ case ValDef(mods, _, _, _) => (mods hasFlag PRESUPER | PARAMACCESSOR)
+ case _ => false
+ }
- delayedDD.asInstanceOf[DefDef]
+ // Create an assignment to class field `to` with rhs `from`
+ def mkAssign(to: Symbol, from: Tree): Tree =
+ localTyper.typedPos(to.pos) { Assign(Select(This(clazz), to), from) }
+
+ // Create code to copy parameter to parameter accessor field.
+ // If parameter is $outer, check that it is not null so that we NPE
+ // here instead of at some unknown future $outer access.
+ def copyParam(to: Symbol, from: Symbol): Tree = {
+ import CODE._
+ val result = mkAssign(to, Ident(from))
+
+ if (from.name != nme.OUTER ||
+ from.tpe.typeSymbol.isPrimitiveValueClass) result
+ else localTyper.typedPos(to.pos) {
+ // `throw null` has the same effect as `throw new NullPointerException`, see JVM spec on instruction `athrow`
+ IF (from OBJ_EQ NULL) THEN Throw(gen.mkZero(ThrowableTpe)) ELSE result
}
+ }
- /* @see overview at `delayedEndpointDef()` of the translation scheme for DelayedInit */
- def delayedInitClosure(delayedEndPointSym: MethodSymbol): ClassDef = {
- val satelliteClass = localTyper.typed {
- atPos(impl.pos) {
- val closureClass = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL)
- val closureParents = List(AbstractFunctionClass(0).tpe)
-
- closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass)
-
- val outerField: TermSymbol = (
- closureClass
- newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR)
- setInfoAndEnter clazz.tpe
- )
- val applyMethod: MethodSymbol = (
- closureClass
- newMethod(nme.apply, impl.pos, FINAL)
- setInfoAndEnter MethodType(Nil, ObjectTpe)
- )
- val outerFieldDef = ValDef(outerField)
- val closureClassTyper = localTyper.atOwner(closureClass)
- val applyMethodTyper = closureClassTyper.atOwner(applyMethod)
-
- def applyMethodStat =
- applyMethodTyper.typed {
- atPos(impl.pos) {
- val receiver = Select(This(closureClass), outerField)
- Apply(Select(receiver, delayedEndPointSym), Nil)
- }
- }
+ // The list of definitions that go into class
+ val defBuf = new ListBuffer[Tree]
+
+ // The auxiliary constructors, separate from the defBuf since they should
+ // follow the primary constructor
+ val auxConstructorBuf = new ListBuffer[Tree]
+
+ // The list of statements that go into constructor after and including the superclass constructor call
+ val constrStatBuf = new ListBuffer[Tree]
+
+ // The list of early initializer statements that go into constructor before the superclass constructor call
+ val constrPrefixBuf = new ListBuffer[Tree]
+
+ // The early initialized field definitions of the class (these are the class members)
+ val presupers = treeInfo.preSuperFields(stats)
+
+ // generate code to copy pre-initialized fields
+ for (stat <- constrBody.stats) {
+ constrStatBuf += stat
+ stat match {
+ case ValDef(mods, name, _, _) if (mods hasFlag PRESUPER) =>
+ // stat is the constructor-local definition of the field value
+ val fields = presupers filter (_.getterName == name)
+ assert(fields.length == 1)
+ val to = fields.head.symbol
+ if (!to.tpe.isInstanceOf[ConstantType])
+ constrStatBuf += mkAssign(to, Ident(stat.symbol))
+ case _ =>
+ }
+ }
- val applyMethodDef = DefDef(
- sym = applyMethod,
- vparamss = ListOfNil,
- rhs = Block(applyMethodStat, gen.mkAttributedRef(BoxedUnit_UNIT)))
-
- ClassDef(
- sym = closureClass,
- constrMods = Modifiers(0),
- vparamss = List(List(outerFieldDef)),
- body = applyMethodDef :: Nil,
- superPos = impl.pos)
+ // Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf.
+ for (stat <- stats) stat match {
+ case DefDef(_,_,_,_,_,rhs) =>
+ // methods with constant result type get literals as their body
+ // all methods except the primary constructor go into template
+ stat.symbol.tpe match {
+ case MethodType(List(), tp @ ConstantType(c)) =>
+ defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp)
+ case _ =>
+ if (stat.symbol.isPrimaryConstructor) ()
+ else if (stat.symbol.isConstructor) auxConstructorBuf += stat
+ else defBuf += stat
+ }
+ case ValDef(_, _, _, rhs) =>
+ // val defs with constant right-hand sides are eliminated.
+ // for all other val defs, an empty valdef goes into the template and
+ // the initializer goes as an assignment into the constructor
+ // if the val def is an early initialized or a parameter accessor, it goes
+ // before the superclass constructor call, otherwise it goes after.
+ // Lazy vals don't get the assignment in the constructor.
+ if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
+ if (rhs != EmptyTree && !stat.symbol.isLazy) {
+ val rhs1 = intoConstructor(stat.symbol, rhs)
+ (if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
+ stat.symbol, rhs1)
}
+ defBuf += deriveValDef(stat)(_ => EmptyTree)
}
+ case ClassDef(_, _, _, _) =>
+ // classes are treated recursively, and left in the template
+ defBuf += new ConstructorTransformer(unit).transform(stat)
+ case _ =>
+ // all other statements go into the constructor
+ constrStatBuf += intoConstructor(impl.symbol, stat)
+ }
- satelliteClass.asInstanceOf[ClassDef]
- }
+ populateOmittables()
- /* @see overview at `delayedEndpointDef()` of the translation scheme for DelayedInit */
- def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
- gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
- }
+ // Initialize all parameters fields that must be kept.
+ val paramInits = paramAccessors filter mustbeKept map { acc =>
+ // Check for conflicting symbol amongst parents: see bug #1960.
+ // It would be better to mangle the constructor parameter name since
+ // it can only be used internally, but I think we need more robust name
+ // mangling before we introduce more of it.
+ val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
+ if (conflict ne NoSymbol)
+ unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
- /* Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
- def splitAtSuper(stats: List[Tree]) = {
- def isConstr(tree: Tree): Boolean = tree match {
- case Block(_, expr) => isConstr(expr) // SI-6481 account for named argument blocks
- case _ => (tree.symbol ne null) && tree.symbol.isConstructor
- }
- val (pre, rest0) = stats span (!isConstr(_))
- val (supercalls, rest) = rest0 span (isConstr(_))
- (pre ::: supercalls, rest)
- }
+ copyParam(acc, parameter(acc))
+ }
- val (uptoSuperStats, remainingConstrStats0) = splitAtSuper(constrStatBuf.toList)
- var remainingConstrStats = remainingConstrStats0
+ /* Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
+ def splitAtSuper(stats: List[Tree]) = {
+ def isConstr(tree: Tree): Boolean = tree match {
+ case Block(_, expr) => isConstr(expr) // SI-6481 account for named argument blocks
+ case _ => (tree.symbol ne null) && tree.symbol.isConstructor
+ }
+ val (pre, rest0) = stats span (!isConstr(_))
+ val (supercalls, rest) = rest0 span (isConstr(_))
+ (pre ::: supercalls, rest)
+ }
- /* XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
- * but excluding it includes too much. The constructor sequence being mimicked
- * needs to be reproduced with total fidelity.
- *
- * See test case files/run/bug4680.scala, the output of which is wrong in many
- * particulars.
- */
- val needsDelayedInit = (isDelayedInitSubclass && remainingConstrStats.nonEmpty)
+ val (uptoSuperStats, remainingConstrStats0) = splitAtSuper(constrStatBuf.toList)
+ var remainingConstrStats = remainingConstrStats0
- if (needsDelayedInit) {
- val delayedHook: DefDef = delayedEndpointDef(remainingConstrStats)
- defBuf += delayedHook
- val hookCallerClass = {
- // transform to make the closure-class' default constructor assign the the outer instance to its param-accessor field.
- val drillDown = new ConstructorTransformer(unit)
- drillDown transform delayedInitClosure(delayedHook.symbol.asInstanceOf[MethodSymbol])
- }
- defBuf += hookCallerClass
- remainingConstrStats = delayedInitCall(hookCallerClass) :: Nil
- }
+ rewriteDelayedInit()
- // Assemble final constructor
- defBuf += deriveDefDef(constr)(_ =>
- treeCopy.Block(
- constrBody,
- paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats :::
- guardSpecializedInitializer(remainingConstrStats),
- constrBody.expr))
+ // Assemble final constructor
+ defBuf += deriveDefDef(constr)(_ =>
+ treeCopy.Block(
+ constrBody,
+ paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats :::
+ guardSpecializedInitializer(remainingConstrStats),
+ constrBody.expr))
- // Followed by any auxiliary constructors
- defBuf ++= auxConstructorBuf
+ // Followed by any auxiliary constructors
+ defBuf ++= auxConstructorBuf
- // Unlink all fields that can be dropped from class scope
- for (sym <- clazz.info.decls ; if !mustbeKept(sym))
- clazz.info.decls unlink sym
+ // Unlink all fields that can be dropped from class scope
+ for (sym <- clazz.info.decls ; if !mustbeKept(sym))
+ clazz.info.decls unlink sym
- // Eliminate all field definitions that can be dropped from template
- deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustbeKept(stat.symbol)))
- } // transformClassTemplate
+ // Eliminate all field definitions that can be dropped from template
+ val transformed: Template = deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustbeKept(stat.symbol)))
- override def transform(tree: Tree): Tree = {
- tree match {
- case ClassDef(_,_,_,_) if !tree.symbol.isInterface && !isPrimitiveValueClass(tree.symbol) =>
- deriveClassDef(tree)(transformClassTemplate)
- case _ =>
- super.transform(tree)
- }
- }
+ } // TemplateTransformer
- } // ConstructorTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 0f65b11e9b..c74fc620ca 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -525,12 +525,12 @@ abstract class Erasure extends AddInterfaces
private def isDifferentErasedValueType(tpe: Type, other: Type) =
isErasedValueType(tpe) && (tpe ne other)
- private def isPrimitiveValueMember(sym: Symbol) =
- sym != NoSymbol && isPrimitiveValueClass(sym.owner)
+ private def isPrimitiveValueMember(sym: Symbol) = isPrimitiveValueClass(sym.owner)
@inline private def box(tree: Tree, target: => String): Tree = {
val result = box1(tree)
- log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
+ if (tree.tpe =:= UnitTpe) ()
+ else log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
result
}
@@ -572,7 +572,7 @@ abstract class Erasure extends AddInterfaces
private def unbox(tree: Tree, pt: Type): Tree = {
val result = unbox1(tree, pt)
- log(s"unboxing ${tree.summaryString}: ${tree.tpe} with pt=$pt as type ${result.tpe}")
+ log(s"unboxing ${tree.shortClass}: ${tree.tpe} as a ${result.tpe}")
result
}
@@ -595,7 +595,6 @@ abstract class Erasure extends AddInterfaces
val tree1 = pt match {
case ErasedValueType(tref) =>
val clazz = tref.sym
- log("not boxed: "+tree)
lazy val underlying = underlyingOfValueClass(clazz)
val tree0 =
if (tree.tpe.typeSymbol == NullClass &&
@@ -623,8 +622,18 @@ abstract class Erasure extends AddInterfaces
/** Generate a synthetic cast operation from tree.tpe to pt.
* @pre pt eq pt.normalize
*/
- private def cast(tree: Tree, pt: Type): Tree = logResult(s"cast($tree, $pt)") {
- if (pt.typeSymbol == UnitClass) {
+ private def cast(tree: Tree, pt: Type): Tree = {
+ if ((tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) {
+ def word = (
+ if (tree.tpe <:< pt) "upcast"
+ else if (pt <:< tree.tpe) "downcast"
+ else if (pt weak_<:< tree.tpe) "coerce"
+ else if (tree.tpe weak_<:< pt) "widen"
+ else "cast"
+ )
+ log(s"erasure ${word}s from ${tree.tpe} to $pt")
+ }
+ if (pt =:= UnitTpe) {
// See SI-4731 for one example of how this occurs.
log("Attempted to cast to Unit: " + tree)
tree.duplicate setType pt
@@ -681,7 +690,7 @@ abstract class Erasure extends AddInterfaces
private def adaptMember(tree: Tree): Tree = {
//Console.println("adaptMember: " + tree);
tree match {
- case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
+ case Apply(ta @ TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_asInstanceOf =>
val qual1 = typedQualifier(qual, NOmode, ObjectTpe) // need to have an expected type, see #3037
@@ -706,7 +715,8 @@ abstract class Erasure extends AddInterfaces
// }
typed(untyped)
}
- } else tree
+ } else treeCopy.Apply(tree, treeCopy.TypeApply(ta, treeCopy.Select(sel, qual1, name), List(targ)), List())
+
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_isInstanceOf =>
targ.tpe match {
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index 44d39de205..e31211d321 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -24,8 +24,8 @@ abstract class Flatten extends InfoTransform {
val old = (scope lookupUnshadowedEntries sym.name).toList
old foreach (scope unlink _)
scope enter sym
- log(s"lifted ${sym.fullLocationString}" + ( if (old.isEmpty) "" else s" after unlinking $old from scope." ))
- old
+ def old_s = old map (_.sym) mkString ", "
+ debuglog(s"In scope of ${sym.owner}, unlinked $old_s and entered $sym")
}
private def liftClass(sym: Symbol) {
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index ce495ca8ca..515fa66cfa 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -489,7 +489,7 @@ abstract class LambdaLift extends InfoTransform {
treeCopy.Assign(tree, qual, rhs)
case Ident(name) =>
val tree1 =
- if (sym != NoSymbol && sym.isTerm && !sym.isLabel)
+ if (sym.isTerm && !sym.isLabel)
if (sym.isMethod)
atPos(tree.pos)(memberRef(sym))
else if (sym.isLocal && !isSameOwnerEnclosure(sym))
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 1c44e86aca..3ec4d16bf5 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -734,10 +734,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
sym
}
- if (sym ne NoSymbol)
- sym
- else
- createBitmap
+ sym orElse createBitmap
}
def maskForOffset(offset: Int, sym: Symbol, kind: ClassSymbol): Tree = {
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e2ce2743f7..16c803e2e8 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -64,7 +64,6 @@ abstract class UnCurry extends InfoTransform
class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
private var needTryLift = false
- private var inPattern = false
private var inConstructorFlag = 0L
private val byNameArgs = mutable.HashSet[Tree]()
private val noApply = mutable.HashSet[Tree]()
@@ -79,12 +78,6 @@ abstract class UnCurry extends InfoTransform
@inline private def useNewMembers[T](owner: Symbol)(f: List[Tree] => T): T =
f(newMembers.remove(owner).getOrElse(Nil).toList)
- @inline private def withInPattern[T](value: Boolean)(body: => T): T = {
- inPattern = value
- try body
- finally inPattern = !value
- }
-
private def newFunction0(body: Tree): Tree = {
val result = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function]
log("Change owner from %s to %s in %s".format(currentOwner, result.symbol, result.body))
@@ -119,16 +112,6 @@ abstract class UnCurry extends InfoTransform
&& (isByName(tree.symbol))
)
- /** Uncurry a type of a tree node.
- * This function is sensitive to whether or not we are in a pattern -- when in a pattern
- * additional parameter sections of a case class are skipped.
- */
- def uncurryTreeType(tp: Type): Type = tp match {
- case MethodType(params, MethodType(params1, restpe)) if inPattern =>
- uncurryTreeType(MethodType(params, restpe))
- case _ =>
- uncurry(tp)
- }
// ------- Handling non-local returns -------------------------------------------------
@@ -327,7 +310,7 @@ abstract class UnCurry extends InfoTransform
}
else {
def mkArray = mkArrayValue(args drop (formals.length - 1), varargsElemType)
- if (isJava || inPattern) mkArray
+ if (isJava) mkArray
else if (args.isEmpty) gen.mkNil // avoid needlessly double-wrapping an empty argument list
else arrayToSequence(mkArray, varargsElemType)
}
@@ -474,10 +457,10 @@ abstract class UnCurry extends InfoTransform
else
super.transform(tree)
case UnApply(fn, args) =>
- val fn1 = withInPattern(value = false)(transform(fn))
+ val fn1 = transform(fn)
val args1 = transformTrees(fn.symbol.name match {
case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args))
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, localTyper.expectedPatternTypes(fn, args))
case _ => sys.error("internal error: UnApply node has wrong symbol")
})
treeCopy.UnApply(tree, fn1, args1)
@@ -510,7 +493,7 @@ abstract class UnCurry extends InfoTransform
else super.transform(tree)
case CaseDef(pat, guard, body) =>
- val pat1 = withInPattern(value = true)(transform(pat))
+ val pat1 = transform(pat)
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
case fun @ Function(_, _) =>
@@ -532,7 +515,7 @@ abstract class UnCurry extends InfoTransform
}
)
assert(result.tpe != null, result.shortClass + " tpe is null:\n" + result)
- result setType uncurryTreeType(result.tpe)
+ result modifyType uncurry
}
def postTransform(tree: Tree): Tree = exitingUncurry {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index 069484ff65..45aa1106f0 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -13,7 +13,6 @@ import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
import scala.reflect.internal.util.HashSet
-
trait Logic extends Debugging {
import PatternMatchingStats._
@@ -494,7 +493,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
import global.{ConstantType, Constant, SingletonType, Literal, Ident, singleType}
- import global.definitions.{AnyClass, UnitClass}
+ import global.definitions._
// all our variables range over types
@@ -549,7 +548,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
def tp: Type
def wideTp: Type
- def isAny = wideTp.typeSymbol == AnyClass
+ def isAny = wideTp =:= AnyTpe
def isValue: Boolean //= tp.isStable
// note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
@@ -564,6 +563,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
// (At least conceptually: `true` is an instance of class `Boolean`)
private def widenToClass(tp: Type): Type =
if (tp.typeSymbol.isClass) tp
+ else if (tp.baseClasses.isEmpty) sys.error("Bad type: " + tp)
else tp.baseType(tp.baseClasses.head)
object TypeConst extends TypeConstExtractor {
@@ -606,7 +606,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
if (tp.isInstanceOf[SingletonType]) tp
else p match {
case Literal(c) =>
- if (c.tpe.typeSymbol == UnitClass) c.tpe
+ if (c.tpe =:= UnitTpe) c.tpe
else ConstantType(c)
case Ident(_) if p.symbol.isStable =>
// for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index f089c8f5a5..8feb87210e 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -21,7 +21,7 @@ trait TreeAndTypeAnalysis extends Debugging {
// unfortunately this is not true in general:
// SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefTpe)
def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
- val tpValue = tp.typeSymbol.isPrimitiveValueClass
+ val tpValue = isPrimitiveValueType(tp)
// pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
// (and the subtype is respectively a value type or not a value type)
@@ -59,17 +59,20 @@ trait TreeAndTypeAnalysis extends Debugging {
debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym))))
None
case sym =>
- val subclasses = (
- sym.sealedDescendants.toList sortBy (_.sealedSortName)
+ val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")(
// symbols which are both sealed and abstract need not be covered themselves, because
// all of their children must be and they cannot otherwise be created.
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
- debug.patmat("enum sealed -- subclasses: "+ ((sym, subclasses)))
+ sym.sealedDescendants.toList
+ sortBy (_.sealedSortName)
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
+ )
val tpApprox = typer.infer.approximateAbstracts(tp)
val pre = tpApprox.prefix
+
+ Some(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") {
// valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
- val validSubTypes = (subclasses flatMap {sym =>
+ subclasses flatMap { sym =>
// have to filter out children which cannot match: see ticket #3683 for an example
// compare to the fully known type `tp` (modulo abstract types),
// so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
@@ -81,9 +84,8 @@ trait TreeAndTypeAnalysis extends Debugging {
// debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
else None
- })
- debug.patmat("enum sealed "+ ((tp, tpApprox)) + " as "+ validSubTypes)
- Some(validSubTypes)
+ }
+ })
}
// approximate a type to the static type that is fully checkable at run time,
@@ -104,10 +106,7 @@ trait TreeAndTypeAnalysis extends Debugging {
mapOver(tp)
}
}
-
- val res = typeArgsToWildcardsExceptArray(tp)
- debug.patmat("checkable "+((tp, res)))
- res
+ debug.patmatResult(s"checkableType($tp)")(typeArgsToWildcardsExceptArray(tp))
}
// a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
@@ -136,20 +135,17 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
var currId = 0
}
case class Test(prop: Prop, treeMaker: TreeMaker) {
- // private val reusedBy = new scala.collection.mutable.HashSet[Test]
+ // private val reusedBy = new mutable.HashSet[Test]
var reuses: Option[Test] = None
def registerReuseBy(later: Test): Unit = {
assert(later.reuses.isEmpty, later.reuses)
// reusedBy += later
later.reuses = Some(this)
}
-
val id = { Test.currId += 1; Test.currId}
- override def toString =
- "T"+ id + "C("+ prop +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
+ override def toString = s"T${id}C($prop)"
}
-
class TreeMakersToPropsIgnoreNullChecks(root: Symbol) extends TreeMakersToProps(root) {
override def uniqueNonNullProp(p: Tree): Prop = True
}
@@ -158,9 +154,9 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
class TreeMakersToProps(val root: Symbol) {
prepareNewAnalysis() // reset hash consing for Var and Const
- private[this] val uniqueEqualityProps = new scala.collection.mutable.HashMap[(Tree, Tree), Eq]
- private[this] val uniqueNonNullProps = new scala.collection.mutable.HashMap[Tree, Not]
- private[this] val uniqueTypeProps = new scala.collection.mutable.HashMap[(Tree, Type), Eq]
+ private[this] val uniqueEqualityProps = new mutable.HashMap[(Tree, Tree), Eq]
+ private[this] val uniqueNonNullProps = new mutable.HashMap[Tree, Not]
+ private[this] val uniqueTypeProps = new mutable.HashMap[(Tree, Type), Eq]
def uniqueEqualityProp(testedPath: Tree, rhs: Tree): Prop =
uniqueEqualityProps getOrElseUpdate((testedPath, rhs), Eq(Var(testedPath), ValueConst(rhs)))
@@ -222,7 +218,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
// so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
case (f, t) =>
- t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
+ t.isInstanceOf[Ident] && t.symbol.exists && pointsToBound(f)
}
val (boundFrom, boundTo) = boundSubst.unzip
val (unboundFrom, unboundTo) = unboundSubst.unzip
@@ -624,9 +620,9 @@ trait MatchAnalysis extends MatchApproximation {
private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
- private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
- private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
- private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
+ private lazy val ctorParams = if (ctor.paramss.isEmpty) Nil else ctor.paramss.head
+ private lazy val cls = ctor.safeOwner
+ private lazy val caseFieldAccs = cls.caseFieldAccessors
def addField(symbol: Symbol, assign: VariableAssignment) {
// SI-7669 Only register this field if if this class contains it.
@@ -686,8 +682,7 @@ trait MatchAnalysis extends MatchApproximation {
// TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
case _ => NoExample
}
- debug.patmat("described as: "+ res)
- res
+ debug.patmatResult("described as")(res)
}
override def toString = toCounterExample().toString
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
index 1e4c56529c..cf74f0fb11 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -62,45 +62,44 @@ trait MatchCodeGen extends Interface {
def codegen: AbsCodegen
abstract class CommonCodegen extends AbsCodegen { import CODE._
- def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
- def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
- def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
- def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
- def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
+ def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+ def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
+
+ // Right now this blindly calls drop on the result of the unapplySeq
+ // unless it verifiably has no drop method (this is the case in particular
+ // with Array.) You should not actually have to write a method called drop
+ // for name-based matching, but this was an expedient route for the basics.
+ def drop(tgt: Tree)(n: Int): Tree = {
+ def callDirect = fn(tgt, nme.drop, LIT(n))
+ def callRuntime = Apply(REF(traversableDropMethod), tgt :: LIT(n) :: Nil)
+ def needsRuntime = (tgt.tpe ne null) && (typeOfMemberNamedDrop(tgt.tpe) == NoType)
+
+ if (needsRuntime) callRuntime else callDirect
+ }
+
+ // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder)
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, any = true, wrapInApply = false)
- // duplicated out of frustration with cast generation
- def mkZero(tp: Type): Tree = {
- tp.typeSymbol match {
- case UnitClass => Literal(Constant(()))
- case BooleanClass => Literal(Constant(false))
- case FloatClass => Literal(Constant(0.0f))
- case DoubleClass => Literal(Constant(0.0d))
- case ByteClass => Literal(Constant(0.toByte))
- case ShortClass => Literal(Constant(0.toShort))
- case IntClass => Literal(Constant(0))
- case LongClass => Literal(Constant(0L))
- case CharClass => Literal(Constant(0.toChar))
- case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
- }
+ def mkZero(tp: Type): Tree = gen.mkConstantZero(tp) match {
+ case Constant(null) => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+ case const => Literal(const)
}
}
}
trait PureMatchMonadInterface extends MatchMonadInterface {
val matchStrategy: Tree
-
- def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
- def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
- protected def matchMonadSym = oneSig.finalResultType.typeSymbol
-
import CODE._
def _match(n: Name): SelectStart = matchStrategy DOT n
- private lazy val oneSig: Type = typer.typedOperator(_match(vpmName.one)).tpe // TODO: error message
+ // TODO: error message
+ private lazy val oneType = typer.typedOperator(_match(vpmName.one)).tpe
+ override def pureType(tp: Type): Type = firstParamType(appliedType(oneType, tp :: Nil))
}
trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
@@ -132,13 +131,7 @@ trait MatchCodeGen extends Interface {
}
}
- trait OptimizedMatchMonadInterface extends MatchMonadInterface {
- override def inMatchMonad(tp: Type): Type = optionType(tp)
- override def pureType(tp: Type): Type = tp
- override protected def matchMonadSym = OptionClass
- }
-
- trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
+ trait OptimizedCodegen extends CodegenCore with TypedSubstitution with MatchMonadInterface {
override def codegen: AbsCodegen = optimizedCodegen
// when we know we're targetting Option, do some inlining the optimizer won't do
@@ -154,9 +147,8 @@ trait MatchCodeGen extends Interface {
* if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
*/
def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
- val matchEnd = newSynthCaseLabel("matchEnd")
val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
- matchEnd setInfo MethodType(List(matchRes), restpe)
+ val matchEnd = newSynthCaseLabel("matchEnd") setInfo MethodType(List(matchRes), restpe)
def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
var _currCase = newCaseSym
@@ -168,23 +160,22 @@ trait MatchCodeGen extends Interface {
LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
}
-
// must compute catchAll after caseLabels (side-effects nextCase)
// catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
// if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
val catchAllDef = matchFailGen map { matchFailGen =>
- val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+ val scrutRef = scrutSym.fold(EmptyTree: Tree)(REF) // for alternatives
LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
} toList // at most 1 element
// scrutSym == NoSymbol when generating an alternatives matcher
- val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
+ val scrutDef = scrutSym.fold(List[Tree]())(sym => (VAL(sym) === scrut) :: Nil) // for alternatives
// the generated block is taken apart in TailCalls under the following assumptions
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
Block(
scrutDef ++ caseDefs ++ catchAllDef,
LabelDef(matchEnd, List(matchRes), REF(matchRes))
@@ -206,15 +197,14 @@ trait MatchCodeGen extends Interface {
// next: MatchMonad[U]
// returns MatchMonad[U]
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
- val tp = inMatchMonad(b.tpe)
- val prevSym = freshSym(prev.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
-
+ val prevSym = freshSym(prev.pos, prev.tpe, "o")
BLOCK(
VAL(prevSym) === prev,
// must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
- ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+ ifThenElseZero(
+ NOT(prevSym DOT vpmName.isEmpty),
+ Substitution(b, prevSym DOT vpmName.get)(next)
+ )
)
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala
new file mode 100644
index 0000000000..0d08120e43
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala
@@ -0,0 +1,37 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+/** Segregating this super hacky CPS code. */
+trait MatchCps {
+ self: PatternMatching =>
+
+ import global._
+
+ // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
+ private object CpsSymbols {
+ private def cpsSymbol(name: String) = rootMirror.getClassIfDefined(s"scala.util.continuations.$name")
+
+ val MarkerCPSAdaptPlus = cpsSymbol("cpsPlus")
+ val MarkerCPSAdaptMinus = cpsSymbol("cpsMinus")
+ val MarkerCPSSynth = cpsSymbol("cpsSynth")
+ val MarkerCPSTypes = cpsSymbol("cpsParam")
+ val stripTriggerCPSAnns = Set[Symbol](MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
+ val strippedCPSAnns = stripTriggerCPSAnns + MarkerCPSTypes
+
+ // when one of the internal cps-type-state annotations is present, strip all CPS annotations
+ // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
+ // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
+ def removeCPSFromPt(pt: Type): Type = (
+ if (MarkerCPSAdaptPlus.exists && (stripTriggerCPSAnns exists pt.hasAnnotation))
+ pt filterAnnotations (ann => !(strippedCPSAnns exists ann.matches))
+ else
+ pt
+ )
+ }
+ def removeCPSFromPt(pt: Type): Type = CpsSymbols removeCPSFromPt pt
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index 9854e4ef62..ec45789687 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -210,7 +210,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
// }
//// SWITCHES -- TODO: operate on Tests rather than TreeMakers
- trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface {
+ trait SwitchEmission extends TreeMakers with MatchMonadInterface {
import treeInfo.isGuardedCase
abstract class SwitchMaker {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index fcee142932..75335f7920 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -12,86 +12,165 @@ import scala.reflect.internal.util.Statistics
/** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers.
*/
-trait MatchTranslation { self: PatternMatching =>
+trait MatchTranslation {
+ self: PatternMatching =>
+
import PatternMatchingStats._
import global._
import definitions._
import global.analyzer.{ErrorUtils, formalTypes}
+ import treeInfo.{ WildcardStarArg, Unapplied, isStar, unbind }
+ import CODE._
+
+ // Always map repeated params to sequences
+ private def setVarInfo(sym: Symbol, info: Type) =
+ sym setInfo debug.patmatResult(s"changing ${sym.defString} to")(repeatedToSeq(info))
+
+ private def hasSym(t: Tree) = t.symbol != null && t.symbol != NoSymbol
- trait MatchTranslator extends TreeMakers {
+ trait MatchTranslator extends TreeMakers with TreeMakerWarnings {
import typer.context
- // Why is it so difficult to say "here's a name and a context, give me any
- // matching symbol in scope" ? I am sure this code is wrong, but attempts to
- // use the scopes of the contexts in the enclosing context chain discover
- // nothing. How to associate a name with a symbol would would be a wonderful
- // linkage for which to establish a canonical acquisition mechanism.
- def matchingSymbolInScope(pat: Tree): Symbol = {
- def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
- case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
- case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
- case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
- case _ => NoSymbol
- }
- pat match {
- case Bind(name, _) =>
- context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
- res orElse declarationOfName(ctx.owner.rawInfo, name))
- case _ => NoSymbol
+ object SymbolBound {
+ def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match {
+ case Bind(_, expr) if hasSym(tree) => Some(tree.symbol -> expr)
+ case _ => None
}
}
- // Issue better warnings than "unreachable code" when people mis-use
- // variable patterns thinking they bind to existing identifiers.
- //
- // Possible TODO: more deeply nested variable patterns, like
- // case (a, b) => 1 ; case (c, d) => 2
- // However this is a pain (at least the way I'm going about it)
- // and I have to think these detailed errors are primarily useful
- // for beginners, not people writing nested pattern matches.
- def checkMatchVariablePatterns(cases: List[CaseDef]) {
- // A string describing the first variable pattern
- var vpat: String = null
- // Using an iterator so we can recognize the last case
- val it = cases.iterator
-
- def addendum(pat: Tree) = {
- matchingSymbolInScope(pat) match {
- case NoSymbol => ""
- case sym =>
- val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
- s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ def newBoundTree(tree: Tree, pt: Type): BoundTree = tree match {
+ case SymbolBound(sym, expr) => BoundTree(setVarInfo(sym, pt), expr)
+ case _ => BoundTree(setVarInfo(freshSym(tree.pos, prefix = "p"), pt), tree)
+ }
+
+ final case class BoundTree(binder: Symbol, tree: Tree) {
+ private lazy val extractor = ExtractorCall(tree)
+
+ def pos = tree.pos
+ def tpe = binder.info.dealiasWiden // the type of the variable bound to the pattern
+ def pt = unbound match {
+ case Star(tpt) => this glbWith seqType(tpt.tpe)
+ case TypeBound(tpe) => tpe
+ case tree => tree.tpe
+ }
+ def repeatedType = unbound match {
+ case Star(tpt) => tpt.tpe
+ case _ => NoType
+ }
+ def glbWith(other: Type) = glb(tpe :: other :: Nil).normalize
+
+ object SymbolAndTypeBound {
+ def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
+ case SymbolBound(sym, SymbolAndTypeBound(_, tpe)) => Some(sym -> tpe)
+ case TypeBound(tpe) => Some(binder -> tpe)
+ case _ => None
}
}
- while (it.hasNext) {
- val cdef = it.next()
- // If a default case has been seen, then every succeeding case is unreachable.
- if (vpat != null)
- context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
- // If this is a default case and more cases follow, warn about this one so
- // we have a reason to mention its pattern variable name and any corresponding
- // symbol in scope. Errors will follow from the remaining cases, at least
- // once we make the above warning an error.
- else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
- val vpatName = cdef.pat match {
- case Bind(name, _) => s" '$name'"
- case _ => ""
- }
- vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
- context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ object TypeBound {
+ def unapply(tree: Tree): Option[Type] = unbind(tree) match {
+ case Typed(Ident(_), _) if tree.tpe != null => Some(tree.tpe)
+ case _ => None
}
}
+
+ private def rebindTo(pattern: Tree) = BoundTree(binder, pattern)
+ private def step(treeMakers: TreeMaker*)(subpatterns: BoundTree*): TranslationStep = TranslationStep(treeMakers.toList, subpatterns.toList)
+
+ private def bindingStep(sub: Symbol, subpattern: Tree) = step(SubstOnlyTreeMaker(sub, binder))(rebindTo(subpattern))
+ private def equalityTestStep() = step(EqualityTestTreeMaker(binder, tree, pos))()
+ private def typeTestStep(sub: Symbol, subPt: Type) = step(TypeTestTreeMaker(sub, binder, subPt, glbWith(subPt))(pos))()
+ private def alternativesStep(alts: List[Tree]) = step(AlternativesTreeMaker(binder, translatedAlts(alts), alts.head.pos))()
+ private def translatedAlts(alts: List[Tree]) = alts map (alt => rebindTo(alt).translate())
+ private def noStep() = step()()
+
+ private def unsupportedPatternMsg = sm"""
+ |unsupported pattern: ${tree.shortClass} / $this (this is a scalac bug.)
+ |""".trim
+
+ // example check: List[Int] <:< ::[Int]
+ private def extractorStep(): TranslationStep = {
+ import extractor.{ paramType, treeMaker }
+ if (!extractor.isTyped)
+ ErrorUtils.issueNormalTypeError(tree, "Could not typecheck extractor call: "+ extractor)(context)
+
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ lazy val typeTest = TypeTestTreeMaker(binder, binder, paramType, paramType)(pos, extractorArgTypeTest = true)
+ // check whether typetest implies binder is not null,
+ // even though the eventual null check will be on typeTest.nextBinder
+ // it'll be equal to binder casted to paramType anyway (and the type test is on binder)
+ def extraction: TreeMaker = treeMaker(typeTest.nextBinder, typeTest impliesBinderNonNull binder, pos)
+
+ // paramType = the type expected by the unapply
+ // TODO: paramType may contain unbound type params (run/t2800, run/t3530)
+ val makers = (
+ // Statically conforms to paramType
+ if (this ensureConformsTo paramType) treeMaker(binder, false, pos) :: Nil
+ else typeTest :: extraction :: Nil
+ )
+ step(makers: _*)(extractor.subBoundTrees: _*)
+ }
+
+ // Summary of translation cases. I moved the excerpts from the specification further below so all
+ // the logic can be seen at once.
+ //
+ // [1] skip wildcard trees -- no point in checking them
+ // [2] extractor and constructor patterns
+ // [3] replace subpatBinder by patBinder, as if the Bind was not there.
+ // It must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type,
+ // this is not guaranteed until we cast
+ // [4] typed patterns - a typed pattern never has any subtrees
+ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
+ // [5] literal and stable id patterns
+ // [6] pattern alternatives
+ // [7] symbol-less bind patterns - this happens in certain ill-formed programs, there'll be an error later
+ // don't fail here though (or should we?)
+ def nextStep(): TranslationStep = tree match {
+ case WildcardPattern() => noStep()
+ case _: UnApply | _: Apply => extractorStep()
+ case SymbolAndTypeBound(sym, tpe) => typeTestStep(sym, tpe)
+ case TypeBound(tpe) => typeTestStep(binder, tpe)
+ case SymbolBound(sym, expr) => bindingStep(sym, expr)
+ case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) => equalityTestStep()
+ case Alternative(alts) => alternativesStep(alts)
+ case _ => context.unit.error(pos, unsupportedPatternMsg) ; noStep()
+ }
+ def translate(): List[TreeMaker] = nextStep() merge (_.translate())
+
+ private def setInfo(paramType: Type): Boolean = {
+ devWarning(s"resetting info of $this to $paramType")
+ setVarInfo(binder, paramType)
+ true
+ }
+ // If <:< but not =:=, no type test needed, but the tree maker relies on the binder having
+ // exactly paramType (and not just some type compatible with it.) SI-6624 shows this is necessary
+ // because apparently patBinder may have an unfortunate type (.decls don't have the case field
+ // accessors) TODO: get to the bottom of this -- I assume it happens when type checking
+ // infers a weird type for an unapply call. By going back to the parameterType for the
+ // extractor call we get a saner type, so let's just do that for now.
+ def ensureConformsTo(paramType: Type): Boolean = (
+ (tpe =:= paramType)
+ || (tpe <:< paramType) && setInfo(paramType)
+ )
+
+ private def concreteType = tpe.bounds.hi
+ private def unbound = unbind(tree)
+ private def tpe_s = if (pt <:< concreteType) "" + pt else s"$pt (binder: $tpe)"
+ private def at_s = unbound match {
+ case WildcardPattern() => ""
+ case pat => s" @ $pat"
+ }
+ override def toString = s"${binder.name}: $tpe_s$at_s"
}
- // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
- private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
- private lazy val MarkerCPSAdaptMinus = rootMirror.getClassIfDefined("scala.util.continuations.cpsMinus")
- private lazy val MarkerCPSSynth = rootMirror.getClassIfDefined("scala.util.continuations.cpsSynth")
- private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
- private lazy val MarkerCPSTypes = rootMirror.getClassIfDefined("scala.util.continuations.cpsParam")
- private lazy val strippedCPSAnns = MarkerCPSTypes :: stripTriggerCPSAnns
- private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
+ // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
+ final case class TranslationStep(makers: List[TreeMaker], subpatterns: List[BoundTree]) {
+ def merge(f: BoundTree => List[TreeMaker]): List[TreeMaker] = makers ::: (subpatterns flatMap f)
+ override def toString = if (subpatterns.isEmpty) "" else subpatterns.mkString("(", ", ", ")")
+ }
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -107,10 +186,8 @@ trait MatchTranslation { self: PatternMatching =>
val Match(selector, cases) = match_
val (nonSyntheticCases, defaultOverride) = cases match {
- case init :+ last if treeInfo isSyntheticDefaultCase last =>
- (init, Some(((scrut: Tree) => last.body)))
- case _ =>
- (cases, None)
+ case init :+ last if treeInfo isSyntheticDefaultCase last => (init, Some(((scrut: Tree) => last.body)))
+ case _ => (cases, None)
}
checkMatchVariablePatterns(nonSyntheticCases)
@@ -127,18 +204,11 @@ trait MatchTranslation { self: PatternMatching =>
val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
- val origPt = match_.tpe
// when one of the internal cps-type-state annotations is present, strip all CPS annotations
- // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
- // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
- val ptUnCPS =
- if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
- removeCPSAdaptAnnotations(origPt)
- else origPt
-
+ val origPt = removeCPSFromPt(match_.tpe)
// relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
// pt is the skolemized version
- val pt = repeatedToSeq(ptUnCPS)
+ val pt = repeatedToSeq(origPt)
// val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
@@ -183,7 +253,7 @@ trait MatchTranslation { self: PatternMatching =>
CaseDef(
Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
EmptyTree,
- combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
+ combineCasesNoSubstOnly(REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(REF(exSym))))
)
})
}
@@ -191,8 +261,6 @@ trait MatchTranslation { self: PatternMatching =>
typer.typedCases(catches, ThrowableTpe, WildcardType)
}
-
-
/** The translation of `pat if guard => body` has two aspects:
* 1) the substitution due to the variables bound by patterns
* 2) the combination of the extractor calls using `flatMap`.
@@ -221,166 +289,12 @@ trait MatchTranslation { self: PatternMatching =>
* a function that will take care of binding and substitution of the next ast (to the right).
*
*/
- def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
- translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = {
+ val CaseDef(pattern, guard, body) = caseDef
+ translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt)
}
- def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
- // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
- type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
- def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
- def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
-
- val pos = patTree.pos
-
- def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
- if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
- // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
-
- debug.patmat("translateExtractorPattern checking parameter type: "+ ((patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType)))
-
- // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
- // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
- // (it will later result in a type test when `tp` is not a subtype of `b.info`)
- // TODO: can we simplify this, together with the Bound case?
- (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
- debug.patmat("changing "+ b +" : "+ b.info +" -> "+ tp)
- b setInfo tp
- }
-
- // example check: List[Int] <:< ::[Int]
- // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
- val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
- if (patBinder.info.widen <:< extractor.paramType) {
- // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
- // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
- // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
- // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now
- /* TODO: uncomment when `settings.developer` and `devWarning` become available
- if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
- devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
- */
- (Nil, patBinder setInfo extractor.paramType, false)
- } else {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
-
- // check whether typetest implies patBinder is not null,
- // even though the eventual null check will be on patBinderOrCasted
- // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
- (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
- }
-
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
- }
-
-
- object MaybeBoundTyped {
- /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
- * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
- * The returned type is the one inferred by inferTypedPattern (`owntype`)
- *
- * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
- */
- def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
- // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
- case Bound(subpatBinder, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
- case Bind(_, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
- case Typed(Ident(_), tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
- case _ => None
- }
- }
-
- val (treeMakers, subpats) = patTree match {
- // skip wildcard trees -- no point in checking them
- case WildcardPattern() => noFurtherSubPats()
- case UnApply(unfun, args) =>
- // TODO: check unargs == args
- // debug.patmat("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
- translateExtractorPattern(ExtractorCall(unfun, args))
-
- /* A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
- It consists of a stable identifier c, followed by element patterns p1, ..., pn.
- The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
-
- If the case class is monomorphic, then it must conform to the expected type of the pattern,
- and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
-
- If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
- The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
-
- The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
- A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
- **/
- case Apply(fun, args) =>
- ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
- ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
- noFurtherSubPats()
- }
-
- /* A typed pattern x : T consists of a pattern variable x and a type pattern T.
- The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
- This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
- */
- // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
- case MaybeBoundTyped(subPatBinder, pt) =>
- val next = glb(List(dealiasWiden(patBinder.info), pt)).normalize
- // a typed pattern never has any subtrees
- noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
-
- /* A pattern binder x@p consists of a pattern variable x and a pattern p.
- The type of the variable x is the static type T of the pattern p.
- This pattern matches any value v matched by the pattern p,
- provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
- and it binds the variable name to that value.
- */
- case Bound(subpatBinder, p) =>
- // replace subpatBinder by patBinder (as if the Bind was not there)
- withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
- // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
- (patBinder, p)
- )
-
- /* 8.1.4 Literal Patterns
- A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
- The type of L must conform to the expected type of the pattern.
-
- 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
- The pattern matches any value v such that r == v (§12.1).
- The type of r must conform to the expected type of the pattern.
- */
- case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
- noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
-
- case Alternative(alts) =>
- noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
-
- /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
- case class Foo(x: Int, y: String)
- case class Bar(z: Int)
-
- def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
- */
-
- case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
- debug.patmat("WARNING: Bind tree with unbound symbol "+ patTree)
- noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
-
- // case Star(_) | ArrayValue => error("stone age pattern relics encountered!")
-
- case _ =>
- typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
- noFurtherSubPats()
- }
-
- treeMakers ++ subpats.flatMap { case (binder, pat) =>
- translatePattern(binder, pat) // recurse on subpatterns
- }
- }
+ def translatePattern(bound: BoundTree): List[TreeMaker] = bound.translate()
def translateGuard(guard: Tree): List[TreeMaker] =
if (guard == EmptyTree) Nil
@@ -395,28 +309,87 @@ trait MatchTranslation { self: PatternMatching =>
def translateBody(body: Tree, matchPt: Type): TreeMaker =
BodyTreeMaker(body, matchPt)
+ // Some notes from the specification
+
+ /*A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
+ It consists of a stable identifier c, followed by element patterns p1, ..., pn.
+ The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
+
+ If the case class is monomorphic, then it must conform to the expected type of the pattern,
+ and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected
+ types of the element patterns p1, ..., pn.
+
+ If the case class is polymorphic, then its type parameters are instantiated so that the
+ instantiation of c conforms to the expected type of the pattern.
+ The instantiated formal parameter types of c’s primary constructor are then taken as the
+ expected types of the component patterns p1, ..., pn.
+
+ The pattern matches all objects created from constructor invocations c(v1, ..., vn)
+ where each element pattern pi matches the corresponding value vi .
+ A special case arises when c’s formal parameter types end in a repeated parameter.
+ This is further discussed in (§8.1.9).
+ **/
+
+ /* A typed pattern x : T consists of a pattern variable x and a type pattern T.
+ The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+ This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
+ */
+
+ /* A pattern binder x@p consists of a pattern variable x and a pattern p.
+ The type of the variable x is the static type T of the pattern p.
+ This pattern matches any value v matched by the pattern p,
+ provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+ and it binds the variable name to that value.
+ */
+
+ /* 8.1.4 Literal Patterns
+ A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+ The type of L must conform to the expected type of the pattern.
+
+ 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
+ The pattern matches any value v such that r == v (§12.1).
+ The type of r must conform to the expected type of the pattern.
+ */
+
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
object ExtractorCall {
- def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
- def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
+ // TODO: check unargs == args
+ def apply(tree: Tree): ExtractorCall = tree match {
+ case UnApply(unfun, args) => new ExtractorCallRegular(unfun, args) // extractor
+ case Apply(fun, args) => new ExtractorCallProd(fun, args) // case class
+ }
}
- abstract class ExtractorCall(val args: List[Tree]) {
- val nbSubPats = args.length
+ abstract class ExtractorCall {
+ def fun: Tree
+ def args: List[Tree]
- // everything okay, captain?
- def isTyped : Boolean
+ val nbSubPats = args.length
+ val starLength = if (hasStar) 1 else 0
+ val nonStarLength = args.length - starLength
+ // everything okay, captain?
+ def isTyped: Boolean
def isSeq: Boolean
- lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
+
+ private def hasStar = nbSubPats > 0 && isStar(args.last)
+ private def isNonEmptySeq = nbSubPats > 0 && isSeq
+
+ /** This is special cased so that a single pattern will accept any extractor
+ * result, even if it's a tuple (SI-6675)
+ */
+ def isSingle = nbSubPats == 1 && !isSeq
// to which type should the previous binder be casted?
def paramType : Type
+ protected def rawSubPatTypes: List[Type]
+ protected def resultType: Type
+
/** Create the TreeMaker that embodies this extractor call
*
* `binder` has been casted to `paramType` if necessary
@@ -427,79 +400,91 @@ trait MatchTranslation { self: PatternMatching =>
// `subPatBinders` are the variables bound by this pattern in the following patterns
// subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
- lazy val subPatBinders = args map {
- case Bound(b, p) => b
- case p => freshSym(p.pos, prefix = "p")
- }
-
- lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
- case (b, Bound(_, p)) => (b, p)
- case bp => bp
- }
+ // must set infos to `subPatTypes`, which are provided by extractor's result,
+ // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
+ // (it will later result in a type test when `tp` is not a subtype of `b.info`)
+ // TODO: can we simplify this, together with the Bound case?
+ def subPatBinders = subBoundTrees map (_.binder)
+ lazy val subBoundTrees = (args, subPatTypes).zipped map newBoundTree
// never store these in local variables (for PreserveSubPatBinders)
- lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
- case (b, PatternBoundToUnderscore()) => b
- }.toSet
-
- def subPatTypes: List[Type] =
- if(isSeq) {
- val TypeRef(pre, SeqClass, args) = seqTp
- // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
- val formalsWithRepeated = rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args)
-
- if (lastIsStar) formalTypes(formalsWithRepeated, nbSubPats - 1) :+ seqTp
- else formalTypes(formalsWithRepeated, nbSubPats)
- } else rawSubPatTypes
-
- protected def rawSubPatTypes: List[Type]
-
- protected def seqTp = rawSubPatTypes.last baseType SeqClass
- protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
- protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
- protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
- protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
- protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
- protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
+ lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet
+
+ // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
+ private def nonStarSubPatTypes = formalTypes(rawInit :+ repeatedType, nonStarLength)
+
+ def subPatTypes: List[Type] = (
+ if (rawSubPatTypes.isEmpty || !isSeq) rawSubPatTypes
+ else if (hasStar) nonStarSubPatTypes :+ sequenceType
+ else nonStarSubPatTypes
+ )
+
+ private def rawGet = typeOfMemberNamedGetOrSelf(resultType)
+ private def emptySub = rawSubPatTypes.isEmpty
+ private def rawInit = rawSubPatTypes dropRight 1
+ protected def sequenceType = typeOfLastSelectorOrSelf(rawGet)
+ protected def elementType = elementTypeOfLastSelectorOrSelf(rawGet)
+ protected def repeatedType = scalaRepeatedType(elementType)
+
+ // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
+ protected def firstIndexingBinder = rawSubPatTypes.length - 1
+ protected def lastIndexingBinder = nbSubPats - 1 - starLength
+ protected def expectedLength = lastIndexingBinder - firstIndexingBinder + 1
+
+ private def productElemsToN(binder: Symbol, n: Int): List[Tree] = 1 to n map tupleSel(binder) toList
+ private def genTake(binder: Symbol, n: Int): List[Tree] = (0 until n).toList map (codegen index seqTree(binder))
+ private def genDrop(binder: Symbol, n: Int): List[Tree] = codegen.drop(seqTree(binder))(expectedLength) :: Nil
+
+ // codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder + 1)
protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require isSeq
+ // the trees that select the subpatterns on the extractor's result,
+ // referenced by `binder`
protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
- val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
- val nbIndexingIndices = indexingIndices.length
-
+ def lastTrees: List[Tree] = (
+ if (!hasStar) Nil
+ else if (expectedLength == 0) seqTree(binder) :: Nil
+ else genDrop(binder, expectedLength)
+ )
// this error-condition has already been checked by checkStarPatOK:
// if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
- // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
- (((1 to firstIndexingBinder) map tupleSel(binder)) ++
- // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
- (indexingIndices map codegen.index(seqTree(binder))) ++
- // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
- (if(!lastIsStar) Nil else List(
- if(nbIndexingIndices == 0) seqTree(binder)
- else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+
+ // [1] there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+ // [2] then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+ // [3] the last one -- if the last subpattern is a sequence wildcard:
+ // drop the prefix (indexed by the refs on the preceding line), return the remainder
+ ( productElemsToN(binder, firstIndexingBinder)
+ ++ genTake(binder, expectedLength)
+ ++ lastTrees
+ ).toList
}
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require (nbSubPats > 0 && (!lastIsStar || isSeq))
protected def subPatRefs(binder: Symbol): List[Tree] =
- if (nbSubPats == 0) Nil
- else if (isSeq) subPatRefsSeq(binder)
- else ((1 to nbSubPats) map tupleSel(binder)).toList
+ if (isNonEmptySeq) subPatRefsSeq(binder) else productElemsToN(binder, nbSubPats)
+
+ private def compareInts(t1: Tree, t2: Tree) =
+ gen.mkMethodCall(termMember(ScalaPackage, "math"), TermName("signum"), Nil, (t1 INT_- t2) :: Nil)
protected def lengthGuard(binder: Symbol): Option[Tree] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- checkedLength map { expectedLength => import CODE._
+ checkedLength map { expectedLength =>
// `binder.lengthCompare(expectedLength)`
- def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
+ // ...if binder has a lengthCompare method, otherwise
+ // `scala.math.signum(binder.length - expectedLength)`
+ def checkExpectedLength = sequenceType member nme.lengthCompare match {
+ case NoSymbol => compareInts(Select(seqTree(binder), nme.length), LIT(expectedLength))
+ case lencmp => (seqTree(binder) DOT lencmp)(LIT(expectedLength))
+ }
// the comparison to perform
// when the last subpattern is a wildcard-star the expectedLength is but a lower bound
// (otherwise equality is required)
def compareOp: (Tree, Tree) => Tree =
- if (lastIsStar) _ INT_>= _
- else _ INT_== _
+ if (hasStar) _ INT_>= _
+ else _ INT_== _
// `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
(seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
@@ -507,33 +492,29 @@ trait MatchTranslation { self: PatternMatching =>
def checkedLength: Option[Int] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- if (!isSeq || (expectedLength < minLenToCheck)) None
+ if (!isSeq || expectedLength < starLength) None
else Some(expectedLength)
-
}
// TODO: to be called when there's a def unapplyProd(x: T): U
// U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
- //
// for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
- class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ class ExtractorCallProd(val fun: Tree, val args: List[Tree]) extends ExtractorCall {
// TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
/*override def equals(x$1: Any): Boolean = ...
val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
*/
- // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
- // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
- // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
- // debug.patmat("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
- // debug.patmat("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
+
private def constructorTp = fun.tpe
def isTyped = fun.isTyped
// to which type should the previous binder be casted?
def paramType = constructorTp.finalResultType
+ def resultType = fun.tpe.finalResultType
+
+ def isSeq = isVarArgTypes(rawSubPatTypes)
- def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
protected def rawSubPatTypes = constructorTp.paramTypes
/** Create the TreeMaker that embodies this extractor call
@@ -547,34 +528,36 @@ trait MatchTranslation { self: PatternMatching =>
// binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
// make an exception for classes under the scala package as they should be well-behaved,
// to optimize matching on List
- val mutableBinders =
+ val mutableBinders = (
if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
(paramAccessors exists (_.isMutable)))
subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
else Nil
+ )
// checks binder ne null before chaining to the next extractor
ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
}
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
- override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
+ override protected def tupleSel(binder: Symbol)(i: Int): Tree = {
val accessors = binder.caseFieldAccessors
if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
- override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
+ override def toString() = s"ExtractorCallProd($fun:${fun.tpe} / ${fun.symbol} / args=$args)"
}
- class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
- private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
+ class ExtractorCallRegular(extractorCallIncludingDummy: Tree, val args: List[Tree]) extends ExtractorCall {
+ val Unapplied(fun) = extractorCallIncludingDummy
- def tpe = extractorCall.tpe
- def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
- def paramType = tpe.paramTypes.head
+ def tpe = fun.tpe
+ def paramType = firstParamType(tpe)
def resultType = tpe.finalResultType
- def isSeq = extractorCall.symbol.name == nme.unapplySeq
+ def isTyped = (tpe ne NoType) && fun.isTyped && (resultInMonad ne ErrorType)
+ def isSeq = fun.symbol.name == nme.unapplySeq
+ def isBool = resultType =:= BooleanTpe
/** Create the TreeMaker that embodies this extractor call
*
@@ -587,49 +570,56 @@ trait MatchTranslation { self: PatternMatching =>
* Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
*/
def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
- // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
+ // the extractor call (applied to the binder bound by the flatMap corresponding
+ // to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
- val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
+ // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely
+ // wrong when isSeq, and resultInMonad should always be correct since it comes
+ // directly from the extractor's result type
+ val binder = freshSym(pos, pureType(resultInMonad))
+
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
+ subPatBinders,
+ subPatRefs(binder),
+ isBool,
+ checkedLength,
+ patBinderOrCasted,
+ ignoredSubPatBinders
+ )
}
override protected def seqTree(binder: Symbol): Tree =
- if (firstIndexingBinder == 0) CODE.REF(binder)
+ if (firstIndexingBinder == 0) REF(binder)
else super.seqTree(binder)
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require (nbSubPats > 0 && (!lastIsStar || isSeq))
override protected def subPatRefs(binder: Symbol): List[Tree] =
- if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
+ if (isSingle) REF(binder) :: Nil // special case for extractors
else super.subPatRefs(binder)
protected def spliceApply(binder: Symbol): Tree = {
object splice extends Transformer {
override def transform(t: Tree) = t match {
case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
- treeCopy.Apply(t, x, List(CODE.REF(binder).setPos(i.pos)))
- case _ => super.transform(t)
+ treeCopy.Apply(t, x, (REF(binder) setPos i.pos) :: Nil)
+ case _ =>
+ super.transform(t)
}
}
- splice.transform(extractorCallIncludingDummy)
+ splice transform extractorCallIncludingDummy
}
- // what's the extractor's result type in the monad?
- // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
- protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
- if (resultType.typeSymbol == BooleanClass) UnitTpe
- else matchMonadResult(resultType)
- }
+ // what's the extractor's result type in the monad? It is the type of its nullary member `get`.
+ protected lazy val resultInMonad: Type = if (isBool) UnitTpe else typeOfMemberNamedGet(resultType)
- protected lazy val rawSubPatTypes =
- if (resultInMonad.typeSymbol eq UnitClass) Nil
- else if(!isSeq && nbSubPats == 1) List(resultInMonad)
- else getProductArgs(resultInMonad) match {
- case Nil => List(resultInMonad)
- case x => x
- }
+ protected lazy val rawSubPatTypes = (
+ if (isBool) Nil
+ else if (isSingle) resultInMonad :: Nil // don't go looking for selectors if we only expect one pattern
+ else typesOfSelectorsOrSelf(resultInMonad)
+ )
- override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
+ override def toString() = s"ExtractorCallRegular($fun: $tpe / ${fun.symbol})"
}
/** A conservative approximation of which patterns do not discern anything.
@@ -638,10 +628,9 @@ trait MatchTranslation { self: PatternMatching =>
object WildcardPattern {
def unapply(pat: Tree): Boolean = pat match {
case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
case Star(WildcardPattern()) => true
case x: Ident => treeInfo.isVarPattern(x)
- case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
+ case Alternative(ps) => ps forall unapply
case EmptyTree => true
case _ => false
}
@@ -651,7 +640,7 @@ trait MatchTranslation { self: PatternMatching =>
def unapply(pat: Tree): Boolean = pat match {
case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
case Ident(nme.WILDCARD) => true
- case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Alternative(ps) => ps forall unapply
case Typed(PatternBoundToUnderscore(), _) => true
case _ => false
}
@@ -659,9 +648,8 @@ trait MatchTranslation { self: PatternMatching =>
object Bound {
def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
- case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
- Some((t.symbol, p))
- case _ => None
+ case t@Bind(n, p) if t.hasExistingSymbol => Some((t.symbol, p)) // pos/t2429 does not satisfy these conditions
+ case _ => None
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index baccdcf544..942aa80c34 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -201,6 +201,16 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def extraStoredBinders: Set[Symbol] = Set()
+ debug.patmat(s"""
+ |ExtractorTreeMaker($extractor, $extraCond, $nextBinder) {
+ | $subPatBinders
+ | $subPatRefs
+ | $extractorReturnsBoolean
+ | $checkedLength
+ | $prevBinder
+ | $ignoredSubPatBinders
+ |}""".stripMargin)
+
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val condAndNext = extraCond match {
case Some(cond) =>
@@ -426,7 +436,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case _ if testedBinder.info.widen <:< expectedTp =>
// if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
// since the types conform, no further checking is required
- if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ if (isPrimitiveValueType(expectedTp)) tru
// have to test outer and non-null only when it's a reference type
else if (expectedTp <:< AnyRefTpe) {
// do non-null check first to ensure we won't select outer on null
@@ -587,9 +597,8 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
t.symbol.owner = currentOwner
case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
debug.patmat("def: "+ ((d, d.symbol.ownerChain, currentOwner.ownerChain)))
- if(d.symbol.moduleClass ne NoSymbol)
- d.symbol.moduleClass.owner = currentOwner
+ d.symbol.moduleClass andAlso (_.owner = currentOwner)
d.symbol.owner = currentOwner
// case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
debug.patmat("untouched "+ ((t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)))
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
new file mode 100644
index 0000000000..a7d7680db1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
@@ -0,0 +1,86 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+trait MatchWarnings {
+ self: PatternMatching =>
+
+ import global._
+
+ trait TreeMakerWarnings {
+ self: MatchTranslator =>
+
+ import typer.context
+
+ // Why is it so difficult to say "here's a name and a context, give me any
+ // matching symbol in scope" ? I am sure this code is wrong, but attempts to
+ // use the scopes of the contexts in the enclosing context chain discover
+ // nothing. How to associate a name with a symbol would would be a wonderful
+ // linkage for which to establish a canonical acquisition mechanism.
+ private def matchingSymbolInScope(pat: Tree): Symbol = {
+ def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
+ case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
+ case _ => NoSymbol
+ }
+ pat match {
+ case Bind(name, _) =>
+ context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
+ res orElse declarationOfName(ctx.owner.rawInfo, name))
+ case _ => NoSymbol
+ }
+ }
+
+ // Issue better warnings than "unreachable code" when people mis-use
+ // variable patterns thinking they bind to existing identifiers.
+ //
+ // Possible TODO: more deeply nested variable patterns, like
+ // case (a, b) => 1 ; case (c, d) => 2
+ // However this is a pain (at least the way I'm going about it)
+ // and I have to think these detailed errors are primarily useful
+ // for beginners, not people writing nested pattern matches.
+ def checkMatchVariablePatterns(cases: List[CaseDef]) {
+ // A string describing the first variable pattern
+ var vpat: String = null
+ // Using an iterator so we can recognize the last case
+ val it = cases.iterator
+
+ def addendum(pat: Tree) = {
+ matchingSymbolInScope(pat) match {
+ case NoSymbol => ""
+ case sym =>
+ val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
+ s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ }
+ }
+
+ while (it.hasNext) {
+ val cdef = it.next()
+ // If a default case has been seen, then every succeeding case is unreachable.
+ if (vpat != null)
+ context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
+ // If this is a default case and more cases follow, warn about this one so
+ // we have a reason to mention its pattern variable name and any corresponding
+ // symbol in scope. Errors will follow from the remaining cases, at least
+ // once we make the above warning an error.
+ else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
+ val vpatName = cdef.pat match {
+ case Bind(name, _) => s" '$name'"
+ case _ => ""
+ }
+ vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
+ context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index 63834ae51e..a4944caa2b 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -40,10 +40,12 @@ trait PatternMatching extends Transform with TypingTransformers
with MatchTranslation
with MatchTreeMaking
with MatchCodeGen
+ with MatchCps
with ScalaLogic
with Solving
with MatchAnalysis
- with MatchOptimization {
+ with MatchOptimization
+ with MatchWarnings {
import global._
val phaseName: String = "patmat"
@@ -94,12 +96,17 @@ trait Debugging {
// TODO: the inliner fails to inline the closures to debug.patmat unless the method is nested in an object
object debug {
val printPatmat = global.settings.Ypatmatdebug.value
- @inline final def patmat(s: => String) = if (printPatmat) println(s)
+ @inline final def patmat(s: => String) = if (printPatmat) Console.err.println(s)
+ @inline final def patmatResult[T](s: => String)(result: T): T = {
+ if (printPatmat) Console.err.println(s + ": " + result)
+ result
+ }
}
}
trait Interface extends ast.TreeDSL {
- import global.{newTermName, analyzer, Type, ErrorType, Symbol, Tree}
+ import global._
+ import definitions._
import analyzer.Typer
// 2.10/2.11 compatibility
@@ -166,6 +173,10 @@ trait Interface extends ast.TreeDSL {
trait MatchMonadInterface {
val typer: Typer
val matchOwner = typer.context.owner
+ def pureType(tp: Type): Type = tp
+
+ // Extracting from the monad: tp == { def get: T }, result == T
+ def matchMonadResult(tp: Type) = typeOfMemberNamedGet(tp)
def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
def reportMissingCases(pos: Position, counterExamples: List[String]) = {
@@ -175,16 +186,6 @@ trait Interface extends ast.TreeDSL {
typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
}
-
- def inMatchMonad(tp: Type): Type
- def pureType(tp: Type): Type
- final def matchMonadResult(tp: Type): Type =
- tp.baseType(matchMonadSym).typeArgs match {
- case arg :: Nil => arg
- case _ => ErrorType
- }
-
- protected def matchMonadSym: Symbol
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index 3c7dc79636..114bcba5df 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -202,15 +202,16 @@ trait Solving extends Logic {
withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
case _ =>
// partition symbols according to whether they appear in positive and/or negative literals
- val pos = new mutable.HashSet[Sym]()
- val neg = new mutable.HashSet[Sym]()
+ // SI-7020 Linked- for deterministic counter examples.
+ val pos = new mutable.LinkedHashSet[Sym]()
+ val neg = new mutable.LinkedHashSet[Sym]()
f.foreach{_.foreach{ lit =>
if (lit.pos) pos += lit.sym else neg += lit.sym
}}
// appearing in both positive and negative
- val impures = pos intersect neg
+ val impures: mutable.LinkedHashSet[Sym] = pos intersect neg
// appearing only in either positive/negative positions
- val pures = (pos ++ neg) -- impures
+ val pures: mutable.LinkedHashSet[Sym] = (pos ++ neg) -- impures
if (pures nonEmpty) {
val pureSym = pures.head
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 31a31df764..0eae17612d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -246,8 +246,8 @@ trait Checkable {
uncheckedOk(P0) || (P0.widen match {
case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false
case RefinedType(_, decls) if !decls.isEmpty => false
- case p =>
- new CheckabilityChecker(AnyTpe, p) isCheckable
+ case RefinedType(parents, _) => parents forall isCheckable
+ case p => new CheckabilityChecker(AnyTpe, p) isCheckable
})
)
@@ -273,9 +273,13 @@ trait Checkable {
// Matching on types like case _: AnyRef { def bippy: Int } => doesn't work -- yet.
case RefinedType(_, decls) if !decls.isEmpty =>
getContext.unit.warning(tree.pos, s"a pattern match on a refinement type is unchecked")
+ case RefinedType(parents, _) =>
+ parents foreach (p => checkCheckable(tree, p, X, inPattern, canRemedy))
case _ =>
val checker = new CheckabilityChecker(X, P)
- log(checker.summaryString)
+ if (checker.result == RuntimeCheckable)
+ log(checker.summaryString)
+
if (checker.neverMatches) {
val addendum = if (checker.neverSubClass) "" else " (but still might match its erasure)"
getContext.unit.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $P$addendum")
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 81f5545695..1f4d5cbac2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -517,6 +517,9 @@ trait ContextErrors {
def TooManyArgsPatternError(fun: Tree) =
NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity)
+ def WrongShapeExtractorExpansion(fun: Tree) =
+ NormalTypeError(fun, "extractor macros can only expand into extractor calls")
+
def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
@@ -593,7 +596,12 @@ trait ContextErrors {
}
def CaseClassConstructorError(tree: Tree) = {
- issueNormalTypeError(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method")
+ val baseMessage = tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method"
+ val addendum = directUnapplyMember(tree.symbol.info) match {
+ case sym if hasMultipleNonImplicitParamLists(sym) => s"\nNote: ${sym.defString} exists in ${tree.symbol}, but it cannot be used as an extractor due to its second non-implicit parameter list"
+ case _ => ""
+ }
+ issueNormalTypeError(tree, baseMessage + addendum)
setError(tree)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 60641d6752..8d42bf94f3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -269,7 +269,7 @@ trait Contexts { self: Analyzer =>
/** The next enclosing context (potentially `this`) that is owned by a class or method */
def enclClassOrMethod: Context =
- if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
+ if (!owner.exists || owner.isClass || owner.isMethod) this
else outer.enclClassOrMethod
/** The next enclosing context (potentially `this`) that has a `CaseDef` as a tree */
@@ -653,13 +653,8 @@ trait Contexts { self: Analyzer =>
lastAccessCheckDetails = ""
// Console.println("isAccessible(%s, %s, %s)".format(sym, pre, superAccess))
- def accessWithinLinked(ab: Symbol) = {
- val linked = ab.linkedClassOfClass
- // don't have access if there is no linked class
- // (before adding the `ne NoSymbol` check, this was a no-op when linked eq NoSymbol,
- // since `accessWithin(NoSymbol) == true` whatever the symbol)
- (linked ne NoSymbol) && accessWithin(linked)
- }
+ // don't have access if there is no linked class (so exclude linkedClass=NoSymbol)
+ def accessWithinLinked(ab: Symbol) = ab.linkedClassOfClass.fold(false)(accessWithin)
/* Are we inside definition of `ab`? */
def accessWithin(ab: Symbol) = {
@@ -957,7 +952,7 @@ trait Contexts { self: Analyzer =>
// 2) sym.owner is inherited by the correct package object class
// We try to establish 1) by inspecting the owners directly, and then we try
// to rule out 2), and only if both those fail do we resort to looking in the info.
- !sym.isPackage && (sym.owner ne NoSymbol) && (
+ !sym.isPackage && sym.owner.exists && (
if (sym.owner.isPackageObjectClass)
sym.owner.owner == pkgClass
else
@@ -1031,7 +1026,7 @@ trait Contexts { self: Analyzer =>
(scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList
def newOverloaded(owner: Symbol, pre: Type, entries: List[ScopeEntry]) =
- logResult(s"!!! lookup overloaded")(owner.newOverloaded(pre, entries map (_.sym)))
+ logResult(s"overloaded symbol in $pre")(owner.newOverloaded(pre, entries map (_.sym)))
// Constructor lookup should only look in the decls of the enclosing class
// not in the self-type, nor in the enclosing context, nor in imports (SI-4460, SI-6745)
@@ -1194,7 +1189,7 @@ trait Contexts { self: Analyzer =>
override final def imports = impInfo :: super.imports
override final def firstImport = Some(impInfo)
override final def isRootImport = !tree.pos.isDefined
- override final def toString = s"ImportContext { $impInfo; outer.owner = ${outer.owner} }"
+ override final def toString = super.toString + " with " + s"ImportContext { $impInfo; outer.owner = ${outer.owner} }"
}
/** A buffer for warnings and errors that are accumulated during speculative type checking. */
@@ -1340,6 +1335,7 @@ trait Contexts { self: Analyzer =>
}
object ContextMode {
+ import scala.language.implicitConversions
private implicit def liftIntBitsToContextState(bits: Int): ContextMode = apply(bits)
def apply(bits: Int): ContextMode = new ContextMode(bits)
final val NOmode: ContextMode = 0
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 0a2628b482..396f3407f3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -74,22 +74,19 @@ abstract class Duplicators extends Analyzer {
override def mapOver(tpe: Type): Type = tpe match {
case TypeRef(NoPrefix, sym, args) if sym.isTypeParameterOrSkolem =>
- var sym1 = context.scope.lookup(sym.name)
- if (sym1 eq NoSymbol) {
- // try harder (look in outer scopes)
- // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
- BodyDuplicator.super.silent(_.typedType(Ident(sym.name))) match {
- case SilentResultValue(t) =>
- sym1 = t.symbol
- debuglog("fixed by trying harder: "+((sym, sym1, context)))
- case _ =>
- }
- }
-// assert(sym1 ne NoSymbol, tpe)
- if ((sym1 ne NoSymbol) && (sym1 ne sym)) {
- debuglog("fixing " + sym + " -> " + sym1)
+ val sym1 = (
+ context.scope lookup sym.name orElse {
+ // try harder (look in outer scopes)
+ // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but
+ // is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
+ BodyDuplicator.super.silent(_ typedType Ident(sym.name)).fold(NoSymbol: Symbol)(_.symbol)
+ } filter (_ ne sym)
+ )
+ if (sym1.exists) {
+ debuglog(s"fixing $sym -> $sym1")
typeRef(NoPrefix, sym1, mapOverArgs(args, sym1.typeParams))
- } else super.mapOver(tpe)
+ }
+ else super.mapOver(tpe)
case TypeRef(pre, sym, args) =>
val newsym = updateSym(sym)
@@ -157,7 +154,7 @@ abstract class Duplicators extends Analyzer {
case vdef @ ValDef(mods, name, _, rhs) if mods.hasFlag(Flags.LAZY) =>
debuglog("ValDef " + name + " sym.info: " + vdef.symbol.info)
invalidSyms(vdef.symbol) = vdef
- val newowner = if (owner != NoSymbol) owner else context.owner
+ val newowner = owner orElse context.owner
val newsym = vdef.symbol.cloneSymbol(newowner)
newsym.setInfo(fixType(vdef.symbol.info))
vdef.symbol = newsym
@@ -362,12 +359,11 @@ abstract class Duplicators extends Analyzer {
case _ =>
debuglog("Duplicators default case: " + tree.summaryString)
debuglog(" ---> " + tree)
- if (tree.hasSymbolField && tree.symbol != NoSymbol && (tree.symbol.owner == AnyClass)) {
+ if (tree.hasSymbolField && tree.symbol.safeOwner == AnyClass)
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
- }
+
val ntree = castType(tree, pt)
- val res = super.typed(ntree, mode, pt)
- res
+ super.typed(ntree, mode, pt)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 100112fec1..3a6b25f1cd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -60,6 +60,8 @@ trait Implicits {
* @return A search result
*/
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = {
+ // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the
+ // work is performed, than at the point where it presently exists.
val shouldPrint = printTypings && !context.undetparams.isEmpty
val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeImpl) else null
val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberImpl) else null
@@ -642,8 +644,7 @@ trait Implicits {
if (tvars.nonEmpty)
typingLog("solve", ptLine("tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr)))
- val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt),
- upper = false, lubDepth(List(itree2.tpe, pt)))
+ val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt), upper = false, lubDepth(itree2.tpe :: pt :: Nil))
// #2421: check that we correctly instantiated type parameters outside of the implicit tree:
checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
@@ -813,7 +814,7 @@ trait Implicits {
if (search.isDivergent && countdown > 0) {
countdown -= 1
implicitSym = i.sym
- log("discarding divergent implicit ${implicitSym} during implicit search")
+ log(s"discarding divergent implicit $implicitSym during implicit search")
SearchFailure
} else search
}
@@ -1335,12 +1336,18 @@ trait Implicits {
}
}
if (result.isSuccess && isView) {
+ def maybeInvalidConversionError(msg: String) {
+ // We have to check context.ambiguousErrors even though we are calling "issueAmbiguousError"
+ // which ostensibly does exactly that before issuing the error. Why? I have no idea. Test is pos/t7690.
+ if (context.ambiguousErrors)
+ context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, msg))
+ }
if (isInvalidConversionTarget(pt)) {
- context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, "the result type of an implicit conversion must be more specific than AnyRef"))
+ maybeInvalidConversionError("the result type of an implicit conversion must be more specific than AnyRef")
result = SearchFailure
}
else if (isInvalidConversionSource(pt)) {
- context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, "an expression of type Null is ineligible for implicit conversion"))
+ maybeInvalidConversionError("an expression of type Null is ineligible for implicit conversion")
result = SearchFailure
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 06892053fa..03f680525c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -9,6 +9,7 @@ package typechecker
import scala.collection.{ mutable, immutable }
import scala.util.control.ControlThrowable
import symtab.Flags._
+import scala.reflect.internal.Depth
/** This trait contains methods related to type parameter inference.
*
@@ -21,22 +22,32 @@ trait Infer extends Checkable {
import global._
import definitions._
import typeDebug.ptBlock
+ import typeDebug.str.parentheses
import typingStack.{ printTyping }
/** The formal parameter types corresponding to `formals`.
* If `formals` has a repeated last parameter, a list of
- * (nargs - params.length + 1) copies of its type is returned.
- * By-name types are replaced with their underlying type.
+ * (numArgs - numFormals + 1) copies of its type is appended
+ * to the other formals. By-name types are replaced with their
+ * underlying type.
*
* @param removeByName allows keeping ByName parameters. Used in NamesDefaults.
* @param removeRepeated allows keeping repeated parameter (if there's one argument). Used in NamesDefaults.
*/
- def formalTypes(formals: List[Type], nargs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
- val formals1 = if (removeByName) formals mapConserve dropByName else formals
- if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) {
- val ft = formals1.last.dealiasWiden.typeArgs.head
- formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
- } else formals1
+ def formalTypes(formals: List[Type], numArgs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
+ val numFormals = formals.length
+ val formals1 = if (removeByName) formals mapConserve dropByName else formals
+ val expandLast = (
+ (removeRepeated || numFormals != numArgs)
+ && isVarArgTypes(formals1)
+ )
+ def lastType = formals1.last.dealiasWiden.typeArgs.head
+ def expanded(n: Int) = (1 to n).toList map (_ => lastType)
+
+ if (expandLast)
+ formals1.init ::: expanded(numArgs - numFormals + 1)
+ else
+ formals1
}
/** Sorts the alternatives according to the given comparison function.
@@ -67,96 +78,6 @@ trait Infer extends Checkable {
override def complete(sym: Symbol) = ()
}
- /** Returns `(formals, formalsExpanded)` where `formalsExpanded` are the expected types
- * for the `nbSubPats` sub-patterns of an extractor pattern, of which the corresponding
- * unapply[Seq] call is assumed to have result type `resTp`.
- *
- * `formals` are the formal types before expanding a potential repeated parameter (must come last in `formals`, if at all)
- *
- * @param nbSubPats The number of arguments to the extractor pattern
- * @param effectiveNbSubPats `nbSubPats`, unless there is one sub-pattern which, after unwrapping
- * bind patterns, is a Tuple pattern, in which case it is the number of
- * elements. Used to issue warnings about binding a `TupleN` to a single value.
- * @throws TypeError when the unapply[Seq] definition is ill-typed
- * @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
- *
- * This is the spec currently implemented -- TODO: update it.
- *
- * 8.1.8 ExtractorPatterns
- *
- * An extractor pattern x(p1, ..., pn) where n ≥ 0 is of the same syntactic form as a constructor pattern.
- * However, instead of a case class, the stable identifier x denotes an object which has a member method named unapply or unapplySeq that matches the pattern.
- *
- * An `unapply` method with result type `R` in an object `x` matches the
- * pattern `x(p_1, ..., p_n)` if it takes exactly one argument and, either:
- * - `n = 0` and `R =:= Boolean`, or
- * - `n = 1` and `R <:< Option[T]`, for some type `T`.
- * The argument pattern `p1` is typed in turn with expected type `T`.
- * - Or, `n > 1` and `R <:< Option[Product_n[T_1, ..., T_n]]`, for some
- * types `T_1, ..., T_n`. The argument patterns `p_1, ..., p_n` are
- * typed with expected types `T_1, ..., T_n`.
- *
- * An `unapplySeq` method in an object `x` matches the pattern `x(p_1, ..., p_n)`
- * if it takes exactly one argument and its result type is of the form `Option[S]`,
- * where either:
- * - `S` is a subtype of `Seq[U]` for some element type `U`, (set `m = 0`)
- * - or `S` is a `ProductX[T_1, ..., T_m]` and `T_m <: Seq[U]` (`m <= n`).
- *
- * The argument patterns `p_1, ..., p_n` are typed with expected types
- * `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
- *
- */
- def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int,
- unappSym: Symbol, effectiveNbSubPats: Int): (List[Type], List[Type]) = {
- val isUnapplySeq = unappSym.name == nme.unapplySeq
- val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
-
- def seqToRepeatedChecked(tp: Type) = {
- val toRepeated = seqToRepeated(tp)
- if (tp eq toRepeated) throw new TypeError("(the last tuple-component of) the result type of an unapplySeq must be a Seq[_]")
- else toRepeated
- }
-
- // empty list --> error, otherwise length == 1
- lazy val optionArgs = resTp.baseType(OptionClass).typeArgs
- // empty list --> not a ProductN, otherwise product element types
- def productArgs = getProductArgs(optionArgs.head)
-
- val formals =
- // convert Seq[T] to the special repeated argument type
- // so below we can use formalTypes to expand formals to correspond to the number of actuals
- if (isUnapplySeq) {
- if (optionArgs.nonEmpty)
- productArgs match {
- case Nil => List(seqToRepeatedChecked(optionArgs.head))
- case normalTps :+ seqTp => normalTps :+ seqToRepeatedChecked(seqTp)
- }
- else throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.fullLocationString} does not conform to Option[_]")
- } else {
- if (booleanExtractor && nbSubPats == 0) Nil
- else if (optionArgs.nonEmpty)
- if (nbSubPats == 1) {
- val productArity = productArgs.size
- if (productArity > 1 && productArity != effectiveNbSubPats && settings.lint)
- global.currentUnit.warning(pos,
- s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
- optionArgs
- }
- // TODO: update spec to reflect we allow any ProductN, not just TupleN
- else productArgs
- else
- throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.fullLocationString} does not conform to Option[_] or Boolean")
- }
-
- // for unapplySeq, replace last vararg by as many instances as required by nbSubPats
- val formalsExpanded =
- if (isUnapplySeq && formals.nonEmpty) formalTypes(formals, nbSubPats)
- else formals
-
- if (formalsExpanded.lengthCompare(nbSubPats) != 0) (null, null)
- else (formals, formalsExpanded)
- }
-
/** A fresh type variable with given type parameter as origin.
*/
def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
@@ -213,34 +134,17 @@ trait Infer extends Checkable {
* @param upper When `true` search for max solution else min.
* @throws NoInstance
*/
- def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Variance], upper: Boolean, depth: Int): List[Type] = {
-
- if (tvars.nonEmpty) {
- def tp_s = (tparams, tvars).zipped map { case (tp, tv) => s"${tp.name}/$tv" } mkString ","
- printTyping(s"solving for $tp_s")
- }
-
- if (!solve(tvars, tparams, variances, upper, depth)) {
- // no panic, it's good enough to just guess a solution, we'll find out
- // later whether it works. *ZAP* @M danger, Will Robinson! this means
- // that you should never trust inferred type arguments!
- //
- // Need to call checkBounds on the args/typars or type1 on the tree
- // for the expression that results from type inference see e.g., #2421:
- // implicit search had been ignoring this caveat
- // throw new DeferredNoInstance(() =>
- // "no solution exists for constraints"+(tvars map boundsString))
- }
- for (tvar <- tvars ; if tvar.constr.inst == tvar) {
- if (tvar.origin.typeSymbol.info eq ErrorType)
- // this can happen if during solving a cyclic type parameter
- // such as T <: T gets completed. See #360
- tvar.constr.inst = ErrorType
- else
- abort(tvar.origin+" at "+tvar.origin.typeSymbol.owner)
+ def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol], variances: List[Variance], upper: Boolean, depth: Depth): List[Type] = {
+ if (tvars.isEmpty) Nil else {
+ printTyping("solving for " + parentheses((tparams, tvars).zipped map ((p, tv) => s"${p.name}: $tv")))
+ // !!! What should be done with the return value of "solve", which is at present ignored?
+ // The historical commentary says "no panic, it's good enough to just guess a solution,
+ // we'll find out later whether it works", meaning don't issue an error here when types
+ // don't conform to bounds. That means you can never trust the results of implicit search.
+ // For an example where this was not being heeded, SI-2421.
+ solve(tvars, tparams, variances, upper, depth)
+ tvars map instantiate
}
- tvars map instantiate
}
def skipImplicit(tp: Type) = tp match {
@@ -255,7 +159,10 @@ trait Infer extends Checkable {
* This method seems to be performance critical.
*/
def normalize(tp: Type): Type = tp match {
- case PolyType(_, restpe) => logResult(s"Normalizing $tp in infer")(normalize(restpe))
+ case PolyType(_, restpe) =>
+ logResult(sm"""|Normalizing PolyType in infer:
+ | was: $restpe
+ | now""")(normalize(restpe))
case mt @ MethodType(_, restpe) if mt.isImplicit => normalize(restpe)
case mt @ MethodType(_, restpe) if !mt.isDependentMethodType => functionType(mt.paramTypes, normalize(restpe))
case NullaryMethodType(restpe) => normalize(restpe)
@@ -635,10 +542,7 @@ trait Infer extends Checkable {
"argument expression's type is not compatible with formal parameter type" + foundReqMsg(tp1, pt1))
}
}
- val targs = solvedTypes(
- tvars, tparams, tparams map varianceInTypes(formals),
- upper = false, lubDepth(formals) max lubDepth(argtpes)
- )
+ val targs = solvedTypes(tvars, tparams, tparams map varianceInTypes(formals), upper = false, lubDepth(formals) max lubDepth(argtpes))
// Can warn about inferring Any/AnyVal as long as they don't appear
// explicitly anywhere amongst the formal, argument, result, or expected type.
def canWarnAboutAny = !(pt :: restpe :: formals ::: argtpes exists (t => (t contains AnyClass) || (t contains AnyValClass)))
@@ -700,7 +604,7 @@ trait Infer extends Checkable {
tp nonPrivateMember nme.apply match {
case NoSymbol => tp
case sym if !sym.isOverloaded && sym.isPublic => OverloadedType(tp, sym.alternatives)
- case sym => OverloadedType(tp, sym filter (_.isPublic) alternatives)
+ case sym => OverloadedType(tp, sym.filter(_.isPublic).alternatives)
}
}
@@ -1111,7 +1015,10 @@ trait Infer extends Checkable {
val variances =
if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp)
else undetparams map varianceInTypes(ctorTp.paramTypes)
- val targs = solvedTypes(tvars, undetparams, variances, upper = true, lubDepth(List(resTp, pt)))
+
+ // Note: this is the only place where solvedTypes (or, indirectly, solve) is called
+ // with upper = true.
+ val targs = solvedTypes(tvars, undetparams, variances, upper = true, lubDepth(resTp :: pt :: Nil))
// checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
// no checkBounds here. If we enable it, test bug602 fails.
// TODO: reinstate checkBounds, return params that fail to meet their bounds to undetparams
@@ -1180,7 +1087,7 @@ trait Infer extends Checkable {
val tvars1 = tvars map (_.cloneInternal)
// Note: right now it's not clear that solving is complete, or how it can be made complete!
// So we should come back to this and investigate.
- solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (_ => Variance.Covariant), upper = false)
+ solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (_ => Variance.Covariant), upper = false, Depth.AnyDepth)
}
// this is quite nasty: it destructively changes the info of the syms of e.g., method type params
@@ -1190,6 +1097,15 @@ trait Infer extends Checkable {
val tparam = tvar.origin.typeSymbol
val TypeBounds(lo0, hi0) = tparam.info.bounds
val tb @ TypeBounds(lo1, hi1) = instBounds(tvar)
+ val enclCase = context.enclosingCaseDef
+ def enclCase_s = enclCase.toString.replaceAll("\\n", " ").take(60)
+
+ if (enclCase.savedTypeBounds.nonEmpty) log(
+ sm"""|instantiateTypeVar with nonEmpty saved type bounds {
+ | enclosing $enclCase_s
+ | saved ${enclCase.savedTypeBounds}
+ | tparam ${tparam.shortSymbolClass} ${tparam.defString}
+ |}""")
if (lo1 <:< hi1) {
if (lo1 <:< lo0 && hi0 <:< hi1) // bounds unimproved
@@ -1197,7 +1113,7 @@ trait Infer extends Checkable {
else if (tparam == lo1.typeSymbolDirect || tparam == hi1.typeSymbolDirect)
log(s"cyclical bounds: discarding TypeBounds($lo1, $hi1) for $tparam because $tparam appears as bounds")
else {
- context.enclosingCaseDef pushTypeBounds tparam
+ enclCase pushTypeBounds tparam
tparam setInfo logResult(s"updated bounds: $tparam from ${tparam.info} to")(tb)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 6b9537e27d..b3675d6a82 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -589,18 +589,23 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
/** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`.
* @see MacroExpander
*/
- def macroExpandApply(typer: Typer, expandee: Tree, mode: Mode, pt: Type) = {
+ def macroExpandApply(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = {
object expander extends TermMacroExpander(APPLY_ROLE, typer, expandee, mode, pt) {
override def onSuccess(expanded: Tree) = {
// prematurely annotate the tree with a macro expansion attachment
// so that adapt called indirectly by typer.typed knows that it needs to apply the existential fixup
linkExpandeeAndExpanded(expandee, expanded)
- var expectedTpe = expandee.tpe
- if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
+ // approximation is necessary for whitebox macros to guide type inference
+ // read more in the comments for onDelayed below
+ def approximate(tp: Type) = {
+ val undetparams = tp collect { case tp if tp.typeSymbol.isTypeParameter => tp.typeSymbol }
+ deriveTypeWithWildcards(undetparams)(tp)
+ }
+ val macroPtApprox = approximate(if (isNullaryInvocation(expandee)) expandee.tpe.finalResultType else expandee.tpe)
// `macroExpandApply` is called from `adapt`, where implicit conversions are disabled
// therefore we need to re-enable the conversions back temporarily
- if (macroDebugVerbose) println(s"typecheck #1 (against expectedTpe = $expectedTpe): $expanded")
- val expanded1 = typer.context.withImplicitsEnabled(typer.typed(expanded, mode, expectedTpe))
+ if (macroDebugVerbose) println(s"typecheck #1 (against macroPtApprox = $macroPtApprox): $expanded")
+ val expanded1 = typer.context.withImplicitsEnabled(typer.typed(expanded, mode, macroPtApprox))
if (expanded1.isErrorTyped) {
if (macroDebugVerbose) println(s"typecheck #1 has failed: ${typer.context.reportBuffer.errors}")
expanded1
@@ -612,6 +617,8 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
}
}
override def onDelayed(delayed: Tree) = {
+ // =========== THE SITUATION ===========
+ //
// If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
// then there are two possible situations we're in:
// 1) We're in POLYmode, when the typer tests the waters wrt type inference
@@ -627,12 +634,43 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
// the undetermined type params. Therefore we need to do something ourselves or otherwise this
// expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum
// is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases,
- // but sometimes, if the inferencer lacks information, it will be forced to approximate. This prevents
- // an important class of macros, fundep materializers, from working, which I perceive is a problem we need to solve.
- // For details see SI-7470.
+ // but sometimes, if the inferencer lacks information, it will be forced to approximate.
+ //
+ // =========== THE PROBLEM ===========
+ //
+ // Consider the following example (thanks, Miles!):
+ //
+ // Iso represents an isomorphism between two datatypes:
+ // 1) An arbitrary one (e.g. a random case class)
+ // 2) A uniform representation for all datatypes (e.g. an HList)
+ //
+ // trait Iso[T, U] {
+ // def to(t : T) : U
+ // def from(u : U) : T
+ // }
+ // implicit def materializeIso[T, U]: Iso[T, U] = macro ???
+ //
+ // case class Foo(i: Int, s: String, b: Boolean)
+ // def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
+ // foo(Foo(23, "foo", true))
+ //
+ // In the snippet above, even though we know that there's a fundep going from T to U
+ // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
+ // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
+ // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
+ //
+ // =========== THE SOLUTION ===========
+ //
+ // To give materializers a chance to say their word before vanilla inference kicks in,
+ // we infer as much as possible (e.g. in the example above even though L is hopeless, C still can be inferred to Foo)
+ // and then trigger macro expansion with the undetermined type parameters still there.
+ // Thanks to that the materializer can take a look at what's going on and react accordingly.
val shouldInstantiate = typer.context.undetparams.nonEmpty && !mode.inPolyMode
- if (shouldInstantiate) typer.instantiatePossiblyExpectingUnit(delayed, mode, pt)
- else delayed
+ if (shouldInstantiate) {
+ forced += delayed
+ typer.infer.inferExprInstance(delayed, typer.context.extractUndetparams(), pt, keepNothings = false)
+ macroExpandApply(typer, delayed, mode, pt)
+ } else delayed
}
}
expander(expandee)
@@ -750,10 +788,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
* 2) undetparams (sym.isTypeParameter && !sym.isSkolem)
*/
var hasPendingMacroExpansions = false
+ private val forced = perRunCaches.newWeakSet[Tree]
private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]()
private def isDelayed(expandee: Tree) = delayed contains expandee
private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] =
- delayed.get(expandee).getOrElse {
+ if (forced(expandee)) scala.collection.mutable.Set[Int]()
+ else delayed.getOrElse(expandee, {
val calculated = scala.collection.mutable.Set[Symbol]()
expandee foreach (sub => {
def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym
@@ -762,7 +802,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
})
macroLogVerbose("calculateUndetparams: %s".format(calculated))
calculated map (_.id)
- }
+ })
private val undetparams = perRunCaches.newSet[Int]()
def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = {
undetparams ++= newUndets map (_.id)
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 546186479f..3a5845c8ca 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -25,7 +25,7 @@ trait MethodSynthesis {
type TT[T] = ru.TypeTag[T]
type CT[T] = ClassTag[T]
- def ValOrDefDef(sym: Symbol, body: Tree) =
+ def newValOrDefDef(sym: Symbol, body: Tree) =
if (sym.isLazy) ValDef(sym, body)
else DefDef(sym, body)
@@ -67,7 +67,7 @@ trait MethodSynthesis {
}
private def finishMethod(method: Symbol, f: Symbol => Tree): Tree =
- localTyper typed ValOrDefDef(method, f(method))
+ localTyper typed newValOrDefDef(method, f(method))
private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
val name1 = name.toTermName
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index cac6bd2ef2..454f913412 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -291,10 +291,13 @@ trait Namers extends MethodSynthesis {
}
private def logAssignSymbol(tree: Tree, sym: Symbol): Symbol = {
- sym.name.toTermName match {
+ if (isPastTyper) sym.name.toTermName match {
case nme.IMPORT | nme.OUTER | nme.ANON_CLASS_NAME | nme.ANON_FUN_NAME | nme.CONSTRUCTOR => ()
case _ =>
- log("[+symbol] " + sym.debugLocationString)
+ tree match {
+ case md: DefDef => log("[+symbol] " + sym.debugLocationString)
+ case _ =>
+ }
}
tree.symbol = sym
sym
@@ -390,9 +393,7 @@ trait Namers extends MethodSynthesis {
* has been defined in a separate file.
*/
private def validateCompanionDefs(tree: ImplDef) {
- val sym = tree.symbol
- if (sym eq NoSymbol) return
-
+ val sym = tree.symbol orElse { return }
val ctx = if (context.owner.isPackageObjectClass) context.outer else context
val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name
val clazz = if (sym.isClass) sym else ctx.scope lookupClass tree.name
@@ -1417,14 +1418,6 @@ trait Namers extends MethodSynthesis {
annCtx.setReportErrors()
// need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892.
AnnotationInfo lazily {
- if (typer.context ne ctx)
- log(sm"""|The var `typer.context` in ${Namer.this} was mutated before the annotation ${ann} was forced.
- |
- |current value = ${typer.context}
- |original value = $ctx
- |
- |This confirms the hypothesis for the cause of SI-7603. If you see this message, please comment on that ticket.""")
-
enteringTyper(newTyper(annCtx) typedAnnotation ann)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 8e9933f734..dea4c46e79 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -162,7 +162,7 @@ trait NamesDefaults { self: Analyzer =>
// never used for constructor calls, they always have a stable qualifier
def blockWithQualifier(qual: Tree, selected: Name) = {
- val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos, newFlags = ARTIFACT) setInfo qual.tpe
+ val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos, newFlags = ARTIFACT) setInfo uncheckedBounds(qual.tpe)
blockTyper.context.scope enter sym
val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType)
// it stays in Vegas: SI-5720, SI-5727
@@ -289,9 +289,10 @@ trait NamesDefaults { self: Analyzer =>
arg.tpe
}
).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo (
- if (byName) functionType(Nil, argTpe) else argTpe
- )
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo {
+ val tp = if (byName) functionType(Nil, argTpe) else argTpe
+ uncheckedBounds(tp)
+ }
Some((context.scope.enter(s), byName, repeated))
})
map2(symPs, args) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
new file mode 100644
index 0000000000..8bf9ce49be
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -0,0 +1,471 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package typechecker
+
+import scala.collection.mutable
+import symtab.Flags
+import Mode._
+
+ /**
+ *
+ * A pattern match such as
+ *
+ * x match { case Foo(a, b) => ...}
+ *
+ * Might match an instance of any of the following definitions of Foo.
+ * Note the analogous treatment between case classes and unapplies.
+ *
+ * case class Foo(xs: Int*)
+ * case class Foo(a: Int, xs: Int*)
+ * case class Foo(a: Int, b: Int)
+ * case class Foo(a: Int, b: Int, xs: Int*)
+ *
+ * object Foo { def unapplySeq(x: Any): Option[Seq[Int]] }
+ * object Foo { def unapplySeq(x: Any): Option[(Int, Seq[Int])] }
+ * object Foo { def unapply(x: Any): Option[(Int, Int)] }
+ * object Foo { def unapplySeq(x: Any): Option[(Int, Int, Seq[Int])] }
+ */
+
+trait PatternTypers {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+
+ private object FixedAndRepeatedTypes {
+ def unapply(types: List[Type]) = types match {
+ case init :+ last if isRepeatedParamType(last) => Some((init, dropRepeated(last)))
+ case _ => Some((types, NoType))
+ }
+ }
+
+ // when true:
+ // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
+ // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
+ // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
+ protected def newPatternMatching = true // presently overridden in the presentation compiler
+
+ trait PatternTyper {
+ self: Typer =>
+
+ import TyperErrorGen._
+ import infer._
+
+ private def unit = context.unit
+
+ // If the tree's symbol's type does not define an extractor, maybe the tree's type does.
+ // this is the case when we encounter an arbitrary tree as the target of an unapply call
+ // (rather than something that looks like a constructor call.) (for now, this only happens
+ // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
+ // more common place)
+ private def hasUnapplyMember(tpe: Type): Boolean = reallyExists(unapplyMember(tpe))
+ private def hasUnapplyMember(sym: Symbol): Boolean = hasUnapplyMember(sym.tpe_*)
+ private def hasUnapplyMember(fun: Tree): Boolean = hasUnapplyMember(fun.symbol) || hasUnapplyMember(fun.tpe)
+
+ // ad-hoc overloading resolution to deal with unapplies and case class constructors
+ // If some but not all alternatives survive filtering the tree's symbol with `p`,
+ // then update the tree's symbol and type to exclude the filtered out alternatives.
+ private def inPlaceAdHocOverloadingResolution(fun: Tree)(p: Symbol => Boolean): Tree = fun.symbol filter p match {
+ case sym if sym.exists && (sym ne fun.symbol) => fun setSymbol sym modifyType (tp => filterOverloadedAlts(tp)(p))
+ case _ => fun
+ }
+ private def filterOverloadedAlts(tpe: Type)(p: Symbol => Boolean): Type = tpe match {
+ case OverloadedType(pre, alts) => overloadedType(pre, alts filter p)
+ case tp => tp
+ }
+
+ def typedConstructorPattern(fun0: Tree, pt: Type) = {
+ // Do some ad-hoc overloading resolution and update the tree's symbol and type
+ // do not update the symbol if the tree's symbol's type does not define an unapply member
+ // (e.g. since it's some method that returns an object with an unapply member)
+ val fun = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember)
+ def caseClass = fun.tpe.typeSymbol.linkedClassOfClass
+
+ // Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala
+ // A case class with 23+ params has no unapply method.
+ // A case class constructor be overloaded with unapply methods in the companion.
+ if (caseClass.isCase && !unapplyMember(fun.tpe).isOverloaded)
+ convertToCaseConstructor(fun, caseClass, pt)
+ else if (hasUnapplyMember(fun))
+ fun
+ else
+ CaseClassConstructorError(fun)
+ }
+
+ def expectedPatternTypes(fun: Tree, args: List[Tree]): List[Type] =
+ newExtractorShape(fun, args).expectedPatternTypes
+
+ def typedPatternArgs(fun: Tree, args: List[Tree], mode: Mode): List[Tree] =
+ typedArgsForFormals(args, newExtractorShape(fun, args).formals, mode)
+
+ def typedArgsForFormals(args: List[Tree], formals: List[Type], mode: Mode): List[Tree] = {
+ def typedArgWithFormal(arg: Tree, pt: Type) = {
+ val newMode = if (isByNameParamType(pt)) mode.onlySticky else mode.onlySticky | BYVALmode
+ typedArg(arg, mode, newMode, dropByName(pt))
+ }
+ val FixedAndRepeatedTypes(fixed, elem) = formals
+ val front = (args, fixed).zipped map typedArgWithFormal
+ def rest = context withinStarPatterns (args drop front.length map (typedArgWithFormal(_, elem)))
+
+ elem match {
+ case NoType => front
+ case _ => front ::: rest
+ }
+ }
+
+ private def boundedArrayType(bound: Type): Type = {
+ val tparam = context.owner freshExistential "" setInfo (TypeBounds upper bound)
+ newExistentialType(tparam :: Nil, arrayType(tparam.tpe_*))
+ }
+
+ protected def typedStarInPattern(tree: Tree, mode: Mode, pt: Type) = {
+ val Typed(expr, tpt) = tree
+ val exprTyped = typed(expr, mode)
+ val baseClass = exprTyped.tpe.typeSymbol match {
+ case ArrayClass => ArrayClass
+ case _ => SeqClass
+ }
+ val starType = baseClass match {
+ case ArrayClass if isPrimitiveValueType(pt) || !isFullyDefined(pt) => arrayType(pt)
+ case ArrayClass => boundedArrayType(pt)
+ case _ => seqType(pt)
+ }
+ val exprAdapted = adapt(exprTyped, mode, starType)
+ exprAdapted.tpe baseType baseClass match {
+ case TypeRef(_, _, elemtp :: Nil) => treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
+ case _ => setError(tree)
+ }
+ }
+
+ protected def typedInPattern(tree: Typed, mode: Mode, pt: Type) = {
+ val Typed(expr, tpt) = tree
+ val tptTyped = typedType(tpt, mode)
+ val tpe = tptTyped.tpe
+ val exprTyped = typed(expr, mode, tpe.deconst)
+ val extractor = extractorForUncheckedType(tpt.pos, tpe)
+
+ val canRemedy = tpe match {
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case RefinedType(parents, _) if parents exists isUncheckable => false
+ case _ => extractor.nonEmpty
+ }
+
+ val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy)
+ val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped) setType ownType
+
+ extractor match {
+ case EmptyTree => treeTyped
+ case _ => wrapClassTagUnapply(treeTyped, extractor, tpe)
+ }
+ }
+
+ def newExtractorShape(tree: Tree): ExtractorShape = tree match {
+ case Apply(fun, args) => ExtractorShape(fun, args)
+ case UnApply(fun, args) => ExtractorShape(fun, args)
+ }
+ def newExtractorShape(fun: Tree, args: List[Tree]): ExtractorShape = ExtractorShape(fun, args)
+
+ case class CaseClassInfo(clazz: Symbol, classType: Type) {
+ def constructor = clazz.primaryConstructor
+ def constructorType = classType.prefix memberType clazz memberType constructor
+ def paramTypes = constructorType.paramTypes
+ def accessors = clazz.caseFieldAccessors
+ def accessorTypes = accessors map (m => (classType memberType m).finalResultType)
+ // def inverted = MethodType(clazz :: Nil, tupleType(accessorTypes))
+ }
+ object NoCaseClassInfo extends CaseClassInfo(NoSymbol, NoType) {
+ override def toString = "NoCaseClassInfo"
+ }
+
+ case class UnapplyMethodInfo(unapply: Symbol, tpe: Type) {
+ def name = unapply.name
+ def isUnapplySeq = name == nme.unapplySeq
+ def unapplyType = tpe memberType method
+ def resultType = tpe.finalResultType
+ def method = unapplyMember(tpe)
+ def paramType = firstParamType(unapplyType)
+ def rawGet = if (isBool) UnitTpe else typeOfMemberNamedGetOrSelf(resultType)
+ def rawTypes = if (isBool) Nil else typesOfSelectorsOrSelf(rawGet)
+ def rawArity = rawTypes.size
+ def isBool = resultType =:= BooleanTpe // aka "Tuple0" or "Option[Unit]"
+ def isNothing = rawGet =:= NothingTpe
+ def isCase = method.isCase
+ }
+
+ object NoUnapplyMethodInfo extends UnapplyMethodInfo(NoSymbol, NoType) {
+ override def toString = "NoUnapplyMethodInfo"
+ }
+
+ case class ExtractorShape(fun: Tree, args: List[Tree]) {
+ def pos = fun.pos
+ private def symbol = fun.symbol
+ private def tpe = fun.tpe
+
+ val ccInfo = tpe.typeSymbol.linkedClassOfClass match {
+ case clazz if clazz.isCase => CaseClassInfo(clazz, tpe)
+ case _ => NoCaseClassInfo
+ }
+ val exInfo = UnapplyMethodInfo(symbol, tpe)
+ import exInfo.{ rawGet, rawTypes, isUnapplySeq }
+
+ override def toString = s"ExtractorShape($fun, $args)"
+
+ def unapplyMethod = exInfo.method
+ def unapplyType = exInfo.unapplyType
+ def unapplyParamType = exInfo.paramType
+ def caseClass = ccInfo.clazz
+ def enclClass = symbol.enclClass
+
+ // TODO - merge these. The difference between these two methods is that expectedPatternTypes
+ // expands the list of types so it is the same length as the number of patterns, whereas formals
+ // leaves the varargs type unexpanded.
+ def formals = (
+ if (isUnapplySeq) productTypes :+ varargsType
+ else if (elementArity == 0) productTypes
+ else if (isSingle) squishIntoOne()
+ else wrongArity(patternFixedArity)
+ )
+ def expectedPatternTypes = elementArity match {
+ case 0 => productTypes
+ case _ if elementArity > 0 && isUnapplySeq => productTypes ::: elementTypes
+ case _ if productArity > 1 && patternFixedArity == 1 => squishIntoOne()
+ case _ => wrongArity(patternFixedArity)
+ }
+
+ def elementType = elementTypeOfLastSelectorOrSelf(rawGet)
+
+ private def hasBogusExtractor = directUnapplyMember(tpe).exists && !unapplyMethod.exists
+ private def expectedArity = "" + productArity + ( if (isUnapplySeq) "+" else "")
+ private def wrongArityMsg(n: Int) = (
+ if (hasBogusExtractor) s"$enclClass does not define a valid extractor method"
+ else s"wrong number of patterns for $enclClass offering $rawTypes_s: expected $expectedArity, found $n"
+ )
+ private def rawTypes_s = rawTypes match {
+ case Nil => "()"
+ case tp :: Nil => "" + tp
+ case tps => tps.mkString("(", ", ", ")")
+ }
+
+ private def err(msg: String) = { unit.error(pos, msg) ; throw new TypeError(msg) }
+ private def wrongArity(n: Int) = err(wrongArityMsg(n))
+
+ def squishIntoOne() = {
+ if (settings.lint)
+ unit.warning(pos, s"$enclClass expects $expectedArity patterns to hold $rawGet but crushing into $productArity-tuple to fit single pattern (SI-6675)")
+
+ rawGet :: Nil
+ }
+ // elementArity is the number of non-sequence patterns minus the
+ // the number of non-sequence product elements returned by the extractor.
+ // If it is zero, there is a perfect match between those parts, and
+ // if there is a wildcard star it will match any sequence.
+ // If it is positive, there are more patterns than products,
+ // so a sequence will have to fill in the elements. If it is negative,
+ // there are more products than patterns, which is a compile time error.
+ def elementArity = patternFixedArity - productArity
+ def patternFixedArity = treeInfo effectivePatternArity args
+ def productArity = productTypes.size
+ def isSingle = !isUnapplySeq && (patternFixedArity == 1)
+
+ def productTypes = if (isUnapplySeq) rawTypes dropRight 1 else rawTypes
+ def elementTypes = List.fill(elementArity)(elementType)
+ def varargsType = scalaRepeatedType(elementType)
+ }
+
+ private class VariantToSkolemMap extends TypeMap(trackVariance = true) {
+ private val skolemBuffer = mutable.ListBuffer[TypeSymbol]()
+
+ def skolems = try skolemBuffer.toList finally skolemBuffer.clear()
+ def apply(tp: Type): Type = mapOver(tp) match {
+ // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see SI-5189
+ case tp @ TypeRef(NoPrefix, tpSym, Nil) if tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
+ if (variance.isInvariant) {
+ // if (variance.isInvariant) tpSym.tpeHK.bounds
+ devWarning(s"variantToSkolem skipping rewrite of $tpSym due to invariance")
+ return tp
+ }
+ val bounds = (
+ if (variance.isPositive) TypeBounds.upper(tpSym.tpeHK)
+ else TypeBounds.lower(tpSym.tpeHK)
+ )
+ // origin must be the type param so we can deskolemize
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
+ skolemBuffer += skolem
+ skolem.tpe_*
+ case tp1 => tp1
+ }
+ }
+ /*
+ * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
+ * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
+ *
+ * Consider the following example:
+ *
+ * class AbsWrapperCov[+A]
+ * case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
+ *
+ * def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
+ * case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
+ * wrapped // : Wrapped[_ <: T]
+ * }
+ *
+ * this method should type check if and only if Wrapped is covariant in its type parameter
+ *
+ * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
+ * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
+ * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
+ *
+ * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
+ * we can simply replace skolems that represent method type parameters as seen from the method's body
+ * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
+ * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
+ *
+ * see test/files/../t5189*.scala
+ */
+ private def convertToCaseConstructor(tree: Tree, caseClass: Symbol, pt: Type): Tree = {
+ val variantToSkolem = new VariantToSkolemMap
+ val caseConstructorType = tree.tpe.prefix memberType caseClass memberType caseClass.primaryConstructor
+ val tree1 = TypeTree(caseConstructorType) setOriginal tree
+
+ // have to open up the existential and put the skolems in scope
+ // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
+ val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
+ val freeVars = variantToSkolem.skolems
+
+ // use "tree" for the context, not context.tree: don't make another CaseDef context,
+ // as instantiateTypeVar's bounds would end up there
+ log(s"convert ${tree.summaryString}: ${tree.tpe} to case constructor, pt=$ptSafe")
+
+ val ctorContext = context.makeNewScope(tree, context.owner)
+ freeVars foreach ctorContext.scope.enter
+ newTyper(ctorContext).infer.inferConstructorInstance(tree1, caseClass.typeParams, ptSafe)
+
+ // simplify types without losing safety,
+ // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
+ val extrapolator = new ExistentialExtrapolation(freeVars)
+ def extrapolate(tp: Type) = extrapolator extrapolate tp
+
+ // once the containing CaseDef has been type checked (see typedCase),
+ // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
+ tree1 modifyType {
+ case MethodType(ctorArgs, restpe) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
+ copyMethodType(tree1.tpe, ctorArgs map (_ modifyInfo extrapolate), extrapolate(restpe)) // no need to clone ctorArgs, this is OUR method type
+ case tp => tp
+ }
+ }
+
+ def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
+ def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
+ def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
+
+ if (args.length > MaxTupleArity)
+ return duplErrorTree(TooManyArgsPatternError(fun))
+
+ def freshArgType(tp: Type): Type = tp match {
+ case MethodType(param :: _, _) => param.tpe
+ case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, freshArgType(restpe))(polyType)
+ case OverloadedType(_, _) => OverloadedUnapplyError(fun) ; ErrorType
+ case _ => UnapplyWithSingleArgError(fun) ; ErrorType
+ }
+ val shape = newExtractorShape(fun, args)
+ import shape.{ unapplyParamType, unapplyType, unapplyMethod }
+
+ def extractor = extractorForUncheckedType(shape.pos, unapplyParamType)
+ def canRemedy = unapplyParamType match {
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case RefinedType(parents, _) if parents exists isUncheckable => false
+ case _ => extractor.nonEmpty
+ }
+
+ def freshUnapplyArgType(): Type = {
+ val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree))
+ val unapplyContext = context.makeNewScope(context.tree, context.owner)
+ freeVars foreach unapplyContext.scope.enter
+ val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy)
+ // turn any unresolved type variables in freevars into existential skolems
+ val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
+ pattp.substSym(freeVars, skolems)
+ }
+
+ val unapplyArg = (
+ context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, Flags.SYNTHETIC) setInfo (
+ if (isApplicableSafe(Nil, unapplyType, pt :: Nil, WildcardType)) pt
+ else freshUnapplyArgType()
+ )
+ )
+ // clearing the type is necessary so that ref will be stabilized; see bug 881
+ val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapplyMethod), Ident(unapplyArg) :: Nil))
+
+ def makeTypedUnApply() = {
+ // the union of the expected type and the inferred type of the argument to unapply
+ val glbType = glb(ensureFullyDefined(pt) :: unapplyArg.tpe_* :: Nil)
+ val wrapInTypeTest = canRemedy && !(fun1.symbol.owner isNonBottomSubClass ClassTagClass)
+ val args1 = typedPatternArgs(fun1, args, mode)
+ val result = UnApply(fun1, args1) setPos tree.pos setType glbType
+
+ if (wrapInTypeTest)
+ wrapClassTagUnapply(result, extractor, glbType)
+ else
+ result
+ }
+
+ if (fun1.tpe.isErroneous)
+ duplErrTree
+ else if (unapplyMethod.isMacro && !fun1.isInstanceOf[Apply])
+ duplErrorTree(WrongShapeExtractorExpansion(tree))
+ else
+ makeTypedUnApply()
+ }
+
+ def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
+ // TODO: disable when in unchecked match
+ // we don't create a new Context for a Match, so find the CaseDef,
+ // then go out one level and navigate back to the match that has this case
+ val args = List(uncheckedPattern)
+ val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
+ // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
+ // and re-typechecks of the target of the unapply call in PATTERNmode,
+ // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
+ // but an arbitrary tree as is the case here
+ val res = doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
+
+ log(sm"""
+ |wrapClassTagUnapply {
+ | pattern: $uncheckedPattern
+ | extract: $classTagExtractor
+ | pt: $pt
+ | res: $res
+ |}""".trim)
+
+ res
+ }
+
+ // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
+ // return the corresponding extractor (an instance of ClassTag[`pt`])
+ def extractorForUncheckedType(pos: Position, pt: Type): Tree = {
+ if (isPastTyper || (pt eq NoType)) EmptyTree else {
+ pt match {
+ case RefinedType(parents, decls) if !decls.isEmpty || (parents exists isUncheckable) => return EmptyTree
+ case _ =>
+ }
+ // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
+ // but at least make a proper type before passing it elsewhere
+ val pt1 = pt.dealiasWiden match {
+ case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
+ case pt1 => pt1
+ }
+ if (isCheckable(pt1)) EmptyTree
+ else resolveClassTag(pos, pt1) match {
+ case tree if unapplyMember(tree.tpe).exists => tree
+ case _ => devWarning(s"Cannot create runtime type test for $pt1") ; EmptyTree
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 1b6963b598..32e908e03b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -113,6 +113,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
var localTyper: analyzer.Typer = typer
var currentApplication: Tree = EmptyTree
var inPattern: Boolean = false
+ @inline final def savingInPattern[A](body: => A): A = {
+ val saved = inPattern
+ try body finally inPattern = saved
+ }
+
var checkedCombinations = Set[List[Type]]()
// only one overloaded alternative is allowed to define default arguments
@@ -211,7 +216,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
// Delaying calling memberType as long as possible
- if (inherited ne NoSymbol) {
+ if (inherited.exists) {
val jtpe = toJavaRepeatedParam(self memberType member)
// this is a bit tortuous: we look for non-private members or bridges
// if we find a bridge everything is OK. If we find another member,
@@ -1371,6 +1376,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
member.typeParams.map(_.info.bounds.hi.widen) foreach checkAccessibilityOfType
}
+ private def checkByNameRightAssociativeDef(tree: DefDef) {
+ tree match {
+ case DefDef(_, name, _, params :: _, _, _) =>
+ if (settings.lint && !treeInfo.isLeftAssoc(name.decodedName) && params.exists(p => isByName(p.symbol)))
+ unit.warning(tree.pos,
+ "by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.")
+ case _ =>
+ }
+ }
+
/** Check that a deprecated val or def does not override a
* concrete, non-deprecated method. If it does, then
* deprecation is meaningless.
@@ -1399,21 +1414,40 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
false
}
- private def checkTypeRef(tp: Type, tree: Tree) = tp match {
+ private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean) = tp match {
case TypeRef(pre, sym, args) =>
checkDeprecated(sym, tree.pos)
if(sym.isJavaDefined)
sym.typeParams foreach (_.cookJavaRawInfo())
- if (!tp.isHigherKinded)
+ if (!tp.isHigherKinded && !skipBounds)
checkBounds(tree, pre, sym.owner, sym.typeParams, args)
case _ =>
}
- private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach (tp => checkTypeRef(tp, tree))
+ private def checkTypeRefBounds(tp: Type, tree: Tree) = {
+ var skipBounds = false
+ tp match {
+ case AnnotatedType(ann :: Nil, underlying, selfSym) if ann.symbol == UncheckedBoundsClass =>
+ skipBounds = true
+ underlying
+ case TypeRef(pre, sym, args) =>
+ if (!tp.isHigherKinded && !skipBounds)
+ checkBounds(tree, pre, sym.owner, sym.typeParams, args)
+ tp
+ case _ =>
+ tp
+ }
+ }
+
+ private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach { tp =>
+ checkTypeRef(tp, tree, skipBounds = false)
+ checkTypeRefBounds(tp, tree)
+ }
private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f
private def applyRefchecksToAnnotations(tree: Tree): Unit = {
def applyChecks(annots: List[AnnotationInfo]) = {
+ annots foreach (annot => checkCompileTimeOnly(annot.atp.typeSymbol, annot.pos))
checkAnnotations(annots map (_.atp), tree)
transformTrees(annots flatMap (_.args))
}
@@ -1437,8 +1471,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
doTypeTraversal(tree) {
- case AnnotatedType(annots, _, _) => applyChecks(annots)
- case _ =>
+ case tp @ AnnotatedType(annots, _, _) =>
+ applyChecks(annots)
+ case tp =>
}
case _ =>
}
@@ -1516,7 +1551,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
checkCompileTimeOnly(sym, tree.pos)
checkDelayedInitSelect(qual, sym, tree.pos)
- if (sym eq NoSymbol)
+ if (!sym.exists)
devWarning("Select node has NoSymbol! " + tree + " / " + tree.tpe)
else if (sym.hasLocalFlag)
varianceValidator.checkForEscape(sym, currentClass)
@@ -1594,6 +1629,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic)
checkAccessibilityOfReferencedTypes(tree)
}
+ tree match {
+ case dd: DefDef => checkByNameRightAssociativeDef(dd)
+ case _ =>
+ }
tree
case Template(parents, self, body) =>
@@ -1619,13 +1658,27 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
val existentialParams = new ListBuffer[Symbol]
- doTypeTraversal(tree) { // check all bounds, except those that are existential type parameters
- case ExistentialType(tparams, tpe) =>
+ var skipBounds = false
+ // check all bounds, except those that are existential type parameters
+ // or those within typed annotated with @uncheckedBounds
+ doTypeTraversal(tree) {
+ case tp @ ExistentialType(tparams, tpe) =>
existentialParams ++= tparams
- case t: TypeRef =>
- checkTypeRef(deriveTypeWithWildcards(existentialParams.toList)(t), tree)
+ case ann: AnnotatedType if ann.hasAnnotation(UncheckedBoundsClass) =>
+ // SI-7694 Allow code synthetizers to disable checking of bounds for TypeTrees based on inferred LUBs
+ // which might not conform to the constraints.
+ skipBounds = true
+ case tp: TypeRef =>
+ val tpWithWildcards = deriveTypeWithWildcards(existentialParams.toList)(tp)
+ checkTypeRef(tpWithWildcards, tree, skipBounds)
case _ =>
}
+ if (skipBounds) {
+ tree.tpe = tree.tpe.map {
+ _.filterAnnotations(_.symbol != UncheckedBoundsClass)
+ }
+ }
+
tree
case TypeApply(fn, args) =>
@@ -1648,6 +1701,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
tree
case Ident(name) =>
+ checkCompileTimeOnly(tree.symbol, tree.pos)
transformCaseApply(tree,
if (name != nme.WILDCARD && name != tpnme.WILDCARD_STAR) {
assert(sym != NoSymbol, "transformCaseApply: name = " + name.debugString + " tree = " + tree + " / " + tree.getClass) //debug
@@ -1667,19 +1721,35 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case _ => tree
}
+
// skip refchecks in patterns....
result = result match {
case CaseDef(pat, guard, body) =>
- inPattern = true
- val pat1 = transform(pat)
- inPattern = false
+ val pat1 = savingInPattern {
+ inPattern = true
+ transform(pat)
+ }
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
case LabelDef(_, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
- val old = inPattern
- inPattern = true
- val res = deriveLabelDef(result)(transform)
- inPattern = old
- res
+ savingInPattern {
+ inPattern = true
+ deriveLabelDef(result)(transform)
+ }
+ case Apply(fun, args) if fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol) =>
+ savingInPattern {
+ // SI-7756 If we were in a translated pattern, we can now switch out of pattern mode, as the label apply signals
+ // that we are in the user-supplied code in the case body.
+ //
+ // Relies on the translation of:
+ // (null: Any) match { case x: List[_] => x; x.reverse; case _ => }'
+ // to:
+ // <synthetic> val x2: List[_] = (x1.asInstanceOf[List[_]]: List[_]);
+ // matchEnd4({ x2; x2.reverse}) // case body is an argument to a label apply.
+ inPattern = false
+ super.transform(result)
+ }
+ case ValDef(_, _, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
+ deriveValDef(result)(transform) // SI-7716 Don't refcheck the tpt of the synthetic val that holds the selector.
case _ =>
super.transform(result)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 6933b10a0a..12d6bb2e6a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -269,8 +269,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
&& sym.enclClass != currentClass
&& !sym.owner.isPackageClass // SI-7091 no accessor needed package owned (ie, top level) symbols
&& !sym.owner.isTrait
- && (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
- && (qual.symbol.info.member(sym.name) ne NoSymbol)
+ && sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass
+ && qual.symbol.info.member(sym.name).exists
&& !needsProtectedAccessor(sym, tree.pos)
)
if (shouldEnsureAccessor) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index b4a37f9943..1af176736b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -143,7 +143,7 @@ trait TypeDiagnostics {
def defaultMessage = moduleMessage + preResultString + tree.tpe
def applyMessage = defaultMessage + tree.symbol.locationString
- if ((sym eq null) || (sym eq NoSymbol)) {
+ if (!tree.hasExistingSymbol) {
if (isTyperInPattern) patternMessage
else exprMessage
}
@@ -350,11 +350,14 @@ trait TypeDiagnostics {
val strings = mutable.Map[String, Set[TypeDiag]]() withDefaultValue Set()
val names = mutable.Map[Name, Set[TypeDiag]]() withDefaultValue Set()
- def record(t: Type, sym: Symbol) = {
- val diag = TypeDiag(t, sym)
+ val localsSet = locals.toSet
- strings("" + t) += diag
- names(sym.name) += diag
+ def record(t: Type, sym: Symbol) = {
+ if (!localsSet(sym)) {
+ val diag = TypeDiag(t, sym)
+ strings("" + t) += diag
+ names(sym.name) += diag
+ }
}
for (tpe <- types ; t <- tpe) {
t match {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index d2ff47626d..dd16b5be85 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -26,7 +26,7 @@ import Mode._
* @author Martin Odersky
* @version 1.0
*/
-trait Typers extends Adaptations with Tags with TypersTracking {
+trait Typers extends Adaptations with Tags with TypersTracking with PatternTypers {
self: Analyzer =>
import global._
@@ -36,7 +36,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
final def forArgMode(fun: Tree, mode: Mode) =
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode else mode
- // printResult(s"forArgMode($fun, $mode) gets SCCmode")(mode | SCCmode)
// namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
// is cached here and re-used in typedDefDef / typedValDef
// Also used to cache imports type-checked by namer.
@@ -63,6 +62,10 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
sealed abstract class SilentResult[+T] {
+ @inline final def fold[U](none: => U)(f: T => U): U = this match {
+ case SilentResultValue(value) => f(value)
+ case _ => none
+ }
@inline final def map[U](f: T => U): SilentResult[U] = this match {
case SilentResultValue(value) => SilentResultValue(f(value))
case x: SilentTypeError => x
@@ -90,13 +93,7 @@ trait Typers extends Adaptations with Tags with TypersTracking {
private final val InterpolatorCodeRegex = """\$\{.*?\}""".r
private final val InterpolatorIdentRegex = """\$\w+""".r
- // when true:
- // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
- // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
- // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
- protected def newPatternMatching = true // presently overridden in the presentation compiler
-
- abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
+ abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with PatternTyper with TyperContextErrors {
import context0.unit
import typeDebug.{ ptTree, ptBlock, ptLine, inGreen, inRed }
import TyperErrorGen._
@@ -912,121 +909,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
}
- /*
- * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
- * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
- *
- * Consider the following example:
- *
- * class AbsWrapperCov[+A]
- * case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
- *
- * def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
- * case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
- * wrapped // : Wrapped[_ <: T]
- * }
- *
- * this method should type check if and only if Wrapped is covariant in its type parameter
- *
- * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
- * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
- * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
- *
- * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
- * we can simply replace skolems that represent method type parameters as seen from the method's body
- * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
- * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
- *
- * see test/files/../t5189*.scala
- */
- def adaptConstrPattern(): Tree = { // (5)
- def hasUnapplyMember(tp: Type) = reallyExists(unapplyMember(tp))
- val overloadedExtractorOfObject = tree.symbol filter (sym => hasUnapplyMember(sym.tpe))
- // if the tree's symbol's type does not define an extractor, maybe the tree's type does.
- // this is the case when we encounter an arbitrary tree as the target of an unapply call
- // (rather than something that looks like a constructor call.) (for now, this only happens
- // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
- // more common place)
- val extractor = overloadedExtractorOfObject orElse unapplyMember(tree.tpe)
- def convertToCaseConstructor(clazz: Symbol): TypeTree = {
- // convert synthetic unapply of case class to case class constructor
- val prefix = tree.tpe.prefix
- val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
- .setOriginal(tree)
-
- val skolems = new mutable.ListBuffer[TypeSymbol]
- object variantToSkolem extends TypeMap(trackVariance = true) {
- def apply(tp: Type) = mapOver(tp) match {
- // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see SI-5189
- case TypeRef(NoPrefix, tpSym, Nil) if !variance.isInvariant && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
- // must initialize or tpSym.tpe might see random type params!!
- // without this, we'll get very weird types inferred in test/scaladoc/run/SI-5933.scala
- // TODO: why is that??
- tpSym.initialize
- val bounds = if (variance.isPositive) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
- // origin must be the type param so we can deskolemize
- val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
- // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
- skolems += skolem
- skolem.tpe
- case tp1 => tp1
- }
- }
-
- // have to open up the existential and put the skolems in scope
- // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
- val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
- val freeVars = skolems.toList
-
- // use "tree" for the context, not context.tree: don't make another CaseDef context,
- // as instantiateTypeVar's bounds would end up there
- val ctorContext = context.makeNewScope(tree, context.owner)
- freeVars foreach ctorContext.scope.enter
- newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
-
- // simplify types without losing safety,
- // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
- val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
- val extrapolated = tree1.tpe match {
- case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
- ctorArgs foreach (p => p.info = extrapolate(p.info)) // no need to clone, this is OUR method type
- copyMethodType(tree1.tpe, ctorArgs, extrapolate(res))
- case tp => tp
- }
-
- // once the containing CaseDef has been type checked (see typedCase),
- // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
- tree1 setType extrapolated
- }
-
- if (extractor != NoSymbol) {
- // if we did some ad-hoc overloading resolution, update the tree's symbol
- // do not update the symbol if the tree's symbol's type does not define an unapply member
- // (e.g. since it's some method that returns an object with an unapply member)
- if (overloadedExtractorOfObject != NoSymbol)
- tree setSymbol overloadedExtractorOfObject
-
- tree.tpe match {
- case OverloadedType(pre, alts) => tree setType overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe)))
- case _ =>
- }
- val unapply = unapplyMember(extractor.tpe)
- val clazz = unapplyParameterType(unapply)
-
- if (unapply.isCase && clazz.isCase) {
- convertToCaseConstructor(clazz)
- } else {
- tree
- }
- } else {
- val clazz = tree.tpe.typeSymbol.linkedClassOfClass
- if (clazz.isCase)
- convertToCaseConstructor(clazz)
- else
- CaseClassConstructorError(tree)
- }
- }
-
def insertApply(): Tree = {
assert(!context.inTypeConstructorAllowed, mode) //@M
val adapted = adaptToName(tree, nme.apply)
@@ -1213,7 +1095,7 @@ trait Typers extends Adaptations with Tags with TypersTracking {
else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree))
macroExpandApply(this, tree, mode, pt)
else if (mode.typingConstructorPattern)
- adaptConstrPattern()
+ typedConstructorPattern(tree, pt)
else if (shouldInsertApply(tree))
insertApply()
else if (hasUndetsInMonoMode) { // (9)
@@ -2495,7 +2377,7 @@ trait Typers extends Adaptations with Tags with TypersTracking {
// list, so substitute the final result type of the method, i.e. the type
// of the case class.
if (pat1.tpe.paramSectionCount > 0)
- pat1 setType pat1.tpe.finalResultType
+ pat1 modifyType (_.finalResultType)
for (bind @ Bind(name, _) <- cdef.pat)
if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
@@ -2510,8 +2392,10 @@ trait Typers extends Adaptations with Tags with TypersTracking {
// insert a cast if something typechecked under the GADT constraints,
// but not in real life (i.e., now that's we've reset the method's type skolems'
// infos back to their pre-GADT-constraint state)
- if (isFullyDefined(pt) && !(body1.tpe <:< pt))
+ if (isFullyDefined(pt) && !(body1.tpe <:< pt)) {
+ log(s"Adding cast to pattern because ${body1.tpe} does not conform to expected type $pt")
body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden))
+ }
}
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
@@ -3026,32 +2910,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
def typedArgs(args: List[Tree], mode: Mode) =
args mapConserve (arg => typedArg(arg, mode, NOmode, WildcardType))
- /** Type trees in `args0` against corresponding expected type in `adapted0`.
- *
- * The mode in which each argument is typed is derived from `mode` and
- * whether the arg was originally by-name or var-arg (need `formals0` for that)
- * the default is by-val, of course.
- *
- * (docs reverse-engineered -- AM)
- */
- def typedArgs(args0: List[Tree], mode: Mode, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
- def loop(args: List[Tree], formals: List[Type], adapted: List[Type]): List[Tree] = {
- if (args.isEmpty || adapted.isEmpty) Nil
- else {
- // No formals left or * indicates varargs.
- val isVarArgs = formals.isEmpty || formals.tail.isEmpty && isRepeatedParamType(formals.head)
- val isByName = formals.nonEmpty && isByNameParamType(formals.head)
- def typedMode = if (isByName) mode.onlySticky else mode.onlySticky | BYVALmode
- def body = typedArg(args.head, mode, typedMode, adapted.head)
- def arg1 = if (isVarArgs) context.withinStarPatterns(body) else body
-
- // formals may be empty, so don't call tail
- arg1 :: loop(args.tail, formals drop 1, adapted.tail)
- }
- }
- loop(args0, formals0, adapted0)
- }
-
/** Does function need to be instantiated, because a missing parameter
* in an argument closure overlaps with an uninstantiated formal?
*/
@@ -3288,22 +3146,20 @@ trait Typers extends Adaptations with Tags with TypersTracking {
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
def handleMonomorphicCall: Tree = {
- // In order for checkDead not to be misled by the unfortunate special
- // case of AnyRef#synchronized (which is implemented with signature T => T
- // but behaves as if it were (=> T) => T) we need to know what is the actual
- // target of a call. Since this information is no longer available from
- // typedArg, it is recorded here.
- val args1 =
- // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
- // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
- // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
- // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
- // casting breaks SI-6145,
- // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
- if (!phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol))
+ // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
+ // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
+ // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
+ // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
+ // casting breaks SI-6145,
+ // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
+ def noExpectedType = !phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol)
+
+ val args1 = (
+ if (noExpectedType)
typedArgs(args, forArgMode(fun, mode))
else
- typedArgs(args, forArgMode(fun, mode), paramTypes, formals)
+ typedArgsForFormals(args, paramTypes, forArgMode(fun, mode))
+ )
// instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
// val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
@@ -3387,129 +3243,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
}
- def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
- def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
- def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
-
- val otpe = fun.tpe
-
- if (args.length > MaxTupleArity)
- return duplErrorTree(TooManyArgsPatternError(fun))
-
- //
- def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
- case MethodType(param :: _, _) =>
- (Nil, param.tpe)
- case PolyType(tparams, restpe) =>
- createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
- // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
- case OverloadedType(_, _) =>
- OverloadedUnapplyError(fun)
- (Nil, ErrorType)
- case _ =>
- UnapplyWithSingleArgError(fun)
- (Nil, ErrorType)
- }
-
- val unapp = unapplyMember(otpe)
- val unappType = otpe.memberType(unapp)
- val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
- val arg = Ident(argDummy) setType pt
-
- val uncheckedTypeExtractor =
- if (unappType.paramTypes.nonEmpty)
- extractorForUncheckedType(tree.pos, unappType.paramTypes.head)
- else None
-
- if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
- //Console.println(s"UNAPP: need to typetest, arg: ${arg.tpe} unappType: $unappType")
- val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
- val unapplyContext = context.makeNewScope(context.tree, context.owner)
- freeVars foreach unapplyContext.scope.enter
-
- val typer1 = newTyper(unapplyContext)
- val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe, canRemedy = uncheckedTypeExtractor.nonEmpty)
-
- // turn any unresolved type variables in freevars into existential skolems
- val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
- arg setType pattp.substSym(freeVars, skolems)
- argDummy setInfo arg.tpe
- }
-
- // clearing the type is necessary so that ref will be stabilized; see bug 881
- val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapp), List(arg)))
-
- if (fun1.tpe.isErroneous) duplErrTree
- else {
- val resTp = fun1.tpe.finalResultType.dealiasWiden
- val nbSubPats = args.length
- val (formals, formalsExpanded) =
- extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol, treeInfo.effectivePatternArity(args))
- if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
- else {
- val args1 = typedArgs(args, mode, formals, formalsExpanded)
- val pt1 = ensureFullyDefined(pt) // SI-1048
- val itype = glb(List(pt1, arg.tpe))
- arg setType pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
- val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
-
- // if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
- // skip if the unapply's type is not a method type with (at least, but really it should be exactly) one argument
- // also skip if we already wrapped a classtag extractor (so we don't keep doing that forever)
- if (uncheckedTypeExtractor.isEmpty || fun1.symbol.owner.isNonBottomSubClass(ClassTagClass)) unapply
- else wrapClassTagUnapply(unapply, uncheckedTypeExtractor.get, unappType.paramTypes.head)
- }
- }
- }
-
- def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
- // TODO: disable when in unchecked match
- // we don't create a new Context for a Match, so find the CaseDef, then go out one level and navigate back to the match that has this case
- // val thisCase = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
- // val unchecked = thisCase.outer.tree.collect{case Match(selector, cases) if cases contains thisCase => selector} match {
- // case List(Typed(_, tpt)) if tpt.tpe hasAnnotation UncheckedClass => true
- // case t => println("outer tree: "+ (t, thisCase, thisCase.outer.tree)); false
- // }
- // println("wrapClassTagUnapply"+ (!isPastTyper && infer.containsUnchecked(pt), pt, uncheckedPattern))
- // println("wrapClassTagUnapply: "+ extractor)
- // println(util.Position.formatMessage(uncheckedPattern.pos, "made unchecked type test into a checked one", true))
-
- val args = List(uncheckedPattern)
- val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
- // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
- // and re-typechecks of the target of the unapply call in PATTERNmode,
- // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
- // but an arbitrary tree as is the case here
- doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
- }
-
- // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
- // return the corresponding extractor (an instance of ClassTag[`pt`])
- def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (isPastTyper) None else {
- // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
- // but at least make a proper type before passing it elsewhere
- val pt1 = pt.dealiasWiden match {
- case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
- case pt1 => pt1
- }
- pt1 match {
- // if at least one of the types in an intersection is checkable, use the checkable ones
- // this avoids problems as in run/matchonseq.scala, where the expected type is `Coll with scala.collection.SeqLike`
- // Coll is an abstract type, but SeqLike of course is not
- case RefinedType(ps, _) if ps.length > 1 && (ps exists infer.isCheckable) =>
- None
-
- case ptCheckable if infer isUncheckable ptCheckable =>
- val classTagExtractor = resolveClassTag(pos, ptCheckable)
-
- if (classTagExtractor != EmptyTree && unapplyMember(classTagExtractor.tpe) != NoSymbol)
- Some(classTagExtractor)
- else None
-
- case _ => None
- }
- }
-
/**
* Convert an annotation constructor call into an AnnotationInfo.
*/
@@ -3757,11 +3490,15 @@ trait Typers extends Adaptations with Tags with TypersTracking {
/** convert local symbols and skolems to existentials */
def packedType(tree: Tree, owner: Symbol): Type = {
- def defines(tree: Tree, sym: Symbol) =
- sym.isExistentialSkolem && sym.unpackLocation == tree ||
- tree.isDef && tree.symbol == sym
- def isVisibleParameter(sym: Symbol) =
- sym.isParameter && (sym.owner == owner) && (sym.isType || !owner.isAnonymousFunction)
+ def defines(tree: Tree, sym: Symbol) = (
+ sym.isExistentialSkolem && sym.unpackLocation == tree
+ || tree.isDef && tree.symbol == sym
+ )
+ def isVisibleParameter(sym: Symbol) = (
+ sym.isParameter
+ && (sym.owner == owner)
+ && (sym.isType || !owner.isAnonymousFunction)
+ )
def containsDef(owner: Symbol, sym: Symbol): Boolean =
(!sym.hasPackageFlag) && {
var o = sym.owner
@@ -3980,7 +3717,7 @@ trait Typers extends Adaptations with Tags with TypersTracking {
*
*/
def mkInvoke(cxTree: Tree, tree: Tree, qual: Tree, name: Name): Option[Tree] = {
- log(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)")
+ debuglog(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)")
val treeInfo.Applied(treeSelection, _, _) = tree
def isDesugaredApply = treeSelection match {
case Select(`qual`, nme.apply) => true
@@ -4992,14 +4729,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
}
-
- def typedUnApply(tree: UnApply) = {
- val fun1 = typed(tree.fun)
- val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args), tree.args.length)
- val args1 = map2(tree.args, tpes)(typedPattern)
- treeCopy.UnApply(tree, fun1, args1) setType pt
- }
-
def issueTryWarnings(tree: Try): Try = {
def checkForCatchAll(cdef: CaseDef) {
def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
@@ -5051,61 +4780,28 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
def typedTyped(tree: Typed) = {
- val expr = tree.expr
- val tpt = tree.tpt
- tpt match {
- case Function(List(), EmptyTree) =>
- // find out whether the programmer is trying to eta-expand a macro def
- // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
- // that typecheck must not trigger macro expansions, so we explicitly prohibit them
- // however we cannot do `context.withMacrosDisabled`
- // because `expr` might contain nested macro calls (see SI-6673)
- val exprTyped = typed1(suppressMacroExpansion(expr), mode, pt)
- exprTyped match {
- case macroDef if treeInfo.isMacroApplication(macroDef) =>
- MacroEtaError(exprTyped)
- case _ =>
- typedEta(checkDead(exprTyped))
- }
-
- case t if treeInfo isWildcardStarType t =>
- val exprTyped = typed(expr, mode.onlySticky)
- def subArrayType(pt: Type) =
- if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
- else {
- val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
- newExistentialType(List(tparam), arrayType(tparam.tpe))
- }
-
- val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match {
- case ArrayClass => (adapt(exprTyped, mode.onlySticky, subArrayType(pt)), ArrayClass)
- case _ => (adapt(exprTyped, mode.onlySticky, seqType(pt)), SeqClass)
- }
- exprAdapted.tpe.baseType(baseClass) match {
- case TypeRef(_, _, List(elemtp)) =>
- treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
- case _ =>
- setError(tree)
+ if (treeInfo isWildcardStarType tree.tpt)
+ typedStarInPattern(tree, mode.onlySticky, pt)
+ else if (mode.inPatternMode)
+ typedInPattern(tree, mode.onlySticky, pt)
+ else tree match {
+ // find out whether the programmer is trying to eta-expand a macro def
+ // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
+ // that typecheck must not trigger macro expansions, so we explicitly prohibit them
+ // however we cannot do `context.withMacrosDisabled`
+ // because `expr` might contain nested macro calls (see SI-6673)
+ //
+ // Note: apparently `Function(Nil, EmptyTree)` is the secret parser marker
+ // which means trailing underscore.
+ case Typed(expr, Function(Nil, EmptyTree)) =>
+ typed1(suppressMacroExpansion(expr), mode, pt) match {
+ case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef)
+ case exprTyped => typedEta(checkDead(exprTyped))
}
-
- case _ =>
- val tptTyped = typedType(tpt, mode)
- val exprTyped = typed(expr, mode.onlySticky, tptTyped.tpe.deconst)
- val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
-
- if (mode.inPatternMode) {
- val uncheckedTypeExtractor = extractorForUncheckedType(tpt.pos, tptTyped.tpe)
- // make fully defined to avoid bounded wildcard types that may be in pt from calling dropExistential (SI-2038)
- val ptDefined = ensureFullyDefined(pt) // FIXME this is probably redundant now that we don't dropExistenial in pattern mode.
- val ownType = inferTypedPattern(tptTyped, tptTyped.tpe, ptDefined, canRemedy = uncheckedTypeExtractor.nonEmpty)
- treeTyped setType ownType
-
- uncheckedTypeExtractor match {
- case None => treeTyped
- case Some(extractor) => wrapClassTagUnapply(treeTyped, extractor, tptTyped.tpe)
- }
- } else
- treeTyped setType tptTyped.tpe
+ case Typed(expr, tpt) =>
+ val tpt1 = typedType(tpt, mode) // type the ascribed type first
+ val expr1 = typed(expr, mode.onlySticky, tpt1.tpe.deconst) // then type the expression with tpt1 as the expected type
+ treeCopy.Typed(tree, expr1, tpt1) setType tpt1.tpe
}
}
@@ -5241,11 +4937,13 @@ trait Typers extends Adaptations with Tags with TypersTracking {
case _ => tree
}
}
- else
+ else {
// we should get here only when something before failed
// and we try again (@see tryTypedApply). In that case we can assign
// whatever type to tree; we just have to survive until a real error message is issued.
+ devWarning(tree.pos, s"Assigning Any type to TypeTree because tree.original is null: tree is $tree/${System.identityHashCode(tree)}, sym=${tree.symbol}, tpe=${tree.tpe}")
tree setType AnyTpe
+ }
}
def typedFunction(fun: Function) = {
if (fun.symbol == NoSymbol)
@@ -5254,52 +4952,80 @@ trait Typers extends Adaptations with Tags with TypersTracking {
typerWithLocalContext(context.makeNewScope(fun, fun.symbol))(_.typedFunction(fun, mode, pt))
}
- // begin typed1
- //if (settings.debug.value && tree.isDef) log("typing definition of "+sym);//DEBUG
- tree match {
- case tree: Ident => typedIdentOrWildcard(tree)
- case tree: Select => typedSelectOrSuperCall(tree)
- case tree: Apply => typedApply(tree)
+ // Trees only allowed during pattern mode.
+ def typedInPatternMode(tree: Tree): Tree = tree match {
+ case tree: Alternative => typedAlternative(tree)
+ case tree: Star => typedStar(tree)
+ case _ => abort(s"unexpected tree in pattern mode: ${tree.getClass}\n$tree")
+ }
+
+ def typedTypTree(tree: TypTree): Tree = tree match {
case tree: TypeTree => typedTypeTree(tree)
- case tree: Literal => typedLiteral(tree)
- case tree: This => typedThis(tree)
- case tree: ValDef => typedValDef(tree)
- case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
- case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
- case tree: If => typedIf(tree)
- case tree: TypeApply => typedTypeApply(tree)
case tree: AppliedTypeTree => typedAppliedTypeTree(tree)
- case tree: Bind => typedBind(tree)
- case tree: Function => typedFunction(tree)
- case tree: Match => typedVirtualizedMatch(tree)
- case tree: New => typedNew(tree)
- case tree: Assign => typedAssign(tree.lhs, tree.rhs)
- case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
- case tree: Super => typedSuper(tree)
case tree: TypeBoundsTree => typedTypeBoundsTree(tree)
- case tree: Typed => typedTyped(tree)
- case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
- case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
- case tree: TypeDef => typedTypeDef(tree)
- case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
- case tree: PackageDef => typedPackageDef(tree)
- case tree: DocDef => typedDocDef(tree, mode, pt)
- case tree: Annotated => typedAnnotated(tree)
case tree: SingletonTypeTree => typedSingletonTypeTree(tree)
case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree)
case tree: CompoundTypeTree => typedCompoundTypeTree(tree)
case tree: ExistentialTypeTree => typedExistentialTypeTree(tree)
- case tree: Return => typedReturn(tree)
- case tree: Try => typedTry(tree)
- case tree: Throw => typedThrow(tree)
- case tree: Alternative => typedAlternative(tree)
- case tree: Star => typedStar(tree)
- case tree: UnApply => typedUnApply(tree)
- case tree: ArrayValue => typedArrayValue(tree)
- case tree: ApplyDynamic => typedApplyDynamic(tree)
- case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
- case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
+ case _ => abort(s"unexpected type-representing tree: ${tree.getClass}\n$tree")
+ }
+
+ def typedMemberDef(tree: MemberDef): Tree = tree match {
+ case tree: ValDef => typedValDef(tree)
+ case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
+ case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
+ case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
+ case tree: TypeDef => typedTypeDef(tree)
+ case tree: PackageDef => typedPackageDef(tree)
+ case _ => abort(s"unexpected member def: ${tree.getClass}\n$tree")
+ }
+
+ // Trees not allowed during pattern mode.
+ def typedOutsidePatternMode(tree: Tree): Tree = tree match {
+ case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
+ case tree: If => typedIf(tree)
+ case tree: TypeApply => typedTypeApply(tree)
+ case tree: Function => typedFunction(tree)
+ case tree: Match => typedVirtualizedMatch(tree)
+ case tree: New => typedNew(tree)
+ case tree: Assign => typedAssign(tree.lhs, tree.rhs)
+ case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
+ case tree: Super => typedSuper(tree)
+ case tree: Annotated => typedAnnotated(tree)
+ case tree: Return => typedReturn(tree)
+ case tree: Try => typedTry(tree)
+ case tree: Throw => typedThrow(tree)
+ case tree: ArrayValue => typedArrayValue(tree)
+ case tree: ApplyDynamic => typedApplyDynamic(tree)
+ case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
+ case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
+ case tree: DocDef => typedDocDef(tree, mode, pt)
+ case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
+ }
+
+ // Trees allowed in or out of pattern mode.
+ def typedInAnyMode(tree: Tree): Tree = tree match {
+ case tree: Ident => typedIdentOrWildcard(tree)
+ case tree: Bind => typedBind(tree)
+ case tree: Apply => typedApply(tree)
+ case tree: Select => typedSelectOrSuperCall(tree)
+ case tree: Literal => typedLiteral(tree)
+ case tree: Typed => typedTyped(tree)
+ case tree: This => typedThis(tree) // SI-6104
+ case tree: UnApply => abort(s"unexpected UnApply $tree") // turns out UnApply never reaches here
+ case _ =>
+ if (mode.inPatternMode)
+ typedInPatternMode(tree)
+ else
+ typedOutsidePatternMode(tree)
+ }
+
+ // begin typed1
+ tree match {
+ case tree: TypTree => typedTypTree(tree)
+ case tree: MemberDef => typedMemberDef(tree)
+ case _ => typedInAnyMode(tree)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 47c859bb5c..5049fec65b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -12,8 +12,7 @@ import symtab.Flags._
* @author Martin Odersky
* @version 1.0
*/
-trait Unapplies extends ast.TreeDSL
-{
+trait Unapplies extends ast.TreeDSL {
self: Analyzer =>
import global._
@@ -21,7 +20,8 @@ trait Unapplies extends ast.TreeDSL
import CODE.{ CASE => _, _ }
import treeInfo.{ isRepeatedParamType, isByNameParamType }
- private val unapplyParamName = nme.x_0
+ private def unapplyParamName = nme.x_0
+ private def caseMods = Modifiers(SYNTHETIC | CASE)
// In the typeCompleter (templateSig) of a case class (resp it's module),
// synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
@@ -30,39 +30,17 @@ trait Unapplies extends ast.TreeDSL
// moduleClass symbol of the companion module.
class ClassForCaseCompanionAttachment(val caseClass: ClassDef)
- /** returns type list for return type of the extraction
- * @see extractorFormalTypes
+ /** Returns unapply or unapplySeq if available, without further checks.
*/
- def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, args: List[Tree]) = {
- assert(ufn.isMethod, ufn)
- val nbSubPats = args.length
- //Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
- ufn.name match {
- case nme.unapply | nme.unapplySeq =>
- val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn, treeInfo.effectivePatternArity(args))
- if (formals == null) throw new TypeError(s"$ufn of type $ufntpe cannot extract $nbSubPats sub-patterns")
- else formals
- case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
- }
- }
+ def directUnapplyMember(tp: Type): Symbol = (tp member nme.unapply) orElse (tp member nme.unapplySeq)
- /** returns unapply or unapplySeq if available */
- def unapplyMember(tp: Type): Symbol = (tp member nme.unapply) match {
- case NoSymbol => tp member nme.unapplySeq
- case unapp => unapp
- }
+ /** Filters out unapplies with multiple (non-implicit) parameter lists,
+ * as they cannot be used as extractors
+ */
+ def unapplyMember(tp: Type): Symbol = directUnapplyMember(tp) filter (sym => !hasMultipleNonImplicitParamLists(sym))
object ExtractorType {
- def unapply(tp: Type): Option[Symbol] = {
- val member = unapplyMember(tp)
- if (member.exists) Some(member) else None
- }
- }
-
- /** returns unapply member's parameter type. */
- def unapplyParameterType(extractor: Symbol) = extractor.tpe.params match {
- case p :: Nil => p.tpe.typeSymbol
- case _ => NoSymbol
+ def unapply(tp: Type): Option[Symbol] = unapplyMember(tp).toOption
}
def copyUntyped[T <: Tree](tree: T): T =
@@ -93,25 +71,19 @@ trait Unapplies extends ast.TreeDSL
*/
private def caseClassUnapplyReturnValue(param: Name, caseclazz: ClassDef) = {
def caseFieldAccessorValue(selector: ValDef): Tree = {
- val accessorName = selector.name
- val privateLocalParamAccessor = caseclazz.impl.body.collectFirst {
- case dd: ValOrDefDef if dd.name == accessorName && dd.mods.isPrivateLocal => dd.symbol
- }
- privateLocalParamAccessor match {
- case None =>
- // Selecting by name seems to be the most straight forward way here to
- // avoid forcing the symbol of the case class in order to list the accessors.
- val maybeRenamedAccessorName = caseAccessorName(caseclazz.symbol, accessorName)
- Ident(param) DOT maybeRenamedAccessorName
- case Some(sym) =>
- // But, that gives a misleading error message in neg/t1422.scala, where a case
- // class has an illegal private[this] parameter. We can detect this by checking
- // the modifiers on the param accessors.
- //
- // We just generate a call to that param accessor here, which gives us an inaccessible
- // symbol error, as before.
- Ident(param) DOT sym
+ // Selecting by name seems to be the most straight forward way here to
+ // avoid forcing the symbol of the case class in order to list the accessors.
+ def selectByName = Ident(param) DOT caseAccessorName(caseclazz.symbol, selector.name)
+ // But, that gives a misleading error message in neg/t1422.scala, where a case
+ // class has an illegal private[this] parameter. We can detect this by checking
+ // the modifiers on the param accessors.
+ // We just generate a call to that param accessor here, which gives us an inaccessible
+ // symbol error, as before.
+ def localAccessor = caseclazz.impl.body find {
+ case t @ ValOrDefDef(mods, selector.name, _, _) => mods.isPrivateLocal
+ case _ => false
}
+ localAccessor.fold(selectByName)(Ident(param) DOT _.symbol)
}
// Working with trees, rather than symbols, to avoid cycles like SI-5082
@@ -153,8 +125,6 @@ trait Unapplies extends ast.TreeDSL
gen.mkTemplate(parents, emptyValDef, NoMods, Nil, body, cdef.impl.pos.focus))
}
- private val caseMods = Modifiers(SYNTHETIC | CASE)
-
/** The apply method corresponding to a case class
*/
def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 7f9b81e1ec..906a575d90 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -16,6 +16,7 @@ import File.pathSeparator
import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
import java.net.MalformedURLException
import java.util.regex.PatternSyntaxException
+import scala.reflect.runtime.ReflectionUtils
/** <p>
* This module provides star expansion of '-classpath' option arguments, behaves the same as
@@ -80,7 +81,7 @@ object ClassPath {
}
/** A useful name filter. */
- def isTraitImplementation(name: String) = name endsWith "$class.class"
+ def isTraitImplementation(name: String) = ReflectionUtils.isTraitImplementation(name)
def specToURL(spec: String): Option[URL] =
try Some(new URL(spec))
@@ -139,7 +140,7 @@ object ClassPath {
}
object DefaultJavaContext extends JavaContext {
- override def isValidName(name: String) = !isTraitImplementation(name)
+ override def isValidName(name: String) = !ReflectionUtils.scalacShouldntLoadClassfile(name)
}
private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index 752aac5c8c..ea3c9d8dde 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -8,6 +8,7 @@ package tools
package nsc
import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
+import scala.compat.Platform.EOL
package object util {
@@ -78,6 +79,14 @@ package object util {
s"$clazz$msg @ $frame"
}
+ def stackTracePrefixString(ex: Throwable)(p: StackTraceElement => Boolean): String = {
+ val frames = ex.getStackTrace takeWhile p map (" at " + _)
+ val msg = ex.getMessage match { case null => "" ; case s => s": $s" }
+ val clazz = ex.getClass.getName
+
+ s"$clazz$msg" +: frames mkString EOL
+ }
+
lazy val trace = new SimpleTracer(System.out)
@deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index 8e1bcb5f87..4e3761454d 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -94,7 +94,8 @@ abstract class MacroImplementations {
def errorAtIndex(idx: Int, msg: String) = c.error(new OffsetPosition(strTree.pos.source, strTree.pos.point + idx), msg)
def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string")
def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character")
- def nonEscapedPercent(idx: Int) = errorAtIndex(idx, "percent signs not directly following splicees must be escaped")
+ def nonEscapedPercent(idx: Int) = errorAtIndex(idx,
+ "conversions must follow a splice; use %% for literal %, %n for newline")
// STEP 1: handle argument conversion
// 1) "...${smth}" => okay, equivalent to "...${smth}%s"
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index afaca3396c..57ebe1b30d 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -12,6 +12,7 @@ import java.lang.{Class => jClass}
import scala.compat.Platform.EOL
import scala.reflect.NameTransformer
import scala.reflect.api.JavaUniverse
+import scala.reflect.io.NoAbstractFile
abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
@@ -136,7 +137,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val wrapper2 = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
- phase = (new Run).typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
+ val run = new Run
+ run.symSource(ownerClass) = NoAbstractFile // need to set file to something different from null, so that currentRun.defines works
+ phase = run.typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
currentTyper.context.setReportErrors() // need to manually set context mode, otherwise typer.silent will throw exceptions
reporter.reset()
@@ -269,17 +272,13 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
def parse(code: String): Tree = {
- val run = new Run
reporter.reset()
- val wrappedCode = "object wrapper {" + EOL + code + EOL + "}"
- val file = new BatchSourceFile("<toolbox>", wrappedCode)
+ val file = new BatchSourceFile("<toolbox>", code)
val unit = new CompilationUnit(file)
- phase = run.parserPhase
- val parser = newUnitParser(unit)
- val wrappedTree = parser.parse()
+ val parsed = newUnitParser(unit).parseStats()
throwIfErrors()
- val PackageDef(_, List(ModuleDef(_, _, Template(_, _, _ :: parsed)))) = wrappedTree
parsed match {
+ case Nil => EmptyTree
case expr :: Nil => expr
case stats :+ expr => Block(stats, expr)
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
index 9a6ba56c18..18a806e5ff 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -17,32 +17,38 @@ trait Parsers { self: Quasiquotes =>
abstract class Parser extends {
val global: self.global.type = self.global
} with ScalaParser {
- /** Wraps given code to obtain a desired parser mode.
- * This way we can just re-use standard parser entry point.
- */
- def wrapCode(code: String): String =
- s"object wrapper { self => $EOL $code $EOL }"
-
- def unwrapTree(wrappedTree: Tree): Tree = {
- val PackageDef(_, List(ModuleDef(_, _, Template(_, _, _ :: parsed)))) = wrappedTree
- parsed match {
- case tree :: Nil => tree
- case stats :+ tree => Block(stats, tree)
- }
- }
-
def parse(code: String): Tree = {
try {
- val wrapped = wrapCode(code)
- debug(s"wrapped code\n=${wrapped}\n")
- val file = new BatchSourceFile(nme.QUASIQUOTE_FILE, wrapped)
- val tree = new QuasiquoteParser(file).parse()
- unwrapTree(tree)
+ val file = new BatchSourceFile(nme.QUASIQUOTE_FILE, code)
+ new QuasiquoteParser(file).parseRule(entryPoint)
} catch {
- case mi: MalformedInput => c.abort(c.macroApplication.pos, s"syntax error: ${mi.msg}")
+ case mi: MalformedInput => c.abort(correspondingPosition(mi.offset), mi.msg)
+ }
+ }
+
+ def correspondingPosition(offset: Int): Position = {
+ val posMapList = posMap.toList
+ def containsOffset(start: Int, end: Int) = start <= offset && offset <= end
+ def fallbackPosition = posMapList match {
+ case (pos1, (start1, end1)) :: _ if start1 > offset => pos1
+ case _ :+ ((pos2, (start2, end2))) if offset > end2 => pos2.withPoint(pos2.point + (end2 - start2))
}
+ posMapList.sliding(2).collect {
+ case (pos1, (start1, end1)) :: _ if containsOffset(start1, end1) => (pos1, offset - start1)
+ case (pos1, (_, end1)) :: (_, (start2, _)) :: _ if containsOffset(end1, start2) => (pos1, end1)
+ case _ :: (pos2, (start2, end2)) :: _ if containsOffset(start2, end2) => (pos2, offset - start2)
+ }.map { case (pos, offset) =>
+ pos.withPoint(pos.point + offset)
+ }.toList.headOption.getOrElse(fallbackPosition)
}
+ override def token2string(token: Int): String = token match {
+ case EOF => "end of quote"
+ case _ => super.token2string(token)
+ }
+
+ def entryPoint: QuasiquoteParser => Tree
+
class QuasiquoteParser(source0: SourceFile) extends SourceFileParser(source0) {
override val treeBuilder = new ParserTreeBuilder {
// q"(..$xs)"
@@ -73,9 +79,11 @@ trait Parsers { self: Quasiquotes =>
} else
super.caseClause()
- def isHole = isIdent && holeMap.contains(in.name)
+ def isHole: Boolean = isIdent && holeMap.contains(in.name)
- override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
+ override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
+
+ override def isCaseDefStart: Boolean = super.isCaseDefStart || (in.token == EOF)
override def isModifier: Boolean = super.isModifier || (isHole && lookingAhead { isModifier })
@@ -85,6 +93,12 @@ trait Parsers { self: Quasiquotes =>
override def isDclIntro: Boolean = super.isDclIntro || (isHole && lookingAhead { isDclIntro })
+ override def isStatSep(token: Int) = token == EOF || super.isStatSep(token)
+
+ override def expectedMsg(token: Int): String =
+ if (isHole) expectedMsgTemplate(token2string(token), "splicee")
+ else super.expectedMsg(token)
+
// $mods def foo
// $mods T
override def readAnnots(annot: => Tree): List[Tree] = in.token match {
@@ -101,34 +115,26 @@ trait Parsers { self: Quasiquotes =>
}
}
- object TermParser extends Parser
-
- object CaseParser extends Parser {
- override def wrapCode(code: String) = super.wrapCode("something match { case " + code + " }")
-
- override def unwrapTree(wrappedTree: Tree): Tree = {
- val Match(_, head :: tail) = super.unwrapTree(wrappedTree)
- if (tail.nonEmpty)
- c.abort(c.macroApplication.pos, "Can't parse more than one casedef, consider generating a match tree instead")
- head
+ object TermParser extends Parser {
+ def entryPoint = _.templateStats() match {
+ case Nil => EmptyTree
+ case tree :: Nil => tree
+ case stats :+ tree => Block(stats, tree)
}
}
- object PatternParser extends Parser {
- override def wrapCode(code: String) = super.wrapCode("something match { case " + code + " => }")
-
- override def unwrapTree(wrappedTree: Tree): Tree = {
- val Match(_, List(CaseDef(pat, _, _))) = super.unwrapTree(wrappedTree)
- pat
- }
+ object TypeParser extends Parser {
+ def entryPoint = _.typ()
}
- object TypeParser extends Parser {
- override def wrapCode(code: String) = super.wrapCode("type T = " + code)
+ object CaseParser extends Parser {
+ def entryPoint = _.caseClause()
+ }
- override def unwrapTree(wrappedTree: Tree): Tree = {
- val TypeDef(_, _, _, rhs) = super.unwrapTree(wrappedTree)
- rhs
+ object PatternParser extends Parser {
+ def entryPoint = { parser =>
+ val pat = parser.noSeq.pattern1()
+ parser.treeBuilder.patvarTransformer.transform(pat)
}
}
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
index b680c25f76..b3ac1e293a 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -17,18 +17,31 @@ trait Placeholders { self: Quasiquotes =>
// Step 1: Transform Scala source with holes into vanilla Scala source
lazy val holeMap = new HoleMap()
+ lazy val posMap = mutable.ListMap[Position, (Int, Int)]()
lazy val code = {
val sb = new StringBuilder()
val sessionSuffix = randomUUID().toString.replace("-", "").substring(0, 8) + "$"
- foreach2(args, parts.init) { (tree, p) =>
- val (part, cardinality) = parseDots(p)
+ def appendPart(value: String, pos: Position) = {
+ val start = sb.length
+ sb.append(value)
+ val end = sb.length
+ posMap += pos -> (start, end)
+ }
+
+ def appendHole(tree: Tree, cardinality: Cardinality) = {
val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX + sessionSuffix))
- sb.append(part)
sb.append(placeholderName)
holeMap(placeholderName) = Hole(tree, cardinality)
}
- sb.append(parts.last)
+
+ foreach2(args, parts.init) { case (tree, (p, pos)) =>
+ val (part, cardinality) = parseDots(p)
+ appendPart(part, pos)
+ appendHole(tree, cardinality)
+ }
+ val (p, pos) = parts.last
+ appendPart(p, pos)
sb.toString
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
index fe954e0bfd..ee99a5e280 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -17,7 +17,7 @@ abstract class Quasiquotes extends Parsers
lazy val (universe: Tree, args, parts, parse, reify) = c.macroApplication match {
case Apply(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), args0) =>
val parts1 = parts0.map {
- case Literal(Constant(s: String)) => s
+ case lit @ Literal(Constant(s: String)) => s -> lit.pos
case part => c.abort(part.pos, "Quasiquotes can only be used with literal strings")
}
val reify0 = method0 match {
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index bdd6a02043..82f2c5dc74 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -8,8 +8,8 @@ package tools
package util
import scala.tools.reflect.WrappedProperties.AccessControl
-import scala.tools.nsc.{ Settings, GenericRunnerSettings }
-import scala.tools.nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
+import scala.tools.nsc.{ Settings }
+import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
import scala.reflect.io.{ File, Directory, Path, AbstractFile }
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index f260ee4093..3963447de3 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -396,7 +396,6 @@ abstract class CPSAnnotationChecker extends CPSUtils {
* for a tree. All this should do is add annotations. */
override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
- import scala.util.control._
if (!cpsEnabled) {
val report = try hasCpsParamTypes(tpe) catch { case _: MissingRequirementError => false }
if (report)
diff --git a/src/eclipse/interactive/.classpath b/src/eclipse/interactive/.classpath
index 73a67e45ed..9e773a39d2 100644
--- a/src/eclipse/interactive/.classpath
+++ b/src/eclipse/interactive/.classpath
@@ -3,7 +3,7 @@
<classpathentry kind="src" path="interactive"/>
<classpathentry combineaccessrules="false" kind="src" path="/scaladoc"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry kind="output" path="build-quick-interactive"/>
</classpath>
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 462cbb9c94..5a4448e01a 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -1,14 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="src" path="partest"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scalap"/>
+ <classpathentry kind="src" path="partest-extras"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
<classpathentry kind="var" path="M2_REPO/org/scala-tools/testing/test-interface/0.5/test-interface-0.5.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M4/1.0-RC3/scala-partest_2.11.0-M4-1.0-RC3.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="output" path="build-quick-partest"/>
+ <classpathentry kind="output" path="build-quick-partest-extras"/>
</classpath>
diff --git a/src/eclipse/partest/.project b/src/eclipse/partest/.project
index 45c24332ba..5f52d4bf8f 100644
--- a/src/eclipse/partest/.project
+++ b/src/eclipse/partest/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>partest</name>
+ <name>partest-extras</name>
<comment></comment>
<projects>
</projects>
@@ -17,9 +17,9 @@
</natures>
<linkedResources>
<link>
- <name>build-quick-partest</name>
+ <name>build-quick-partest-extras</name>
<type>2</type>
- <locationURI>SCALA_BASEDIR/build/quick/classes/partest</locationURI>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/partest-extras</locationURI>
</link>
<link>
<name>lib</name>
@@ -27,9 +27,9 @@
<locationURI>SCALA_BASEDIR/lib</locationURI>
</link>
<link>
- <name>partest</name>
+ <name>partest-extras</name>
<type>2</type>
- <locationURI>SCALA_BASEDIR/src/partest</locationURI>
+ <locationURI>SCALA_BASEDIR/src/partest-extras</locationURI>
</link>
</linkedResources>
</projectDescription>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
index b6ef5f35bb..c185bc5391 100644
--- a/src/eclipse/scala-compiler/.classpath
+++ b/src/eclipse/scala-compiler/.classpath
@@ -1,10 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="compiler"/>
- <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
- <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry combineaccessrules="false" exported="true" kind="src" path="/asm"/>
+ <classpathentry combineaccessrules="false" exported="true" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" exported="true" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" exported="true" kind="src" path="/continuations-library"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="output" path="build-quick-compiler"/>
diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath
index caafcf33b0..8e03c97657 100644
--- a/src/eclipse/scaladoc/.classpath
+++ b/src/eclipse/scaladoc/.classpath
@@ -3,8 +3,10 @@
<classpathentry kind="src" path="scaladoc"/>
<classpathentry combineaccessrules="false" kind="src" path="/partest"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-xml"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-parser-combinators"/>
<classpathentry kind="output" path="build-quick-scaladoc"/>
</classpath>
diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath
index 8e4f88e0f0..fe3c3e4f18 100644
--- a/src/eclipse/test-junit/.classpath
+++ b/src/eclipse/test-junit/.classpath
@@ -7,5 +7,6 @@
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry kind="output" path="build-test-junit"/>
</classpath>
diff --git a/src/intellij/README b/src/intellij/README
index 9ef612bd0a..ade87749cd 100644
--- a/src/intellij/README
+++ b/src/intellij/README
@@ -1,13 +1,8 @@
Use the latest IntelliJ IDEA release and install the Scala plugin from within the IDE.
The following steps are required to use IntelliJ IDEA on Scala trunk
- - compile "locker" using "ant locker.done"
- - Copy the *.iml.SAMPLE / *.ipr.SAMPLE files to *.iml / *.ipr
- - In IDEA, create a global library named "ant" which contains "ant.jar"
- - Also create an SDK entry named "1.6" containing the java 1.6 SDK
- - In the Scala Facet of the "library" and "reflect" modules, update the path in the
- command-line argument for "-sourcepath"
- - In the Project Settings, update the "Version Control" to match your checkout
-
-Known problems
- - Due to SI-4365, the "library" module has to be built using "-Yno-generic-signatures"
+ - compile "locker" using "ant locker.done". This will also download some JARs from
+ Maven to ./build/deps, which are included in IntelliJ's classpath.
+ - Run src/intellij/setup.sh
+ - Open ./src/intellij/scala-lang.ipr in IntelliJ
+ - File, Project Settings, Project, SDK. Create an SDK entry named "1.6" containing the java 1.6 SDK
diff --git a/src/intellij/compiler.iml.SAMPLE b/src/intellij/compiler.iml.SAMPLE
index f8b1f31327..9fb9cd55eb 100644
--- a/src/intellij/compiler.iml.SAMPLE
+++ b/src/intellij/compiler.iml.SAMPLE
@@ -19,9 +19,8 @@
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="reflect" />
- <orderEntry type="module" module-name="asm" />
- <orderEntry type="library" name="ant" level="application" />
- <orderEntry type="library" name="jline" level="project" />
+ <orderEntry type="module" module-name="asm" exported="" />
+ <orderEntry type="library" exported="" name="ant" level="project" />
</component>
</module>
diff --git a/src/intellij/diff.sh b/src/intellij/diff.sh
new file mode 100755
index 0000000000..54f9248608
--- /dev/null
+++ b/src/intellij/diff.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+#
+# Diffs the SAMPLE files against the working project config.
+#
+export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
+for f in "$SCRIPT_DIR"/*.{iml,ipr}; do
+ echo $f; diff -u $f.SAMPLE $f;
+done
diff --git a/src/intellij/library.iml.SAMPLE b/src/intellij/library.iml.SAMPLE
index 9c1b7ec185..cac53dff15 100644
--- a/src/intellij/library.iml.SAMPLE
+++ b/src/intellij/library.iml.SAMPLE
@@ -5,7 +5,7 @@
<configuration>
<option name="compilerLibraryLevel" value="Project" />
<option name="compilerLibraryName" value="compiler-locker" />
- <option name="compilerOptions" value="-sourcepath /Users/luc/scala/scala/src/library -Yno-generic-signatures" />
+ <option name="compilerOptions" value="-sourcepath $BASE_DIR$/src/library" />
<option name="maximumHeapSize" value="1536" />
<option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
</configuration>
diff --git a/src/intellij/manual.iml.SAMPLE b/src/intellij/manual.iml.SAMPLE
index 62810e0cba..3295a4a877 100644
--- a/src/intellij/manual.iml.SAMPLE
+++ b/src/intellij/manual.iml.SAMPLE
@@ -18,7 +18,8 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="library" />
- <orderEntry type="library" name="ant" level="application" />
+ <orderEntry type="module" module-name="xml" />
+ <orderEntry type="library" name="ant" level="project" />
</component>
</module>
diff --git a/src/intellij/parser-combinators.iml.SAMPLE b/src/intellij/parser-combinators.iml.SAMPLE
new file mode 100644
index 0000000000..1ef913dbe4
--- /dev/null
+++ b/src/intellij/parser-combinators.iml.SAMPLE
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../parser-combinators">
+ <sourceFolder url="file://$MODULE_DIR$/../parser-combinators" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ </component>
+</module>
diff --git a/src/intellij/partest.iml.SAMPLE b/src/intellij/partest.iml.SAMPLE
index ab4a32a9b3..893236b621 100644
--- a/src/intellij/partest.iml.SAMPLE
+++ b/src/intellij/partest.iml.SAMPLE
@@ -12,17 +12,13 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/../partest">
- <sourceFolder url="file://$MODULE_DIR$/../partest" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../partest-extras">
+ <sourceFolder url="file://$MODULE_DIR$/../partest-extras" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="library" />
- <orderEntry type="module" module-name="reflect" />
- <orderEntry type="module" module-name="actors" />
- <orderEntry type="module" module-name="scalap" />
- <orderEntry type="module" module-name="compiler" />
- <orderEntry type="library" name="ant" level="application" />
+ <orderEntry type="library" name="partest-deps" level="project" />
+ <orderEntry type="module" module-name="repl" />
</component>
</module>
diff --git a/src/intellij/reflect.iml.SAMPLE b/src/intellij/reflect.iml.SAMPLE
index 10973c503f..7d10522826 100644
--- a/src/intellij/reflect.iml.SAMPLE
+++ b/src/intellij/reflect.iml.SAMPLE
@@ -5,7 +5,7 @@
<configuration>
<option name="compilerLibraryLevel" value="Project" />
<option name="compilerLibraryName" value="compiler-locker" />
- <option name="compilerOptions" value="-sourcepath /Users/luc/scala/scala/src/reflect" />
+ <option name="compilerOptions" value="-sourcepath $BASE_DIR$/src/reflect" />
<option name="maximumHeapSize" value="1536" />
<option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
</configuration>
diff --git a/src/intellij/repl.iml.SAMPLE b/src/intellij/repl.iml.SAMPLE
index 5e11ff1cf6..fc78ffe8c2 100644
--- a/src/intellij/repl.iml.SAMPLE
+++ b/src/intellij/repl.iml.SAMPLE
@@ -20,6 +20,6 @@
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="compiler" />
+ <orderEntry type="library" name="repl-deps" level="project" />
</component>
</module>
-
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
index 61c813df01..f91a346b75 100644
--- a/src/intellij/scala-lang.ipr.SAMPLE
+++ b/src/intellij/scala-lang.ipr.SAMPLE
@@ -33,6 +33,9 @@
<component name="EntryPointsManager">
<entry_points version="2.0" />
</component>
+ <component name="HighlightingAdvisor">
+ <option name="SUGGEST_TYPE_AWARE_HIGHLIGHTING" value="false" />
+ </component>
<component name="InspectionProjectProfileManager">
<profiles>
<profile version="1.0" is_locked="false">
@@ -204,6 +207,7 @@
<module fileurl="file://$PROJECT_DIR$/interactive.iml" filepath="$PROJECT_DIR$/interactive.iml" />
<module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
<module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
+ <module fileurl="file://$PROJECT_DIR$/parser-combinators.iml" filepath="$PROJECT_DIR$/parser-combinators.iml" />
<module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
<module fileurl="file://$PROJECT_DIR$/reflect.iml" filepath="$PROJECT_DIR$/reflect.iml" />
<module fileurl="file://$PROJECT_DIR$/repl.iml" filepath="$PROJECT_DIR$/repl.iml" />
@@ -212,6 +216,7 @@
<module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
<module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
<module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
+ <module fileurl="file://$PROJECT_DIR$/xml.iml" filepath="$PROJECT_DIR$/xml.iml" />
</modules>
</component>
<component name="ProjectResources">
@@ -228,6 +233,13 @@
<mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
</component>
<component name="libraryTable">
+ <library name="ant">
+ <CLASSES>
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/ant.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
<library name="compiler-locker">
<CLASSES>
<root url="file://$PROJECT_DIR$/../../build/locker/classes/library" />
@@ -238,13 +250,35 @@
<JAVADOC />
<SOURCES />
</library>
- <library name="jline">
+ <library name="junit">
<CLASSES>
- <root url="jar://$PROJECT_DIR$/../../lib/jline.jar!/" />
+ <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
</CLASSES>
<JAVADOC />
- <SOURCES />
+ <SOURCES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
+ </SOURCES>
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" type="SOURCES" />
+ </library>
+ <library name="partest-deps">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/partest" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/junit" />
+ </SOURCES>
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/partest" recursive="false" />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/junit" recursive="false" type="SOURCES" />
</library>
+ <library name="repl-deps">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/deps/repl" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ <jarDirectory url="file://$PROJECT_DIR$/../../build/deps/repl" recursive="false" />
+ </library>
</component>
</project>
-
diff --git a/src/intellij/scala.iml.SAMPLE b/src/intellij/scala.iml.SAMPLE
index 8ea9d0dd71..a4d863800b 100644
--- a/src/intellij/scala.iml.SAMPLE
+++ b/src/intellij/scala.iml.SAMPLE
@@ -2,7 +2,9 @@
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/../.." />
+ <content url="file://$MODULE_DIR$/../..">
+ <excludeFolder url="file://$MODULE_DIR$/../../build" />
+ </content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
diff --git a/src/intellij/scaladoc.iml.SAMPLE b/src/intellij/scaladoc.iml.SAMPLE
index 6cc609919c..07bea5bf5d 100644
--- a/src/intellij/scaladoc.iml.SAMPLE
+++ b/src/intellij/scaladoc.iml.SAMPLE
@@ -20,5 +20,8 @@
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="xml" />
+ <orderEntry type="module" module-name="parser-combinators" />
+ <orderEntry type="module" module-name="partest" />
</component>
</module>
diff --git a/src/intellij/setup.sh b/src/intellij/setup.sh
new file mode 100755
index 0000000000..d0e1abeb96
--- /dev/null
+++ b/src/intellij/setup.sh
@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+#
+# Generates IntelliJ IDEA project files based on the checked-in samples.
+#
+
+set -e
+export SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
+export BASE="$( cd "$( dirname "$0" )"/../.. && pwd )"
+echo "About to delete .ipr and .iml files and replace with the .SAMPLE files. Press enter to continue or CTRL-C to cancel."
+read
+
+(rm *.ipr *.iml 2>/dev/null)
+for f in $(ls "$SCRIPT_DIR"/*.SAMPLE); do
+ NEW_FILE=`echo $f | perl -pe 's/.SAMPLE//'`;
+
+ cp $f $NEW_FILE
+
+ # IntelliJ doesn't process the "compilerOptions" setting for variable
+ # replacement. If it did, we would just use "$PROJECT_DIR$". Instead,
+ # we do this replacement ourselves.
+ perl -pi -e 's/\$BASE_DIR\$/$ENV{"BASE"}/g' $NEW_FILE
+ echo "Created $NEW_FILE"
+done
diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE
index 3ce369be05..423be2062c 100644
--- a/src/intellij/test.iml.SAMPLE
+++ b/src/intellij/test.iml.SAMPLE
@@ -6,6 +6,8 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="xml" />
+ <orderEntry type="module" module-name="parser-combinators" />
<orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="compiler" />
<orderEntry type="module" module-name="actors" />
@@ -13,6 +15,7 @@
<orderEntry type="module" module-name="partest" />
<orderEntry type="module" module-name="asm" />
<orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="library" name="junit" level="project" />
</component>
</module>
diff --git a/src/intellij/xml.iml.SAMPLE b/src/intellij/xml.iml.SAMPLE
new file mode 100644
index 0000000000..b721f4e7f2
--- /dev/null
+++ b/src/intellij/xml.iml.SAMPLE
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../xml">
+ <sourceFolder url="file://$MODULE_DIR$/../xml" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ </component>
+</module>
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 28b84d67ba..492f0f4fb4 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -365,13 +365,18 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
*/
override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym }
+ protected type SymbolLoadersInInteractive = GlobalSymbolLoaders {
+ val global: Global.this.type
+ val platform: Global.this.platform.type
+ }
/** Symbol loaders in the IDE parse all source files loaded from a package for
* top-level idents. Therefore, we can detect top-level symbols that have a name
* different from their source file
*/
- override lazy val loaders: SymbolLoaders { val global: Global.this.type } = new BrowsingLoaders {
+ override lazy val loaders: SymbolLoadersInInteractive = new {
val global: Global.this.type = Global.this
- }
+ val platform: Global.this.platform.type = Global.this.platform
+ } with BrowsingLoaders
// ----------------- Polling ---------------------------------------
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index ddd11257c6..53b4fb2af2 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Boolean` (equivalent to Java's `boolean` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Boolean` are not
* represented by an object in the underlying runtime system.
@@ -20,18 +20,16 @@ import scala.language.implicitConversions
* which provides useful non-primitive operations.
*/
final abstract class Boolean private extends AnyVal {
- /**
- * Negates a Boolean expression.
- *
- * - `!a` results in `false` if and only if `a` evaluates to `true` and
- * - `!a` results in `true` if and only if `a` evaluates to `false`.
- *
- * @return the negated expression
- */
+ /** Negates a Boolean expression.
+ *
+ * - `!a` results in `false` if and only if `a` evaluates to `true` and
+ * - `!a` results in `true` if and only if `a` evaluates to `false`.
+ *
+ * @return the negated expression
+ */
def unary_! : Boolean
- /**
- * Compares two Boolean expressions and returns `true` if they evaluate to the same value.
+ /** Compares two Boolean expressions and returns `true` if they evaluate to the same value.
*
* `a == b` returns `true` if and only if
* - `a` and `b` are `true` or
@@ -48,8 +46,7 @@ final abstract class Boolean private extends AnyVal {
*/
def !=(x: Boolean): Boolean
- /**
- * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+ /** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
*
* `a || b` returns `true` if and only if
* - `a` is `true` or
@@ -62,8 +59,7 @@ final abstract class Boolean private extends AnyVal {
*/
def ||(x: Boolean): Boolean
- /**
- * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+ /** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
*
* `a && b` returns `true` if and only if
* - `a` and `b` are `true`.
@@ -78,8 +74,7 @@ final abstract class Boolean private extends AnyVal {
// def ||(x: => Boolean): Boolean
// def &&(x: => Boolean): Boolean
- /**
- * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+ /** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
*
* `a | b` returns `true` if and only if
* - `a` is `true` or
@@ -90,8 +85,7 @@ final abstract class Boolean private extends AnyVal {
*/
def |(x: Boolean): Boolean
- /**
- * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+ /** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
*
* `a & b` returns `true` if and only if
* - `a` and `b` are `true`.
@@ -100,8 +94,7 @@ final abstract class Boolean private extends AnyVal {
*/
def &(x: Boolean): Boolean
- /**
- * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+ /** Compares two Boolean expressions and returns `true` if they evaluate to a different value.
*
* `a ^ b` returns `true` if and only if
* - `a` is `true` and `b` is `false` or
@@ -135,8 +128,7 @@ object Boolean extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Boolean = x.asInstanceOf[java.lang.Boolean].booleanValue()
- /** The String representation of the scala.Boolean companion object.
- */
+ /** The String representation of the scala.Boolean companion object. */
override def toString = "object scala.Boolean"
}
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index 2510e859c0..413231c0d1 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Byte`, a 8-bit signed integer (equivalent to Java's `byte` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Byte` are not
* represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Byte private extends AnyVal {
* }}}
*/
def unary_~ : Int
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Int
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Int
def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Byte private extends AnyVal {
*/
def >>(x: Long): Int
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
/**
@@ -447,161 +359,89 @@ final abstract class Byte private extends AnyVal {
*/
def ^(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Byte] = null
}
object Byte extends AnyValCompanion {
- /** The smallest value representable as a Byte.
- */
+ /** The smallest value representable as a Byte. */
final val MinValue = java.lang.Byte.MIN_VALUE
- /** The largest value representable as a Byte.
- */
+ /** The largest value representable as a Byte. */
final val MaxValue = java.lang.Byte.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -625,12 +465,10 @@ object Byte extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Byte = x.asInstanceOf[java.lang.Byte].byteValue()
- /** The String representation of the scala.Byte companion object.
- */
+ /** The String representation of the scala.Byte companion object. */
override def toString = "object scala.Byte"
-
- /** Language mandated coercions from Byte to "wider" types.
- */
+ /** Language mandated coercions from Byte to "wider" types. */
+ import scala.language.implicitConversions
implicit def byte2short(x: Byte): Short = x.toShort
implicit def byte2int(x: Byte): Int = x.toInt
implicit def byte2long(x: Byte): Long = x.toLong
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index 1c9a2ba44f..ec2d48c181 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Char`, a 16-bit unsigned integer (equivalent to Java's `char` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Char` are not
* represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Char private extends AnyVal {
* }}}
*/
def unary_~ : Int
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Int
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Int
def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Char private extends AnyVal {
*/
def >>(x: Long): Int
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
/**
@@ -447,161 +359,89 @@ final abstract class Char private extends AnyVal {
*/
def ^(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Char] = null
}
object Char extends AnyValCompanion {
- /** The smallest value representable as a Char.
- */
+ /** The smallest value representable as a Char. */
final val MinValue = java.lang.Character.MIN_VALUE
- /** The largest value representable as a Char.
- */
+ /** The largest value representable as a Char. */
final val MaxValue = java.lang.Character.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -625,12 +465,10 @@ object Char extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Char = x.asInstanceOf[java.lang.Character].charValue()
- /** The String representation of the scala.Char companion object.
- */
+ /** The String representation of the scala.Char companion object. */
override def toString = "object scala.Char"
-
- /** Language mandated coercions from Char to "wider" types.
- */
+ /** Language mandated coercions from Char to "wider" types. */
+ import scala.language.implicitConversions
implicit def char2int(x: Char): Int = x.toInt
implicit def char2long(x: Char): Long = x.toLong
implicit def char2float(x: Char): Float = x.toFloat
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index ce081bbec1..a58fa3ed25 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Double`, a 64-bit IEEE-754 floating point number (equivalent to Java's `double` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Double` are not
* represented by an object in the underlying runtime system.
@@ -28,334 +28,176 @@ final abstract class Double private extends AnyVal {
def toFloat: Float
def toDouble: Double
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Double
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Double
def +(x: String): String
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Double] = null
@@ -401,8 +243,7 @@ object Double extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Double = x.asInstanceOf[java.lang.Double].doubleValue()
- /** The String representation of the scala.Double companion object.
- */
+ /** The String representation of the scala.Double companion object. */
override def toString = "object scala.Double"
}
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index 4ff2d509b8..3c59057a8d 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Float`, a 32-bit IEEE-754 floating point number (equivalent to Java's `float` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Float` are not
* represented by an object in the underlying runtime system.
@@ -28,334 +28,176 @@ final abstract class Float private extends AnyVal {
def toFloat: Float
def toDouble: Double
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Float
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Float
def +(x: String): String
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Float] = null
@@ -401,12 +243,10 @@ object Float extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Float = x.asInstanceOf[java.lang.Float].floatValue()
- /** The String representation of the scala.Float companion object.
- */
+ /** The String representation of the scala.Float companion object. */
override def toString = "object scala.Float"
-
- /** Language mandated coercions from Float to "wider" types.
- */
+ /** Language mandated coercions from Float to "wider" types. */
+ import scala.language.implicitConversions
implicit def float2double(x: Float): Double = x.toDouble
}
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index 6a27195b10..72e5ebf81b 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Int`, a 32-bit signed integer (equivalent to Java's `int` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Int` are not
* represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Int private extends AnyVal {
* }}}
*/
def unary_~ : Int
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Int
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Int
def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Int private extends AnyVal {
*/
def >>(x: Long): Int
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
/**
@@ -447,161 +359,89 @@ final abstract class Int private extends AnyVal {
*/
def ^(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Int] = null
}
object Int extends AnyValCompanion {
- /** The smallest value representable as a Int.
- */
+ /** The smallest value representable as a Int. */
final val MinValue = java.lang.Integer.MIN_VALUE
- /** The largest value representable as a Int.
- */
+ /** The largest value representable as a Int. */
final val MaxValue = java.lang.Integer.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -625,12 +465,10 @@ object Int extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Int = x.asInstanceOf[java.lang.Integer].intValue()
- /** The String representation of the scala.Int companion object.
- */
+ /** The String representation of the scala.Int companion object. */
override def toString = "object scala.Int"
-
- /** Language mandated coercions from Int to "wider" types.
- */
+ /** Language mandated coercions from Int to "wider" types. */
+ import scala.language.implicitConversions
implicit def int2long(x: Int): Long = x.toLong
implicit def int2float(x: Int): Float = x.toFloat
implicit def int2double(x: Int): Double = x.toDouble
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index 4d369ae010..1bd0fe88b1 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Long`, a 64-bit signed integer (equivalent to Java's `long` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Long` are not
* represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Long private extends AnyVal {
* }}}
*/
def unary_~ : Long
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Long
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Long
def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Long private extends AnyVal {
*/
def >>(x: Long): Long
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
/**
@@ -447,161 +359,89 @@ final abstract class Long private extends AnyVal {
*/
def ^(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Long] = null
}
object Long extends AnyValCompanion {
- /** The smallest value representable as a Long.
- */
+ /** The smallest value representable as a Long. */
final val MinValue = java.lang.Long.MIN_VALUE
- /** The largest value representable as a Long.
- */
+ /** The largest value representable as a Long. */
final val MaxValue = java.lang.Long.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -625,12 +465,10 @@ object Long extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Long = x.asInstanceOf[java.lang.Long].longValue()
- /** The String representation of the scala.Long companion object.
- */
+ /** The String representation of the scala.Long companion object. */
override def toString = "object scala.Long"
-
- /** Language mandated coercions from Long to "wider" types.
- */
+ /** Language mandated coercions from Long to "wider" types. */
+ import scala.language.implicitConversions
implicit def long2float(x: Long): Float = x.toFloat
implicit def long2double(x: Long): Double = x.toDouble
}
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 4f91c51550..36b9ec4df9 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Short`, a 16-bit signed integer (equivalent to Java's `short` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Short` are not
* represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Short private extends AnyVal {
* }}}
*/
def unary_~ : Int
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Int
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Int
def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Short private extends AnyVal {
*/
def >>(x: Long): Int
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
/**
@@ -447,161 +359,89 @@ final abstract class Short private extends AnyVal {
*/
def ^(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Short] = null
}
object Short extends AnyValCompanion {
- /** The smallest value representable as a Short.
- */
+ /** The smallest value representable as a Short. */
final val MinValue = java.lang.Short.MIN_VALUE
- /** The largest value representable as a Short.
- */
+ /** The largest value representable as a Short. */
final val MaxValue = java.lang.Short.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -625,12 +465,10 @@ object Short extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Short = x.asInstanceOf[java.lang.Short].shortValue()
- /** The String representation of the scala.Short companion object.
- */
+ /** The String representation of the scala.Short companion object. */
override def toString = "object scala.Short"
-
- /** Language mandated coercions from Short to "wider" types.
- */
+ /** Language mandated coercions from Short to "wider" types. */
+ import scala.language.implicitConversions
implicit def short2int(x: Short): Int = x.toInt
implicit def short2long(x: Short): Long = x.toLong
implicit def short2float(x: Short): Float = x.toFloat
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 42fb2f36e8..70f95750da 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -59,7 +59,8 @@ case class StringContext(parts: String*) {
*/
def checkLengths(args: Seq[Any]): Unit =
if (parts.length != args.length + 1)
- throw new IllegalArgumentException("wrong number of arguments for interpolated string")
+ throw new IllegalArgumentException("wrong number of arguments ("+ args.length
+ +") for interpolated string with "+ parts.length +" parts")
/** The simple string interpolator.
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index 0e59a184d1..018ad24a99 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type
* `Unit`, `()`, and it is not represented by any object in the underlying
@@ -41,8 +41,7 @@ object Unit extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Unit = ()
- /** The String representation of the scala.Unit companion object.
- */
+ /** The String representation of the scala.Unit companion object. */
override def toString = "object scala.Unit"
}
diff --git a/src/library/scala/annotation/compileTimeOnly.scala b/src/library/scala/annotation/compileTimeOnly.scala
new file mode 100644
index 0000000000..942e9cad8c
--- /dev/null
+++ b/src/library/scala/annotation/compileTimeOnly.scala
@@ -0,0 +1,22 @@
+package scala.annotation
+
+import scala.annotation.meta._
+
+/**
+ * An annotation that designates that an annottee should not be referred to after
+ * type checking (which includes macro expansion).
+ *
+ * Examples of potential use:
+ * 1) The annottee can only appear in the arguments of some other macro
+ * that will eliminate it from the AST during expansion.
+ * 2) The annottee is a macro and should have been expanded away,
+ * so if hasn't, something wrong has happened.
+ * (Comes in handy to provide better support for new macro flavors,
+ * e.g. macro annotations, that can't be expanded by the vanilla compiler).
+ *
+ * @param message the error message to print during compilation if a reference remains
+ * after type checking
+ * @since 2.11.0
+ */
+@getter @setter @beanGetter @beanSetter @companionClass @companionMethod
+final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index c01694960c..b11368acdf 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -161,6 +161,8 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* @inheritdoc
*/
@inline final def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
+ // Note to developers: there exists a duplication between this function and `reflect.internal.util.Collections#map2Conserve`.
+ // If any successful optimization attempts or other changes are made, please rehash them there too.
@tailrec
def loop(mapped: ListBuffer[B], unchanged: List[A], pending: List[A]): List[B] =
if (pending.isEmpty) {
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index cc25b5b4b2..5ab2bb81c6 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -30,6 +30,7 @@ trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combin
def result: To = allocateAndCopy
def clear() = { chain.clear() }
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
+ import language.existentials // FIXME: See SI-7750
if (other.isInstanceOf[LazyCombiner[_, _, _]]) {
val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]]
newLazyCombiner(chain ++= that.chain)
diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala
index 4b8139702f..1c00c0e91f 100644
--- a/src/library/scala/concurrent/Lock.scala
+++ b/src/library/scala/concurrent/Lock.scala
@@ -14,6 +14,7 @@ package scala.concurrent
*
* @author Martin Odersky
* @version 1.0, 10/03/2003
+ * @deprecated("Use java.util.concurrent.locks.Lock", "2.11.0")
*/
class Lock {
var available = true
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index c049de3a28..315f56bd4e 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -12,7 +12,7 @@ package runtime
import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
-import scala.collection.generic.{ Sorted }
+import scala.collection.generic.{ Sorted, IsTraversableLike }
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import java.lang.{ Class => jClass }
@@ -48,6 +48,10 @@ object ScalaRunTime {
names.toSet
}
+ // A helper method to make my life in the pattern matcher a lot easier.
+ def drop[Repr](coll: Repr, num: Int)(implicit traversable: IsTraversableLike[Repr]): Repr =
+ traversable conversion coll drop num
+
/** Return the class object representing an array with element class `clazz`.
*/
def arrayClass(clazz: jClass[_]): jClass[_] = {
@@ -267,7 +271,18 @@ object ScalaRunTime {
}
def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala."
def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc."
- def isXmlClass(x: AnyRef) = packageOf(x) startsWith "scala.xml."
+
+ // We use reflection because the scala.xml package might not be available
+ def isSubClassOf(potentialSubClass: Class[_], ofClass: String) =
+ try {
+ val classLoader = potentialSubClass.getClassLoader
+ val clazz = Class.forName(ofClass, /*initialize =*/ false, classLoader)
+ clazz.isAssignableFrom(potentialSubClass)
+ } catch {
+ case cnfe: ClassNotFoundException => false
+ }
+ def isXmlNode(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.Node")
+ def isXmlMetaData(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.MetaData")
// When doing our own iteration is dangerous
def useOwnToString(x: Any) = x match {
@@ -279,11 +294,12 @@ object ScalaRunTime {
case _: StringLike[_] => true
// Don't want to evaluate any elements in a view
case _: TraversableView[_, _] => true
- // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData] -> catch those and more by isXmlClass(x)
+ // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData]
+ // -> catch those by isXmlNode and isXmlMetaData.
// Don't want to a) traverse infinity or b) be overly helpful with peoples' custom
// collections which may have useful toString methods - ticket #3710
// or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s.
- case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlClass(x)
+ case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlNode(x.getClass) || isXmlMetaData(x.getClass)
// Otherwise, nothing could possibly go wrong
case _ => false
}
@@ -324,7 +340,7 @@ object ScalaRunTime {
// to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes.
try inner(arg)
catch {
- case _: StackOverflowError | _: UnsupportedOperationException | _: AssertionError => "" + arg
+ case _: UnsupportedOperationException | _: AssertionError => "" + arg
}
}
@@ -335,20 +351,6 @@ object ScalaRunTime {
nl + s + "\n"
}
- private[scala] def checkZip(what: String, coll1: TraversableOnce[_], coll2: TraversableOnce[_]) {
- if (sys.props contains "scala.debug.zip") {
- val xs = coll1.toIndexedSeq
- val ys = coll2.toIndexedSeq
- if (xs.length != ys.length) {
- Console.err.println(
- "Mismatched zip in " + what + ":\n" +
- " this: " + xs.mkString(", ") + "\n" +
- " that: " + ys.mkString(", ")
- )
- (new Exception).getStackTrace.drop(2).take(10).foreach(println)
- }
- }
- }
def box[T](clazz: jClass[T]): jClass[_] = clazz match {
case java.lang.Byte.TYPE => classOf[java.lang.Byte]
diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala
index adf5a4f6b2..a144d51609 100644
--- a/src/library/scala/sys/process/ProcessBuilder.scala
+++ b/src/library/scala/sys/process/ProcessBuilder.scala
@@ -171,7 +171,7 @@ trait ProcessBuilder extends Source with Sink {
* a Stream that blocks when lines are not available but the process has not
* completed. Standard error is sent to the provided ProcessLogger. If the
* process exits with a non-zero value, the Stream will provide all lines up
- * to termination but will not throw an exception.
+ * to termination and then throw an exception.
*/
def lines(log: ProcessLogger): Stream[String]
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index d68cd004f8..1340a6c415 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -151,7 +151,7 @@ package scala.sys {
*
* // An overly complex way of computing size of a compressed file
* def gzFileSize(name: String) = {
- * val cat = Seq("zcat", "name")
+ * val cat = Seq("zcat", name)
* var count = 0
* def byteCounter(input: java.io.InputStream) = {
* while(input.read() != -1) count += 1
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 02c461f3c6..8b63a73638 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -131,10 +131,9 @@ private[scala] trait PropertiesTrait {
def javaVmName = propOrEmpty("java.vm.name")
def javaVmVendor = propOrEmpty("java.vm.vendor")
def javaVmVersion = propOrEmpty("java.vm.version")
- // this property must remain less-well-known until 2.11
- private def javaSpecVersion = propOrEmpty("java.specification.version")
- //private def javaSpecVendor = propOrEmpty("java.specification.vendor")
- //private def javaSpecName = propOrEmpty("java.specification.name")
+ def javaSpecVersion = propOrEmpty("java.specification.version")
+ def javaSpecVendor = propOrEmpty("java.specification.vendor")
+ def javaSpecName = propOrEmpty("java.specification.name")
def osName = propOrEmpty("os.name")
def scalaHome = propOrEmpty("scala.home")
def tmpDir = propOrEmpty("java.io.tmpdir")
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 8eac0a2520..439b30e714 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -194,6 +194,44 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
else None
}
+ /** Tries to match the String representation of a [[scala.Char]].
+ * If the match succeeds, the result is the first matching
+ * group if any groups are defined, or an empty Sequence otherwise.
+ *
+ * For example:
+ *
+ * {{{
+ * val cat = "cat"
+ * // the case must consume the group to match
+ * val r = """(\p{Lower})""".r
+ * cat(0) match { case r(x) => true }
+ * cat(0) match { case r(_) => true }
+ * cat(0) match { case r(_*) => true }
+ * cat(0) match { case r() => true } // no match
+ *
+ * // there is no group to extract
+ * val r = """\p{Lower}""".r
+ * cat(0) match { case r(x) => true } // no match
+ * cat(0) match { case r(_) => true } // no match
+ * cat(0) match { case r(_*) => true } // matches
+ * cat(0) match { case r() => true } // matches
+ *
+ * // even if there are multiple groups, only one is returned
+ * val r = """((.))""".r
+ * cat(0) match { case r(_) => true } // matches
+ * cat(0) match { case r(_,_) => true } // no match
+ * }}}
+ *
+ * @param c The Char to match
+ * @return The match
+ */
+ def unapplySeq(c: Char): Option[Seq[Char]] = {
+ val m = pattern matcher c.toString
+ if (runMatcher(m)) {
+ if (m.groupCount > 0) Some(m group 1) else Some(Nil)
+ } else None
+ }
+
/** Tries to match on a [[scala.util.matching.Regex.Match]].
* A previously failed match results in None.
* If a successful match was made against the current pattern, then that result is used.
diff --git a/src/partest/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala
index d618e086f4..d618e086f4 100644
--- a/src/partest/scala/tools/partest/ASMConverters.scala
+++ b/src/partest-extras/scala/tools/partest/ASMConverters.scala
diff --git a/src/partest/scala/tools/partest/AsmNode.scala b/src/partest-extras/scala/tools/partest/AsmNode.scala
index d181436676..e6a91498d1 100644
--- a/src/partest/scala/tools/partest/AsmNode.scala
+++ b/src/partest-extras/scala/tools/partest/AsmNode.scala
@@ -16,10 +16,11 @@ sealed trait AsmNode[+T] {
def visibleAnnotations: List[AnnotationNode]
def invisibleAnnotations: List[AnnotationNode]
def characteristics = f"$name%15s $desc%-30s$accessString$sigString"
+ def erasedCharacteristics = f"$name%15s $desc%-30s$accessString"
- private def accessString = if (access == 0) "" else " " + Modifier.toString(access)
- private def sigString = if (signature == null) "" else " " + signature
- override def toString = characteristics
+ private def accessString = if (access == 0) "" else " " + Modifier.toString(access)
+ private def sigString = if (signature == null) "" else " " + signature
+ override def toString = characteristics
}
object AsmNode {
diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
index 172fa29189..7650a892fd 100644
--- a/src/partest/scala/tools/partest/BytecodeTest.scala
+++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
@@ -48,19 +48,30 @@ abstract class BytecodeTest extends ASMConverters {
// descriptors and generic signatures? Method bodies are not considered, and
// the names of the classes containing the methods are substituted so they do
// not appear as differences.
- def sameMethodAndFieldSignatures(clazzA: ClassNode, clazzB: ClassNode): Boolean = {
+ def sameMethodAndFieldSignatures(clazzA: ClassNode, clazzB: ClassNode) =
+ sameCharacteristics(clazzA, clazzB)(_.characteristics)
+
+ // Same as sameMethodAndFieldSignatures, but ignoring generic signatures.
+ // This allows for methods which receive the same descriptor but differing
+ // generic signatures. In particular, this happens with value classes,
+ // which get a generic signature where a method written in terms of the
+ // underlying values does not.
+ def sameMethodAndFieldDescriptors(clazzA: ClassNode, clazzB: ClassNode) =
+ sameCharacteristics(clazzA, clazzB)(_.erasedCharacteristics)
+
+ private def sameCharacteristics(clazzA: ClassNode, clazzB: ClassNode)(f: AsmNode[_] => String): Boolean = {
val ms1 = clazzA.fieldsAndMethods.toIndexedSeq
val ms2 = clazzB.fieldsAndMethods.toIndexedSeq
val name1 = clazzA.name
val name2 = clazzB.name
if (ms1.length != ms2.length) {
- println("Different member counts in $name1 and $name2")
+ println(s"Different member counts in $name1 and $name2")
false
}
else (ms1, ms2).zipped forall { (m1, m2) =>
- val c1 = m1.characteristics
- val c2 = m2.characteristics.replaceAllLiterally(name2, name1)
+ val c1 = f(m1)
+ val c2 = f(m2).replaceAllLiterally(name2, name1)
if (c1 == c2)
println(s"[ok] $m1")
else
diff --git a/src/partest/scala/tools/partest/JavapTest.scala b/src/partest-extras/scala/tools/partest/JavapTest.scala
index 3cb3dc6ca8..3cb3dc6ca8 100644
--- a/src/partest/scala/tools/partest/JavapTest.scala
+++ b/src/partest-extras/scala/tools/partest/JavapTest.scala
diff --git a/src/partest/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala
index 7381b8af54..7cc2dd39a9 100644
--- a/src/partest/scala/tools/partest/ReplTest.scala
+++ b/src/partest-extras/scala/tools/partest/ReplTest.scala
@@ -7,7 +7,6 @@ package scala.tools.partest
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.ILoop
-import scala.tools.partest.nest.FileUtil
import java.lang.reflect.{ Method => JMethod, Field => JField }
/** A trait for testing repl code. It drops the first line
@@ -31,7 +30,7 @@ abstract class ReplTest extends DirectTest {
def show() = eval() foreach println
}
-abstract class SessionTest extends ReplTest with FileUtil {
+abstract class SessionTest extends ReplTest {
def session: String
override final def code = expected filter (_.startsWith(prompt)) map (_.drop(prompt.length)) mkString "\n"
def expected = session.stripMargin.lines.toList
@@ -39,6 +38,6 @@ abstract class SessionTest extends ReplTest with FileUtil {
override def show() = {
val out = eval().toList
if (out.size != expected.size) Console println s"Expected ${expected.size} lines, got ${out.size}"
- if (out != expected) Console print compareContents(expected, out, "expected", "actual")
+ if (out != expected) Console print nest.FileManager.compareContents(expected, out, "expected", "actual")
}
}
diff --git a/src/partest/scala/tools/partest/SigTest.scala b/src/partest-extras/scala/tools/partest/SigTest.scala
index fe233a4fb5..fe233a4fb5 100644
--- a/src/partest/scala/tools/partest/SigTest.scala
+++ b/src/partest-extras/scala/tools/partest/SigTest.scala
diff --git a/src/partest-extras/scala/tools/partest/Util.scala b/src/partest-extras/scala/tools/partest/Util.scala
new file mode 100644
index 0000000000..114658b0cd
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/Util.scala
@@ -0,0 +1,52 @@
+package scala.tools.partest
+
+import scala.language.experimental.macros
+
+object Util {
+ /**
+ * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
+ * {{{
+ * trace> "".isEmpty
+ * res: Boolean = true
+ *
+ * }}}
+ *
+ * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
+ * test code in a string.
+ */
+ def trace[A](a: A) = macro traceImpl[A]
+
+ import scala.reflect.macros.Context
+ def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
+ import c.universe._
+ import definitions._
+
+ // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0,
+ // because this impairs reflection refactorings
+ //
+ // val exprCode = c.literal(show(a.tree))
+ // val exprType = c.literal(show(a.actualType))
+ // reify {
+ // println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
+ // a.splice
+ // }
+
+ c.Expr(Block(
+ List(Apply(
+ Select(Ident(PredefModule), TermName("println")),
+ List(Apply(
+ Select(Apply(
+ Select(Ident(ScalaPackage), TermName("StringContext")),
+ List(
+ Literal(Constant("trace> ")),
+ Literal(Constant("\\nres: ")),
+ Literal(Constant(" = ")),
+ Literal(Constant("\\n")))),
+ TermName("s")),
+ List(
+ Literal(Constant(show(a.tree))),
+ Literal(Constant(show(a.actualType))),
+ a.tree))))),
+ a.tree))
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala
index 18dd740208..18dd740208 100644
--- a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
+++ b/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala
diff --git a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
new file mode 100644
index 0000000000..d6b62e1d9e
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
@@ -0,0 +1,82 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.instrumented;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
+ * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
+ *
+ * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
+ * because it must be universally accessible for instrumentation needs. If you want to profile your test use
+ * {@link Instrumentation} instead.
+ */
+public class Profiler {
+
+ private static boolean isProfiling = false;
+ private static Map<MethodCallTrace, Integer> counts = new HashMap<MethodCallTrace, Integer>();
+
+ static public class MethodCallTrace {
+ final String className;
+ final String methodName;
+ final String methodDescriptor;
+
+ public MethodCallTrace(final String className, final String methodName, final String methodDescriptor) {
+ this.className = className;
+ this.methodName = methodName;
+ this.methodDescriptor = methodDescriptor;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof MethodCallTrace)) {
+ return false;
+ } else {
+ MethodCallTrace that = (MethodCallTrace) obj;
+ return that.className.equals(className) && that.methodName.equals(methodName) && that.methodDescriptor.equals(methodDescriptor);
+ }
+ }
+ @Override
+ public int hashCode() {
+ return className.hashCode() ^ methodName.hashCode() ^ methodDescriptor.hashCode();
+ }
+ }
+
+ public static void startProfiling() {
+ isProfiling = true;
+ }
+
+ public static void stopProfiling() {
+ isProfiling = false;
+ }
+
+ public static boolean isProfiling() {
+ return isProfiling;
+ }
+
+ public static void resetProfiling() {
+ counts = new HashMap<MethodCallTrace, Integer>();
+ }
+
+ public static void methodCalled(final String className, final String methodName, final String methodDescriptor) {
+ if (isProfiling) {
+ MethodCallTrace trace = new MethodCallTrace(className, methodName, methodDescriptor);
+ Integer counter = counts.get(trace);
+ if (counter == null) {
+ counts.put(trace, 1);
+ } else {
+ counts.put(trace, counter+1);
+ }
+ }
+ }
+
+ public static Map<MethodCallTrace, Integer> getStatistics() {
+ return new HashMap<MethodCallTrace, Integer>(counts);
+ }
+
+}
diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java
new file mode 100644
index 0000000000..86f5e64516
--- /dev/null
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java
@@ -0,0 +1,49 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import java.lang.instrument.ClassFileTransformer;
+import java.security.ProtectionDomain;
+
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.ClassWriter;
+
+public class ASMTransformer implements ClassFileTransformer {
+
+ private boolean shouldTransform(String className) {
+ return
+ // do not instrument instrumentation logic (in order to avoid infinite recursion)
+ !className.startsWith("scala/tools/partest/instrumented/") &&
+ !className.startsWith("scala/tools/partest/javaagent/") &&
+ // we instrument all classes from empty package
+ (!className.contains("/") ||
+ // we instrument all classes from scala package
+ className.startsWith("scala/") ||
+ // we instrument all classes from `instrumented` package
+ className.startsWith("instrumented/"));
+ }
+
+ public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
+ if (shouldTransform(className)) {
+ ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
+ @Override protected String getCommonSuperClass(final String type1, final String type2) {
+ // Since we are not recomputing stack frame map, this should never be called we override this method because
+ // default implementation uses reflection for implementation and might try to load the class that we are
+ // currently processing. That leads to weird results like swallowed exceptions and classes being not
+ // transformed.
+ throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
+ ") while transforming " + className);
+ }
+ };
+ ProfilerVisitor visitor = new ProfilerVisitor(writer);
+ ClassReader reader = new ClassReader(classfileBuffer);
+ reader.accept(visitor, 0);
+ return writer.toByteArray();
+ } else {
+ return classfileBuffer;
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/javaagent/MANIFEST.MF b/src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF
index be0fee46a2..be0fee46a2 100644
--- a/src/partest/scala/tools/partest/javaagent/MANIFEST.MF
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
index 8306327b14..b1b100fbb0 100644
--- a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
@@ -10,7 +10,7 @@ import scala.tools.asm.MethodVisitor;
import scala.tools.asm.Opcodes;
public class ProfilerVisitor extends ClassVisitor implements Opcodes {
-
+
private static String profilerClass = "scala/tools/partest/instrumented/Profiler";
public ProfilerVisitor(final ClassVisitor cv) {
@@ -53,7 +53,7 @@ public class ProfilerVisitor extends ClassVisitor implements Opcodes {
"(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
}
}
- return mv;
+ return mv;
}
}
diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java
new file mode 100644
index 0000000000..819a5cc39b
--- /dev/null
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java
@@ -0,0 +1,25 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import java.lang.instrument.Instrumentation;
+import java.lang.instrument.UnmodifiableClassException;
+
+/**
+ * Profiling agent that instruments byte-code to insert calls to
+ * {@link scala.tools.partest.instrumented.Profiler#methodCalled(String, String, String)}
+ * by using ASM library for byte-code manipulation.
+ */
+public class ProfilingAgent {
+ public static void premain(String args, Instrumentation inst) throws UnmodifiableClassException {
+ // NOTE: we are adding transformer that won't be applied to classes that are already loaded
+ // This should be ok because premain should be executed before main is executed so Scala library
+ // and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
+ // not depend on Scala library. In case our assumptions are wrong we can always insert call to
+ // inst.retransformClasses.
+ inst.addTransformer(new ASMTransformer(), false);
+ }
+}
diff --git a/src/partest/README b/src/partest/README
deleted file mode 100644
index 17594dbb1e..0000000000
--- a/src/partest/README
+++ /dev/null
@@ -1,31 +0,0 @@
-How partest chooses the compiler / library:
-
- * ''-Dpartest.build=build/four-pack'' -> will search for libraries in
- ''lib'' directory of given path
- * ''--pack'' -> will set ''partest.build=build/pack'', and run all tests.
- add ''--[kind]'' to run a selected set of tests.
- * auto detection:
- - partest.build property -> ''bin'' / ''lib'' directories
- - distribution (''dists/latest'')
- - supersabbus pack (''build/pack'')
- - sabbus quick (''build/quick'')
- - installed dist (test files in ''misc/scala-test/files'')
-
-How partest choses test files: the test files must be accessible from
-the directory on which partest is run. So the test files must be either
-at:
- * ./test/files
- * ./files (cwd is "test")
- * ./misc/scala-test/files (installed scala distribution)
-
-Other arguments:
- * --pos next files test a compilation success
- * --neg next files test a compilation failure
- * --run next files test the interpreter and all backends
- * --jvm next files test the JVM backend
- * --res next files test the resident compiler
- * --shootout next files are shootout tests
- * --script next files test the script runner
- * ''-Dpartest.scalac_opts=...'' -> add compiler options
- * ''--verbose'' -> print verbose messages
- * ''-Dpartest.debug=true'' -> print debug messages
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
deleted file mode 100644
index df4a81dee2..0000000000
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.reflect.runtime.{universe => ru}
-import scala.tools.nsc._
-
-/** For testing compiler internals directly.
- * Each source code string in "sources" will be compiled, and
- * the check function will be called with the source code and the
- * resulting CompilationUnit. The check implementation should
- * test for what it wants to test and fail (via assert or other
- * exception) if it is not happy.
- */
-abstract class CompilerTest extends DirectTest {
- def check(source: String, unit: global.CompilationUnit): Unit
-
- lazy val global: Global = newCompiler()
- lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
- import global._
- import definitions.{ compilerTypeFromTag }
-
- override def extraSettings = "-usejavacp -d " + testOutput.path
-
- def show() = (sources, units).zipped foreach check
-
- // Override at least one of these...
- def code = ""
- def sources: List[String] = List(code)
-
- // Utility functions
- class MkType(sym: Symbol) {
- def apply[M](implicit t: ru.TypeTag[M]): Type =
- if (sym eq NoSymbol) NoType
- else appliedType(sym, compilerTypeFromTag(t))
- }
- implicit def mkMkType(sym: Symbol) = new MkType(sym)
-
- def allMembers(root: Symbol): List[Symbol] = {
- def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = {
- val latest = roots flatMap (_.info.members) filterNot (seen contains _)
- if (latest.isEmpty) seen.toList.sortWith(_ isLess _)
- else loop(seen ++ latest, latest)
- }
- loop(Set(), List(root))
- }
-
- class SymsInPackage(pkgName: String) {
- def pkg = rootMirror.getPackage(pkgName)
- def classes = allMembers(pkg) filter (_.isClass)
- def modules = allMembers(pkg) filter (_.isModule)
- def symbols = classes ++ terms filterNot (_ eq NoSymbol)
- def terms = allMembers(pkg) filter (s => s.isTerm && !s.isConstructor)
- def tparams = classes flatMap (_.info.typeParams)
- def tpes = symbols map (_.tpe) distinct
- }
-}
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
deleted file mode 100644
index 2e6c3baa02..0000000000
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ /dev/null
@@ -1,128 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.tools.nsc._
-import settings.ScalaVersion
-import util.{ SourceFile, BatchSourceFile }
-import reporters.{Reporter, ConsoleReporter}
-import scala.tools.cmd.CommandLineParser
-
-/** A class for testing code which is embedded as a string.
- * It allows for more complete control over settings, compiler
- * configuration, sequence of events, etc. than does partest.
- */
-abstract class DirectTest extends App {
- // The program being tested in some fashion
- def code: String
- // produce the output to be compared against a checkfile
- def show(): Unit
-
- // the test file or dir, and output directory
- def testPath = SFile(sys.props("partest.test-path"))
- def testOutput = Directory(sys.props("partest.output"))
-
- // override to add additional settings with strings
- def extraSettings: String = ""
- // a default Settings object
- def settings: Settings = newSettings(CommandLineParser tokenize extraSettings)
- // a custom Settings object
- def newSettings(args: List[String]) = {
- val s = new Settings
- val allArgs = args ++ (CommandLineParser tokenize debugSettings)
- log("newSettings: allArgs = " + allArgs)
- s processArguments (allArgs, true)
- s
- }
- // new compiler
- def newCompiler(args: String*): Global = {
- val settings = newSettings((CommandLineParser tokenize ("-d \"" + testOutput.path + "\" " + extraSettings)) ++ args.toList)
- newCompiler(settings)
- }
-
- def newCompiler(settings: Settings): Global = Global(settings, reporter(settings))
-
- def reporter(settings: Settings): Reporter = new ConsoleReporter(settings)
-
- private def newSourcesWithExtension(ext: String)(codes: String*): List[BatchSourceFile] =
- codes.toList.zipWithIndex map {
- case (src, idx) => new BatchSourceFile(s"newSource${idx + 1}.$ext", src)
- }
-
- def newJavaSources(codes: String*) = newSourcesWithExtension("java")(codes: _*)
- def newSources(codes: String*) = newSourcesWithExtension("scala")(codes: _*)
-
- def compileString(global: Global)(sourceCode: String): Boolean = {
- withRun(global)(_ compileSources newSources(sourceCode))
- !global.reporter.hasErrors
- }
-
- def javaCompilationUnits(global: Global)(sourceCodes: String*) = {
- sourceFilesToCompiledUnits(global)(newJavaSources(sourceCodes: _*))
- }
-
- def sourceFilesToCompiledUnits(global: Global)(files: List[SourceFile]) = {
- withRun(global) { run =>
- run compileSources files
- run.units.toList
- }
- }
-
- def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
- val units = sourceFilesToCompiledUnits(global)(newSources(sourceCodes: _*))
- if (global.reporter.hasErrors) {
- global.reporter.flush()
- sys.error("Compilation failure.")
- }
- units
- }
-
- def withRun[T](global: Global)(f: global.Run => T): T = {
- global.reporter.reset()
- f(new global.Run)
- }
-
- // compile the code, optionally first adding to the settings
- def compile(args: String*) = compileString(newCompiler(args: _*))(code)
-
- /** Constructor/main body **/
- try show()
- catch { case t: Exception => println(t.getMessage) ; t.printStackTrace ; sys.exit(1) }
-
- /** Debugger interest only below this line **/
- protected def isDebug = (sys.props contains "partest.debug") || (sys.env contains "PARTEST_DEBUG")
- protected def debugSettings = sys.props.getOrElse("partest.debug.settings", "")
-
- final def log(msg: => Any) {
- if (isDebug) Console.err println msg
- }
-
- /**
- * Run a test only if the current java version is at least the version specified.
- */
- def testUnderJavaAtLeast[A](version: String)(yesRun: =>A) = new TestUnderJavaAtLeast(version, { yesRun })
-
- class TestUnderJavaAtLeast[A](version: String, yesRun: => A) {
- val javaVersion = System.getProperty("java.specification.version")
-
- // the "ScalaVersion" class parses Java specification versions just fine
- val requiredJavaVersion = ScalaVersion(version)
- val executingJavaVersion = ScalaVersion(javaVersion)
- val shouldRun = executingJavaVersion >= requiredJavaVersion
- val preamble = if (shouldRun) "Attempting" else "Doing fallback for"
-
- def logInfo() = log(s"$preamble java $version specific test under java version $javaVersion")
-
- /*
- * If the current java version is at least 'version' then 'yesRun' is evaluated
- * otherwise 'fallback' is
- */
- def otherwise(fallback: =>A): A = {
- logInfo()
- if (shouldRun) yesRun else fallback
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/IcodeTest.scala b/src/partest/scala/tools/partest/IcodeTest.scala
deleted file mode 100644
index b12ec0de61..0000000000
--- a/src/partest/scala/tools/partest/IcodeTest.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.tools.partest.nest.FileUtil.compareContents
-
-/** A trait for testing icode. All you need is this in a
- * partest source file:
- * {{{
- * object Test extends IcodeTest
- * }}}
- * And then the generated output will be the icode for everything
- * in that file. See source for possible customizations.
- */
-abstract class IcodeTest extends DirectTest {
- // override to check icode at a different point.
- def printIcodeAfterPhase = "icode"
- // override to use source code other than the file being tested.
- def code = testPath.slurp()
-
- override def extraSettings: String = "-usejavacp -Xprint-icode:" + printIcodeAfterPhase
-
- // Compile, read in all the *.icode files, delete them, and return their contents
- def collectIcode(args: String*): List[String] = {
- compile("-d" :: testOutput.path :: args.toList : _*)
- val icodeFiles = testOutput.files.toList filter (_ hasExtension "icode")
-
- try icodeFiles sortBy (_.name) flatMap (f => f.lines.toList)
- finally icodeFiles foreach (f => f.delete())
- }
-
- // Default show() compiles the code with and without optimization and
- // outputs the diff.
- def show() {
- val lines1 = collectIcode("")
- val lines2 = collectIcode("-optimise")
-
- println(compareContents(lines1, lines2))
- }
-}
diff --git a/src/partest/scala/tools/partest/MemoryTest.scala b/src/partest/scala/tools/partest/MemoryTest.scala
deleted file mode 100644
index 58d25d2f01..0000000000
--- a/src/partest/scala/tools/partest/MemoryTest.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package scala.tools.partest
-
-abstract class MemoryTest {
- def maxDelta: Double
- def calcsPerIter: Int
- def calc(): Unit
-
- def main(args: Array[String]) {
- val rt = Runtime.getRuntime()
- def memUsage() = {
- import java.lang.management._
- import scala.collection.JavaConverters._
- val pools = ManagementFactory.getMemoryPoolMXBeans.asScala
- pools.map(_.getUsage.getUsed).sum / 1000000d
- }
-
- val history = scala.collection.mutable.ListBuffer[Double]()
- def stressTestIter() = {
- var i = 0
- while (i < calcsPerIter) { calc(); i += 1 }
- 1 to 5 foreach (_ => rt.gc())
- history += memUsage
- }
-
- 1 to 5 foreach (_ => stressTestIter())
- val reference = memUsage()
- 1 to 5 foreach (_ => stressTestIter())
- 1 to 5 foreach (_ => rt.gc())
- val result = memUsage()
- history += result
-
- val delta = result - reference
- if (delta > maxDelta) {
- println("FAILED")
- history foreach (mb => println(mb + " Mb"))
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
deleted file mode 100644
index 8478edeb4d..0000000000
--- a/src/partest/scala/tools/partest/PartestDefaults.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.tools
-package partest
-
-import scala.concurrent.duration.Duration
-import scala.tools.nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
-import java.lang.Runtime.{ getRuntime => runtime }
-
-object PartestDefaults {
-
- def testRootName = propOrNone("partest.root")
- def srcDirName = propOrElse("partest.srcdir", "files")
- def testRootDir = testRootName map (x => Directory(x))
-
- // def classPath = propOrElse("partest.classpath", "")
- def classPath = PathResolver.Environment.javaUserClassPath // XXX
-
- def javaCmd = propOrElse("partest.javacmd", "java")
- def javacCmd = propOrElse("partest.javac_cmd", "javac")
- def javaOpts = propOrElse("partest.java_opts", "")
- def scalacOpts = propOrElse("partest.scalac_opts", "")
-
- def testBuild = propOrNone("partest.build")
- def errorCount = propOrElse("partest.errors", "0").toInt
- def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse runtime.availableProcessors
- def waitTime = propOrNone("partest.timeout") map (Duration.apply) getOrElse Duration("4 hours")
-
- //def timeout = "1200000" // per-test timeout
-}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
deleted file mode 100644
index 8b88021dbf..0000000000
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ /dev/null
@@ -1,207 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-
-import scala.util.Properties.setProp
-import scala.tools.ant.sabbus.CompilationPathProperty
-import java.lang.reflect.Method
-import org.apache.tools.ant.Task
-import org.apache.tools.ant.types.{ Reference, FileSet}
-import org.apache.tools.ant.types.Commandline.Argument
-import scala.tools.ant.ScalaTask
-
-/** An Ant task to execute the Scala test suite (NSC).
- *
- * This task can take the following parameters as attributes:
- * - `srcdir`,
- * - `classpath`,
- * - `classpathref`,
- * - `erroronfailed`,
- * - `javacmd`,
- * - `javaccmd`,
- * - `scalacopts`,
- * - `debug`,
- * - `junitreportdir`.
- *
- * It also takes the following parameters as nested elements:
- * - `compilationpath`.
- *
- * @author Philippe Haller
- */
-class PartestTask extends Task with CompilationPathProperty with ScalaTask {
- type Path = org.apache.tools.ant.types.Path
-
- private var kinds: List[String] = Nil
- private var classpath: Option[Path] = None
- private var debug = false
- private var errorOnFailed: Boolean = true
- private var jUnitReportDir: Option[File] = None
- private var javaccmd: Option[File] = None
- private var javacmd: Option[File] = Option(sys.props("java.home")) map (x => new File(x, "bin/java"))
- private var scalacArgs: Option[Seq[Argument]] = None
- private var srcDir: Option[String] = None
- private var colors: Int = 0
-
- def setSrcDir(input: String) {
- srcDir = Some(input)
- }
-
- def setColors(input: String) {
- try colors = input.toInt catch { case _: NumberFormatException => () }
- if (colors > 0)
- sys.props("partest.colors") = colors.toString
- }
-
- def setClasspath(input: Path) {
- if (classpath.isEmpty)
- classpath = Some(input)
- else
- classpath.get.append(input)
- }
-
- def createClasspath(): Path = {
- if (classpath.isEmpty) classpath = Some(new Path(getProject()))
- classpath.get.createPath()
- }
-
- def setClasspathref(input: Reference) {
- createClasspath().setRefid(input)
- }
- def setErrorOnFailed(input: Boolean) {
- errorOnFailed = input
- }
-
- def setJavaCmd(input: File) {
- javacmd = Some(input)
- }
-
- def setKinds(input: String) {
- kinds = words(input)
- }
-
- def setJavacCmd(input: File) {
- javaccmd = Some(input)
- }
-
- def setScalacOpts(input: String) {
- val s = input.split(' ').map { s => val a = new Argument; a.setValue(s); a }
- scalacArgs = Some(scalacArgs.getOrElse(Seq()) ++ s)
- }
-
- def createCompilerArg(): Argument = {
- val a = new Argument
- scalacArgs = Some(scalacArgs.getOrElse(Seq()) :+ a)
- a
- }
-
- def setDebug(input: Boolean) {
- debug = input
- }
-
- def setJUnitReportDir(input: File) {
- jUnitReportDir = Some(input)
- }
-
- override def execute() {
- if (debug || sys.props.contains("partest.debug")) {
- nest.NestUI.setDebug()
- }
-
- srcDir foreach (x => setProp("partest.srcdir", x))
-
- val classpath = this.compilationPath getOrElse sys.error("Mandatory attribute 'compilationPath' is not set.")
- val cpfiles = classpath.list map { fs => new File(fs) } toList
- def findCp(name: String) = cpfiles find (f =>
- (f.getName == s"scala-$name.jar")
- || (f.absolutePathSegments endsWith Seq("classes", name))
- ) getOrElse sys.error(s"Provided classpath does not contain a Scala $name element.")
-
- val scalaLibrary = findCp("library")
- val scalaReflect = findCp("reflect")
- val scalaCompiler = findCp("compiler")
- val scalaPartest = findCp("partest")
- val scalaActors = findCp("actors")
-
- def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
- val parts = a.getParts
- if (parts eq null) Nil else parts.toSeq
- })
-
- val antRunner = new scala.tools.partest.nest.AntRunner
- val antFileManager = antRunner.fileManager
-
- // antFileManager.failed = runFailed
- antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*)
- antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
- antFileManager.LATEST_REFLECT = scalaReflect.getAbsolutePath
- antFileManager.LATEST_COMP = scalaCompiler.getAbsolutePath
- antFileManager.LATEST_PARTEST = scalaPartest.getAbsolutePath
- antFileManager.LATEST_ACTORS = scalaActors.getAbsolutePath
-
- javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
- javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
- scalacArgsFlat foreach (antFileManager.SCALAC_OPTS ++= _)
-
- def runSet(kind: String, files: Array[File]): (Int, Int, List[String]) = {
- if (files.isEmpty) (0, 0, List())
- else {
- log(s"Running ${files.length} tests in '$kind' at $now")
- // log(s"Tests: ${files.toList}")
- val results = antRunner.reflectiveRunTestsForFiles(files, kind)
- val (passed, failed) = results partition (_.isOk)
- val numPassed = passed.size
- val numFailed = failed.size
- def failedMessages = failed map (_.longStatus)
-
- log(s"Completed '$kind' at $now")
-
- // create JUnit Report xml files if directory was specified
- jUnitReportDir foreach { d =>
- d.mkdir
-
- val report = testReport(kind, results, numPassed, numFailed)
- scala.xml.XML.save(d.getAbsolutePath+"/"+kind+".xml", report)
- }
-
- (numPassed, numFailed, failedMessages)
- }
- }
-
- val _results = kinds map (k => runSet(k, TestKinds testsFor k map (_.jfile) toArray))
- val allSuccesses = _results map (_._1) sum
- val allFailures = _results map (_._2) sum
- val allFailedPaths = _results flatMap (_._3)
-
- def f = if (errorOnFailed && allFailures > 0) buildError(_: String) else log(_: String)
- def s = if (allFailures > 1) "s" else ""
- val msg =
- if (allFailures > 0)
- "Test suite finished with %d case%s failing:\n".format(allFailures, s)+
- allFailedPaths.mkString("\n")
- else if (allSuccesses == 0) "There were no tests to run."
- else "Test suite finished with no failures."
-
- f(msg)
- }
-
- private def oneResult(res: TestState) =
- <testcase name={res.testIdent}>{
- if (res.isOk) scala.xml.NodeSeq.Empty
- else <failure message="Test failed"/>
- }</testcase>
-
- private def testReport(kind: String, results: Iterable[TestState], succs: Int, fails: Int) =
- <testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
- <properties/>
- {
- results map oneResult
- }
- </testsuite>
-}
diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala
deleted file mode 100644
index 1f1c8a95ea..0000000000
--- a/src/partest/scala/tools/partest/SecurityTest.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import java.security._
-import java.util._
-
-abstract class SecurityTest extends App {
- def throwIt(x: Any) = throw new AccessControlException("" + x)
- def propertyCheck(p: PropertyPermission): Unit = throwIt(p)
-
- def check(perm: Permission): Unit = perm match {
- case p: PropertyPermission => propertyCheck(p)
- case _ => ()
- }
-}
diff --git a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala b/src/partest/scala/tools/partest/StoreReporterDirectTest.scala
deleted file mode 100644
index 7f3604c86c..0000000000
--- a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package scala.tools.partest
-
-import scala.tools.nsc.Settings
-import scala.tools.nsc.reporters.StoreReporter
-import scala.collection.mutable
-
-trait StoreReporterDirectTest extends DirectTest {
- lazy val storeReporter: StoreReporter = new scala.tools.nsc.reporters.StoreReporter()
-
- /** Discards all but the first message issued at a given position. */
- def filteredInfos: Seq[storeReporter.Info] = storeReporter.infos.groupBy(_.pos).map(_._2.head).toList
-
- /** Hook into [[scala.tools.partest.DirectTest]] to install the custom reporter */
- override def reporter(settings: Settings) = storeReporter
-}
diff --git a/src/partest/scala/tools/partest/TestKinds.scala b/src/partest/scala/tools/partest/TestKinds.scala
deleted file mode 100644
index b4e8afd0d2..0000000000
--- a/src/partest/scala/tools/partest/TestKinds.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package scala.tools
-package partest
-
-import nest.PathSettings.srcDir
-
-object TestKinds {
- val standardKinds = ("pos neg run jvm res scalacheck scalap specialized instrumented presentation ant" split "\\s+").toList
-
- def denotesTestFile(p: Path) = p.isFile && p.hasExtension("scala", "res", "xml")
- def denotesTestDir(p: Path) = kindOf(p) match {
- case "res" => false
- case _ => p.isDirectory && p.extension == ""
- }
- def denotesTestPath(p: Path) = denotesTestDir(p) || denotesTestFile(p)
-
- // TODO
- def isTestForPartest(p: Path) = (
- (p.name == "intentional-failure.scala")
- || (p.path contains "test-for-partest")
- )
-
- def kindOf(p: Path) = {
- p.toAbsolute.segments takeRight 2 head
-
- // (srcDir relativize p.toCanonical).segments match {
- // case (".." :: "scaladoc" :: xs) => xs.head
- // case xs => xs.head
- // }
- }
- def logOf(p: Path) = {
- p.parent / s"${p.stripExtension}-${kindOf(p)}.log"
- // p.parent / s"${p.stripExtension}.log"
- }
-
- // true if a test path matches the --grep expression.
- private def pathMatchesExpr(path: Path, expr: String) = {
- // Matches the expression if any source file contains the expr,
- // or if the checkfile contains it, or if the filename contains
- // it (the last is case-insensitive.)
- def matches(p: Path) = (
- (p.path.toLowerCase contains expr.toLowerCase)
- || (p.fileContents contains expr)
- )
- def candidates = {
- (path changeExtension "check") +: {
- if (path.isFile) List(path)
- else path.toDirectory.deepList() filter (_.isJavaOrScala) toList
- }
- }
-
- (candidates exists matches)
- }
-
- def groupedTests(paths: List[Path]): List[(String, List[Path])] =
- (paths.distinct groupBy kindOf).toList sortBy (standardKinds indexOf _._1)
-
- /** Includes tests for testing partest. */
- private def allTestsForKind(kind: String): List[Path] =
- (srcDir / kind toDirectory).list.toList filter denotesTestPath
-
- def testsForPartest: List[Path] = standardKinds flatMap allTestsForKind filter isTestForPartest
- def testsFor(kind: String): List[Path] = allTestsForKind(kind) filterNot isTestForPartest
- def grepFor(expr: String): List[Path] = standardTests filter (t => pathMatchesExpr(t, expr))
- def standardTests: List[Path] = standardKinds flatMap testsFor
- def failedTests: List[Path] = standardTests filter (p => logOf(p).isFile)
-}
diff --git a/src/partest/scala/tools/partest/TestState.scala b/src/partest/scala/tools/partest/TestState.scala
deleted file mode 100644
index dbe8a222a5..0000000000
--- a/src/partest/scala/tools/partest/TestState.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-package scala.tools.partest
-
-import scala.tools.nsc.FatalError
-import scala.tools.nsc.util.stackTraceString
-
-sealed abstract class TestState {
- def testFile: File
- def what: String
- def reason: String
- def transcript: List[String]
-
- def isOk = false
- def isSkipped = false
- def testIdent = testFile.testIdent
- def transcriptString = transcript mkString EOL
-
- def identAndReason = testIdent + reasonString
- def status = s"$what - $identAndReason"
- def longStatus = status + transcriptString
- def reasonString = if (reason == "") "" else s" [$reason]"
-
- def shortStatus = if (isOk) "ok" else "!!"
-
- override def toString = status
-}
-
-object TestState {
- case class Uninitialized(testFile: File) extends TestState {
- def what = "uninitialized"
- def reason = what
- def transcript = Nil
- override def shortStatus = "??"
- }
- case class Pass(testFile: File) extends TestState {
- def what = "pass"
- override def isOk = true
- def transcript: List[String] = Nil
- def reason = ""
- }
- case class Updated(testFile: File) extends TestState {
- def what = "updated"
- override def isOk = true
- def transcript: List[String] = Nil
- def reason = "updated check file"
- override def shortStatus = "++"
- }
- case class Skip(testFile: File, reason: String) extends TestState {
- def what = "skip"
- override def isOk = true
- override def isSkipped = true
- def transcript: List[String] = Nil
- override def shortStatus = "--"
- }
- case class Fail(testFile: File, reason: String, transcript: List[String]) extends TestState {
- def what = "fail"
- }
- case class Crash(testFile: File, caught: Throwable, transcript: List[String]) extends TestState {
- def what = "crash"
- def reason = s"caught $caught_s - ${caught.getMessage}"
-
- private def caught_s = (caught.getClass.getName split '.').last
- private def stack_s = stackTraceString(caught)
- override def transcriptString = nljoin(super.transcriptString, caught_s)
- }
-}
diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala
deleted file mode 100644
index 5c177ac962..0000000000
--- a/src/partest/scala/tools/partest/TestUtil.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package scala.tools.partest
-
-import scala.reflect.{ classTag, ClassTag }
-
-trait TestUtil {
- /** Given function and block of code, evaluates code block,
- * calls function with nanoseconds elapsed, and returns block result.
- */
- def timed[T](f: Long => Unit)(body: => T): T = {
- val start = System.nanoTime
- val result = body
- val end = System.nanoTime
-
- f(end - start)
- result
- }
- /** Times body and returns (nanos, result).
- */
- def alsoNanos[T](body: => T): (Long, T) = {
- var nanos = 0L
- val result = timed(nanos = _)(body)
-
- (nanos, result)
- }
- def nanos(body: => Unit): Long = alsoNanos(body)._1
-
- def intercept[T <: Exception : ClassTag](code: => Unit): Unit =
- try {
- code
- assert(false, "did not throw " + classTag[T])
- } catch {
- case ex: Exception if classTag[T].runtimeClass isInstance ex =>
- }
-}
-
-// Used in tests.
-object TestUtil extends TestUtil {
-}
diff --git a/src/partest/scala/tools/partest/antlib.xml b/src/partest/scala/tools/partest/antlib.xml
deleted file mode 100644
index b3b98e853f..0000000000
--- a/src/partest/scala/tools/partest/antlib.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<antlib>
- <taskdef name="partest"
- classname="scala.tools.partest.PartestTask"/>
-</antlib>
diff --git a/src/partest/scala/tools/partest/instrumented/Profiler.java b/src/partest/scala/tools/partest/instrumented/Profiler.java
deleted file mode 100644
index e267e197e7..0000000000
--- a/src/partest/scala/tools/partest/instrumented/Profiler.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.instrumented;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
- * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
- *
- * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
- * because it must be universally accessible for instrumentation needs. If you want to profile your test use
- * {@link Instrumentation} instead.
- */
-public class Profiler {
-
- private static boolean isProfiling = false;
- private static Map<MethodCallTrace, Integer> counts = new HashMap<MethodCallTrace, Integer>();
-
- static public class MethodCallTrace {
- final String className;
- final String methodName;
- final String methodDescriptor;
-
- public MethodCallTrace(final String className, final String methodName, final String methodDescriptor) {
- this.className = className;
- this.methodName = methodName;
- this.methodDescriptor = methodDescriptor;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof MethodCallTrace)) {
- return false;
- } else {
- MethodCallTrace that = (MethodCallTrace) obj;
- return that.className.equals(className) && that.methodName.equals(methodName) && that.methodDescriptor.equals(methodDescriptor);
- }
- }
- @Override
- public int hashCode() {
- return className.hashCode() ^ methodName.hashCode() ^ methodDescriptor.hashCode();
- }
- }
-
- public static void startProfiling() {
- isProfiling = true;
- }
-
- public static void stopProfiling() {
- isProfiling = false;
- }
-
- public static boolean isProfiling() {
- return isProfiling;
- }
-
- public static void resetProfiling() {
- counts = new HashMap<MethodCallTrace, Integer>();
- }
-
- public static void methodCalled(final String className, final String methodName, final String methodDescriptor) {
- if (isProfiling) {
- MethodCallTrace trace = new MethodCallTrace(className, methodName, methodDescriptor);
- Integer counter = counts.get(trace);
- if (counter == null) {
- counts.put(trace, 1);
- } else {
- counts.put(trace, counter+1);
- }
- }
- }
-
- public static Map<MethodCallTrace, Integer> getStatistics() {
- return new HashMap<MethodCallTrace, Integer>(counts);
- }
-
-}
diff --git a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
deleted file mode 100644
index 878c8613d5..0000000000
--- a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.javaagent;
-
-import java.lang.instrument.ClassFileTransformer;
-import java.security.ProtectionDomain;
-
-import scala.tools.asm.ClassReader;
-import scala.tools.asm.ClassWriter;
-
-public class ASMTransformer implements ClassFileTransformer {
-
- private boolean shouldTransform(String className) {
- return
- // do not instrument instrumentation logic (in order to avoid infinite recursion)
- !className.startsWith("scala/tools/partest/instrumented/") &&
- !className.startsWith("scala/tools/partest/javaagent/") &&
- // we instrument all classes from empty package
- (!className.contains("/") ||
- // we instrument all classes from scala package
- className.startsWith("scala/") ||
- // we instrument all classes from `instrumented` package
- className.startsWith("instrumented/"));
- }
-
- public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
- if (shouldTransform(className)) {
- ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
- @Override protected String getCommonSuperClass(final String type1, final String type2) {
- // Since we are not recomputing stack frame map, this should never be called we override this method because
- // default implementation uses reflection for implementation and might try to load the class that we are
- // currently processing. That leads to weird results like swallowed exceptions and classes being not
- // transformed.
- throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
- ") while transforming " + className);
- }
- };
- ProfilerVisitor visitor = new ProfilerVisitor(writer);
- ClassReader reader = new ClassReader(classfileBuffer);
- reader.accept(visitor, 0);
- return writer.toByteArray();
- } else {
- return classfileBuffer;
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
deleted file mode 100644
index 3b18987040..0000000000
--- a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.javaagent;
-
-import java.lang.instrument.Instrumentation;
-import java.lang.instrument.UnmodifiableClassException;
-
-/**
- * Profiling agent that instruments byte-code to insert calls to
- * {@link scala.tools.partest.instrumented.Profiler#methodCalled(String, String, String)}
- * by using ASM library for byte-code manipulation.
- */
-public class ProfilingAgent {
- public static void premain(String args, Instrumentation inst) throws UnmodifiableClassException {
- // NOTE: we are adding transformer that won't be applied to classes that are already loaded
- // This should be ok because premain should be executed before main is executed so Scala library
- // and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
- // not depend on Scala library. In case our assumptions are wrong we can always insert call to
- // inst.retransformClasses.
- inst.addTransformer(new ASMTransformer(), false);
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
deleted file mode 100644
index 1d3b79171b..0000000000
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.partest
-package nest
-
-class AntRunner extends DirectRunner {
-
- val fileManager = new FileManager {
- var JAVACMD: String = "java"
- var JAVAC_CMD: String = "javac"
- var CLASSPATH: String = _
- var LATEST_LIB: String = _
- var LATEST_REFLECT: String = _
- var LATEST_COMP: String = _
- var LATEST_PARTEST: String = _
- var LATEST_ACTORS: String = _
- val testRootPath: String = "test"
- val testRootDir: Directory = Directory(testRootPath)
- }
-
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): List[TestState] =
- runTestsForFiles(kindFiles.toList, kind)
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
deleted file mode 100644
index b436675d3a..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ /dev/null
@@ -1,189 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-
-
-package scala.tools.partest
-package nest
-
-import java.io.{ FilenameFilter, IOException }
-import java.net.URI
-import scala.util.Properties.{ propOrElse, scalaCmd, scalacCmd }
-import scala.tools.nsc.{ io, util }
-import PathResolver.{ Environment, Defaults }
-
-class ConsoleFileManager extends FileManager {
- var testBuild: Option[String] = PartestDefaults.testBuild
- def testBuildFile = testBuild map (testParent / _)
-
- var testClasses: Option[String] = None
-
- def this(buildPath: String, rawClasses: Boolean) = {
- this()
- if (rawClasses)
- testClasses = Some(buildPath)
- else
- testBuild = Some(buildPath)
- // re-run because initialization of default
- // constructor must be updated
- findLatest()
- }
-
- def this(buildPath: String) = {
- this(buildPath, false)
- }
-
- def this(buildPath: String, rawClasses: Boolean, moreOpts: String) = {
- this(buildPath, rawClasses)
- SCALAC_OPTS = SCALAC_OPTS ++ moreOpts.split(' ').toSeq.filter(_.length > 0)
- }
-
- lazy val srcDir = PathSettings.srcDir
- lazy val testRootDir = PathSettings.testRoot
- lazy val testRootPath = testRootDir.toAbsolute.path
- def testParent = testRootDir.parent
-
- var CLASSPATH = PartestDefaults.classPath
- var JAVACMD = PartestDefaults.javaCmd
- var JAVAC_CMD = PartestDefaults.javacCmd
-
-
- vlog("CLASSPATH: "+CLASSPATH)
-
- if (!srcDir.isDirectory) {
- NestUI.failure("Source directory \"" + srcDir.path + "\" not found")
- sys.exit(1)
- }
-
- CLASSPATH = {
- val libs = (srcDir / Directory("lib")).files filter (_ hasExtension "jar") map (_.toCanonical.path)
-
- // add all jars in libs
- (CLASSPATH :: libs.toList) mkString pathSeparator
- }
-
- def findLatest() {
- vlog("test parent: "+testParent)
-
- def prefixFileWith(parent: File, relPath: String) = (SFile(parent) / relPath).toCanonical
- def prefixFile(relPath: String) = (testParent / relPath).toCanonical
-
- if (!testClasses.isEmpty) {
- testClassesDir = Path(testClasses.get).toCanonical.toDirectory
- vlog("Running with classes in "+testClassesDir)
-
- latestLibFile = testClassesDir / "library"
- latestActorsFile = testClassesDir / "library" / "actors"
- latestReflectFile = testClassesDir / "reflect"
- latestCompFile = testClassesDir / "compiler"
- latestPartestFile = testClassesDir / "partest"
- }
- else if (testBuild.isDefined) {
- val dir = Path(testBuild.get)
- vlog("Running on "+dir)
- latestLibFile = dir / "lib/scala-library.jar"
- latestActorsFile = dir / "lib/scala-actors.jar"
- latestReflectFile = dir / "lib/scala-reflect.jar"
- latestCompFile = dir / "lib/scala-compiler.jar"
- latestPartestFile = dir / "lib/scala-partest.jar"
- }
- else {
- def setupQuick() {
- vlog("Running build/quick")
- latestLibFile = prefixFile("build/quick/classes/library")
- latestActorsFile = prefixFile("build/quick/classes/library/actors")
- latestReflectFile = prefixFile("build/quick/classes/reflect")
- latestCompFile = prefixFile("build/quick/classes/compiler")
- latestPartestFile = prefixFile("build/quick/classes/partest")
- }
-
- def setupInst() {
- vlog("Running dist (installed)")
- val p = testParent.getParentFile
- latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
- latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
- latestReflectFile = prefixFileWith(p, "lib/scala-reflect.jar")
- latestCompFile = prefixFileWith(p, "lib/scala-compiler.jar")
- latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
- }
-
- def setupDist() {
- vlog("Running dists/latest")
- latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
- latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
- latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar")
- latestCompFile = prefixFile("dists/latest/lib/scala-compiler.jar")
- latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
- }
-
- def setupPack() {
- vlog("Running build/pack")
- latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
- latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
- latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar")
- latestCompFile = prefixFile("build/pack/lib/scala-compiler.jar")
- latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
- }
-
- def mostRecentOf(base: String, names: String*) =
- names map (x => prefixFile(base + "/" + x).lastModified) reduceLeft (_ max _)
-
- // detect most recent build
- val quickTime = mostRecentOf("build/quick/classes", "compiler/compiler.properties", "reflect/reflect.properties", "library/library.properties")
- val packTime = mostRecentOf("build/pack/lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
- val distTime = mostRecentOf("dists/latest/lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
- val instTime = mostRecentOf("lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
-
- val pairs = Map(
- (quickTime, () => setupQuick()),
- (packTime, () => setupPack()),
- (distTime, () => setupDist()),
- (instTime, () => setupInst())
- )
-
- // run setup based on most recent time
- pairs(pairs.keys max)()
- }
-
- LATEST_LIB = latestLibFile.getAbsolutePath
- LATEST_REFLECT = latestReflectFile.getAbsolutePath
- LATEST_COMP = latestCompFile.getAbsolutePath
- LATEST_PARTEST = latestPartestFile.getAbsolutePath
- LATEST_ACTORS = latestActorsFile.getAbsolutePath
- }
-
- var LATEST_LIB: String = ""
- var LATEST_REFLECT: String = ""
- var LATEST_COMP: String = ""
- var LATEST_PARTEST: String = ""
- var LATEST_ACTORS: String = ""
-
- var latestLibFile: File = _
- var latestActorsFile: File = _
- var latestReflectFile: File = _
- var latestCompFile: File = _
- var latestPartestFile: File = _
- //def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile
- //def latestScalapFile: File = new File(latestLibFile.getParentFile, "scalap.jar")
- var testClassesDir: Directory = _
- // initialize above fields
- findLatest()
-
- /*
- def getFiles(kind: String, cond: Path => Boolean): List[File] = {
- def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _)
-
- val dir = Directory(srcDir / kind)
-
- if (dir.isDirectory) NestUI.verbose("look in %s for tests" format dir)
- else NestUI.failure("Directory '%s' not found" format dir)
-
- val files = dir.list filterNot ignoreDir filter cond toList
-
- ( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile)
- }
- */
- var latestFjbgFile: File = _
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
deleted file mode 100644
index 332131ca3a..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ /dev/null
@@ -1,224 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-package nest
-
-import utils.Properties._
-import scala.tools.nsc.Properties.{ versionMsg, setProp }
-import scala.collection.{ mutable, immutable }
-import PathSettings.srcDir
-import TestKinds._
-import scala.reflect.internal.util.Collections.distinctBy
-import scala.tools.cmd.{ CommandLine, CommandLineParser, Instance }
-
-class ConsoleRunner(argstr: String) extends {
- val parsed = ConsoleRunnerSpec.creator(CommandLineParser tokenize argstr)
-} with DirectRunner with ConsoleRunnerSpec with Instance {
- import NestUI._
- import NestUI.color._
-
- // So we can ctrl-C a test run and still hear all
- // the buffered failure info.
- scala.sys addShutdownHook issueSummaryReport()
-
- var fileManager: ConsoleFileManager = _
-
- private var totalTests = 0
- private val passedTests = mutable.ListBuffer[TestState]()
- private val failedTests = mutable.ListBuffer[TestState]()
-
- def comment(s: String) = echo(magenta("# " + s))
- def levyJudgment() = {
- if (totalTests == 0) echoMixed("No tests to run.")
- else if (elapsedMillis == 0) echoMixed("Test Run ABORTED")
- else if (isSuccess) echoPassed("Test Run PASSED")
- else echoFailed("Test Run FAILED")
- }
-
- def passFailString(passed: Int, failed: Int, skipped: Int): String = {
- val total = passed + failed + skipped
- val isSuccess = failed == 0
- def p0 = s"$passed/$total"
- def p = ( if (isSuccess) bold(green(p0)) else p0 ) + " passed"
- def f = if (failed == 0) "" else bold(red("" + failed)) + " failed"
- def s = if (skipped == 0) "" else bold(yellow("" + skipped)) + " skipped"
-
- oempty(p, f, s) mkString ", "
- }
-
- private var summarizing = false
- private var elapsedMillis = 0L
- private var expectedFailures = 0
- private def isSuccess = failedTests.size == expectedFailures
-
- def issueSummaryReport() {
- // Don't run twice
- if (!summarizing) {
- summarizing = true
-
- val passed0 = passedTests.toList
- val failed0 = failedTests.toList
- val passed = passed0.size
- val failed = failed0.size
- val skipped = totalTests - (passed + failed)
- val passFail = passFailString(passed, failed, skipped)
- val elapsed = if (elapsedMillis > 0) " (elapsed time: " + elapsedString(elapsedMillis) + ")" else ""
- val message = passFail + elapsed
-
- if (failed0.nonEmpty) {
- if (isPartestVerbose) {
- echo(bold(cyan("##### Transcripts from failed tests #####\n")))
- failed0 foreach { state =>
- comment("partest " + state.testFile)
- echo(state.transcriptString + "\n")
- }
- }
-
- def files_s = failed0.map(_.testFile).mkString(""" \""" + "\n ")
- echo("# Failed test paths (this command will update checkfiles)")
- echo("test/partest --update-check \\\n " + files_s + "\n")
- }
-
- echo(message)
- levyJudgment()
- }
- }
-
- def run(): Unit = {
- if (optDebug) NestUI.setDebug()
- if (optVerbose) NestUI.setVerbose()
- if (optTerse) NestUI.setTerse()
- if (optShowDiff) NestUI.setDiffOnFail()
-
- // Early return on no args, version, or invalid args
- if (optVersion) return echo(versionMsg)
- if ((argstr == "") || optHelp) return NestUI.usage()
-
- val (individualTests, invalid) = parsed.residualArgs map (p => Path(p)) partition denotesTestPath
- if (invalid.nonEmpty) {
- if (isPartestVerbose)
- invalid foreach (p => echoWarning(s"Discarding invalid test path " + p))
- else if (!isPartestTerse)
- echoWarning(s"Discarding ${invalid.size} invalid test paths")
- }
-
- optSourcePath foreach (x => setProp("partest.srcdir", x))
- optTimeout foreach (x => setProp("partest.timeout", x))
-
- fileManager =
- if (optBuildPath.isDefined) new ConsoleFileManager(optBuildPath.get)
- else if (optClassPath.isDefined) new ConsoleFileManager(optClassPath.get, true)
- else if (optPack) new ConsoleFileManager("build/pack")
- else new ConsoleFileManager // auto detection, see ConsoleFileManager.findLatest
-
- fileManager.updateCheck = optUpdateCheck
- fileManager.failed = optFailed
-
- val partestTests = (
- if (optSelfTest) TestKinds.testsForPartest
- else Nil
- )
-
- val grepExpr = optGrep getOrElse ""
-
- // If --grep is given we suck in every file it matches.
- val greppedTests = if (grepExpr == "") Nil else {
- val paths = grepFor(grepExpr)
- if (paths.isEmpty)
- echoWarning(s"grep string '$grepExpr' matched no tests.\n")
-
- paths.sortBy(_.toString)
- }
-
- val isRerun = optFailed
- val rerunTests = if (isRerun) TestKinds.failedTests else Nil
- def miscTests = partestTests ++ individualTests ++ greppedTests ++ rerunTests
-
- val givenKinds = standardKinds filter parsed.isSet
- val kinds = (
- if (optAll) standardKinds
- else if (givenKinds.nonEmpty) givenKinds
- else if (invalid.isEmpty && miscTests.isEmpty && !isRerun) standardKinds // If no kinds, --grep, or individual tests were given, assume --all
- else Nil
- )
- val kindsTests = kinds flatMap testsFor
- val dir =
- if (fileManager.testClasses.isDefined) fileManager.testClassesDir
- else fileManager.testBuildFile getOrElse {
- fileManager.latestCompFile.getParentFile.getParentFile.getAbsoluteFile
- }
-
- def testContributors = {
- List(
- if (partestTests.isEmpty) "" else "partest self-tests",
- if (rerunTests.isEmpty) "" else "previously failed tests",
- if (kindsTests.isEmpty) "" else s"${kinds.size} named test categories",
- if (greppedTests.isEmpty) "" else s"${greppedTests.size} tests matching '$grepExpr'",
- if (individualTests.isEmpty) "" else "specified tests"
- ) filterNot (_ == "") mkString ", "
- }
-
- def banner = {
- val vmBin = javaHome + fileSeparator + "bin"
- val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
- val vmOpts = fileManager.JAVA_OPTS
-
- s"""|Scala compiler classes in: $dir
- |Scala version is: $versionMsg
- |Scalac options are: ${fileManager.SCALAC_OPTS mkString " "}
- |Java binaries in: $vmBin
- |Java runtime is: $vmName
- |Java options are: $vmOpts
- |Source directory is: $srcDir
- |Available processors: ${Runtime.getRuntime().availableProcessors()}
- |Java Classpath: ${sys.props("java.class.path")}
- """.stripMargin
- }
-
- chatty(banner)
-
- val allTests: List[Path] = distinctBy(miscTests ++ kindsTests)(_.toCanonical) sortBy (_.toString)
- val grouped = (allTests groupBy kindOf).toList sortBy (x => standardKinds indexOf x._1)
-
- totalTests = allTests.size
- expectedFailures = propOrNone("partest.errors") match {
- case Some(num) => num.toInt
- case _ => 0
- }
- val expectedFailureMessage = if (expectedFailures == 0) "" else s" (expecting $expectedFailures to fail)"
- echo(s"Selected $totalTests tests drawn from $testContributors$expectedFailureMessage\n")
-
- val (_, millis) = timed {
- for ((kind, paths) <- grouped) {
- val num = paths.size
- val ss = if (num == 1) "" else "s"
- comment(s"starting $num test$ss in $kind")
- val results = runTestsForFiles(paths map (_.jfile), kind)
- val (passed, failed) = results partition (_.isOk)
-
- passedTests ++= passed
- failedTests ++= failed
- if (failed.nonEmpty) {
- comment(passFailString(passed.size, failed.size, 0) + " in " + kind)
- }
- echo("")
- }
- }
- this.elapsedMillis = millis
- issueSummaryReport()
- System exit ( if (isSuccess) 0 else 1 )
- }
-
- run()
-}
-
-object ConsoleRunner {
- def main(args: Array[String]): Unit = {
- new ConsoleRunner(args mkString " ")
- }
-}
-
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala b/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala
deleted file mode 100644
index f9143013e9..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-package scala.tools.partest.nest
-
-import language.postfixOps
-
-import scala.tools.cmd.{ CommandLine, Interpolation, Meta, Reference, Spec }
-
-trait ConsoleRunnerSpec extends Spec with Meta.StdOpts with Interpolation {
- def referenceSpec = ConsoleRunnerSpec
- def programInfo = Spec.Info(
- "console-runner",
- "Usage: NestRunner [options] [test test ...]",
- "scala.tools.partest.nest.ConsoleRunner")
-
- heading("Test categories:")
- val optAll = "all" / "run all tests" --?
- val optPos = "pos" / "run compilation tests (success)" --?
- val optNeg = "neg" / "run compilation tests (failure)" --?
- val optRun = "run" / "run interpreter and backend tests" --?
- val optJvm = "jvm" / "run JVM backend tests" --?
- val optRes = "res" / "run resident compiler tests" --?
- val optAnt = "ant" / "run Ant tests" --?
- val optScalap = "scalap" / "run scalap tests" --?
- val optSpecialized = "specialized" / "run specialization tests" --?
- val optScalacheck = "scalacheck" / "run ScalaCheck tests" --?
- val optInstrumented = "instrumented" / "run instrumented tests" --?
- val optPresentation = "presentation" / "run presentation compiler tests" --?
-
- heading("Test runner options:")
- val optFailed = "failed" / "run only those tests that failed during the last run" --?
- val optTimeout = "timeout" / "aborts the test suite after the given amount of time" --|
- val optPack = "pack" / "pick compiler/reflect/library in build/pack, and run all tests" --?
- val optGrep = "grep" / "run all tests whose source file contains the expression given to grep" --|
- val optUpdateCheck = "update-check" / "instead of failing tests with output change, update checkfile (use with care!)" --?
- val optBuildPath = "buildpath" / "set (relative) path to build jars (ex.: --buildpath build/pack)" --|
- val optClassPath = "classpath" / "set (absolute) path to build classes" --|
- val optSourcePath = "srcpath" / "set (relative) path to test source files (ex.: --srcpath pending)" --|
-
- heading("Test output options:")
- val optShowDiff = "show-diff" / "show diffs for failed tests" --?
- val optVerbose = "verbose" / "show verbose progress information" --?
- val optTerse = "terse" / "show terse progress information" --?
- val optDebug = "debug" / "enable debugging output" --?
-
- heading("Other options:")
- val optVersion = "version" / "show Scala version and exit" --?
- val optSelfTest = "self-test" / "run tests for partest itself" --?
- val optHelp = "help" / "show this page and exit" --?
-
-}
-
-object ConsoleRunnerSpec extends ConsoleRunnerSpec with Reference {
- type ThisCommandLine = CommandLine
- def creator(args: List[String]): ThisCommandLine = new CommandLine(ConsoleRunnerSpec, args)
-}
diff --git a/src/partest/scala/tools/partest/nest/DirectCompiler.scala b/src/partest/scala/tools/partest/nest/DirectCompiler.scala
deleted file mode 100644
index 8e5ff2abc4..0000000000
--- a/src/partest/scala/tools/partest/nest/DirectCompiler.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError }
-import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
-import scala.tools.nsc.util.{ FakePos, stackTraceString }
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
-import scala.reflect.io.AbstractFile
-import scala.reflect.internal.util.Position
-import java.io.{ BufferedReader, PrintWriter, FileReader, Writer, FileWriter }
-
-class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) {
- shortname = true
- // override def error(pos: Position, msg: String): Unit
-}
-
-class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
- def this(cp: String) = this(cp, _ => ())
-
- nowarnings.value = false
- encoding.value = "UTF-8"
- classpath.value = cp
-}
-
-class PartestGlobal(settings: Settings, reporter: Reporter) extends Global(settings, reporter) {
- // override def abort(msg: String): Nothing
- // override def globalError(msg: String): Unit
- // override def supplementErrorMessage(msg: String): String
-}
-class DirectCompiler(val fileManager: FileManager) {
- def newGlobal(settings: Settings, reporter: Reporter): PartestGlobal =
- new PartestGlobal(settings, reporter)
-
- def newGlobal(settings: Settings, logWriter: FileWriter): Global =
- newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter)))
-
- def newSettings(): TestSettings = new TestSettings(fileManager.LATEST_LIB)
- def newSettings(outdir: String): TestSettings = {
- val cp = ClassPath.join(fileManager.LATEST_LIB, outdir)
- val s = new TestSettings(cp)
- s.outdir.value = outdir
- s
- }
-
- def compile(runner: Runner, opts0: List[String], sources: List[File]): TestState = {
- import runner.{ sources => _, _ }
-
- val testSettings = new TestSettings(ClassPath.join(fileManager.LATEST_LIB, outDir.getPath))
- val logWriter = new FileWriter(logFile)
- val srcDir = if (testFile.isDirectory) testFile else Path(testFile).parent.jfile
- val opts = fileManager.updatePluginPath(opts0, AbstractFile getDirectory outDir, AbstractFile getDirectory srcDir)
- val command = new CompilerCommand(opts, testSettings)
- val global = newGlobal(testSettings, logWriter)
- val reporter = global.reporter.asInstanceOf[ExtConsoleReporter]
- def errorCount = reporter.ERROR.count
-
- def defineSettings(s: Settings) = {
- s.outputDirs setSingleOutput outDir.getPath
- // adding codelib.jar to the classpath
- // codelib provides the possibility to override standard reify
- // this shields the massive amount of reification tests from changes in the API
- prependToClasspaths(s, codelib)
- s.classpath append fileManager.CLASSPATH // adding this why?
-
- // add the instrumented library version to classpath
- if (kind == "specialized")
- prependToClasspaths(s, speclib)
-
- // check that option processing succeeded
- opts0.isEmpty || command.ok
- }
-
- if (!defineSettings(testSettings))
- if (opts0.isEmpty)
- reporter.error(null, s"bad settings: $testSettings")
- else
- reporter.error(null, opts0.mkString("bad options: ", space, ""))
-
- def ids = sources.map(_.testIdent) mkString space
- vlog(s"% scalac $ids")
-
- def execCompile() =
- if (command.shouldStopWithInfo) {
- logWriter append (command getInfoMessage global)
- runner genFail "compilation stopped with info"
- } else {
- new global.Run compile sources.map(_.getPath)
- if (!reporter.hasErrors) runner.genPass()
- else {
- reporter.printSummary()
- reporter.writer.close()
- runner.genFail(s"compilation failed with $errorCount errors")
- }
- }
-
- try { execCompile() }
- catch { case t: Throwable => reporter.error(null, t.getMessage) ; runner.genCrash(t) }
- finally { logWriter.close() }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
deleted file mode 100644
index 7bfa8c6e77..0000000000
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{File, FilenameFilter, IOException, StringWriter,
- FileInputStream, FileOutputStream, BufferedReader,
- FileReader, PrintWriter, FileWriter}
-import java.net.URI
-import scala.reflect.io.AbstractFile
-import scala.collection.mutable
-
-trait FileUtil {
- /**
- * Compares two files using difflib to produce a unified diff.
- *
- * @param f1 the first file to be compared
- * @param f2 the second file to be compared
- * @return the unified diff of the compared files or the empty string if they're equal
- */
- def compareFiles(f1: File, f2: File): String = {
- compareContents(io.Source.fromFile(f1).getLines.toSeq, io.Source.fromFile(f2).getLines.toSeq, f1.getName, f2.getName)
- }
-
- /**
- * Compares two lists of lines using difflib to produce a unified diff.
- *
- * @param origLines the first seq of lines to be compared
- * @param newLines the second seq of lines to be compared
- * @param origName file name to be used in unified diff for `origLines`
- * @param newName file name to be used in unified diff for `newLines`
- * @return the unified diff of the `origLines` and `newLines` or the empty string if they're equal
- */
- def compareContents(origLines: Seq[String], newLines: Seq[String], origName: String = "a", newName: String = "b"): String = {
- import collection.JavaConverters._
-
- val diff = difflib.DiffUtils.diff(origLines.asJava, newLines.asJava)
- if (diff.getDeltas.isEmpty) ""
- else difflib.DiffUtils.generateUnifiedDiff(origName, newName, origLines.asJava, diff, 1).asScala.mkString("\n")
- }
-}
-object FileUtil extends FileUtil { }
-
-trait FileManager extends FileUtil {
-
- def testRootDir: Directory
- def testRootPath: String
-
- var JAVACMD: String
- var JAVAC_CMD: String
-
- var CLASSPATH: String
- var LATEST_LIB: String
- var LATEST_REFLECT: String
- var LATEST_COMP: String
- var LATEST_PARTEST: String
- var LATEST_ACTORS: String
-
- protected def relativeToLibrary(what: String): String = {
- def jarname = if (what startsWith "scala") s"$what.jar" else s"scala-$what.jar"
- if (LATEST_LIB endsWith ".jar")
- (SFile(LATEST_LIB).parent / jarname).toAbsolute.path
- else
- (SFile(LATEST_LIB).parent.parent / "classes" / what).toAbsolute.path
- }
- def latestParserCBLib = relativeToLibrary("parser-combinators")
- def latestXmlLib = relativeToLibrary("xml")
- def latestScaladoc = relativeToLibrary("scaladoc")
- def latestInteractive = relativeToLibrary("interactive")
- def latestScalapFile = relativeToLibrary("scalap")
- def latestPaths = List(
- LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS,
- latestParserCBLib, latestXmlLib, latestScalapFile, latestScaladoc, latestInteractive
- )
- def latestFiles = latestPaths map (p => new java.io.File(p))
- def latestUrls = latestFiles map (_.toURI.toURL)
-
- var showDiff = false
- var updateCheck = false
- var showLog = false
- var failed = false
-
- var SCALAC_OPTS = PartestDefaults.scalacOpts.split(' ').toSeq
- var JAVA_OPTS = PartestDefaults.javaOpts
-
- /** Only when --debug is given. */
- lazy val testTimings = new mutable.HashMap[String, Long]
- def recordTestTiming(name: String, milliseconds: Long) =
- synchronized { testTimings(name) = milliseconds }
-
- def getLogFile(dir: File, fileBase: String, kind: String): File =
- new File(dir, fileBase + "-" + kind + ".log")
-
- def getLogFile(file: File, kind: String): File = {
- val dir = file.getParentFile
- val fileBase = basename(file.getName)
-
- getLogFile(dir, fileBase, kind)
- }
-
- def logFileExists(file: File, kind: String) =
- getLogFile(file, kind).canRead
-
- def overwriteFileWith(dest: File, file: File) =
- dest.isFile && copyFile(file, dest)
-
- def copyFile(from: File, dest: File): Boolean = {
- if (from.isDirectory) {
- assert(dest.isDirectory, "cannot copy directory to file")
- val subDir:Directory = Path(dest) / Directory(from.getName)
- subDir.createDirectory()
- from.listFiles.toList forall (copyFile(_, subDir))
- }
- else {
- val to = if (dest.isDirectory) new File(dest, from.getName) else dest
-
- try {
- SFile(to) writeAll SFile(from).slurp()
- true
- }
- catch { case _: IOException => false }
- }
- }
-
- def mapFile(file: File, replace: String => String) {
- val f = SFile(file)
-
- f.printlnAll(f.lines.toList map replace: _*)
- }
-
- /** Massage args to merge plugins and fix paths.
- * Plugin path can be relative to test root, or cwd is out.
- * While we're at it, mix in the baseline options, too.
- * That's how ant passes in the plugins dir.
- */
- def updatePluginPath(args: List[String], out: AbstractFile, srcdir: AbstractFile): List[String] = {
- val dir = testRootDir
- // The given path, or the output dir if ".", or a temp dir if output is virtual (since plugin loading doesn't like virtual)
- def pathOrCwd(p: String) =
- if (p == ".") {
- val plugxml = "scalac-plugin.xml"
- val pout = if (out.isVirtual) Directory.makeTemp() else Path(out.path)
- val srcpath = Path(srcdir.path)
- val pd = (srcpath / plugxml).toFile
- if (pd.exists) pd copyTo (pout / plugxml)
- pout
- } else Path(p)
- def absolutize(path: String) = pathOrCwd(path) match {
- case x if x.isAbsolute => x.path
- case x => (dir / x).toAbsolute.path
- }
-
- val xprefix = "-Xplugin:"
- val (xplugs, others) = args partition (_ startsWith xprefix)
- val Xplugin = if (xplugs.isEmpty) Nil else List(xprefix +
- (xplugs map (_ stripPrefix xprefix) flatMap (_ split pathSeparator) map absolutize mkString pathSeparator)
- )
- SCALAC_OPTS.toList ::: others ::: Xplugin
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/NestRunner.scala b/src/partest/scala/tools/partest/nest/NestRunner.scala
deleted file mode 100644
index e398d2ead9..0000000000
--- a/src/partest/scala/tools/partest/nest/NestRunner.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-object NestRunner {
- def main(args: Array[String]) {
- new ReflectiveRunner main (args mkString " ")
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
deleted file mode 100644
index 5148115905..0000000000
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import java.io.PrintWriter
-
-class Colors(enabled: => Boolean) {
- import Console._
-
- val bold = colored(BOLD)
- val yellow = colored(YELLOW)
- val green = colored(GREEN)
- val blue = colored(BLUE)
- val red = colored(RED)
- val red_b = colored(RED_B)
- val green_b = colored(GREEN_B)
- val cyan = colored(CYAN)
- val magenta = colored(MAGENTA)
-
- private def colored(code: String): String => String =
- s => if (enabled) code + s + RESET else s
-}
-
-object NestUI {
- private val testNum = new java.util.concurrent.atomic.AtomicInteger(1)
- @volatile private var testNumberFmt = "%3d"
- // @volatile private var testNumber = 1
- private def testNumber = testNumberFmt format testNum.getAndIncrement()
- def resetTestNumber(max: Int = -1) {
- testNum set 1
- val width = if (max > 0) max.toString.length else 3
- testNumberFmt = s"%${width}d"
- }
-
- var colorEnabled = sys.props contains "partest.colors"
- val color = new Colors(colorEnabled)
- import color._
-
- val NONE = 0
- val SOME = 1
- val MANY = 2
-
- private var _outline = ""
- private var _success = ""
- private var _failure = ""
- private var _warning = ""
- private var _default = ""
-
- private var dotCount = 0
- private val DotWidth = 72
-
- def leftFlush() {
- if (dotCount != 0) {
- normal("\n")
- dotCount = 0
- }
- }
-
- def statusLine(state: TestState) = {
- import state._
- import TestState._
- val colorizer = state match {
- case _: Skip => yellow
- case _: Updated => cyan
- case s if s.isOk => green
- case _ => red
- }
- val word = bold(colorizer(state.shortStatus))
- f"$word $testNumber - $testIdent%-40s$reasonString"
- }
-
- def reportTest(state: TestState) = {
- if (isTerse && state.isOk) {
- if (dotCount >= DotWidth) {
- outline("\n.")
- dotCount = 1
- }
- else {
- outline(".")
- dotCount += 1
- }
- }
- else {
- echo(statusLine(state))
- if (!state.isOk && isDiffy) {
- val differ = bold(red("% ")) + "diff "
- state.transcript find (_ startsWith differ) foreach (echo(_))
- }
- }
- }
-
- def echo(message: String): Unit = synchronized {
- leftFlush()
- print(message + "\n")
- }
- def chatty(msg: String) = if (isVerbose) echo(msg)
-
- def echoSkipped(msg: String) = echo(yellow(msg))
- def echoPassed(msg: String) = echo(bold(green(msg)))
- def echoFailed(msg: String) = echo(bold(red(msg)))
- def echoMixed(msg: String) = echo(bold(yellow(msg)))
- def echoWarning(msg: String) = echo(bold(red(msg)))
-
- def initialize(number: Int) = number match {
- case MANY =>
- _outline = Console.BOLD + Console.BLACK
- _success = Console.BOLD + Console.GREEN
- _failure = Console.BOLD + Console.RED
- _warning = Console.BOLD + Console.YELLOW
- _default = Console.RESET
- case SOME =>
- _outline = Console.BOLD + Console.BLACK
- _success = Console.RESET
- _failure = Console.BOLD + Console.BLACK
- _warning = Console.BOLD + Console.BLACK
- _default = Console.RESET
- case _ =>
- }
-
- def outline(msg: String) = print(_outline + msg + _default)
- def outline(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_outline + msg + _default)
- }
-
- def success(msg: String) = print(_success + msg + _default)
- def success(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_success + msg + _default)
- }
-
- def failure(msg: String) = print(_failure + msg + _default)
- def failure(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_failure + msg + _default)
- }
-
- def warning(msg: String) = print(_warning + msg + _default)
-
- def normal(msg: String) = print(_default + msg)
- def normal(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_default + msg)
- }
-
- def usage() {
- println(ConsoleRunnerSpec.programInfo.usage)
- println(ConsoleRunnerSpec.helpMsg)
- sys.exit(1)
- }
-
- var _verbose = false
- var _debug = false
- var _terse = false
- var _diff = false
-
- def isVerbose = _verbose
- def isDebug = _debug
- def isTerse = _terse
- def isDiffy = _diff
-
- def setVerbose() {
- _verbose = true
- }
- def setDebug() {
- _debug = true
- }
- def setTerse() {
- _terse = true
- }
- def setDiffOnFail() {
- _diff = true
- }
- def verbose(msg: String) {
- if (isVerbose)
- System.err.println(msg)
- }
- def debug(msg: String) {
- if (isDebug)
- System.err.println(msg)
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
deleted file mode 100644
index 030c515947..0000000000
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io.{ Path, File, Directory }
-import Path._
-
-object PathSettings {
- import PartestDefaults.{ testRootDir, srcDirName }
-
- private def cwd = Directory.Current getOrElse sys.error("user.dir property not set")
- private def isPartestDir(d: Directory) = (d.name == "test") && (d / srcDirName isDirectory)
- private def findJar(d: Directory, name: String): Option[File] = findJar(d.files, name)
- private def findJar(files: Iterator[File], name: String): Option[File] =
- files filter (_ hasExtension "jar") find { _.name startsWith name }
- private def findJarOrFail(name: String, ds: Directory*): File = findJar(ds flatMap (_.files) iterator, name) getOrElse
- sys.error(s"'${name}.jar' not found in '${ds map (_.path) mkString ", "}'.")
-
- // Directory <root>/test
- lazy val testRoot: Directory = testRootDir getOrElse {
- val candidates: List[Directory] = (cwd :: cwd.parents) flatMap (d => List(d, Directory(d / "test")))
-
- candidates find isPartestDir getOrElse sys.error("Directory 'test' not found.")
- }
-
- // Directory <root>/test/files or .../scaladoc
- def srcDir = Directory(testRoot / srcDirName toCanonical)
-
- // Directory <root>/test/files/lib
- lazy val srcLibDir = Directory(srcDir / "lib")
-
- // Directory <root>/test/files/speclib
- lazy val srcSpecLibDir = Directory(srcDir / "speclib")
-
- lazy val srcSpecLib: File = findJar(srcSpecLibDir, "instrumented") getOrElse {
- sys.error("No instrumented.jar found in %s".format(srcSpecLibDir))
- }
-
- // Directory <root>/test/files/codelib
- lazy val srcCodeLibDir = Directory(srcDir / "codelib")
-
- lazy val srcCodeLib: File = (
- findJar(srcCodeLibDir, "code")
- orElse findJar(Directory(testRoot / "files" / "codelib"), "code") // work with --srcpath pending
- getOrElse sys.error("No code.jar found in %s".format(srcCodeLibDir))
- )
-
- lazy val instrumentationAgentLib: File = {
- findJar(buildPackLibDir.files, "scala-partest-javaagent") getOrElse {
- sys.error("No partest-javaagent jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
- }
- }
-
- // Directory <root>/build
- lazy val buildDir: Directory = {
- val bases = testRoot :: testRoot.parents
- // In the classic "ant" build, the relevant subdirectory is called build,
- // but in the postmodern "sbt" build, it is called target. Look for both.
- val dirs = Path.onlyDirs(bases flatMap (x => List(x / "build", x / "target")))
-
- dirs.headOption getOrElse sys.error("Neither 'build' nor 'target' dir found under test root " + testRoot + ".")
- }
-
- // Directory <root>/build/pack/lib
- lazy val buildPackLibDir = Directory(buildDir / "pack" / "lib")
-
- lazy val scalaCheck: File =
- findJar(buildPackLibDir.files ++ srcLibDir.files, "scalacheck") getOrElse {
- sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
- }
-
- lazy val testInterface: File = findJarOrFail("test-interface", buildPackLibDir, srcLibDir)
-
- lazy val diffUtils: File =
- findJar(buildPackLibDir.files, "diffutils") getOrElse sys.error(s"No diffutils.jar found in '$buildPackLibDir'.")
-
- /** The platform-specific support jar, `tools.jar`.
- */
- lazy val platformTools: Option[File] = PathResolver.SupplementalLocations.platformTools
-}
-
-class PathSettings() {
- // def classpathAsURLs: List[URL]
-}
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
deleted file mode 100644
index 3c77a03f1e..0000000000
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io
-import io.Path
-import java.net.URLClassLoader
-
-/* This class is used to load an instance of DirectRunner using
- * a custom class loader.
- * The purpose is to "auto-detect" a good classpath for the
- * rest of the classes (Worker, CompileManager etc.), so that
- * the main NestRunner can be started merely by putting its
- * class on the classpath (ideally).
- */
-class ReflectiveRunner {
- // TODO: we might also use fileManager.CLASSPATH
- // to use the same classes as used by `scala` that
- // was used to start the runner.
- val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner"
-
- private def searchPath(option: String, as: List[String]): Option[String] = as match {
- case `option` :: r :: _ => Some(r)
- case _ :: rest => searchPath(option, rest)
- case Nil => None
- }
-
- def main(args: String) {
- val argList = (args.split("\\s")).toList
-
- if (isPartestDebug)
- showAllJVMInfo
-
- // find out which build to test
- val buildPath = searchPath("--buildpath", argList)
- val classPath = searchPath("--classpath", argList)
- val fileManager =
- if (!buildPath.isEmpty)
- new ConsoleFileManager(buildPath.get)
- else if (!classPath.isEmpty)
- new ConsoleFileManager(classPath.get, true)
- else if (argList contains "--pack")
- new ConsoleFileManager("build/pack")
- else // auto detection
- new ConsoleFileManager
-
- // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
- // when that bug is fixed, the addition of PathSettings.srcCodeLib can be removed
- // we hack into the classloader that will become parent classloader for scalac
- // this way we ensure that reflective macro lookup will pick correct Code.lift
- // it's also used to inject diffutils into the classpath when running partest from the test/partest script
- val srcCodeLibAndDiff = List(PathSettings.srcCodeLib, PathSettings.diffUtils, PathSettings.testInterface)
- val sepUrls = srcCodeLibAndDiff.map(_.toURI.toURL) ::: fileManager.latestUrls
- // this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script
- val sepLoader = new URLClassLoader(sepUrls.toArray, null)
-
- if (isPartestDebug)
- println("Loading classes from:\n " + fileManager.latestUrls.mkString("\n "))
-
- // @partest maintainer: it seems to me that commented lines are incorrect
- // if classPath is not empty, then it has been provided by the --classpath option
- // which points to the root of Scala home (see ConsoleFileManager's testClasses and the true flag in the ctor for more information)
- // this doesn't mean that we had custom Java classpath set, so we don't have to override latestXXXFiles from the file manager
- //
- //val paths = classPath match {
- // case Some(cp) => Nil
- // case _ => files.toList map (_.path)
- //}
-
- setProp("java.class.path", ClassPath.join(fileManager.latestPaths: _*))
-
- // don't let partest find pluginsdir; in ant build, standard plugin has dedicated test suite
- //setProp("scala.home", latestLibFile.parent.parent.path)
- setProp("scala.home", "")
-
- if (isPartestDebug)
- for (prop <- List("java.class.path", "sun.boot.class.path", "java.ext.dirs"))
- println(prop + ": " + propOrEmpty(prop))
-
- try {
- val sepRunnerClass = sepLoader loadClass sepRunnerClassName
- val sepMainMethod = sepRunnerClass.getMethod("main", classOf[Array[String]])
- val cargs: Array[AnyRef] = Array(Array(args))
- sepMainMethod.invoke(null, cargs: _*)
- }
- catch {
- case cnfe: ClassNotFoundException =>
- cnfe.printStackTrace()
- NestUI.failure(sepRunnerClassName +" could not be loaded from:\n")
- sepUrls foreach (x => NestUI.failure(x + "\n"))
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala
deleted file mode 100644
index d7d87bdcf5..0000000000
--- a/src/partest/scala/tools/partest/nest/Runner.scala
+++ /dev/null
@@ -1,894 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-package scala.tools.partest
-package nest
-
-import java.io.{ Console => _, _ }
-import java.net.URL
-import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
-import java.util.concurrent.Executors
-import java.util.concurrent.TimeUnit.NANOSECONDS
-import scala.collection.mutable.ListBuffer
-import scala.concurrent.duration.Duration
-import scala.io.Codec
-import scala.reflect.internal.FatalError
-import scala.sys.process.{ Process, ProcessLogger }
-import scala.tools.nsc.Properties.{ envOrElse, isWin, jdkHome, javaHome, propOrElse, propOrEmpty, setProp }
-import scala.tools.nsc.{ Settings, CompilerCommand, Global }
-import scala.tools.nsc.io.{ AbstractFile, PlainFile }
-import scala.tools.nsc.reporters.ConsoleReporter
-import scala.tools.nsc.util.{ Exceptional, ScalaClassLoader, stackTraceString }
-import scala.tools.scalap.Main.decompileScala
-import scala.tools.scalap.scalax.rules.scalasig.ByteCode
-import scala.util.{ Try, Success, Failure }
-import ClassPath.{ join, split }
-import PartestDefaults.{ javaCmd, javacCmd }
-import TestState.{ Pass, Fail, Crash, Uninitialized, Updated }
-
-trait PartestRunSettings {
- def gitPath: Path
- def reportPath: Path
- def logPath: Path
-
- def testPaths: List[Path]
-
- def gitDiffOptions: List[String]
- def extraScalacOptions: List[String]
- def extraJavaOptions: List[String]
-}
-
-class TestTranscript {
- import NestUI.color._
- private val buf = ListBuffer[String]()
- private def pass(s: String) = bold(green("% ")) + s
- private def fail(s: String) = bold(red("% ")) + s
-
- def add(action: String): this.type = { buf += action ; this }
- def append(text: String) { val s = buf.last ; buf.trimEnd(1) ; buf += (s + text) }
-
- // Colorize prompts according to pass/fail
- def fail: List[String] = buf.toList match {
- case Nil => Nil
- case xs => (xs.init map pass) :+ fail(xs.last)
- }
-}
-
-/** Run a single test. Rubber meets road. */
-class Runner(val testFile: File, fileManager: FileManager, val testRunParams: TestRunParams) {
- import fileManager._
-
- // Override to true to have the outcome of this test displayed
- // whether it passes or not; in general only failures are reported,
- // except for a . per passing test to show progress.
- def isEnumeratedTest = false
-
- private var _lastState: TestState = null
- private var _transcript = new TestTranscript
-
- def lastState = if (_lastState == null) Uninitialized(testFile) else _lastState
- def setLastState(s: TestState) = _lastState = s
- def transcript: List[String] = _transcript.fail ++ logFile.fileLines
- def pushTranscript(msg: String) = _transcript add msg
-
- val parentFile = testFile.getParentFile
- val kind = parentFile.getName
- val fileBase = basename(testFile.getName)
- val logFile = new File(parentFile, s"$fileBase-$kind.log")
- val outFile = logFile changeExtension "obj"
- val checkFile = testFile changeExtension "check"
- val flagsFile = testFile changeExtension "flags"
- val testIdent = testFile.testIdent // e.g. pos/t1234
-
- lazy val outDir = { outFile.mkdirs() ; outFile }
-
- type RanOneTest = (Boolean, LogContext)
-
- def showCrashInfo(t: Throwable) {
- System.err.println("Crashed running test $testIdent: " + t)
- if (!isPartestTerse)
- System.err.println(stackTraceString(t))
- }
- protected def crashHandler: PartialFunction[Throwable, TestState] = {
- case t: InterruptedException =>
- genTimeout()
- case t: Throwable =>
- showCrashInfo(t)
- logFile.appendAll(stackTraceString(t))
- genCrash(t)
- }
-
- def genPass() = Pass(testFile)
- def genFail(reason: String) = Fail(testFile, reason, _transcript.fail)
- def genTimeout() = Fail(testFile, "timed out", _transcript.fail)
- def genCrash(caught: Throwable) = Crash(testFile, caught, _transcript.fail)
- def genUpdated() = Updated(testFile)
-
- def speclib = PathSettings.srcSpecLib.toString // specialization lib
- def codelib = PathSettings.srcCodeLib.toString // reify lib
-
- // Prepend to a classpath, but without incurring duplicate entries
- def prependTo(classpath: String, path: String): String = {
- val segments = ClassPath split classpath
-
- if (segments startsWith path) classpath
- else ClassPath.join(path :: segments distinct: _*)
- }
-
- def prependToJavaClasspath(path: String) {
- val jcp = sys.props.getOrElse("java.class.path", "")
- prependTo(jcp, path) match {
- case `jcp` =>
- case cp => sys.props("java.class.path") = cp
- }
- }
- def prependToClasspaths(s: Settings, path: String) {
- prependToJavaClasspath(path)
- val scp = s.classpath.value
- prependTo(scp, path) match {
- case `scp` =>
- case cp => s.classpath.value = cp
- }
- }
-
- private def workerError(msg: String): Unit = System.err.println("Error: " + msg)
-
- def javac(files: List[File]): TestState = {
- // compile using command-line javac compiler
- val args = Seq(
- javacCmd,
- "-d",
- outDir.getAbsolutePath,
- "-classpath",
- join(outDir.toString, CLASSPATH)
- ) ++ files.map(_.getAbsolutePath)
-
- pushTranscript(args mkString " ")
- val captured = StreamCapture(runCommand(args, logFile))
- if (captured.result) genPass() else {
- logFile appendAll captured.stderr
- genFail("java compilation failed")
- }
- }
-
- def testPrompt = kind match {
- case "res" => "nsc> "
- case _ => "% "
- }
-
- /** Evaluate an action body and update the test state.
- * @param failFn optionally map a result to a test state.
- */
- def nextTestAction[T](body: => T)(failFn: PartialFunction[T, TestState]): T = {
- val result = body
- setLastState( if (failFn isDefinedAt result) failFn(result) else genPass() )
- result
- }
- def nextTestActionExpectTrue(reason: String, body: => Boolean): Boolean = (
- nextTestAction(body) { case false => genFail(reason) }
- )
- def nextTestActionFailing(reason: String): Boolean = nextTestActionExpectTrue(reason, false)
-
- private def assembleTestCommand(outDir: File, logFile: File): List[String] = {
- // check whether there is a ".javaopts" file
- val argsFile = testFile changeExtension "javaopts"
- val argString = file2String(argsFile)
- if (argString != "")
- NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString))
-
- val testFullPath = testFile.getAbsolutePath
-
- // Note! As this currently functions, JAVA_OPTS must precede argString
- // because when an option is repeated to java only the last one wins.
- // That means until now all the .javaopts files were being ignored because
- // they all attempt to change options which are also defined in
- // partest.java_opts, leading to debug output like:
- //
- // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
- // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
- val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil
- val propertyOptions = List(
- "-Dfile.encoding=UTF-8",
- "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
- "-Dpartest.output="+outDir.getAbsolutePath,
- "-Dpartest.lib="+LATEST_LIB,
- "-Dpartest.reflect="+LATEST_REFLECT,
- "-Dpartest.cwd="+outDir.getParent,
- "-Dpartest.test-path="+testFullPath,
- "-Dpartest.testname="+fileBase,
- "-Djavacmd="+javaCmd,
- "-Djavaccmd="+javacCmd,
- "-Duser.language=en",
- "-Duser.country=US"
- ) ++ extras
-
- val classpath = if (extraClasspath != "") join(extraClasspath, CLASSPATH) else CLASSPATH
-
- javaCmd +: (
- (JAVA_OPTS.split(' ') ++ extraJavaOptions.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "").toList ++ Seq(
- "-classpath",
- join(outDir.toString, classpath)
- ) ++ propertyOptions ++ Seq(
- "scala.tools.nsc.MainGenericRunner",
- "-usejavacp",
- "Test",
- "jvm"
- )
- )
- }
-
- /** Runs command redirecting standard out and
- * error out to output file.
- */
- private def runCommand(args: Seq[String], outFile: File): Boolean = {
- //(Process(args) #> outFile !) == 0 or (Process(args) ! pl) == 0
- val pl = ProcessLogger(outFile)
- val nonzero = 17 // rounding down from 17.3
- def run: Int = {
- val p = Process(args) run pl
- try p.exitValue
- catch {
- case e: InterruptedException =>
- NestUI verbose s"Interrupted waiting for command to finish (${args mkString " "})"
- p.destroy
- nonzero
- case t: Throwable =>
- NestUI verbose s"Exception waiting for command to finish: $t (${args mkString " "})"
- p.destroy
- throw t
- }
- finally pl.close()
- }
- (pl buffer run) == 0
- }
-
- private def execTest(outDir: File, logFile: File): Boolean = {
- val cmd = assembleTestCommand(outDir, logFile)
-
- pushTranscript((cmd mkString s" \\$EOL ") + " > " + logFile.getName)
- nextTestAction(runCommand(cmd, logFile)) {
- case false =>
- _transcript append EOL + logFile.fileContents
- genFail("non-zero exit code")
- }
- }
-
- override def toString = s"""Test($testIdent, lastState = $lastState)"""
-
- // result is unused
- def newTestWriters() = {
- val swr = new StringWriter
- val wr = new PrintWriter(swr, true)
- // diff = ""
-
- ((swr, wr))
- }
-
- def fail(what: Any) = {
- NestUI.verbose("scalac: compilation of "+what+" failed\n")
- false
- }
-
- /** Filter the diff for conditional blocks.
- * The check file can contain lines of the form:
- * `#partest java7`
- * where the line contains a conventional flag name.
- * In the diff output, these lines have the form:
- * `> #partest java7`
- * Blocks which don't apply are filtered out,
- * and what remains is the desired diff.
- * Line edit commands such as `0a1,6` don't count
- * as diff, so return a nonempty diff only if
- * material diff output was seen.
- * Filtering the diff output (instead of every check
- * file) means that we only post-process a test that
- * might be failing, in the normal case.
- */
- def diffilter(d: String) = {
- import scala.util.Properties.{javaVersion, isAvian}
- val prefix = "#partest"
- val margin = "> "
- val leader = margin + prefix
- // use lines in block so labeled? Default to sorry, Charlie.
- def retainOn(f: String) = {
- val (invert, token) =
- if (f startsWith "!") (true, f drop 1) else (false, f)
- val cond = token match {
- case "java7" => javaVersion startsWith "1.7"
- case "java6" => javaVersion startsWith "1.6"
- case "avian" => isAvian
- case "true" => true
- case _ => false
- }
- if (invert) !cond else cond
- }
- if (d contains prefix) {
- val sb = new StringBuilder
- var retain = true // use the current line
- var material = false // saw a line of diff
- for (line <- d.lines)
- if (line startsWith leader) {
- val rest = (line stripPrefix leader).trim
- retain = retainOn(rest)
- } else if (retain) {
- if (line startsWith margin) material = true
- sb ++= line
- sb ++= EOL
- }
- if (material) sb.toString else ""
- } else d
- }
-
- def currentDiff = (
- if (checkFile.canRead) diffilter(compareFiles(logFile, checkFile))
- else compareContents(augmentString(file2String(logFile)).lines.toList, Nil)
- )
-
- val gitRunner = List("/usr/local/bin/git", "/usr/bin/git") map (f => new java.io.File(f)) find (_.canRead)
- val gitDiffOptions = "--ignore-space-at-eol --no-index " + propOrEmpty("partest.git_diff_options")
- // --color=always --word-diff
-
- def gitDiff(f1: File, f2: File): Option[String] = {
- try gitRunner map { git =>
- val cmd = s"$git diff $gitDiffOptions $f1 $f2"
- val diff = Process(cmd).lines_!.drop(4).map(_ + "\n").mkString
-
- "\n" + diff
- }
- catch { case t: Exception => None }
- }
-
- /** Normalize the log output by applying test-specific filters
- * and fixing filesystem-specific paths.
- *
- * Line filters are picked up from `filter: pattern` at the top of sources.
- * The filtered line is detected with a simple "contains" test,
- * and yes, "filter" means "filter out" in this context.
- *
- * File paths are detected using the absolute path of the test root.
- * A string that looks like a file path is normalized by replacing
- * the leading segments (the root) with "$ROOT" and by replacing
- * any Windows backslashes with the one true file separator char.
- */
- def normalizeLog() {
- // Apply judiciously; there are line comments in the "stub implementations" error output.
- val slashes = """[/\\]+""".r
- def squashSlashes(s: String) = slashes replaceAllIn (s, "/")
-
- // this string identifies a path and is also snipped from log output.
- // to preserve more of the path, could use fileManager.testRootPath
- val elided = parentFile.getAbsolutePath
-
- // something to mark the elision in the log file (disabled)
- val ellipsis = "" //".../" // using * looks like a comment
-
- // no spaces in test file paths below root, because otherwise how to detect end of path string?
- val pathFinder = raw"""(?i)\Q${elided}${File.separator}\E([\${File.separator}\w]*)""".r
- def canonicalize(s: String): String = (
- pathFinder replaceAllIn (s, m => ellipsis + squashSlashes(m group 1))
- )
-
- def masters = {
- val files = List(new File(parentFile, "filters"), new File(PathSettings.srcDir.path, "filters"))
- files filter (_.exists) flatMap (_.fileLines) map (_.trim) filter (s => !(s startsWith "#"))
- }
- val filters = toolArgs("filter", split = false) ++ masters
- val elisions = ListBuffer[String]()
- //def lineFilter(s: String): Boolean = !(filters exists (s contains _))
- def lineFilter(s: String): Boolean = (
- filters map (_.r) forall { r =>
- val res = (r findFirstIn s).isEmpty
- if (!res) elisions += s
- res
- }
- )
-
- logFile.mapInPlace(canonicalize)(lineFilter)
- if (isPartestVerbose && elisions.nonEmpty) {
- import NestUI.color._
- val emdash = bold(yellow("--"))
- pushTranscript(s"filtering ${logFile.getName}$EOL${elisions mkString (emdash, EOL + emdash, EOL)}")
- }
- }
-
- def diffIsOk: Boolean = {
- // always normalize the log first
- normalizeLog()
- val diff = currentDiff
- // if diff is not empty, is update needed?
- val updating: Option[Boolean] = (
- if (diff == "") None
- else Some(fileManager.updateCheck)
- )
- pushTranscript(s"diff $logFile $checkFile")
- nextTestAction(updating) {
- case Some(true) =>
- NestUI.verbose("Updating checkfile " + checkFile)
- checkFile writeAll file2String(logFile)
- genUpdated()
- case Some(false) =>
- // Get a word-highlighted diff from git if we can find it
- val bestDiff = if (updating.isEmpty) "" else {
- if (checkFile.canRead)
- gitDiff(logFile, checkFile) getOrElse {
- s"diff $logFile $checkFile\n$diff"
- }
- else diff
- }
- _transcript append bestDiff
- genFail("output differs")
- // TestState.fail("output differs", "output differs",
- // genFail("output differs")
- // TestState.Fail("output differs", bestDiff)
- case None => genPass() // redundant default case
- } getOrElse true
- }
-
- /** 1. Creates log file and output directory.
- * 2. Runs script function, providing log file and output directory as arguments.
- * 2b. or, just run the script without context and return a new context
- */
- def runInContext(body: => Boolean): (Boolean, LogContext) = {
- val (swr, wr) = newTestWriters()
- val succeeded = body
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- /** Grouped files in group order, and lex order within each group. */
- def groupedFiles(sources: List[File]): List[List[File]] = (
- if (sources.tail.nonEmpty) {
- val grouped = sources groupBy (_.group)
- grouped.keys.toList.sorted map (k => grouped(k) sortBy (_.getName))
- }
- else List(sources)
- )
-
- /** Source files for the given test file. */
- def sources(file: File): List[File] = (
- if (file.isDirectory)
- file.listFiles.toList filter (_.isJavaOrScala)
- else
- List(file)
- )
-
- def newCompiler = new DirectCompiler(fileManager)
-
- def attemptCompile(sources: List[File]): TestState = {
- val state = newCompiler.compile(this, flagsForCompilation(sources), sources)
- if (!state.isOk)
- _transcript append ("\n" + file2String(logFile))
-
- state
- }
-
- // snort or scarf all the contributing flags files
- def flagsForCompilation(sources: List[File]): List[String] = {
- def argsplitter(s: String) = words(s) filter (_.nonEmpty)
- val perTest = argsplitter(flagsFile.fileContents)
- val perGroup = if (testFile.isDirectory) {
- sources flatMap { f => SFile(Path(f) changeExtension "flags").safeSlurp map argsplitter getOrElse Nil }
- } else Nil
- perTest ++ perGroup
- }
-
- def toolArgs(tool: String, split: Boolean = true): List[String] = {
- def argsplitter(s: String) = if (split) words(s) filter (_.nonEmpty) else List(s)
- def argsFor(f: File): List[String] = {
- import scala.util.matching.Regex
- val p = new Regex(s"(?:.*\\s)?${tool}:(?:\\s*)(.*)?", "args")
- val max = 10
- val src = Path(f).toFile.chars(codec)
- val args = try {
- src.getLines take max collectFirst {
- case s if (p findFirstIn s).nonEmpty => for (m <- p findFirstMatchIn s) yield m group "args"
- }
- } finally src.close()
- args.flatten map argsplitter getOrElse Nil
- }
- sources(testFile) flatMap argsFor
- }
-
- abstract class CompileRound {
- def fs: List[File]
- def result: TestState
- def description: String
-
- def fsString = fs map (_.toString stripPrefix parentFile.toString + "/") mkString " "
- def isOk = result.isOk
- def mkScalacString(): String = {
- val flags = file2String(flagsFile) match {
- case "" => ""
- case s => " " + s
- }
- s"""scalac $fsString"""
- }
- override def toString = description + ( if (result.isOk) "" else "\n" + result.status )
- }
- case class OnlyJava(fs: List[File]) extends CompileRound {
- def description = s"""javac $fsString"""
- lazy val result = { pushTranscript(description) ; javac(fs) }
- }
- case class OnlyScala(fs: List[File]) extends CompileRound {
- def description = mkScalacString()
- lazy val result = { pushTranscript(description) ; attemptCompile(fs) }
- }
- case class ScalaAndJava(fs: List[File]) extends CompileRound {
- def description = mkScalacString()
- lazy val result = { pushTranscript(description) ; attemptCompile(fs) }
- }
-
- def compilationRounds(file: File): List[CompileRound] = (
- (groupedFiles(sources(file)) map mixedCompileGroup).flatten
- )
- def mixedCompileGroup(allFiles: List[File]): List[CompileRound] = {
- val (scalaFiles, javaFiles) = allFiles partition (_.isScala)
- val isMixed = javaFiles.nonEmpty && scalaFiles.nonEmpty
- val round1 = if (scalaFiles.isEmpty) None else Some(ScalaAndJava(allFiles))
- val round2 = if (javaFiles.isEmpty) None else Some(OnlyJava(javaFiles))
- val round3 = if (!isMixed) None else Some(OnlyScala(scalaFiles))
-
- List(round1, round2, round3).flatten
- }
-
- def runNegTest() = runInContext {
- val rounds = compilationRounds(testFile)
-
- // failing means Does Not Compile
- val failing = rounds find (x => nextTestActionExpectTrue("compilation failed", x.isOk) == false)
-
- // which means passing if it checks and didn't crash the compiler
- // or, OK, we'll let you crash the compiler with a FatalError if you supply a check file
- def checked(r: CompileRound) = r.result match {
- case Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => false
- case _ => diffIsOk
- }
-
- failing map (checked) getOrElse nextTestActionFailing("expected compilation failure")
- }
-
- def runTestCommon(andAlso: => Boolean): (Boolean, LogContext) = runInContext {
- compilationRounds(testFile).forall(x => nextTestActionExpectTrue("compilation failed", x.isOk)) && andAlso
- }
-
- // Apache Ant 1.6 or newer
- def ant(args: Seq[String], output: File): Boolean = {
- val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/"))
- val antLibDir = Directory(antDir / "lib")
- val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path
- val antOptions =
- if (NestUI._verbose) List("-verbose", "-noinput")
- else List("-noinput")
- val cmd = javaCmd +: (
- JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq(
- "-classpath",
- antLauncherPath,
- "org.apache.tools.ant.launch.Launcher"
- ) ++ antOptions ++ args
- )
-
- runCommand(cmd, output)
- }
-
- def runAntTest(): (Boolean, LogContext) = {
- val (swr, wr) = newTestWriters()
-
- val succeeded = try {
- val binary = "-Dbinary="+(
- if (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick"
- else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack"
- else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest"
- else "installed"
- )
- val args = Array(binary, "-logfile", logFile.getPath, "-file", testFile.getPath)
- NestUI.verbose("ant "+args.mkString(" "))
-
- pushTranscript(s"ant ${args.mkString(" ")}")
- nextTestActionExpectTrue("ant failed", ant(args, logFile)) && diffIsOk
- }
- catch { // *catch-all*
- case e: Exception =>
- NestUI.warning("caught "+e)
- false
- }
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- def extraClasspath = kind match {
- case "specialized" => PathSettings.srcSpecLib.toString
- case _ => ""
- }
- def extraJavaOptions = kind match {
- case "instrumented" => "-javaagent:"+PathSettings.instrumentationAgentLib
- case _ => ""
- }
-
- def runScalacheckTest() = runTestCommon {
- NestUI verbose f"compilation of $testFile succeeded%n"
-
- // this classloader is test specific: its parent contains library classes and others
- val loader = {
- import PathSettings.scalaCheck
- val locations = List(outDir, scalaCheck.jfile) map (_.getAbsoluteFile.toURI.toURL)
- ScalaClassLoader.fromURLs(locations, getClass.getClassLoader)
- }
- val logWriter = new PrintStream(new FileOutputStream(logFile), true)
-
- def runInFramework(): Boolean = {
- import org.scalatools.testing._
- val f: Framework = loader.instantiate[Framework]("org.scalacheck.ScalaCheckFramework")
- val logger = new Logger {
- def ansiCodesSupported = false //params.env.isSet("colors")
- def error(msg: String) = logWriter println msg
- def warn(msg: String) = logWriter println msg
- def info(msg: String) = logWriter println msg
- def debug(msg: String) = logWriter println msg
- def trace(t: Throwable) = t printStackTrace logWriter
- }
- var bad = 0
- val handler = new EventHandler {
- // testName, description, result, error
- // Result = Success, Failure, Error, Skipped
- def handle(event: Event): Unit = event.result match {
- case Result.Success =>
- //case Result.Skipped => // an exhausted test is skipped, therefore bad
- case _ => bad += 1
- }
- }
- val loggers = Array(logger)
- val r = f.testRunner(loader, loggers).asInstanceOf[Runner2] // why?
- val claas = "Test"
- val fingerprint = f.tests collectFirst { case x: SubclassFingerprint if x.isModule => x }
- val args = toolArgs("scalacheck")
- vlog(s"Run $testFile with args $args")
- // set the context class loader for scaladoc/scalacheck tests (FIX ME)
- ScalaClassLoader(testRunParams.scalaCheckParentClassLoader).asContext {
- r.run(claas, fingerprint.get, handler, args.toArray) // synchronous?
- }
- val ok = (bad == 0)
- if (!ok) _transcript append logFile.fileContents
- ok
- }
- try nextTestActionExpectTrue("ScalaCheck test failed", runInFramework()) finally logWriter.close()
- }
-
- def runResidentTest() = {
- // simulate resident compiler loop
- val prompt = "\nnsc> "
- val (swr, wr) = newTestWriters()
-
- NestUI.verbose(this+" running test "+fileBase)
- val dir = parentFile
- val resFile = new File(dir, fileBase + ".res")
-
- // run compiler in resident mode
- // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
- val sourcedir = logFile.getParentFile.getAbsoluteFile
- val sourcepath = sourcedir.getAbsolutePath+File.separator
- NestUI.verbose("sourcepath: "+sourcepath)
-
- val argList = List(
- "-d", outDir.getAbsoluteFile.getPath,
- "-Xresident",
- "-sourcepath", sourcepath)
-
- // configure input/output files
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut, true)
- val resReader = new BufferedReader(new FileReader(resFile))
- val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true)
-
- // create compiler
- val settings = new Settings(workerError)
- settings.sourcepath.value = sourcepath
- settings.classpath.value = fileManager.CLASSPATH
- val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val command = new CompilerCommand(argList, settings)
- object compiler extends Global(command.settings, reporter)
-
- def resCompile(line: String): Boolean = {
- // NestUI.verbose("compiling "+line)
- val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
- // NestUI.verbose("cmdArgs: "+cmdArgs)
- val sett = new Settings(workerError)
- sett.sourcepath.value = sourcepath
- val command = new CompilerCommand(cmdArgs, sett)
- // "scalac " + command.files.mkString(" ")
- pushTranscript("scalac " + command.files.mkString(" "))
- nextTestActionExpectTrue(
- "compilation failed",
- command.ok && {
- (new compiler.Run) compile command.files
- !reporter.hasErrors
- }
- )
- }
- def loop(): Boolean = {
- logWriter.print(prompt)
- resReader.readLine() match {
- case null | "" => logWriter.close() ; true
- case line => resCompile(line) && loop()
- }
- }
- // res/t687.res depends on ignoring its compilation failure
- // and just looking at the diff, so I made them all do that
- // because this is long enough.
- if (!Output.withRedirected(logWriter)(try loop() finally resReader.close()))
- setLastState(genPass())
-
- (diffIsOk, LogContext(logFile, swr, wr))
- }
-
- def run(): TestState = {
- if (kind == "neg" || (kind endsWith "-neg")) runNegTest()
- else kind match {
- case "pos" => runTestCommon(true)
- case "ant" => runAntTest()
- case "scalacheck" => runScalacheckTest()
- case "res" => runResidentTest()
- case "scalap" => runScalapTest()
- case "script" => runScriptTest()
- case _ => runTestCommon(execTest(outDir, logFile) && diffIsOk)
- }
-
- lastState
- }
-
- def runScalapTest() = runTestCommon {
- val isPackageObject = testFile.getName startsWith "package"
- val className = testFile.getName.stripSuffix(".scala").capitalize + (if (!isPackageObject) "" else ".package")
- val loader = ScalaClassLoader.fromURLs(List(outDir.toURI.toURL), this.getClass.getClassLoader)
- val byteCode = ByteCode forClass (loader loadClass className)
- val result = decompileScala(byteCode.bytes, isPackageObject)
-
- logFile writeAll result
- diffIsOk
- }
- def runScriptTest() = {
- import scala.sys.process._
- val (swr, wr) = newTestWriters()
-
- val args = file2String(testFile changeExtension "args")
- val cmdFile = if (isWin) testFile changeExtension "bat" else testFile
- val succeeded = (((cmdFile + " " + args) #> logFile !) == 0) && diffIsOk
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- def cleanup() {
- if (lastState.isOk)
- logFile.delete()
- if (!isPartestDebug)
- Directory(outDir).deleteRecursively()
- }
-}
-
-case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader)
-
-/** Extended by Ant- and ConsoleRunner for running a set of tests. */
-trait DirectRunner {
- def fileManager: FileManager
-
- import PartestDefaults.{ numThreads, waitTime }
-
- setUncaughtHandler
-
- def runTestsForFiles(kindFiles: List[File], kind: String): List[TestState] = {
-
- NestUI.resetTestNumber(kindFiles.size)
-
- // this special class loader is for the benefit of scaladoc tests, which need a class path
- import PathSettings.{ testInterface, scalaCheck }
- val allUrls = scalaCheck.toURL :: testInterface.toURL :: fileManager.latestUrls
- val parentClassLoader = ScalaClassLoader fromURLs allUrls
- // add scalacheck.jar to a special classloader, but use our loader as parent with test-interface
- //val parentClassLoader = ScalaClassLoader fromURLs (List(scalaCheck.toURL), getClass().getClassLoader)
- val pool = Executors newFixedThreadPool numThreads
- val manager = new RunnerManager(kind, fileManager, TestRunParams(parentClassLoader))
- val futures = kindFiles map (f => pool submit callable(manager runTest f))
-
- pool.shutdown()
- Try (pool.awaitTermination(waitTime) {
- throw TimeoutException(waitTime)
- }) match {
- case Success(_) => futures map (_.get)
- case Failure(e) =>
- e match {
- case TimeoutException(d) =>
- NestUI warning "Thread pool timeout elapsed before all tests were complete!"
- case ie: InterruptedException =>
- NestUI warning "Thread pool was interrupted"
- ie.printStackTrace()
- }
- pool.shutdownNow() // little point in continuing
- // try to get as many completions as possible, in case someone cares
- val results = for (f <- futures) yield {
- try {
- Some(f.get(0, NANOSECONDS))
- } catch {
- case _: Throwable => None
- }
- }
- results.flatten
- }
- }
-}
-
-case class TimeoutException(duration: Duration) extends RuntimeException
-
-class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)])
-
-object LogContext {
- def apply(file: File, swr: StringWriter, wr: PrintWriter): LogContext = {
- require (file != null)
- new LogContext(file, Some((swr, wr)))
- }
- def apply(file: File): LogContext = new LogContext(file, None)
-}
-
-object Output {
- object outRedirect extends Redirecter(out)
- object errRedirect extends Redirecter(err)
-
- System.setOut(outRedirect)
- System.setErr(errRedirect)
-
- import scala.util.DynamicVariable
- private def out = java.lang.System.out
- private def err = java.lang.System.err
- private val redirVar = new DynamicVariable[Option[PrintStream]](None)
-
- class Redirecter(stream: PrintStream) extends PrintStream(new OutputStream {
- def write(b: Int) = withStream(_ write b)
-
- private def withStream(f: PrintStream => Unit) = f(redirVar.value getOrElse stream)
-
- override def write(b: Array[Byte]) = withStream(_ write b)
- override def write(b: Array[Byte], off: Int, len: Int) = withStream(_.write(b, off, len))
- override def flush = withStream(_.flush)
- override def close = withStream(_.close)
- })
-
- // this supports thread-safe nested output redirects
- def withRedirected[T](newstream: PrintStream)(func: => T): T = {
- // note down old redirect destination
- // this may be None in which case outRedirect and errRedirect print to stdout and stderr
- val saved = redirVar.value
- // set new redirecter
- // this one will redirect both out and err to newstream
- redirVar.value = Some(newstream)
-
- try func
- finally {
- newstream.flush()
- redirVar.value = saved
- }
- }
-}
-
-/** Use a Runner to run a test. */
-class RunnerManager(kind: String, fileManager: FileManager, params: TestRunParams) {
- import fileManager._
- fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
- fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
-
- def runTest(testFile: File): TestState = {
- val runner = new Runner(testFile, fileManager, params)
-
- // when option "--failed" is provided execute test only if log
- // is present (which means it failed before)
- if (fileManager.failed && !runner.logFile.canRead)
- runner.genPass()
- else {
- val (state, elapsed) =
- try timed(runner.run())
- catch {
- case t: Throwable => throw new RuntimeException(s"Error running $testFile", t)
- }
- NestUI.reportTest(state)
- runner.cleanup()
- state
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
deleted file mode 100644
index 1cf3aa858f..0000000000
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-package scala.tools.partest
-package nest
-
-import java.io.File
-import scala.tools.nsc.io.{ Directory }
-import scala.util.Properties.setProp
-import scala.collection.JavaConverters._
-
-object SBTRunner extends DirectRunner {
-
- val fileManager = new FileManager {
- var JAVACMD: String = "java"
- var JAVAC_CMD: String = "javac"
- var CLASSPATH: String = _
- var LATEST_LIB: String = _
- var LATEST_REFLECT: String = _
- var LATEST_COMP: String = _
- var LATEST_PARTEST: String = _
- var LATEST_ACTORS: String = _
- val testRootPath: String = "test"
- val testRootDir: Directory = Directory(testRootPath)
- }
-
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): java.util.List[TestState] = {
- def failedOnlyIfRequired(files:List[File]):List[File]={
- if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
- }
- runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind).asJava
- }
-
- case class CommandLineOptions(classpath: Option[String] = None,
- tests: Map[String, Array[File]] = Map(),
- scalacOptions: Seq[String] = Seq(),
- justFailedTests: Boolean = false)
-
- def mainReflect(args: Array[String]): java.util.List[TestState] = {
- setProp("partest.debug", "true")
-
- val Argument = new scala.util.matching.Regex("-(.*)")
- def parseArgs(args: Seq[String], data: CommandLineOptions): CommandLineOptions = args match {
- case Seq("--failed", rest @ _*) => parseArgs(rest, data.copy(justFailedTests = true))
- case Seq("-cp", cp, rest @ _*) => parseArgs(rest, data.copy(classpath=Some(cp)))
- case Seq("-scalacoption", opt, rest @ _*) => parseArgs(rest, data.copy(scalacOptions= data.scalacOptions :+ opt))
- case Seq(Argument(name), runFiles, rest @ _*) => parseArgs(rest, data.copy(tests=data.tests + (name -> runFiles.split(",").map(new File(_)))))
- case Seq() => data
- case x => sys.error("Unknown command line options: " + x)
- }
- val config = parseArgs(args, CommandLineOptions())
- fileManager.SCALAC_OPTS = config.scalacOptions
- fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
-
- def findClasspath(jar: String, name: String): Option[String] = {
- val optJar = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+jar+".*\\.jar"))).headOption
- val optClassDir = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+name+File.separator+"classes"))).headOption
- optJar orElse optClassDir
- }
- // Find scala library jar file...
- fileManager.LATEST_LIB = findClasspath("scala-library", "scala-library") getOrElse sys.error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_REFLECT = findClasspath("scala-reflect", "scala-reflect") getOrElse sys.error("No scala-reflect found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_COMP = findClasspath("scala-compiler", "scala-compiler") getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_PARTEST = findClasspath("scala-partest", "partest") getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_ACTORS = findClasspath("scala-actors", "actors") getOrElse sys.error("No scala-actors found! Classpath = " + fileManager.CLASSPATH)
-
- // TODO - Do something useful here!!!
- fileManager.JAVAC_CMD = "javac"
- fileManager.failed = config.justFailedTests
- // TODO - Make this a flag?
- //fileManager.updateCheck = true
- // Now run and report...
- val runs = config.tests.filterNot(_._2.isEmpty)
- val result = runs.toList flatMap { case (kind, files) => reflectiveRunTestsForFiles(files, kind).asScala }
-
- result.asJava
- }
-
- def main(args: Array[String]): Unit = {
- val failures = mainReflect(args).asScala collect { case s if !s.isOk => s.longStatus }
- // Re-list all failures so we can go figure out what went wrong.
- failures foreach System.err.println
- if(!failures.isEmpty) sys.exit(1)
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/StreamCapture.scala b/src/partest/scala/tools/partest/nest/StreamCapture.scala
deleted file mode 100644
index dc155b1787..0000000000
--- a/src/partest/scala/tools/partest/nest/StreamCapture.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-package scala.tools.partest
-package nest
-
-import java.io.{ Console => _, _ }
-
-object StreamCapture {
- case class Captured[T](stdout: String, stderr: String, result: T) {
- override def toString = s"""
- |result: $result
- |[stdout]
- |$stdout
- |[stderr]
- |$stderr""".stripMargin.trim
- }
-
- private def mkStream = {
- val swr = new StringWriter
- val wr = new PrintWriter(swr, true)
- val ostream = new PrintStream(new OutputStream { def write(b: Int): Unit = wr write b }, true) // autoFlush = true
-
- (ostream, () => { ostream.close() ; swr.toString })
- }
-
- def savingSystem[T](body: => T): T = {
- val savedOut = System.out
- val savedErr = System.err
- try body
- finally {
- System setErr savedErr
- System setOut savedOut
- }
- }
-
- def apply[T](body: => T): Captured[T] = {
- val (outstream, stdoutFn) = mkStream
- val (errstream, stderrFn) = mkStream
-
- val result = savingSystem {
- System setOut outstream
- System setErr errstream
- Console.withOut(outstream) {
- Console.withErr(errstream) {
- body
- }
- }
- }
- Captured(stdoutFn(), stderrFn(), result)
- }
-}
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
deleted file mode 100644
index 5a1afeb77f..0000000000
--- a/src/partest/scala/tools/partest/package.scala
+++ /dev/null
@@ -1,241 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala.tools
-
-import java.util.concurrent.{ Callable, ExecutorService }
-import scala.concurrent.duration.Duration
-import scala.sys.process.javaVmArguments
-import scala.tools.partest.nest.NestUI
-import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional }
-
-package object partest {
- type File = java.io.File
- type SFile = scala.reflect.io.File
- type Directory = scala.reflect.io.Directory
- type Path = scala.reflect.io.Path
- type PathResolver = scala.tools.util.PathResolver
- type ClassPath[T] = scala.tools.nsc.util.ClassPath[T]
- type StringWriter = java.io.StringWriter
-
- val SFile = scala.reflect.io.File
- val Directory = scala.reflect.io.Directory
- val Path = scala.reflect.io.Path
- val PathResolver = scala.tools.util.PathResolver
- val ClassPath = scala.tools.nsc.util.ClassPath
-
- val space = "\u0020"
- val EOL = scala.compat.Platform.EOL
- def onull(s: String) = if (s == null) "" else s
- def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
- def ojoin(xs: String*): String = oempty(xs: _*) mkString space
- def nljoin(xs: String*): String = oempty(xs: _*) mkString EOL
-
- implicit val codec = scala.io.Codec.UTF8
-
- def setUncaughtHandler() = {
- Thread.setDefaultUncaughtExceptionHandler(
- new Thread.UncaughtExceptionHandler {
- def uncaughtException(thread: Thread, t: Throwable) {
- val t1 = Exceptional unwrap t
- System.err.println(s"Uncaught exception on thread $thread: $t1")
- t1.printStackTrace()
- }
- }
- )
- }
-
- /** Sources have a numerical group, specified by name_7 and so on. */
- private val GroupPattern = """.*_(\d+)""".r
-
- implicit class FileOps(val f: File) {
- private def sf = SFile(f)
-
- def testIdent = {
- f.toString split """[/\\]+""" takeRight 2 mkString "/" // e.g. pos/t1234
- }
-
- def mapInPlace(mapFn: String => String)(filterFn: String => Boolean = _ => true): Unit =
- writeAll(fileLines filter filterFn map (x => mapFn(x) + EOL): _*)
-
- def appendAll(strings: String*): Unit = sf.appendAll(strings: _*)
- def writeAll(strings: String*): Unit = sf.writeAll(strings: _*)
- def absolutePathSegments: List[String] = f.getAbsolutePath split """[/\\]+""" toList
-
- def isJava = f.isFile && (sf hasExtension "java")
- def isScala = f.isFile && (sf hasExtension "scala")
- def isJavaOrScala = isJava || isScala
-
- def extension = sf.extension
- def hasExtension(ext: String) = sf hasExtension ext
- def changeExtension(ext: String): File = (sf changeExtension ext).jfile
-
- /** The group number for this source file, or -1 for no group. */
- def group: Int =
- sf.stripExtension match {
- case GroupPattern(g) if g.toInt >= 0 => g.toInt
- case _ => -1
- }
-
- def fileContents: String = try sf.slurp() catch { case _: java.io.FileNotFoundException => "" }
- def fileLines: List[String] = augmentString(fileContents).lines.toList
- }
-
- implicit class PathOps(p: Path) extends FileOps(p.jfile) { }
-
- implicit class Copier(val f: SFile) extends AnyVal {
- def copyTo(dest: Path): Unit = dest.toFile writeAll f.slurp(scala.io.Codec.UTF8)
- }
-
- implicit class LoaderOps(val loader: ClassLoader) extends AnyVal {
- import scala.util.control.Exception.catching
- /** Like ScalaClassLoader.create for the case where the result type is
- * available to the current class loader, implying that the current
- * loader is a parent of `loader`.
- */
- def instantiate[A >: Null](name: String): A = (
- catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
- (loader loadClass name).newInstance.asInstanceOf[A] orNull
- )
- }
-
- implicit class ExecutorOps(val executor: ExecutorService) {
- def awaitTermination[A](wait: Duration)(failing: => A = ()): Option[A] = (
- if (executor awaitTermination (wait.length, wait.unit)) None
- else Some(failing)
- )
- }
-
- implicit def temporaryPath2File(x: Path): File = x.jfile
- implicit def stringPathToJavaFile(path: String): File = new File(path)
-
- implicit lazy val postfixOps = scala.language.postfixOps
- implicit lazy val implicitConversions = scala.language.implicitConversions
-
- def fileSeparator = java.io.File.separator
- def pathSeparator = java.io.File.pathSeparator
-
- def pathToTestIdent(path: Path) = path.jfile.testIdent
-
- def canonicalizeSlashes(line: String) = line.replaceAll("""[/\\]+""", "/")
-
- def words(s: String): List[String] = (s.trim split "\\s+").toList
-
- def timed[T](body: => T): (T, Long) = {
- val t1 = System.currentTimeMillis
- val result = body
- val t2 = System.currentTimeMillis
-
- (result, t2 - t1)
- }
-
- def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
-
- def file2String(f: File): String = f.fileContents
-
- def basename(name: String): String = Path(name).stripExtension
-
- /** In order to allow for spaces in flags/options, this
- * parses .flags, .javaopts, javacopts etc files as follows:
- * If it is exactly one line, it is split (naively) on spaces.
- * If it contains more than one line, each line is its own
- * token, spaces and all.
- */
- def readOptionsFile(file: File): List[String] = {
- file.fileLines match {
- case x :: Nil => words(x)
- case xs => xs map (_.trim)
- }
- }
-
- def findProgram(name: String): Option[File] = {
- val pathDirs = sys.env("PATH") match {
- case null => List("/usr/local/bin", "/usr/bin", "/bin")
- case path => path split "[:;]" filterNot (_ == "") toList
- }
- pathDirs.iterator map (d => new File(d, name)) find (_.canExecute)
- }
-
- def now = (new java.util.Date).toString
- def elapsedString(millis: Long): String = {
- val elapsedSecs = millis/1000
- val elapsedMins = elapsedSecs/60
- val elapsedHrs = elapsedMins/60
- val dispMins = elapsedMins - elapsedHrs * 60
- val dispSecs = elapsedSecs - elapsedMins * 60
-
- "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs)
- }
-
- def vmArgString = javaVmArguments.mkString(
- "Java VM started with arguments: '",
- " ",
- "'"
- )
-
- def allPropertiesString = {
- import scala.collection.JavaConversions._
- System.getProperties.toList.sorted map { case (k, v) => "%s -> %s\n".format(k, v) } mkString ""
- }
-
- def showAllJVMInfo() {
- vlog(vmArgString)
- vlog(allPropertiesString)
- }
-
- import scala.language.experimental.macros
-
- /**
- * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
- * {{{
- * trace> "".isEmpty
- * res: Boolean = true
- *
- * }}}
- *
- * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
- * test code in a string.
- */
- def trace[A](a: A) = macro traceImpl[A]
-
- import scala.reflect.macros.Context
- def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
- import c.universe._
- import definitions._
-
- // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0,
- // because this impairs reflection refactorings
- //
- // val exprCode = c.literal(show(a.tree))
- // val exprType = c.literal(show(a.actualType))
- // reify {
- // println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
- // a.splice
- // }
-
- c.Expr(Block(
- List(Apply(
- Select(Ident(PredefModule), newTermName("println")),
- List(Apply(
- Select(Apply(
- Select(Ident(ScalaPackage), newTermName("StringContext")),
- List(
- Literal(Constant("trace> ")),
- Literal(Constant("\\nres: ")),
- Literal(Constant(" = ")),
- Literal(Constant("\\n")))),
- newTermName("s")),
- List(
- Literal(Constant(show(a.tree))),
- Literal(Constant(show(a.actualType))),
- a.tree))))),
- a.tree))
- }
-
- def isPartestTerse = NestUI.isTerse
- def isPartestDebug = NestUI.isDebug
- def isPartestVerbose = NestUI.isVerbose
-
- def vlog(msg: => String) = if (isPartestVerbose) System.err.println(msg)
-}
diff --git a/src/partest/scala/tools/partest/utils/Properties.scala b/src/partest/scala/tools/partest/utils/Properties.scala
deleted file mode 100644
index b9394b50c9..0000000000
--- a/src/partest/scala/tools/partest/utils/Properties.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.partest
-package utils
-
-/** Loads partest.properties from the jar. */
-object Properties extends scala.util.PropertiesTrait {
- protected def propCategory = "partest"
- protected def pickJarBasedOn = classOf[nest.RunnerManager]
- override def isAvian = super.isAvian
-}
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 009d9dbfdb..5b6ff2325c 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -8,6 +8,7 @@ package reflect
package api
import scala.reflect.runtime.{universe => ru}
+import scala.annotation.compileTimeOnly
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
@@ -91,7 +92,7 @@ trait Exprs { self: Universe =>
* }}}
* because expr of type Expr[T] itself does not have a method foo.
*/
- // @compileTimeOnly("Cannot use splice outside reify")
+ @compileTimeOnly("splice must be enclosed within a reify {} block")
def splice: T
/**
@@ -108,7 +109,7 @@ trait Exprs { self: Universe =>
* object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
* }}}
*/
- // @compileTimeOnly("Cannot use value except for signatures of macro implementations")
+ @compileTimeOnly("cannot use value except for signatures of macro implementations")
val value: T
override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index d702555ba6..ec128e31a3 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -101,7 +101,7 @@ package api
* via `ModuleMirror.instance`). Entry point: `val mm = im.reflectMethod(<method symbol>)`.
* Example:
* {{{
- * scala> val methodX = typeOf[C].declaration(newTermName("x")).asMethod
+ * scala> val methodX = typeOf[C].declaration(TermName("x")).asMethod
* methodX: reflect.runtime.universe.MethodSymbol = method x
*
* scala> val mm = im.reflectMethod(methodX)
@@ -126,7 +126,7 @@ package api
* scala> val im = m.reflect(new C)
* im: reflect.runtime.universe.InstanceMirror = instance mirror for C@5f0c8ac1
*
- * scala> val fieldX = typeOf[C].declaration(newTermName("x")).asTerm.accessed.asTerm
+ * scala> val fieldX = typeOf[C].declaration(TermName("x")).asTerm.accessed.asTerm
* fieldX: reflect.runtime.universe.TermSymbol = value x
* scala> val fmX = im.reflectField(fieldX)
* fmX: reflect.runtime.universe.FieldMirror = field mirror for C.x (bound to C@5f0c8ac1)
@@ -136,7 +136,7 @@ package api
*
* scala> fmX.set(3) // NOTE: can set an underlying value of an immutable field!
*
- * scala> val fieldY = typeOf[C].declaration(newTermName("y")).asTerm.accessed.asTerm
+ * scala> val fieldY = typeOf[C].declaration(TermName("y")).asTerm.accessed.asTerm
* fieldY: reflect.runtime.universe.TermSymbol = variable y
*
* scala> val fmY = im.reflectField(fieldY)
@@ -255,7 +255,7 @@ trait Mirrors { self: Universe =>
* Note also that only accessor MethodMirrors, but not FieldMirrors will accurately reflect overriding behavior.
*
* To get a field symbol by the name of the field you would like to reflect,
- * use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the field>)).asTerm.accessed`.
+ * use `<this mirror>.symbol.typeSignature.member(TermName(<name of the field>)).asTerm.accessed`.
* For further information about member lookup refer to `Symbol.typeSignature`.
*
* The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
@@ -275,7 +275,7 @@ trait Mirrors { self: Universe =>
* that can be used to invoke the method provided.
*
* To get a method symbol by the name of the method you would like to reflect,
- * use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the method>)).asMethod`.
+ * use `<this mirror>.symbol.typeSignature.member(TermName(<name of the method>)).asMethod`.
* For further information about member lookup refer to `Symbol.typeSignature`.
*
* The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
@@ -299,7 +299,7 @@ trait Mirrors { self: Universe =>
* that can be used to get the instance of the object or inspect its companion class.
*
* To get a module symbol by the name of the object you would like to reflect,
- * use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the object>)).asModule`.
+ * use `<this mirror>.symbol.typeSignature.member(TermName(<name of the object>)).asModule`.
* For further information about member lookup refer to `Symbol.typeSignature`.
*
* The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index f74e0ce014..87d7f9fd8e 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -33,13 +33,13 @@ trait Names {
* Enables an alternative notation `"map": TermName` as opposed to `newTermName("map")`.
* @group Names
*/
- implicit def stringToTermName(s: String): TermName = newTermName(s)
+ implicit def stringToTermName(s: String): TermName = TermName(s)
/** An implicit conversion from String to TypeName.
* Enables an alternative notation `"List": TypeName` as opposed to `newTypeName("List")`.
* @group Names
*/
- implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
+ implicit def stringToTypeName(s: String): TypeName = TypeName(s)
/** The abstract type of names.
* @group Names
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index f7a6a68946..443f34ccae 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -572,8 +572,8 @@ trait Trees { self: Universe =>
* @group Extractors
*/
abstract class DefDefExtractor {
- def apply(mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
- def unapply(defDef: DefDef): Option[(Modifiers, Name, List[TypeDef], List[List[ValDef]], Tree, Tree)]
+ def apply(mods: Modifiers, name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
+ def unapply(defDef: DefDef): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)]
}
/** The API that all def defs support
@@ -584,7 +584,7 @@ trait Trees { self: Universe =>
def mods: Modifiers
/** @inheritdoc */
- def name: Name
+ def name: TermName
/** The type parameters of the method. */
def tparams: List[TypeDef]
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index e3498a95a6..05aaa462c4 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -130,9 +130,9 @@ trait BaseTypeSeqs {
lazy val maxDepth = maxDepthOfElems
- protected def maxDepthOfElems: Int = {
- var d = 0
- for (i <- 1 until length) d = max(d, typeDepth(elems(i)))
+ protected def maxDepthOfElems: Depth = {
+ var d = Depth.Zero
+ 1 until length foreach (i => d = d max typeDepth(elems(i)))
d
}
@@ -234,7 +234,7 @@ trait BaseTypeSeqs {
override def map(g: Type => Type) = lateMap(g)
override def lateMap(g: Type => Type) = orig.lateMap(x => g(f(x)))
override def exists(p: Type => Boolean) = elems exists (x => p(f(x)))
- override protected def maxDepthOfElems: Int = elems.map(x => typeDepth(f(x))).max
+ override protected def maxDepthOfElems: Depth = elems.map(x => typeDepth(f(x))).max
override def toString = elems.mkString("MBTS(", ",", ")")
}
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index a7ce044780..e0a6757d34 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -335,13 +335,8 @@ object ClassfileConstants {
abstract class FlagTranslation {
import Flags._
- private var isAnnotation = false
- private var isClass = false
- private def initFields(flags: Int) = {
- isAnnotation = (flags & JAVA_ACC_ANNOTATION) != 0
- isClass = false
- }
- private def translateFlag(jflag: Int): Long = (jflag: @switch) match {
+ private def isAnnotation(flags: Int): Boolean = (flags & JAVA_ACC_ANNOTATION) != 0
+ private def translateFlag(jflag: Int, isAnnotation: Boolean, isClass: Boolean): Long = (jflag: @switch) match {
case JAVA_ACC_PRIVATE => PRIVATE
case JAVA_ACC_PROTECTED => PROTECTED
case JAVA_ACC_FINAL => FINAL
@@ -351,31 +346,28 @@ object ClassfileConstants {
case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT
case _ => 0L
}
- private def translateFlags(jflags: Int, baseFlags: Long): Long = {
+ private def translateFlags(jflags: Int, baseFlags: Long, isAnnotation: Boolean, isClass: Boolean): Long = {
+ def translateFlag0(jflags: Int): Long = translateFlag(jflags, isAnnotation, isClass)
var res: Long = JAVA | baseFlags
/* fast, elegant, maintainable, pick any two... */
- res |= translateFlag(jflags & JAVA_ACC_PRIVATE)
- res |= translateFlag(jflags & JAVA_ACC_PROTECTED)
- res |= translateFlag(jflags & JAVA_ACC_FINAL)
- res |= translateFlag(jflags & JAVA_ACC_SYNTHETIC)
- res |= translateFlag(jflags & JAVA_ACC_STATIC)
- res |= translateFlag(jflags & JAVA_ACC_ABSTRACT)
- res |= translateFlag(jflags & JAVA_ACC_INTERFACE)
+ res |= translateFlag0(jflags & JAVA_ACC_PRIVATE)
+ res |= translateFlag0(jflags & JAVA_ACC_PROTECTED)
+ res |= translateFlag0(jflags & JAVA_ACC_FINAL)
+ res |= translateFlag0(jflags & JAVA_ACC_SYNTHETIC)
+ res |= translateFlag0(jflags & JAVA_ACC_STATIC)
+ res |= translateFlag0(jflags & JAVA_ACC_ABSTRACT)
+ res |= translateFlag0(jflags & JAVA_ACC_INTERFACE)
res
}
def classFlags(jflags: Int): Long = {
- initFields(jflags)
- isClass = true
- translateFlags(jflags, 0)
+ translateFlags(jflags, 0, isAnnotation(jflags), isClass = true)
}
def fieldFlags(jflags: Int): Long = {
- initFields(jflags)
- translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0 , isAnnotation(jflags), isClass = false)
}
def methodFlags(jflags: Int): Long = {
- initFields(jflags)
- translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0, isAnnotation(jflags), isClass = false)
}
}
object FlagTranslation extends FlagTranslation { }
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 6b7aa2dddf..90a1ab39d5 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -227,10 +227,7 @@ trait Definitions extends api.StandardDefinitions {
scope
}
/** Is this symbol a member of Object or Any? */
- def isUniversalMember(sym: Symbol) = (
- (sym ne NoSymbol)
- && (ObjectClass isSubClass sym.owner)
- )
+ def isUniversalMember(sym: Symbol) = ObjectClass isSubClass sym.owner
/** Is this symbol unimportable? Unimportable symbols include:
* - constructors, because <init> is not a real name
@@ -253,6 +250,13 @@ trait Definitions extends api.StandardDefinitions {
|| tp =:= AnyRefTpe
)
+ def hasMultipleNonImplicitParamLists(member: Symbol): Boolean = hasMultipleNonImplicitParamLists(member.info)
+ def hasMultipleNonImplicitParamLists(info: Type): Boolean = info match {
+ case PolyType(_, restpe) => hasMultipleNonImplicitParamLists(restpe)
+ case MethodType(_, MethodType(p :: _, _)) if !p.isImplicit => true
+ case _ => false
+ }
+
private def fixupAsAnyTrait(tpe: Type): Type = tpe match {
case ClassInfoType(parents, decls, clazz) =>
if (parents.head.typeSymbol == AnyClass) tpe
@@ -384,6 +388,7 @@ trait Definitions extends api.StandardDefinitions {
def arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone)
def ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible)
def arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass)
+ def traversableDropMethod = getMemberMethod(ScalaRunTimeModule, nme.drop)
// classes with special meanings
lazy val StringAddClass = requiredClass[scala.runtime.StringAdd]
@@ -423,6 +428,15 @@ trait Definitions extends api.StandardDefinitions {
def isVarArgsList(params: Seq[Symbol]) = params.nonEmpty && isRepeatedParamType(params.last.tpe)
def isVarArgTypes(formals: Seq[Type]) = formals.nonEmpty && isRepeatedParamType(formals.last)
+ def firstParamType(tpe: Type): Type = tpe.paramTypes match {
+ case p :: _ => p
+ case _ => NoType
+ }
+ def isImplicitParamss(paramss: List[List[Symbol]]) = paramss match {
+ case (p :: _) :: _ => p.isImplicit
+ case _ => false
+ }
+
def hasRepeatedParam(tp: Type): Boolean = tp match {
case MethodType(formals, restpe) => isScalaVarArgs(formals) || hasRepeatedParam(restpe)
case PolyType(_, restpe) => hasRepeatedParam(restpe)
@@ -430,7 +444,12 @@ trait Definitions extends api.StandardDefinitions {
}
// wrapping and unwrapping
- def dropByName(tp: Type): Type = elementExtract(ByNameParamClass, tp) orElse tp
+ def dropByName(tp: Type): Type = elementExtract(ByNameParamClass, tp) orElse tp
+ def dropRepeated(tp: Type): Type = (
+ if (isJavaRepeatedParamType(tp)) elementExtract(JavaRepeatedParamClass, tp) orElse tp
+ else if (isScalaRepeatedParamType(tp)) elementExtract(RepeatedParamClass, tp) orElse tp
+ else tp
+ )
def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse tp
def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp
def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp
@@ -659,21 +678,23 @@ trait Definitions extends api.StandardDefinitions {
def isExactProductType(tp: Type): Boolean = isProductNSymbol(tp.typeSymbol)
/** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */
- def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match {
+ @deprecated("No longer used", "2.11.0") def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match {
case Some(x) => tpe.baseType(x).typeArgs
case _ => Nil
}
- def dropNullaryMethod(tp: Type) = tp match {
- case NullaryMethodType(restpe) => restpe
- case _ => tp
- }
-
- def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
+ @deprecated("No longer used", "2.11.0") def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
case RefinedType(p :: _, _) => p.dealiasWiden
case tp => tp
}
+ def getterMemberTypes(tpe: Type, getters: List[Symbol]): List[Type] =
+ getters map (m => dropNullaryMethod(tpe memberType m))
+
+ def dropNullaryMethod(tp: Type) = tp match {
+ case NullaryMethodType(restpe) => restpe
+ case _ => tp
+ }
def abstractFunctionForFunctionType(tp: Type) = {
assert(isFunctionType(tp), tp)
abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
@@ -696,6 +717,71 @@ trait Definitions extends api.StandardDefinitions {
def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg)
def seqType(arg: Type) = appliedType(SeqClass, arg)
+ // FYI the long clunky name is because it's really hard to put "get" into the
+ // name of a method without it sounding like the method "get"s something, whereas
+ // this method is about a type member which just happens to be named get.
+ def typeOfMemberNamedGet(tp: Type) = resultOfMatchingMethod(tp, nme.get)()
+ def typeOfMemberNamedHead(tp: Type) = resultOfMatchingMethod(tp, nme.head)()
+ def typeOfMemberNamedApply(tp: Type) = resultOfMatchingMethod(tp, nme.apply)(IntTpe)
+ def typeOfMemberNamedDrop(tp: Type) = resultOfMatchingMethod(tp, nme.drop)(IntTpe)
+ def typeOfMemberNamedGetOrSelf(tp: Type) = typeOfMemberNamedGet(tp) orElse tp
+ def typesOfSelectors(tp: Type) = getterMemberTypes(tp, productSelectors(tp))
+ def typesOfCaseAccessors(tp: Type) = getterMemberTypes(tp, tp.typeSymbol.caseFieldAccessors)
+
+ /** If this is a case class, the case field accessors (which may be an empty list.)
+ * Otherwise, if there are any product selectors, that list.
+ * Otherwise, a list containing only the type itself.
+ */
+ def typesOfSelectorsOrSelf(tp: Type): List[Type] = (
+ if (tp.typeSymbol.isCase)
+ typesOfCaseAccessors(tp)
+ else typesOfSelectors(tp) match {
+ case Nil => tp :: Nil
+ case tps => tps
+ }
+ )
+
+ /** If the given type has one or more product selectors, the type of the last one.
+ * Otherwise, the type itself.
+ */
+ def typeOfLastSelectorOrSelf(tp: Type) = typesOfSelectorsOrSelf(tp).last
+
+ def elementTypeOfLastSelectorOrSelf(tp: Type) = {
+ val last = typeOfLastSelectorOrSelf(tp)
+ ( typeOfMemberNamedHead(last)
+ orElse typeOfMemberNamedApply(last)
+ orElse elementType(ArrayClass, last)
+ )
+ }
+
+ /** Returns the method symbols for members _1, _2, ..., _N
+ * which exist in the given type.
+ */
+ def productSelectors(tpe: Type): List[Symbol] = {
+ def loop(n: Int): List[Symbol] = tpe member TermName("_" + n) match {
+ case NoSymbol => Nil
+ case m if m.paramss.nonEmpty => Nil
+ case m => m :: loop(n + 1)
+ }
+ loop(1)
+ }
+
+ /** If `tp` has a term member `name`, the first parameter list of which
+ * matches `paramTypes`, and which either has no further parameter
+ * lists or only an implicit one, then the result type of the matching
+ * method. Otherwise, NoType.
+ */
+ def resultOfMatchingMethod(tp: Type, name: TermName)(paramTypes: Type*): Type = {
+ def matchesParams(member: Symbol) = member.paramss match {
+ case Nil => paramTypes.isEmpty
+ case ps :: rest => (rest.isEmpty || isImplicitParamss(rest)) && (ps corresponds paramTypes)(_.tpe =:= _)
+ }
+ tp member name filter matchesParams match {
+ case NoSymbol => NoType
+ case member => (tp memberType member).finalResultType
+ }
+ }
+
def ClassType(arg: Type) = if (phase.erasedTypes) ClassClass.tpe else appliedType(ClassClass, arg)
/** Can we tell by inspecting the symbol that it will never
@@ -904,7 +990,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
- lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly")
+ lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.annotation.compileTimeOnly")
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance]
@@ -918,6 +1004,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val ThrowsClass = requiredClass[scala.throws[_]]
lazy val TransientAttr = requiredClass[scala.transient]
lazy val UncheckedClass = requiredClass[scala.unchecked]
+ lazy val UncheckedBoundsClass = getClassIfDefined("scala.reflect.internal.annotations.uncheckedBounds")
lazy val UnspecializedClass = requiredClass[scala.annotation.unspecialized]
lazy val VolatileAttr = requiredClass[scala.volatile]
diff --git a/src/reflect/scala/reflect/internal/Depth.scala b/src/reflect/scala/reflect/internal/Depth.scala
new file mode 100644
index 0000000000..357abf765f
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Depth.scala
@@ -0,0 +1,28 @@
+package scala
+package reflect
+package internal
+
+import Depth._
+
+final class Depth private (val depth: Int) extends AnyVal with Ordered[Depth] {
+ def max(that: Depth): Depth = if (this < that) that else this
+ def decr(n: Int): Depth = if (isAnyDepth) this else Depth(depth - n)
+ def incr(n: Int): Depth = if (isAnyDepth) this else Depth(depth + n)
+ def decr: Depth = decr(1)
+ def incr: Depth = incr(1)
+
+ def isNegative = depth < 0
+ def isZero = depth == 0
+ def isAnyDepth = this == AnyDepth
+
+ def compare(that: Depth): Int = if (depth < that.depth) -1 else if (this == that) 0 else 1
+ override def toString = s"Depth($depth)"
+}
+
+object Depth {
+ // A don't care value for the depth parameter in lubs/glbs and related operations.
+ final val AnyDepth = new Depth(Int.MinValue)
+ final val Zero = new Depth(0)
+
+ @inline final def apply(depth: Int): Depth = if (depth < 0) AnyDepth else new Depth(depth)
+}
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index b0828e9c54..9ddf156128 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -411,6 +411,11 @@ trait Importers extends api.Importers { to: SymbolTable =>
if (my != null) {
addFixup(recreatedTreeCompleter(their, my))
tryFixup()
+ // we have to be careful with position import as some shared trees
+ // like EmptyTree, emptyValDef don't support position assignment
+ if (their.pos != NoPosition) {
+ my.setPos(importPosition(their.pos))
+ }
}
my
}
diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala
index 46a95c7d26..d1c215713e 100644
--- a/src/reflect/scala/reflect/internal/Kinds.scala
+++ b/src/reflect/scala/reflect/internal/Kinds.scala
@@ -233,7 +233,7 @@ trait Kinds {
/**
* The data structure describing the kind of a given type.
- *
+ *
* Proper types are represented using ProperTypeKind.
*
* Type constructors are reprented using TypeConKind.
@@ -251,7 +251,7 @@ trait Kinds {
* it uses prescribed letters for each level: A, F, X, Y, Z.
*/
def scalaNotation: String
-
+
/** Kind notation used in http://adriaanm.github.com/files/higher.pdf.
* Proper types are expressed as *.
* Type constructors are expressed * -> *(lo, hi) -(+)-> *.
@@ -261,13 +261,13 @@ trait Kinds {
/** Contains bounds either as part of itself or its arguments.
*/
def hasBounds: Boolean = !bounds.isEmptyBounds
-
+
private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState
}
object Kind {
private[internal] sealed trait ScalaNotation
private[internal] sealed case class Head(order: Int, n: Option[Int], alias: Option[String]) extends ScalaNotation {
- override def toString: String = {
+ override def toString: String = {
alias getOrElse {
typeAlias(order) + n.map(_.toString).getOrElse("")
}
@@ -285,7 +285,7 @@ trait Kinds {
}
private[internal] sealed case class Text(value: String) extends ScalaNotation {
override def toString: String = value
- }
+ }
private[internal] case class StringState(tokens: Seq[ScalaNotation]) {
override def toString: String = tokens.mkString
def append(value: String): StringState = StringState(tokens :+ Text(value))
@@ -310,7 +310,7 @@ trait Kinds {
ts map {
case Head(`o`, _, a) => Head(o, None, a)
case t => t
- }
+ }
else ts
})
}
@@ -332,7 +332,7 @@ trait Kinds {
val order = 0
private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState = {
s.append(v.symbolicString).appendHead(order, sym).append(bounds.scalaNotation(_.toString))
- }
+ }
def scalaNotation: String = Kind.Head(order, None, None) + bounds.scalaNotation(_.toString)
def starNotation: String = "*" + bounds.starNotation(_.toString)
}
@@ -344,7 +344,7 @@ trait Kinds {
class TypeConKind(val bounds: TypeBounds, val args: Seq[TypeConKind.Argument]) extends Kind {
import Kind.StringState
- val order = (args map {_.kind.order} max) + 1
+ val order = (args map (_.kind.order)).max + 1
def description: String =
if (order == 1) "This is a type constructor: a 1st-order-kinded type."
else "This is a type constructor that takes type constructor(s): a higher-kinded type."
@@ -380,7 +380,7 @@ trait Kinds {
object TypeConKind {
def apply(args: Seq[TypeConKind.Argument]): TypeConKind = this(TypeBounds.empty, args)
def apply(bounds: TypeBounds, args: Seq[TypeConKind.Argument]): TypeConKind = new TypeConKind(bounds, args)
- def unapply(tck: TypeConKind): Some[(TypeBounds, Seq[TypeConKind.Argument])] = Some(tck.bounds, tck.args)
+ def unapply(tck: TypeConKind): Some[(TypeBounds, Seq[TypeConKind.Argument])] = Some((tck.bounds, tck.args))
case class Argument(variance: Variance, kind: Kind)(val sym: Symbol) {}
}
@@ -389,7 +389,7 @@ trait Kinds {
*/
object inferKind {
import TypeConKind.Argument
-
+
abstract class InferKind {
protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind
protected def infer(sym: Symbol, topLevel: Boolean): Kind = infer(sym.tpeHK, sym.owner, topLevel)
@@ -398,7 +398,7 @@ trait Kinds {
}
def apply(pre: Type): InferKind = new InferKind {
- protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind = {
+ protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind = {
val bounds = if (topLevel) TypeBounds.empty
else tpe.asSeenFrom(pre, owner).bounds
if(!tpe.isHigherKinded) ProperTypeKind(bounds)
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 1603029340..206dff44e2 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -574,6 +574,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
case refTree: RefTree =>
if (tree.symbol.name != refTree.name) print("[", tree.symbol, " aka ", refTree.name, "]")
else print(tree.symbol)
+ case defTree: DefTree =>
+ print(tree.symbol)
case _ =>
print(tree.symbol.name)
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 64713b8d41..7a2287664a 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -626,6 +626,7 @@ trait StdNames {
val clone_ : NameType = "clone"
val collection: NameType = "collection"
val conforms: NameType = "conforms"
+ val compare: NameType = "compare"
val copy: NameType = "copy"
val create: NameType = "create"
val currentMirror: NameType = "currentMirror"
@@ -657,6 +658,7 @@ trait StdNames {
val get: NameType = "get"
val hashCode_ : NameType = "hashCode"
val hash_ : NameType = "hash"
+ val head : NameType = "head"
val immutable: NameType = "immutable"
val implicitly: NameType = "implicitly"
val in: NameType = "in"
@@ -725,6 +727,7 @@ trait StdNames {
val toArray: NameType = "toArray"
val toList: NameType = "toList"
val toObjectArray : NameType = "toObjectArray"
+ val toSeq: NameType = "toSeq"
val TopScope: NameType = "TopScope"
val toString_ : NameType = "toString"
val toTypeConstructor: NameType = "toTypeConstructor"
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 2ae9f81a09..a6f9dfc164 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -52,8 +52,15 @@ abstract class SymbolTable extends macros.Universe
def globalError(msg: String): Unit = abort(msg)
def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
+ protected def elapsedMessage(msg: String, start: Long) =
+ msg + " in " + (System.currentTimeMillis() - start) + "ms"
+
+ def informProgress(msg: String) = if (settings.verbose) inform("[" + msg + "]")
+ def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
+
def shouldLogAtThisPhase = false
def isPastTyper = false
+ protected def isDeveloper: Boolean = settings.debug
@deprecated("Give us a reason", "2.10.0")
def abort(): Nothing = abort("unknown error")
@@ -63,8 +70,12 @@ abstract class SymbolTable extends macros.Universe
/** Override with final implementation for inlining. */
def debuglog(msg: => String): Unit = if (settings.debug) log(msg)
- def devWarning(msg: => String): Unit = if (settings.debug) Console.err.println(msg)
+ def devWarning(msg: => String): Unit = if (isDeveloper) Console.err.println(msg)
def throwableAsString(t: Throwable): String = "" + t
+ def throwableAsString(t: Throwable, maxFrames: Int): String = t.getStackTrace take maxFrames mkString "\n at "
+
+ @inline final def devWarningDumpStack(msg: => String, maxFrames: Int): Unit =
+ devWarning(msg + "\n" + throwableAsString(new Throwable, maxFrames))
/** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */
def debugStack(t: Throwable): Unit = devWarning(throwableAsString(t))
@@ -105,6 +116,13 @@ abstract class SymbolTable extends macros.Universe
result
}
+ @inline
+ final private[scala] def debuglogResultIf[T](msg: => String, cond: T => Boolean)(result: T): T = {
+ if (cond(result))
+ debuglog(msg + ": " + result)
+
+ result
+ }
// For too long have we suffered in order to sort NAMES.
// I'm pretty sure there's a reasonable default for that.
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index e41038cafc..d3a0ffb744 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -147,13 +147,16 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def name: NameType
def name_=(n: Name): Unit = {
if (shouldLogAtThisPhase) {
- val msg = s"Renaming $fullLocationString to $n"
+ def msg = s"In $owner, renaming $name -> $n"
if (isSpecialized) debuglog(msg) else log(msg)
}
}
def asNameType(n: Name): NameType
- private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
+ // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
+ // The null check is for NoSymbol, which can't pass a reference to itself to the constructor and also
+ // can't call owner_= due to an assertion it contains.
+ private[this] var _rawowner = if (initOwner eq null) this else initOwner
private[this] var _rawflags: Long = _
def rawowner = _rawowner
@@ -610,7 +613,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
final def isLazyAccessor = isLazy && lazyAccessor != NoSymbol
- final def isOverridableMember = !(isClass || isEffectivelyFinal) && (this ne NoSymbol) && owner.isClass
+ final def isOverridableMember = !(isClass || isEffectivelyFinal) && safeOwner.isClass
/** Does this symbol denote a wrapper created by the repl? */
final def isInterpreterWrapper = (
@@ -999,13 +1002,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ owner attribute --------------------------------------------------------------
+ /** In general when seeking the owner of a symbol, one should call `owner`.
+ * The other possibilities include:
+ * - call `safeOwner` if it is expected that the target may be NoSymbol
+ * - call `assertOwner` if it is an unrecoverable error if the target is NoSymbol
+ *
+ * `owner` behaves like `safeOwner`, but logs NoSymbol.owner calls under -Xdev.
+ * `assertOwner` aborts compilation immediately if called on NoSymbol.
+ */
def owner: Symbol = {
if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
rawowner
}
-
- // Like owner, but NoSymbol.owner == NoSymbol instead of throwing an exception.
- final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner
+ final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner
+ final def assertOwner: Symbol = if (this eq NoSymbol) abort("no-symbol does not have an owner") else owner
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
@@ -1781,10 +1791,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
result
}
- @inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
-
- final def toOption: Option[Symbol] = if (exists) Some(this) else None
-
// ------ cloneing -------------------------------------------------------------------
/** A clone of this symbol. */
@@ -2179,8 +2185,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* the recursive knot.
*/
private def canMatchInheritedSymbols = (
- (this ne NoSymbol)
- && owner.isClass
+ owner.isClass
&& !this.isClass
&& !this.isConstructor
)
@@ -2352,6 +2357,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
@inline final def orElse(alt: => Symbol): Symbol = if (this ne NoSymbol) this else alt
@inline final def andAlso(f: Symbol => Unit): Symbol = { if (this ne NoSymbol) f(this) ; this }
+ @inline final def fold[T](none: => T)(f: Symbol => T): T = if (this ne NoSymbol) f(this) else none
+ @inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
+
+ final def toOption: Option[Symbol] = if (exists) Some(this) else None
+
// ------ toString -------------------------------------------------------------------
@@ -2514,7 +2524,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
def infosString = infos.toString
- def debugLocationString = fullLocationString + " (flags: " + debugFlagString + ")"
+ def debugLocationString = {
+ val pre = flagString match {
+ case "" => ""
+ case s if s contains ' ' => "(" + s + ") "
+ case s => s + " "
+ }
+ pre + fullLocationString
+ }
private def defStringCompose(infoString: String) = compose(
flagString,
@@ -3340,7 +3357,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def enclosingPackageClass: Symbol = this
override def enclMethod: Symbol = this
override def associatedFile = NoAbstractFile
- override def ownerChain: List[Symbol] = List()
+ override def owner: Symbol = {
+ devWarningDumpStack("NoSymbol.owner", 15)
+ this
+ }
+ override def ownerChain: List[Symbol] = Nil
override def ownersIterator: Iterator[Symbol] = Iterator.empty
override def alternatives: List[Symbol] = List()
override def reset(completer: Type): this.type = this
@@ -3350,9 +3371,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def accessBoundary(base: Symbol): Symbol = enclosingRootClass
def cloneSymbolImpl(owner: Symbol, newFlags: Long) = abort("NoSymbol.clone()")
override def originalEnclosingMethod = this
-
- override def owner: Symbol =
- abort("no-symbol does not have an owner")
}
protected def makeNoSymbol: NoSymbol = new NoSymbol
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 5c92512193..34fe0afb1a 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -488,7 +488,7 @@ abstract class TreeInfo {
}
object WildcardStarArg {
- def unapply(tree: Typed): Option[Tree] = tree match {
+ def unapply(tree: Tree): Option[Tree] = tree match {
case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => Some(expr)
case _ => None
}
@@ -628,11 +628,12 @@ abstract class TreeInfo {
* case Extractor(a @ (b, c)) => 2
* }}}
*/
- def effectivePatternArity(args: List[Tree]): Int = (args.map(unbind) match {
+ def effectivePatternArity(args: List[Tree]): Int = flattenedPatternArgs(args).length
+
+ def flattenedPatternArgs(args: List[Tree]): List[Tree] = args map unbind match {
case Apply(fun, xs) :: Nil if isTupleSymbol(fun.symbol) => xs
case xs => xs
- }).length
-
+ }
// used in the symbols for labeldefs and valdefs emitted by the pattern matcher
// tailcalls, cps,... use this flag combination to detect translated matches
@@ -772,6 +773,17 @@ abstract class TreeInfo {
unapply(dissectApplied(tree))
}
+ /** Locates the synthetic Apply node corresponding to an extractor's call to
+ * unapply (unwrapping nested Applies) and returns the fun part of that Apply.
+ */
+ object Unapplied {
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case Apply(fun, Ident(nme.SELECTOR_DUMMY) :: Nil) => Some(fun)
+ case Apply(fun, _) => unapply(fun)
+ case _ => None
+ }
+ }
+
/** Is this file the body of a compilation unit which should not
* have Predef imported? This is the case iff the first import in the
* unit explicitly refers to Predef.
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index ceb3b383d7..fab1f45358 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -185,8 +185,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
def replace(from: Tree, to: Tree): Tree =
new TreeReplacer(from, to, positionAware = false) transform this
- def hasSymbolWhich(f: Symbol => Boolean) =
- (symbol ne null) && (symbol ne NoSymbol) && f(symbol)
+ def hasExistingSymbol = (symbol ne null) && (symbol ne NoSymbol)
+ def hasSymbolWhich(f: Symbol => Boolean) = hasExistingSymbol && f(symbol)
def isErroneous = (tpe ne null) && tpe.isErroneous
def isTyped = (tpe ne null) && !tpe.isErroneous
@@ -309,10 +309,18 @@ trait Trees extends api.Trees { self: SymbolTable =>
def rhs: Tree
}
+ object ValOrDefDef {
+ def unapply(tree: Tree): Option[(Modifiers, TermName, Tree, Tree)] = tree match {
+ case ValDef(mods, name, tpt, rhs) => Some((mods, name, tpt, rhs))
+ case DefDef(mods, name, _, _, tpt, rhs) => Some((mods, name, tpt, rhs))
+ case _ => None
+ }
+ }
+
case class ValDef(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) extends ValOrDefDef with ValDefApi
object ValDef extends ValDefExtractor
- case class DefDef(mods: Modifiers, name: Name, tparams: List[TypeDef],
+ case class DefDef(mods: Modifiers, name: TermName, tparams: List[TypeDef],
vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef with DefDefApi
object DefDef extends DefDefExtractor
@@ -1017,14 +1025,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
trait CannotHaveAttrs extends Tree {
override def canHaveAttrs = false
- private def unsupported(what: String, args: Any*) =
- throw new UnsupportedOperationException(s"$what($args) inapplicable for "+self.toString)
+ private def requireLegal(value: Any, allowed: Any, what: String) =
+ require(value == allowed, s"can't set $what for $self to value other than $allowed")
super.setPos(NoPosition)
- override def setPos(pos: Position) = unsupported("setPos", pos)
+ override def setPos(pos: Position) = { requireLegal(pos, NoPosition, "pos"); this }
+ override def pos_=(pos: Position) = setPos(pos)
super.setType(NoType)
- override def tpe_=(t: Type) = if (t != NoType) unsupported("tpe_=", t)
+ override def setType(t: Type) = { requireLegal(t, NoType, "tpe"); this }
+ override def tpe_=(t: Type) = setType(t)
}
case object EmptyTree extends TermTree with CannotHaveAttrs { override def isEmpty = true; val asList = List(this) }
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
index 9c1342e68e..fd64d98ca2 100644
--- a/src/reflect/scala/reflect/internal/TypeDebugging.scala
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -36,7 +36,7 @@ trait TypeDebugging {
case ObjectClass => true
case _ => sym.hasPackageFlag
}
- def skipType(tpe: Type): Boolean = skipSym(tpe.typeSymbolDirect)
+ def skipType(tpe: Type): Boolean = (tpe eq null) || skipSym(tpe.typeSymbolDirect)
def skip(t: Tree): Boolean = t match {
case EmptyTree => true
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 11527d88ca..9c66dc476f 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -17,6 +17,7 @@ import scala.annotation.tailrec
import util.Statistics
import util.ThreeValues._
import Variance._
+import Depth._
/* A standard type pattern match:
case ErrorType =>
@@ -80,7 +81,8 @@ trait Types
with tpe.CommonOwners
with tpe.GlbLubs
with tpe.TypeMaps
- with tpe.TypeConstraints { self: SymbolTable =>
+ with tpe.TypeConstraints
+ with util.Collections { self: SymbolTable =>
import definitions._
import TypesStats._
@@ -92,12 +94,6 @@ trait Types
private final val LogPendingBaseTypesThreshold = DefaultLogThreshhold
private final val LogVolatileThreshold = DefaultLogThreshhold
- /** A don't care value for the depth parameter in lubs/glbs and related operations. */
- protected[internal] final val AnyDepth = -3
-
- /** Decrement depth unless it is a don't care. */
- protected[internal] final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
-
private final val traceTypeVars = sys.props contains "scalac.debug.tvar"
private final val breakCycles = settings.breakCycles.value
/** In case anyone wants to turn off type parameter bounds being used
@@ -703,7 +699,7 @@ trait Types
case OverloadedType(_, alts) =>
OverloadedType(this, alts)
case tp =>
- tp.asSeenFrom(this, sym.owner)
+ if (sym eq NoSymbol) NoType else tp.asSeenFrom(this, sym.owner)
}
/** Substitute types `to` for occurrences of references to
@@ -783,8 +779,8 @@ trait Types
if (Statistics.canEnable) stat_<:<(that)
else {
(this eq that) ||
- (if (explainSwitch) explain("<:", isSubType, this, that)
- else isSubType(this, that, AnyDepth))
+ (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that)
+ else isSubType(this, that))
}
}
@@ -804,7 +800,7 @@ trait Types
case TypeRef(_, sym, args) =>
val that1 = existentialAbstraction(args map (_.typeSymbol), that)
(that ne that1) && (this <:< that1) && {
- log(s"$this.matchesPattern($that) depended on discarding args and testing <:< $that1")
+ debuglog(s"$this.matchesPattern($that) depended on discarding args and testing <:< $that1")
true
}
case _ =>
@@ -816,8 +812,8 @@ trait Types
val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, subtypeNanos) else null
val result =
(this eq that) ||
- (if (explainSwitch) explain("<:", isSubType, this, that)
- else isSubType(this, that, AnyDepth))
+ (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that)
+ else isSubType(this, that))
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
result
}
@@ -882,7 +878,7 @@ trait Types
/** The maximum depth (@see typeDepth)
* of each type in the BaseTypeSeq of this type except the first.
*/
- def baseTypeSeqDepth: Int = 1
+ def baseTypeSeqDepth: Depth = Depth(1)
/** The list of all baseclasses of this type (including its own typeSymbol)
* in linearization order, starting with the class itself and ending
@@ -1219,7 +1215,7 @@ trait Types
override def decls: Scope = supertype.decls
override def baseType(clazz: Symbol): Type = supertype.baseType(clazz)
override def baseTypeSeq: BaseTypeSeq = supertype.baseTypeSeq
- override def baseTypeSeqDepth: Int = supertype.baseTypeSeqDepth
+ override def baseTypeSeqDepth: Depth = supertype.baseTypeSeqDepth
override def baseClasses: List[Symbol] = supertype.baseClasses
}
@@ -1513,7 +1509,7 @@ trait Types
}
}
- override def baseTypeSeqDepth: Int = baseTypeSeq.maxDepth
+ override def baseTypeSeqDepth: Depth = baseTypeSeq.maxDepth
override def baseClasses: List[Symbol] = {
val cached = baseClassesCache
@@ -2602,7 +2598,7 @@ trait Types
override def parents: List[Type] = resultType.parents
override def decls: Scope = resultType.decls
override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
- override def baseTypeSeqDepth: Int = resultType.baseTypeSeqDepth
+ override def baseTypeSeqDepth: Depth = resultType.baseTypeSeqDepth
override def baseClasses: List[Symbol] = resultType.baseClasses
override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
override def boundSyms = resultType.boundSyms
@@ -2641,7 +2637,7 @@ trait Types
override def boundSyms = immutable.Set[Symbol](typeParams ++ resultType.boundSyms: _*)
override def prefix: Type = resultType.prefix
override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
- override def baseTypeSeqDepth: Int = resultType.baseTypeSeqDepth
+ override def baseTypeSeqDepth: Depth = resultType.baseTypeSeqDepth
override def baseClasses: List[Symbol] = resultType.baseClasses
override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
override def narrow: Type = resultType.narrow
@@ -2776,13 +2772,13 @@ trait Types
def withTypeVars(op: Type => Boolean): Boolean = withTypeVars(op, AnyDepth)
- def withTypeVars(op: Type => Boolean, depth: Int): Boolean = {
+ def withTypeVars(op: Type => Boolean, depth: Depth): Boolean = {
val quantifiedFresh = cloneSymbols(quantified)
val tvars = quantifiedFresh map (tparam => TypeVar(tparam))
val underlying1 = underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars
op(underlying1) && {
solve(tvars, quantifiedFresh, quantifiedFresh map (_ => Invariant), upper = false, depth) &&
- isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.constr.inst))
+ isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.inst))
}
}
}
@@ -2846,6 +2842,9 @@ trait Types
// but pattern-matching returned the original constr0 (a bug)
// now, pattern-matching returns the most recent constr
object TypeVar {
+ private val ConstantTrue = ConstantType(Constant(true))
+ private val ConstantFalse = ConstantType(Constant(false))
+
@inline final def trace[T](action: String, msg: => String)(value: T): T = {
if (traceTypeVars) {
val s = msg match {
@@ -2983,7 +2982,9 @@ trait Types
* or `encounteredHigherLevel` or `suspended` accesses should be necessary.
*/
def instValid = constr.instValid
- override def isGround = instValid && constr.inst.isGround
+ def inst = constr.inst
+ def instWithinBounds = constr.instWithinBounds
+ override def isGround = instValid && inst.isGround
/** The variable's skolemization level */
val level = skolemizationLevel
@@ -3002,6 +3003,7 @@ trait Types
this
else if (newArgs.size == params.size) {
val tv = TypeVar(origin, constr, newArgs, params)
+ tv.linkSuspended(this)
TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv)
}
else
@@ -3024,8 +3026,7 @@ trait Types
// When comparing to types containing skolems, remember the highest level
// of skolemization. If that highest level is higher than our initial
// skolemizationLevel, we can't re-use those skolems as the solution of this
- // typevar, which means we'll need to repack our constr.inst into a fresh
- // existential.
+ // typevar, which means we'll need to repack our inst into a fresh existential.
// were we compared to skolems at a higher skolemizationLevel?
// EXPERIMENTAL: value will not be considered unless enableTypeVarExperimentals is true
// see SI-5729 for why this is still experimental
@@ -3064,7 +3065,16 @@ trait Types
// </region>
// ignore subtyping&equality checks while true -- see findMember
- private[Types] var suspended = false
+ // OPT: This could be Either[TypeVar, Boolean], but this encoding was chosen instead to save allocations.
+ private var _suspended: Type = TypeVar.ConstantFalse
+ private[Types] def suspended: Boolean = (_suspended: @unchecked) match {
+ case TypeVar.ConstantFalse => false
+ case TypeVar.ConstantTrue => true
+ case tv: TypeVar => tv.suspended
+ }
+ private[Types] def suspended_=(b: Boolean): Unit = _suspended = if (b) TypeVar.ConstantTrue else TypeVar.ConstantFalse
+ // SI-7785 Link the suspended attribute of a TypeVar created in, say, a TypeMap (e.g. AsSeenFrom) to its originator
+ private[Types] def linkSuspended(origin: TypeVar): Unit = _suspended = origin
/** Called when a TypeVar is involved in a subtyping check. Result is whether
* this TypeVar could plausibly be a [super/sub]type of argument `tp` and if so,
@@ -3170,8 +3180,8 @@ trait Types
// AM: I think we could use the `suspended` flag to avoid side-effecting during unification
if (suspended) // constraint accumulation is disabled
checkSubtype(tp, origin)
- else if (constr.instValid) // type var is already set
- checkSubtype(tp, constr.inst)
+ else if (instValid) // type var is already set
+ checkSubtype(tp, inst)
else isRelatable(tp) && {
unifySimple || unifyFull(tp) || (
// only look harder if our gaze is oriented toward Any
@@ -3187,14 +3197,14 @@ trait Types
}
def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = {
-// println("regTypeEq: "+(safeToString, debugString(tp), tp.getClass, if (typeVarLHS) "in LHS" else "in RHS", if (suspended) "ZZ" else if (constr.instValid) "IV" else "")) //@MDEBUG
+// println("regTypeEq: "+(safeToString, debugString(tp), tp.getClass, if (typeVarLHS) "in LHS" else "in RHS", if (suspended) "ZZ" else if (instValid) "IV" else "")) //@MDEBUG
def checkIsSameType(tp: Type) = (
- if (typeVarLHS) constr.inst =:= tp
- else tp =:= constr.inst
+ if (typeVarLHS) inst =:= tp
+ else tp =:= inst
)
if (suspended) tp =:= origin
- else if (constr.instValid) checkIsSameType(tp)
+ else if (instValid) checkIsSameType(tp)
else isRelatable(tp) && {
val newInst = wildcardToTypeVarMap(tp)
(constr isWithinBounds newInst) && {
@@ -3233,7 +3243,7 @@ trait Types
)
override def normalize: Type = (
- if (constr.instValid) constr.inst
+ if (instValid) inst
// get here when checking higher-order subtyping of the typevar by itself
// TODO: check whether this ever happens?
else if (isHigherKinded) logResult("Normalizing HK $this")(typeFun(params, applyArgs(params map (_.typeConstructor))))
@@ -3264,10 +3274,11 @@ trait Types
}
private def levelString = if (settings.explaintypes) level else ""
override def safeToString = (
- if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>"
- else if (constr.inst ne NoType) "=?" + constr.inst
+ if ((constr eq null) || (inst eq null)) "TVar<" + originName + "=null>"
+ else if (inst ne NoType) "=?" + inst
else (if(untouchable) "!?" else "?") + levelString + originName
)
+ def originString = s"$originName in $originLocation"
override def kind = "TypeVar"
def cloneInternal = {
@@ -3474,7 +3485,7 @@ trait Types
def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) =
if ((parents eq original.parents) && (decls eq original.decls)) original
else {
- val owner = if (original.typeSymbol == NoSymbol) NoSymbol else original.typeSymbol.owner
+ val owner = original.typeSymbol.owner
val result = refinedType(parents, owner)
val syms1 = decls.toList
for (sym <- syms1)
@@ -3852,7 +3863,7 @@ trait Types
/** The maximum allowable depth of lubs or glbs over types `ts`.
*/
- def lubDepth(ts: List[Type]): Int = {
+ def lubDepth(ts: List[Type]): Depth = {
val td = typeDepth(ts)
val bd = baseTypeSeqDepth(ts)
lubDepthAdjust(td, td max bd)
@@ -3862,16 +3873,17 @@ trait Types
* as a function over the maximum depth `td` of these types, and
* the maximum depth `bd` of all types in the base type sequences of these types.
*/
- private def lubDepthAdjust(td: Int, bd: Int): Int =
+ private def lubDepthAdjust(td: Depth, bd: Depth): Depth = (
if (settings.XfullLubs) bd
- else if (bd <= 3) bd
- else if (bd <= 5) td max (bd - 1)
- else if (bd <= 7) td max (bd - 2)
- else (td - 1) max (bd - 3)
+ else if (bd <= Depth(3)) bd
+ else if (bd <= Depth(5)) td max bd.decr
+ else if (bd <= Depth(7)) td max (bd decr 2)
+ else td.decr max (bd decr 3)
+ )
- private def symTypeDepth(syms: List[Symbol]): Int = typeDepth(syms map (_.info))
- private def typeDepth(tps: List[Type]): Int = maxDepth(tps)
- private def baseTypeSeqDepth(tps: List[Type]): Int = maxBaseTypeSeqDepth(tps)
+ private def symTypeDepth(syms: List[Symbol]): Depth = typeDepth(syms map (_.info))
+ private def typeDepth(tps: List[Type]): Depth = maxDepth(tps)
+ private def baseTypeSeqDepth(tps: List[Type]): Depth = maxbaseTypeSeqDepth(tps)
/** Is intersection of given types populated? That is,
* for all types tp1, tp2 in intersection
@@ -4014,9 +4026,12 @@ trait Types
def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType)
+ /** This appears to be equivalent to tp.isInstanceof[SingletonType],
+ * except it excludes ConstantTypes.
+ */
def isSingleType(tp: Type) = tp match {
case ThisType(_) | SuperType(_, _) | SingleType(_, _) => true
- case _ => false
+ case _ => false
}
def isConstantType(tp: Type) = tp match {
@@ -4137,7 +4152,7 @@ trait Types
case _ => false
}
- def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Int): Boolean = {
+ def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Depth): Boolean = {
def isSubArg(t1: Type, t2: Type, variance: Variance) = (
(variance.isContravariant || isSubType(t1, t2, depth))
&& (variance.isCovariant || isSubType(t2, t1, depth))
@@ -4146,7 +4161,7 @@ trait Types
corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
}
- def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean = {
+ def specializesSym(tp: Type, sym: Symbol, depth: Depth): Boolean = {
def directlySpecializedBy(member: Symbol): Boolean = (
member == sym
|| specializesSym(tp.narrow, member, sym.owner.thisType, sym, depth)
@@ -4166,7 +4181,7 @@ trait Types
/** Does member `sym1` of `tp1` have a stronger type
* than member `sym2` of `tp2`?
*/
- protected[internal] def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = {
+ protected[internal] def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Depth): Boolean = {
require((sym1 ne NoSymbol) && (sym2 ne NoSymbol), ((tp1, sym1, tp2, sym2, depth)))
val info1 = tp1.memberInfo(sym1)
val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1)
@@ -4317,18 +4332,6 @@ trait Types
}
}
- /** like map2, but returns list `xs` itself - instead of a copy - if function
- * `f` maps all elements to themselves.
- */
- def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
- if (xs.isEmpty || ys.isEmpty) xs
- else {
- val x1 = f(xs.head, ys.head)
- val xs1 = map2Conserve(xs.tail, ys.tail)(f)
- if ((x1 eq xs.head) && (xs1 eq xs.tail)) xs
- else x1 :: xs1
- }
-
/** Do type arguments `targs` conform to formal parameters `tparams`?
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
@@ -4378,7 +4381,7 @@ trait Types
* Return `x` if the computation succeeds with result `x`.
* Return `NoType` if the computation fails.
*/
- def mergePrefixAndArgs(tps: List[Type], variance: Variance, depth: Int): Type = tps match {
+ def mergePrefixAndArgs(tps: List[Type], variance: Variance, depth: Depth): Type = tps match {
case tp :: Nil => tp
case TypeRef(_, sym, _) :: rest =>
val pres = tps map (_.prefix) // prefix normalizes automatically
@@ -4411,7 +4414,7 @@ trait Types
val args = map2(sym.typeParams, argsst) { (tparam, as0) =>
val as = as0.distinct
if (as.size == 1) as.head
- else if (depth == 0) {
+ else if (depth.isZero) {
log("Giving up merging args: can't unify %s under %s".format(as.mkString(", "), tparam.fullLocationString))
// Don't return "Any" (or "Nothing") when we have to give up due to
// recursion depth. Return NoType, which prevents us from poisoning
@@ -4420,11 +4423,11 @@ trait Types
NoType
}
else {
- if (tparam.variance == variance) lub(as, decr(depth))
- else if (tparam.variance == variance.flip) glb(as, decr(depth))
+ if (tparam.variance == variance) lub(as, depth.decr)
+ else if (tparam.variance == variance.flip) glb(as, depth.decr)
else {
- val l = lub(as, decr(depth))
- val g = glb(as, decr(depth))
+ val l = lub(as, depth.decr)
+ val g = glb(as, depth.decr)
if (l <:< g) l
else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
// just err on the conservative side, i.e. with a bound that is too high.
@@ -4462,7 +4465,7 @@ trait Types
/** Make symbol `sym` a member of scope `tp.decls`
* where `thistp` is the narrowed owner type of the scope.
*/
- def addMember(thistp: Type, tp: Type, sym: Symbol, depth: Int) {
+ def addMember(thistp: Type, tp: Type, sym: Symbol, depth: Depth) {
assert(sym != NoSymbol)
// debuglog("add member " + sym+":"+sym.info+" to "+thistp) //DEBUG
if (!specializesSym(thistp, sym, depth)) {
@@ -4549,6 +4552,12 @@ trait Types
else (ps :+ SerializableTpe).toList
)
+ /** Adds the @uncheckedBound annotation if the given `tp` has type arguments */
+ final def uncheckedBounds(tp: Type): Type = {
+ if (tp.typeArgs.isEmpty || UncheckedBoundsClass == NoSymbol) tp // second condition for backwards compatibilty with older scala-reflect.jar
+ else tp.withAnnotation(AnnotationInfo marker UncheckedBoundsClass.tpe)
+ }
+
/** Members of the given class, other than those inherited
* from Any or AnyRef.
*/
@@ -4589,23 +4598,15 @@ trait Types
private[scala] val typeIsHigherKinded = (tp: Type) => tp.isHigherKinded
/** The maximum depth of type `tp` */
- def typeDepth(tp: Type): Int = tp match {
- case TypeRef(pre, sym, args) =>
- math.max(typeDepth(pre), typeDepth(args) + 1)
- case RefinedType(parents, decls) =>
- math.max(typeDepth(parents), symTypeDepth(decls.toList) + 1)
- case TypeBounds(lo, hi) =>
- math.max(typeDepth(lo), typeDepth(hi))
- case MethodType(paramtypes, result) =>
- typeDepth(result)
- case NullaryMethodType(result) =>
- typeDepth(result)
- case PolyType(tparams, result) =>
- math.max(typeDepth(result), symTypeDepth(tparams) + 1)
- case ExistentialType(tparams, result) =>
- math.max(typeDepth(result), symTypeDepth(tparams) + 1)
- case _ =>
- 1
+ def typeDepth(tp: Type): Depth = tp match {
+ case TypeRef(pre, sym, args) => typeDepth(pre) max typeDepth(args).incr
+ case RefinedType(parents, decls) => typeDepth(parents) max symTypeDepth(decls.toList).incr
+ case TypeBounds(lo, hi) => typeDepth(lo) max typeDepth(hi)
+ case MethodType(paramtypes, result) => typeDepth(result)
+ case NullaryMethodType(result) => typeDepth(result)
+ case PolyType(tparams, result) => typeDepth(result) max symTypeDepth(tparams).incr
+ case ExistentialType(tparams, result) => typeDepth(result) max symTypeDepth(tparams).incr
+ case _ => Depth(1)
}
def withUncheckedVariance(tp: Type): Type =
@@ -4616,19 +4617,19 @@ trait Types
// var d = 0
// for (tp <- tps) d = d max by(tp) //!!!OPT!!!
// d
- private[scala] def maxDepth(tps: List[Type]): Int = {
- @tailrec def loop(tps: List[Type], acc: Int): Int = tps match {
- case tp :: rest => loop(rest, math.max(acc, typeDepth(tp)))
+ private[scala] def maxDepth(tps: List[Type]): Depth = {
+ @tailrec def loop(tps: List[Type], acc: Depth): Depth = tps match {
+ case tp :: rest => loop(rest, acc max typeDepth(tp))
case _ => acc
}
- loop(tps, 0)
+ loop(tps, Depth.Zero)
}
- private[scala] def maxBaseTypeSeqDepth(tps: List[Type]): Int = {
- @tailrec def loop(tps: List[Type], acc: Int): Int = tps match {
- case tp :: rest => loop(rest, math.max(acc, tp.baseTypeSeqDepth))
+ private[scala] def maxbaseTypeSeqDepth(tps: List[Type]): Depth = {
+ @tailrec def loop(tps: List[Type], acc: Depth): Depth = tps match {
+ case tp :: rest => loop(rest, acc max tp.baseTypeSeqDepth)
case _ => acc
}
- loop(tps, 0)
+ loop(tps, Depth.Zero)
}
@tailrec private def typesContain(tps: List[Type], sym: Symbol): Boolean = tps match {
diff --git a/src/reflect/scala/reflect/internal/Variance.scala b/src/reflect/scala/reflect/internal/Variance.scala
index 3480161567..ecc5d99a40 100644
--- a/src/reflect/scala/reflect/internal/Variance.scala
+++ b/src/reflect/scala/reflect/internal/Variance.scala
@@ -60,8 +60,7 @@ final class Variance private (val flags: Int) extends AnyVal {
/** The symbolic annotation used to indicate the given kind of variance. */
def symbolicString = (
- if (isBivariant) "+/-"
- else if (isCovariant) "+"
+ if (isCovariant) "+"
else if (isContravariant) "-"
else ""
)
diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala
index 78df3c9617..bf00a7ac87 100644
--- a/src/reflect/scala/reflect/internal/Variances.scala
+++ b/src/reflect/scala/reflect/internal/Variances.scala
@@ -92,7 +92,9 @@ trait Variances {
val relative = relativeVariance(sym)
val required = relative * variance
if (!relative.isBivariant) {
- log(s"verifying $sym (${sym.variance}${sym.locationString}) is $required at $base in ${base.owner}")
+ def sym_s = s"$sym (${sym.variance}${sym.locationString})"
+ def base_s = s"$base in ${base.owner}" + (if (base.owner.isClass) "" else " in " + base.owner.enclClass)
+ log(s"verifying $sym_s is $required at $base_s")
if (sym.variance != required)
issueVarianceError(base, sym, required)
}
@@ -146,7 +148,7 @@ trait Variances {
)
tree match {
case defn: MemberDef if skip =>
- log(s"Skipping variance check of ${sym.defString}")
+ debuglog(s"Skipping variance check of ${sym.defString}")
case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) =>
validateVariance(sym)
super.traverse(tree)
diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
deleted file mode 100644
index 2c9f909629..0000000000
--- a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package scala
-package reflect
-package internal
-package annotations
-
-import scala.annotation.meta._
-
-/**
- * An annotation that designates a member should not be referred to after
- * type checking (which includes macro expansion); it must only be used in
- * the arguments of some other macro that will eliminate it from the AST.
- *
- * Later on, this annotation should be removed and implemented with domain-specific macros.
- * If a certain method `inner` mustn't be called outside the context of a given macro `outer`,
- * then it should itself be declared as a macro.
- *
- * Approach #1. Expansion of `inner` checks whether its enclosures contain `outer` and
- * report an error if `outer` is not detected. In principle, we could use this approach right now,
- * but currently enclosures are broken, because contexts aren't exactly famous for keeping precise
- * track of the stack of the trees being typechecked.
- *
- * Approach #2. Default implementation of `inner` is just an invocation of `c.abort`.
- * `outer` is an untyped macro, which expands into a block, which contains a redefinition of `inner`
- * and a call to itself. The redefined `inner` could either be a stub like `Expr.splice` or carry out
- * domain-specific logic.
- *
- * @param message the error message to print during compilation if a reference remains
- * after type checking
- * @since 2.10.1
- */
-@getter @setter @beanGetter @beanSetter
-final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/annotations/package.scala b/src/reflect/scala/reflect/internal/annotations/package.scala
new file mode 100644
index 0000000000..ef299a600c
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/package.scala
@@ -0,0 +1,6 @@
+package scala.reflect.internal
+
+package object annotations {
+ @deprecated("Use scala.annotation.compileTimeOnly instead", "2.11.0")
+ type compileTimeOnly = scala.annotation.compileTimeOnly
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
new file mode 100644
index 0000000000..a44bb54734
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
@@ -0,0 +1,13 @@
+package scala.reflect
+package internal
+package annotations
+
+/**
+ * An annotation that designates the annotated type should not be checked for violations of
+ * type parameter bounds in the `refchecks` phase of the compiler. This can be used by synthesized
+ * code the uses an inferred type of an expression as the type of an artifict val/def (for example,
+ * a temporary value introduced by an ANF transform). See [[https://issues.scala-lang.org/browse/SI-7694]].
+ *
+ * @since 2.10.3
+ */
+final class uncheckedBounds extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 6cffdbc193..f42dbf56e1 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -22,8 +22,8 @@ import scala.annotation.switch
* @version 1.0
*/
abstract class UnPickler {
- val global: SymbolTable
- import global._
+ val symbolTable: SymbolTable
+ import symbolTable._
/** Unpickle symbol table information descending from a class and/or module root
* from an array of bytes.
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
index 1d3c6b0f23..6fa536d84c 100644
--- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -19,7 +19,7 @@ private[internal] trait GlbLubs {
private final val verifyLubs = true
- private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) {
+ private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Depth) {
import util.TableDef
import TableDef.Column
def str(tp: Type) = {
@@ -76,8 +76,8 @@ private[internal] trait GlbLubs {
* (except that type constructors have been applied to their dummyArgs)
* @See baseTypeSeq for a definition of sorted and upwards closed.
*/
- def lubList(ts: List[Type], depth: Int): List[Type] = {
- var lubListDepth = 0
+ def lubList(ts: List[Type], depth: Depth): List[Type] = {
+ var lubListDepth = Depth.Zero
// This catches some recursive situations which would otherwise
// befuddle us, e.g. pos/hklub0.scala
def isHotForTs(xs: List[Type]) = ts exists (_.typeParams == xs.map(_.typeSymbol))
@@ -89,7 +89,7 @@ private[internal] trait GlbLubs {
}
// pretypes is a tail-recursion-preserving accumulator.
@tailrec def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = {
- lubListDepth += 1
+ lubListDepth = lubListDepth.incr
if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) pretypes.reverse
else if (tsBts.tail.isEmpty) pretypes.reverse ++ tsBts.head
@@ -181,13 +181,13 @@ private[internal] trait GlbLubs {
/** Eliminate from list of types all elements which are a subtype
* of some other element of the list. */
- private def elimSub(ts: List[Type], depth: Int): List[Type] = {
+ private def elimSub(ts: List[Type], depth: Depth): List[Type] = {
def elimSub0(ts: List[Type]): List[Type] = ts match {
case List() => List()
case List(t) => List(t)
case t :: ts1 =>
- val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
- if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
+ val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, depth.decr)))
+ if (rest exists (t1 => isSubType(t, t1, depth.decr))) rest else t :: rest
}
val ts0 = elimSub0(ts)
if (ts0.isEmpty || ts0.tail.isEmpty) ts0
@@ -251,8 +251,8 @@ private[internal] trait GlbLubs {
else if (isNumericSubType(t2, t1)) t1
else IntTpe)
- private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
- private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
+ private val lubResults = new mutable.HashMap[(Depth, List[Type]), Type]
+ private val glbResults = new mutable.HashMap[(Depth, List[Type]), Type]
/** Given a list of types, finds all the base classes they have in
* common, then returns a list of type constructors derived directly
@@ -299,7 +299,7 @@ private[internal] trait GlbLubs {
}
/** The least upper bound wrt <:< of a list of types */
- protected[internal] def lub(ts: List[Type], depth: Int): Type = {
+ protected[internal] def lub(ts: List[Type], depth: Depth): Type = {
def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
case List() => NothingTpe
case List(t) => t
@@ -321,7 +321,7 @@ private[internal] trait GlbLubs {
lubType
case None =>
lubResults((depth, ts)) = AnyTpe
- val res = if (depth < 0) AnyTpe else lub1(ts)
+ val res = if (depth.isNegative) AnyTpe else lub1(ts)
lubResults((depth, ts)) = res
res
}
@@ -333,7 +333,7 @@ private[internal] trait GlbLubs {
val lubOwner = commonOwner(ts)
val lubBase = intersectionType(lubParents, lubOwner)
val lubType =
- if (phase.erasedTypes || depth == 0 ) lubBase
+ if (phase.erasedTypes || depth.isZero ) lubBase
else {
val lubRefined = refinedType(lubParents, lubOwner)
val lubThisType = lubRefined.typeSymbol.thisType
@@ -357,12 +357,12 @@ private[internal] trait GlbLubs {
val symtypes =
map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
- proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, depth.decr))
else if (symtypes.tail forall (symtypes.head =:= _))
proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
else {
def lubBounds(bnds: List[TypeBounds]): TypeBounds =
- TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
+ TypeBounds(glb(bnds map (_.lo), depth.decr), lub(bnds map (_.hi), depth.decr))
lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
.setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
}
@@ -432,8 +432,8 @@ private[internal] trait GlbLubs {
* The counter breaks this recursion after two calls.
* If the recursion is broken, no member is added to the glb.
*/
- private var globalGlbDepth = 0
- private final val globalGlbLimit = 2
+ private var globalGlbDepth = Depth.Zero
+ private final val globalGlbLimit = Depth(2)
/** The greatest lower bound of a list of types (as determined by `<:<`). */
def glb(ts: List[Type]): Type = elimSuper(ts) match {
@@ -451,7 +451,7 @@ private[internal] trait GlbLubs {
}
}
- protected[internal] def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match {
+ protected[internal] def glb(ts: List[Type], depth: Depth): Type = elimSuper(ts) match {
case List() => AnyTpe
case List(t) => t
case ts0 => glbNorm(ts0, depth)
@@ -459,7 +459,7 @@ private[internal] trait GlbLubs {
/** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
* with regard to `elimSuper`. */
- protected def glbNorm(ts: List[Type], depth: Int): Type = {
+ protected def glbNorm(ts: List[Type], depth: Depth): Type = {
def glb0(ts0: List[Type]): Type = ts0 match {
case List() => AnyTpe
case List(t) => t
@@ -479,7 +479,7 @@ private[internal] trait GlbLubs {
glbType
case _ =>
glbResults((depth, ts)) = NothingTpe
- val res = if (depth < 0) NothingTpe else glb1(ts)
+ val res = if (depth.isNegative) NothingTpe else glb1(ts)
glbResults((depth, ts)) = res
res
}
@@ -501,7 +501,7 @@ private[internal] trait GlbLubs {
val ts1 = ts flatMap refinedToParents
val glbBase = intersectionType(ts1, glbOwner)
val glbType =
- if (phase.erasedTypes || depth == 0) glbBase
+ if (phase.erasedTypes || depth.isZero) glbBase
else {
val glbRefined = refinedType(ts1, glbOwner)
val glbThisType = glbRefined.typeSymbol.thisType
@@ -514,15 +514,15 @@ private[internal] trait GlbLubs {
val symtypes = syms map glbThisType.memberInfo
assert(!symtypes.isEmpty)
proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
- if (proto.isTerm) glb(symtypes, decr(depth))
+ if (proto.isTerm) glb(symtypes, depth.decr)
else {
def isTypeBound(tp: Type) = tp match {
case TypeBounds(_, _) => true
case _ => false
}
def glbBounds(bnds: List[Type]): TypeBounds = {
- val lo = lub(bnds map (_.bounds.lo), decr(depth))
- val hi = glb(bnds map (_.bounds.hi), decr(depth))
+ val lo = lub(bnds map (_.bounds.lo), depth.decr)
+ val hi = glb(bnds map (_.bounds.hi), depth.decr)
if (lo <:< hi) TypeBounds(lo, hi)
else throw GlbFailure
}
@@ -539,7 +539,7 @@ private[internal] trait GlbLubs {
}
if (globalGlbDepth < globalGlbLimit)
try {
- globalGlbDepth += 1
+ globalGlbDepth = globalGlbDepth.incr
val dss = ts flatMap refinedToDecls
for (ds <- dss; sym <- ds.iterator)
if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth))
@@ -549,7 +549,7 @@ private[internal] trait GlbLubs {
case ex: NoCommonType =>
}
} finally {
- globalGlbDepth -= 1
+ globalGlbDepth = globalGlbDepth.decr
}
if (glbRefined.decls.isEmpty) glbBase else glbRefined
}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index 63f17dff34..d8b3b04d0e 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -232,9 +232,7 @@ trait TypeComparers {
)
}
- def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
-
- def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
+ def isSubType(tp1: Type, tp2: Type, depth: Depth = Depth.AnyDepth): Boolean = try {
subsametypeRecursions += 1
//OPT cutdown on Function0 allocation
@@ -314,7 +312,7 @@ trait TypeComparers {
else TriState.Unknown
}
- private def isSubType1(tp1: Type, tp2: Type, depth: Int): Boolean = typeRelationPreCheck(tp1, tp2) match {
+ private def isSubType1(tp1: Type, tp2: Type, depth: Depth): Boolean = typeRelationPreCheck(tp1, tp2) match {
case state if state.isKnown => state.booleanValue
case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => annotationsConform(tp1, tp2) && (tp1.withoutAnnotations <:< tp2.withoutAnnotations)
case _ => isSubType2(tp1, tp2, depth)
@@ -338,7 +336,7 @@ trait TypeComparers {
}
// @assume tp1.isHigherKinded || tp2.isHigherKinded
- def isHKSubType(tp1: Type, tp2: Type, depth: Int): Boolean = {
+ def isHKSubType(tp1: Type, tp2: Type, depth: Depth): Boolean = {
def isSub(ntp1: Type, ntp2: Type) = (ntp1.withoutAnnotations, ntp2.withoutAnnotations) match {
case (TypeRef(_, AnyClass, _), _) => false // avoid some warnings when Nothing/Any are on the other side
case (_, TypeRef(_, NothingClass, _)) => false
@@ -357,7 +355,7 @@ trait TypeComparers {
}
/** Does type `tp1` conform to `tp2`? */
- private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
+ private def isSubType2(tp1: Type, tp2: Type, depth: Depth): Boolean = {
def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSubType(lhs, rhs, depth)
if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2))
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
index 123c296f95..fdfe376c18 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -6,7 +6,6 @@ package tpe
import scala.collection.{ generic }
import generic.Clearable
-
private[internal] trait TypeConstraints {
self: SymbolTable =>
import definitions._
@@ -170,11 +169,14 @@ private[internal] trait TypeConstraints {
}
}
- def isWithinBounds(tp: Type): Boolean =
- lobounds.forall(_ <:< tp) &&
- hibounds.forall(tp <:< _) &&
- (numlo == NoType || (numlo weak_<:< tp)) &&
- (numhi == NoType || (tp weak_<:< numhi))
+ def instWithinBounds = instValid && isWithinBounds(inst)
+
+ def isWithinBounds(tp: Type): Boolean = (
+ lobounds.forall(_ <:< tp)
+ && hibounds.forall(tp <:< _)
+ && (numlo == NoType || (numlo weak_<:< tp))
+ && (numhi == NoType || (tp weak_<:< numhi))
+ )
var inst: Type = NoType // @M reduce visibility?
@@ -188,12 +190,17 @@ private[internal] trait TypeConstraints {
override def toString = {
val boundsStr = {
- val lo = loBounds filterNot typeIsNothing
- val hi = hiBounds filterNot typeIsAny
- val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
- val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
-
- lostr ++ histr mkString ("[", " | ", "]")
+ val lo = loBounds filterNot typeIsNothing match {
+ case Nil => ""
+ case tp :: Nil => " >: " + tp
+ case tps => tps.mkString(" >: (", ", ", ")")
+ }
+ val hi = hiBounds filterNot typeIsAny match {
+ case Nil => ""
+ case tp :: Nil => " <: " + tp
+ case tps => tps.mkString(" <: (", ", ", ")")
+ }
+ lo + hi
}
if (inst eq NoType) boundsStr
else boundsStr + " _= " + inst.safeToString
@@ -208,12 +215,7 @@ private[internal] trait TypeConstraints {
* solution direction for all contravariant variables.
* @param upper When `true` search for max solution else min.
*/
- def solve(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Variance], upper: Boolean): Boolean =
- solve(tvars, tparams, variances, upper, AnyDepth)
-
- def solve(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Variance], upper: Boolean, depth: Int): Boolean = {
+ def solve(tvars: List[TypeVar], tparams: List[Symbol], variances: List[Variance], upper: Boolean, depth: Depth): Boolean = {
def solveOne(tvar: TypeVar, tparam: Symbol, variance: Variance) {
if (tvar.constr.inst == NoType) {
@@ -236,25 +238,25 @@ private[internal] trait TypeConstraints {
if (!cyclic) {
if (up) {
if (bound.typeSymbol != AnyClass) {
- log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
+ debuglog(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
}
for (tparam2 <- tparams)
tparam2.info.bounds.lo.dealias match {
case TypeRef(_, `tparam`, _) =>
- log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+ debuglog(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
case _ =>
}
} else {
if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
- log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
+ debuglog(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
}
for (tparam2 <- tparams)
tparam2.info.bounds.hi.dealias match {
case TypeRef(_, `tparam`, _) =>
- log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+ debuglog(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
case _ =>
}
@@ -265,12 +267,16 @@ private[internal] trait TypeConstraints {
//println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
val newInst = (
if (up) {
- if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds)
- } else {
- if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
+ if (depth.isAnyDepth) glb(tvar.constr.hiBounds)
+ else glb(tvar.constr.hiBounds, depth)
+ }
+ else {
+ if (depth.isAnyDepth) lub(tvar.constr.loBounds)
+ else lub(tvar.constr.loBounds, depth)
}
)
- log(s"$tvar setInst $newInst")
+
+ debuglog(s"$tvar setInst $newInst")
tvar setInst newInst
//Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
}
@@ -278,6 +284,6 @@ private[internal] trait TypeConstraints {
// println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
foreach3(tvars, tparams, variances)(solveOne)
- tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
+ tvars forall (tv => tv.instWithinBounds || util.andFalse(log(s"Inferred type for ${tv.originString} does not conform to bounds: ${tv.constr}")))
}
}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index bebc419c7c..be61c45041 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -17,12 +17,11 @@ private[internal] trait TypeMaps {
* so it is no longer carries the too-stealthy name "deAlias".
*/
object normalizeAliases extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(_, sym, _) if sym.isAliasType =>
- def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
- mapOver(logResult(msg)(tp.normalize))
- case _ => mapOver(tp)
- }
+ def apply(tp: Type): Type = mapOver(tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType && tp.isHigherKinded => logResult(s"Normalized type alias function $tp")(tp.normalize)
+ case TypeRef(_, sym, _) if sym.isAliasType => tp.normalize
+ case tp => tp
+ })
}
/** Remove any occurrence of type <singleton> from this type and its parents */
@@ -395,7 +394,7 @@ private[internal] trait TypeMaps {
s"Widened lone occurrence of $tp1 inside existential to $word bound"
}
if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam)
- logResult(msg)(repl)
+ debuglogResult(msg)(repl)
else
tp1
case _ =>
@@ -524,7 +523,7 @@ private[internal] trait TypeMaps {
private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = {
val TypeRef(_, lhsSym, lhsArgs) = lhs
val TypeRef(_, rhsSym, rhsArgs) = rhs
- require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
+ require(lhsSym.owner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
// Find the type parameter position; we'll use the corresponding argument.
// Why are we checking by name rather than by equality? Because for
@@ -539,7 +538,7 @@ private[internal] trait TypeMaps {
else {
// It's easy to get here when working on hardcore type machinery (not to
// mention when not doing so, see above) so let's provide a standout error.
- def own_s(s: Symbol) = s.nameString + " in " + s.safeOwner.nameString
+ def own_s(s: Symbol) = s.nameString + " in " + s.owner.nameString
def explain =
sm"""| sought ${own_s(lhsSym)}
| classSym ${own_s(rhsSym)}
@@ -944,7 +943,7 @@ private[internal] trait TypeMaps {
}
}
- /** A map to convert every occurrence of a type variable to a wildcard type. */
+ /** A map to convert each occurrence of a type variable to its origin. */
object typeVarToOriginMap extends TypeMap {
def apply(tp: Type): Type = tp match {
case TypeVar(origin, _) => origin
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 580ada8254..90ffe9d9e7 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -60,7 +60,7 @@ trait Erasure {
*/
protected def unboundedGenericArrayLevel(tp: Type): Int = tp match {
case GenericArray(level, core) if !(core <:< AnyRefTpe) => level
- case RefinedType(ps, _) if ps.nonEmpty => logResult(s"Unbounded generic level for $tp is")(ps map unboundedGenericArrayLevel max)
+ case RefinedType(ps, _) if ps.nonEmpty => logResult(s"Unbounded generic level for $tp is")((ps map unboundedGenericArrayLevel).max)
case _ => 0
}
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index e127d577e1..59af819dad 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -53,6 +53,42 @@ trait Collections {
}
lb.toList
}
+
+ /** like map2, but returns list `xs` itself - instead of a copy - if function
+ * `f` maps all elements to themselves.
+ */
+ final def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] = {
+ // Note to developers: there exists a duplication between this function and `List#mapConserve`.
+ // If any successful optimization attempts or other changes are made, please rehash them there too.
+ @tailrec
+ def loop(mapped: ListBuffer[A], unchanged: List[A], pending0: List[A], pending1: List[B]): List[A] = {
+ if (pending0.isEmpty || pending1.isEmpty) {
+ if (mapped eq null) unchanged
+ else mapped.prependToList(unchanged)
+ } else {
+ val head00 = pending0.head
+ val head01 = pending1.head
+ val head1 = f(head00, head01)
+
+ if ((head1 eq head00.asInstanceOf[AnyRef])) {
+ loop(mapped, unchanged, pending0.tail, pending1.tail)
+ } else {
+ val b = if (mapped eq null) new ListBuffer[A] else mapped
+ var xc = unchanged
+ while ((xc ne pending0) && (xc ne pending1)) {
+ b += xc.head
+ xc = xc.tail
+ }
+ b += head1
+ val tail0 = pending0.tail
+ val tail1 = pending1.tail
+ loop(b, tail0, tail0, tail1)
+ }
+ }
+ }
+ loop(null, xs, xs, ys)
+ }
+
final def map3[A, B, C, D](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => D): List[D] = {
if (xs1.isEmpty || xs2.isEmpty || xs3.isEmpty) Nil
else f(xs1.head, xs2.head, xs3.head) :: map3(xs1.tail, xs2.tail, xs3.tail)(f)
diff --git a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
index a7fd787dfc..63ea6e2c49 100644
--- a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
@@ -44,7 +44,7 @@ trait ScalaClassLoader extends JClassLoader {
/** Create an instance of a class with this classloader */
def create(path: String): AnyRef =
- tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull
+ tryToInitializeClass[AnyRef](path).map(_.newInstance()).orNull
/** The actual bytes for a class file, or an empty array if it can't be found. */
def classBytes(className: String): Array[Byte] = classAsStream(className) match {
@@ -116,7 +116,7 @@ object ScalaClassLoader {
/** True if supplied class exists in supplied path */
def classExists(urls: Seq[URL], name: String): Boolean =
- fromURLs(urls) tryToLoadClass name isDefined
+ (fromURLs(urls) tryToLoadClass name).isDefined
/** Finding what jar a clazz or instance came from */
def originOfClass(x: Class[_]): Option[URL] =
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index 97cc19952c..f61c1f3c50 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -92,7 +92,7 @@ trait TraceSymbolActivity {
while (ph != NoPhase && ph.name != "erasure") {
ph = ph.prev
}
- ph
+ if (ph eq NoPhase) phase else ph
}
private def runBeforeErasure[T](body: => T): T = enteringPhase(findErasurePhase)(body)
diff --git a/src/reflect/scala/reflect/internal/util/TriState.scala b/src/reflect/scala/reflect/internal/util/TriState.scala
index c7a35d4637..4074d974d2 100644
--- a/src/reflect/scala/reflect/internal/util/TriState.scala
+++ b/src/reflect/scala/reflect/internal/util/TriState.scala
@@ -3,6 +3,8 @@ package reflect
package internal
package util
+import scala.language.implicitConversions
+
import TriState._
/** A simple true/false/unknown value, for those days when
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index 9b792a3f43..a8bc79d832 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -4,7 +4,7 @@ package reflect.internal.util
import java.lang.ref.{WeakReference, ReferenceQueue}
import scala.annotation.tailrec
import scala.collection.generic.Clearable
-import scala.collection.mutable.{Set => mSet}
+import scala.collection.mutable.{Set => MSet}
/**
* A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other
@@ -16,8 +16,8 @@ import scala.collection.mutable.{Set => mSet}
* This set implmeentation is not in general thread safe without external concurrency control. However it behaves
* properly when GC concurrently collects elements in this set.
*/
-final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with mSet[A] {
-
+final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with MSet[A] {
+
import WeakHashSet._
def this() = this(initialCapacity = WeakHashSet.defaultInitialCapacity, loadFactor = WeakHashSet.defaultLoadFactor)
@@ -47,7 +47,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
candidate *= 2
}
candidate
- }
+ }
/**
* the underlying table of entries which is an array of Entry linked lists
@@ -65,7 +65,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
* find the bucket associated with an elements's hash code
*/
private[this] def bucketFor(hash: Int): Int = {
- // spread the bits around to try to avoid accidental collisions using the
+ // spread the bits around to try to avoid accidental collisions using the
// same algorithm as java.util.HashMap
var h = hash
h ^= h >>> 20 ^ h >>> 12
@@ -98,7 +98,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]]
@tailrec
- def queueLoop {
+ def queueLoop(): Unit = {
val stale = poll()
if (stale != null) {
val bucket = bucketFor(stale.hash)
@@ -109,11 +109,11 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
linkedListLoop(null, table(bucket))
- queueLoop
+ queueLoop()
}
}
-
- queueLoop
+
+ queueLoop()
}
/**
@@ -123,7 +123,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
val oldTable = table
table = new Array[Entry[A]](oldTable.size * 2)
threshhold = computeThreshHold
-
+
@tailrec
def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) {
@tailrec
@@ -225,7 +225,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
def +=(elem: A) = this + elem
// from scala.reflect.interanl.Set
- override def addEntry(x: A) { this += x }
+ override def addEntry(x: A) { this += x }
// remove an element from this set and return this set
override def -(elem: A): this.type = elem match {
@@ -274,6 +274,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
override def foreach[U](f: A => U): Unit = iterator foreach f
+ // It has the `()` because iterator runs `removeStaleEntries()`
override def toList(): List[A] = iterator.toList
// Iterator over all the elements in this set in no particular order
@@ -292,7 +293,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
*/
private[this] var entry: Entry[A] = null
- /**
+ /**
* the element that will be the result of the next call to next()
*/
private[this] var lookaheadelement: A = null.asInstanceOf[A]
@@ -339,7 +340,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
* the entries must be stable. If any are garbage collected during validation
* then an assertion may inappropriately fire.
*/
- def fullyValidate {
+ def fullyValidate: Unit = {
var computedCount = 0
var bucket = 0
while (bucket < table.size) {
@@ -407,10 +408,10 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
e = e.tail
}
count
- }
+ }
}
- private[util] def diagnostics = new Diagnostics
+ private[util] def diagnostics = new Diagnostics
}
/**
diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala
index 49164d366c..df63a55090 100644
--- a/src/reflect/scala/reflect/internal/util/package.scala
+++ b/src/reflect/scala/reflect/internal/util/package.scala
@@ -7,6 +7,8 @@ import scala.language.existentials // SI-6541
package object util {
import StringOps.longestCommonPrefix
+ def andFalse(body: Unit): Boolean = false
+
// Shorten a name like Symbols$FooSymbol to FooSymbol.
private def shortenName(name: String): String = {
if (name == "") return ""
diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala
index eabf1dcbab..8260189459 100644
--- a/src/reflect/scala/reflect/io/ZipArchive.scala
+++ b/src/reflect/scala/reflect/io/ZipArchive.scala
@@ -126,7 +126,11 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
final class FileZipArchive(file: JFile) extends ZipArchive(file) {
def iterator: Iterator[Entry] = {
- val zipFile = new ZipFile(file)
+ val zipFile = try {
+ new ZipFile(file)
+ } catch {
+ case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe)
+ }
val root = new DirEntry("/")
val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
val enum = zipFile.entries()
diff --git a/src/reflect/scala/reflect/macros/Context.scala b/src/reflect/scala/reflect/macros/Context.scala
index 434b7c1b9c..b0c816f4ad 100644
--- a/src/reflect/scala/reflect/macros/Context.scala
+++ b/src/reflect/scala/reflect/macros/Context.scala
@@ -37,8 +37,7 @@ trait Context extends Aliases
with Typers
with Parsers
with Evals
- with ExprUtils
- with Synthetics {
+ with ExprUtils {
/** The compile-time universe. */
val universe: Universe
diff --git a/src/reflect/scala/reflect/macros/Synthetics.scala b/src/reflect/scala/reflect/macros/Synthetics.scala
deleted file mode 100644
index 5e422ee89f..0000000000
--- a/src/reflect/scala/reflect/macros/Synthetics.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-package scala
-package reflect
-package macros
-
-/**
- * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
- *
- * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
- * exposes functions to introduce synthetic definitions.
- *
- * @define TOPLEVEL_TREE Top-level tree is a tree that represents a non-inner class or object in one of the currently compiled source files.
- * Note that top-level isn't equivalent to [[scala.reflect.api.Symbols#SymbolApi.isStatic]],
- * because static also embraces definitions nested in static objects
- *
- * @define INTRODUCE_TOP_LEVEL Allowed definitions include classes (represented by `ClassDef` trees), traits (represented
- * by `ClassDef` trees having the `TRAIT` flag set in `mods`) and objects (represented by `ModuleDef` trees).
- *
- * The definitions are put into the package with a prototype provided in `packagePrototype`.
- * Supported prototypes are (see [[PackageSpec]] for more details):
- * * Strings and names representing a fully-qualified name of the package
- * * Trees that can work as package ids
- * * Package or package class symbols
- *
- * Typical value for a package prototype is a fully-qualified name in a string.
- * For example, to generate a class available at `foo.bar.Test`, call this method as follows:
- *
- * introduceTopLevel("foo.bar", ClassDef(<mods>, TypeName("Test"), <tparams>, <template>))
- *
- * It is possible to add definitions to the empty package by using `nme.EMPTY_PACKAGE_NAME.toString`, but
- * that's not recommended, since such definitions cannot be seen from outside the empty package.
- *
- * Only the multi-parameter overload of this method can be used to introduce companions.
- * If companions are introduced by two different calls, then they will be put into different virtual files, and `scalac`
- * will show an error about companions being defined in different files. By the way, this also means that there's currently no way
- * to define a companion for an existing class or module
- */
-trait Synthetics {
- self: Context =>
-
- import universe._
-
- /** Looks up a top-level definition tree with a given fully-qualified name
- * (term name for modules, type name for classes). $TOPLEVEL_TREE.
- * If such a tree does not exist, returns `EmptyTree`.
- */
- def topLevelDef(name: Name): Tree
-
- /** Returns a reference to a top-level definition tree with a given fully-qualified name
- * (term name for modules, type name for classes). $TOPLEVEL_TREE.
- * If such a tree does not exist, returns `EmptyTree`.
- */
- def topLevelRef(name: Name): Tree
-
- /** Adds a top-level definition to the compiler's symbol table. $INTRODUCE_TOP_LEVEL.
- *
- * Returns a fully-qualified reference to the introduced definition.
- */
- def introduceTopLevel[T: PackageSpec](packagePrototype: T, definition: ImplDef): RefTree
-
- /** Adds a list of top-level definitions to the compiler's symbol table. $INTRODUCE_TOP_LEVEL.
- *
- * Returns a list of fully-qualified references to the introduced definitions.
- */
- def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: ImplDef*): List[RefTree]
-
- /** A factory which can create a package def from a prototype and a list of declarations.
- */
- trait PackageSpec[T] { def mkPackageDef(prototype: T, stats: List[Tree]): PackageDef }
-
- /** Hosts supported package specs.
- */
- object PackageSpec {
- /** Package def can be created from a fully-qualified name and a list of definitions.
- * The name is converted into an Ident or a chain of Selects.
- */
- implicit val stringIsPackageSpec = new PackageSpec[String] {
- def mkPackageDef(prototype: String, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
- }
-
- /** Package def can be created from a fully-qualified term name and a list of definitions.
- * The name is converted into an Ident or a chain of Selects.
- */
- implicit val termNameIsPackageSpec = new PackageSpec[TermName] {
- def mkPackageDef(prototype: TermName, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
- }
-
- /** Package def can be created from a package id tree and a list of definitions.
- * If the tree is not a valid package id, i.e. is not a term-name ident or a chain of term-name selects,
- * then the produced PackageDef will fail compilation at some point in the future.
- */
- implicit val refTreeIsPackageSpec = new PackageSpec[RefTree] {
- def mkPackageDef(prototype: RefTree, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
- }
-
- /** Package def can be created from a package/package class symbol and a list of definitions.
- * If the provided symbol is not a package symbol or a package class symbol, package construction will throw an exception.
- */
- implicit val SymbolIsPackageSpec = new PackageSpec[Symbol] {
- def mkPackageDef(prototype: Symbol, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
- }
- }
-
- protected def mkPackageDef(name: String, stats: List[Tree]): PackageDef
- protected def mkPackageDef(name: TermName, stats: List[Tree]): PackageDef
- protected def mkPackageDef(tree: RefTree, stats: List[Tree]): PackageDef
- protected def mkPackageDef(sym: Symbol, stats: List[Tree]): PackageDef
-}
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 16405a88b4..dd77b084c5 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -18,7 +18,7 @@ import internal.pickling.ByteCodecs
import internal.pickling.UnPickler
import scala.collection.mutable.{ HashMap, ListBuffer }
import internal.Flags._
-import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance}
+import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance, scalacShouldntLoadClass}
import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
@@ -529,7 +529,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
}
private object unpickler extends UnPickler {
- val global: thisUniverse.type = thisUniverse
+ val symbolTable: thisUniverse.type = thisUniverse
}
/** how connected????
@@ -696,8 +696,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val parents = try {
parentsLevel += 1
val jsuperclazz = jclazz.getGenericSuperclass
- val superclazz = if (jsuperclazz == null) AnyTpe else typeToScala(jsuperclazz)
- superclazz :: (jclazz.getGenericInterfaces.toList map typeToScala)
+ val ifaces = jclazz.getGenericInterfaces.toList map typeToScala
+ val isAnnotation = JavaAccFlags(jclazz).isAnnotation
+ if (isAnnotation) AnnotationClass.tpe :: ClassfileAnnotationClass.tpe :: ifaces
+ else (if (jsuperclazz == null) AnyTpe else typeToScala(jsuperclazz)) :: ifaces
} finally {
parentsLevel -= 1
}
@@ -709,14 +711,21 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
def enter(sym: Symbol, mods: JavaAccFlags) =
( if (mods.isStatic) module.moduleClass else clazz ).info.decls enter sym
- for (jinner <- jclazz.getDeclaredClasses)
+ def enterEmptyCtorIfNecessary(): Unit = {
+ if (jclazz.getConstructors.isEmpty)
+ clazz.info.decls.enter(clazz.newClassConstructor(NoPosition))
+ }
+
+ for (jinner <- jclazz.getDeclaredClasses) {
jclassAsScala(jinner) // inner class is entered as a side-effect
// no need to call enter explicitly
+ }
pendingLoadActions ::= { () =>
jclazz.getDeclaredFields foreach (f => enter(jfieldAsScala(f), f.javaFlags))
jclazz.getDeclaredMethods foreach (m => enter(jmethodAsScala(m), m.javaFlags))
jclazz.getConstructors foreach (c => enter(jconstrAsScala(c), c.javaFlags))
+ enterEmptyCtorIfNecessary()
}
if (parentsLevel == 0) {
@@ -949,7 +958,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val cls =
if (jclazz.isMemberClass && !nme.isImplClassName(jname))
lookupClass
- else if (jclazz.isLocalClass0 || isInvalidClassName(jname))
+ else if (jclazz.isLocalClass0 || scalacShouldntLoadClass(jname))
// local classes and implementation classes not preserved by unpickling - treat as Java
//
// upd. but only if they cannot be loaded as top-level classes
diff --git a/src/reflect/scala/reflect/runtime/ReflectSetup.scala b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
index 84f159be00..6a364ff0be 100644
--- a/src/reflect/scala/reflect/runtime/ReflectSetup.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
@@ -2,7 +2,7 @@ package scala
package reflect
package runtime
-import internal.{SomePhase, NoPhase, Phase, TreeGen}
+import internal.{SomePhase, NoPhase, Phase}
/** A helper trait to initialize things that need to be set before JavaMirrors and other
* reflect specific traits are initialized */
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index 2db9706007..710ec02acd 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -78,4 +78,10 @@ private[scala] object ReflectionUtils {
accessor setAccessible true
accessor invoke outer
}
+
+ def isTraitImplementation(fileName: String) = fileName endsWith "$class.class"
+
+ def scalacShouldntLoadClassfile(fileName: String) = isTraitImplementation(fileName)
+
+ def scalacShouldntLoadClass(name: scala.reflect.internal.SymbolTable#Name) = scalacShouldntLoadClassfile(name + ".class")
}
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 815cc0c885..3e01a6df02 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -5,6 +5,7 @@ package runtime
import internal.Flags
import java.lang.{Class => jClass, Package => jPackage}
import scala.collection.mutable
+import scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass
private[reflect] trait SymbolLoaders { self: SymbolTable =>
@@ -90,14 +91,6 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
}
}
- /** Is the given name valid for a top-level class? We exclude names with embedded $-signs, because
- * these are nested classes or anonymous classes,
- */
- def isInvalidClassName(name: Name) = {
- val dp = name pos '$'
- 0 < dp && dp < (name.length - 1)
- }
-
class PackageScope(pkgClass: Symbol) extends Scope(initFingerPrints = -1L) // disable fingerprinting as we do not know entries beforehand
with SynchronizedScope {
assert(pkgClass.isType)
@@ -107,7 +100,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
val e = super.lookupEntry(name)
if (e != null)
e
- else if (isInvalidClassName(name) || (negatives contains name))
+ else if (scalacShouldntLoadClass(name) || (negatives contains name))
null
else {
val path =
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
index f4b02c5bcd..c0146167df 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
@@ -4,6 +4,7 @@ package runtime
import scala.collection.mutable.WeakHashMap
import java.lang.ref.WeakReference
+import scala.reflect.internal.Depth
/** This trait overrides methods in reflect.internal, bracketing
* them in synchronized { ... } to make them thread-safe
@@ -57,7 +58,7 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
override def isDifferentType(tp1: Type, tp2: Type): Boolean =
subsametypeLock.synchronized { super.isDifferentType(tp1, tp2) }
- override def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean =
+ override def isSubType(tp1: Type, tp2: Type, depth: Depth): Boolean =
subsametypeLock.synchronized { super.isSubType(tp1, tp2, depth) }
private object lubglbLock
diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
index 9353215e1e..6406dacc24 100644
--- a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -16,32 +16,6 @@ trait ExprTyper {
import syntaxAnalyzer.UnitParser
import naming.freshInternalVarName
- object codeParser {
- val global: repl.global.type = repl.global
- def applyRule[T](code: String, rule: UnitParser => T): T = {
- reporter.reset()
- val scanner = newUnitParser(code)
- val result = rule(scanner)
-
- if (!reporter.hasErrors)
- scanner.accept(EOF)
-
- result
- }
- def stmts(code: String) = applyRule(code, _.templateStats())
- }
-
- /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
- def parse(line: String): Option[List[Tree]] = debugging(s"""parse("$line")""") {
- var isIncomplete = false
- reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
- val trees = codeParser.stmts(line)
- if (reporter.hasErrors) Some(Nil)
- else if (isIncomplete) None
- else Some(trees)
- }
- }
-
def symbolOfLine(code: String): Symbol = {
def asExpr(): Symbol = {
val name = freshInternalVarName()
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 3a71930383..3eafa563bc 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -7,30 +7,25 @@ package scala
package tools.nsc
package interpreter
-import Predef.{ println => _, _ }
-import util.stringFromWriter
-import scala.reflect.internal.util._
-import java.net.URL
-import scala.sys.BooleanProp
-import scala.tools.nsc.io.AbstractFile
-import reporters._
+import PartialFunction.cond
+
+import scala.language.implicitConversions
+
+import scala.collection.mutable
+
+import scala.concurrent.{ Future, ExecutionContext }
+
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ BeanProperty, ClassTag, classTag }
+import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
+
import scala.tools.util.PathResolver
-import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
-import ScalaClassLoader.URLClassLoader
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromWriter, stackTracePrefixString }
import scala.tools.nsc.util.Exceptional.unwrap
-import scala.collection.{ mutable, immutable }
-import scala.reflect.BeanProperty
-import scala.util.Properties.versionString
+
import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
-import java.io.{ StringWriter, Reader }
-import java.util.Arrays
-import IMain._
-import java.util.concurrent.Future
-import scala.reflect.runtime.{ universe => ru }
-import scala.reflect.{ ClassTag, classTag }
-import StdReplTags._
-import scala.language.implicitConversions
/** An interpreter for Scala code.
*
@@ -92,7 +87,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private var _classLoader: util.AbstractFileClassLoader = null // active classloader
private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler
- def compilerClasspath: Seq[URL] = (
+ def compilerClasspath: Seq[java.net.URL] = (
if (isInitializeComplete) global.classPath.asURLs
else new PathResolver(settings).result.asURLs // the compiler's classpath
)
@@ -142,10 +137,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def initialize(postInitSignal: => Unit) {
synchronized {
if (_isInitialized == null) {
- _isInitialized = io.spawn {
- try _initialize()
- finally postInitSignal
- }
+ _isInitialized =
+ Future(try _initialize() finally postInitSignal)(ExecutionContext.global)
}
}
}
@@ -241,7 +234,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
lazy val isettings = new ISettings(this)
/** Instantiate a compiler. Overridable. */
- protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
+ protected def newCompiler(settings: Settings, reporter: reporters.Reporter): ReplGlobal = {
settings.outputDirs setSingleOutput replOutput.dir
settings.exposeEmptyPackage.value = true
new Global(settings, reporter) with ReplGlobal { override def toString: String = "<global>" }
@@ -306,19 +299,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName
def translatePath(path: String) = {
val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path)
- sym match {
- case NoSymbol => None
- case _ => Some(flatPath(sym))
- }
- }
- def translateEnclosingClass(n: String) = {
- def enclosingClass(s: Symbol): Symbol =
- if (s == NoSymbol || s.isClass) s else enclosingClass(s.owner)
- enclosingClass(symbolOfTerm(n)) match {
- case NoSymbol => None
- case c => Some(flatPath(c))
- }
+ sym.toOption map flatPath
}
+ def translateEnclosingClass(n: String) = symbolOfTerm(n).enclClass.toOption map flatPath
private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) {
/** Overridden here to try translating a simple name to the generated
@@ -334,7 +317,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private def makeClassLoader(): util.AbstractFileClassLoader =
new TranslatingClassLoader(parentClassLoader match {
case null => ScalaClassLoader fromURLs compilerClasspath
- case p => new URLClassLoader(compilerClasspath, p)
+ case p => new ScalaClassLoader.URLClassLoader(compilerClasspath, p)
})
// Set the current Java "context" class loader to this interpreter's class loader
@@ -446,9 +429,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
val content = indentCode(line)
val trees = parse(content) match {
- case None => return Left(IR.Incomplete)
- case Some(Nil) => return Left(IR.Error) // parse error or empty input
- case Some(trees) => trees
+ case parse.Incomplete => return Left(IR.Incomplete)
+ case parse.Error => return Left(IR.Error)
+ case parse.Success(trees) => trees
}
repltrace(
trees map (t => {
@@ -466,7 +449,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
// If the last tree is a bare expression, pinpoint where it begins using the
// AST node position and snap the line off there. Rewrite the code embodied
// by the last tree as a ValDef instead, so we can access the value.
- trees.last match {
+ val last = trees.lastOption.getOrElse(EmptyTree)
+ last match {
case _:Assign => // we don't want to include assignments
case _:TermTree | _:Ident | _:Select => // ... but do want other unnamed terms.
val varName = if (synthetic) freshInternalVarName() else freshUserVarName()
@@ -478,7 +462,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
if (trees.size == 1) "val " + varName + " =\n" + content
else {
// The position of the last tree
- val lastpos0 = earliestPosition(trees.last)
+ val lastpos0 = earliestPosition(last)
// Oh boy, the parser throws away parens so "(2+2)" is mispositioned,
// with increasingly hard to decipher positions as we move on to "() => 5",
// (x: Int) => x + 1, and more. So I abandon attempts to finesse and just
@@ -554,7 +538,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
var code = ""
var bound = false
- @throws(classOf[ScriptException])
+ @throws[ScriptException]
def compile(script: String): CompiledScript = {
if (!bound) {
quietBind("engine" -> this.asInstanceOf[ScriptEngine])
@@ -582,9 +566,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
- @throws(classOf[ScriptException])
- def compile(reader: Reader): CompiledScript = {
- val writer = new StringWriter()
+ @throws[ScriptException]
+ def compile(reader: java.io.Reader): CompiledScript = {
+ val writer = new java.io.StringWriter()
var c = reader.read()
while(c != -1) {
writer.write(c)
@@ -604,7 +588,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
* escape. We could have wrapped runtime exceptions just like other
* exceptions in ScriptException, this is a choice.
*/
- @throws(classOf[ScriptException])
+ @throws[ScriptException]
def eval(context: ScriptContext): Object = {
val result = req.lineRep.evalEither match {
case Left(e: RuntimeException) => throw e
@@ -736,10 +720,18 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
throw t
val unwrapped = unwrap(t)
+
+ // Example input: $line3.$read$$iw$$iw$
+ val classNameRegex = (naming.lineRegex + ".*").r
+ def isWrapperInit(x: StackTraceElement) = cond(x.getClassName) {
+ case classNameRegex() if x.getMethodName == nme.CONSTRUCTOR.decoded => true
+ }
+ val stackTrace = util.stackTracePrefixString(unwrapped)(!isWrapperInit(_))
+
withLastExceptionLock[String]({
- directBind[Throwable]("lastException", unwrapped)(tagOfThrowable, classTag[Throwable])
- util.stackTraceString(unwrapped)
- }, util.stackTraceString(unwrapped))
+ directBind[Throwable]("lastException", unwrapped)(StdReplTags.tagOfThrowable, classTag[Throwable])
+ stackTrace
+ }, stackTrace)
}
// TODO: split it out into a package object and a regular
@@ -871,7 +863,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def fullPath(vname: String) = s"${lineRep.readPath}$accessPath.`$vname`"
/** generate the source code for the object that computes this request */
- private object ObjectSourceCode extends CodeAssembler[MemberHandler] {
+ private object ObjectSourceCode extends IMain.CodeAssembler[MemberHandler] {
def path = originalPath("$intp")
def envLines = {
if (!isReplPower) Nil // power mode only for now
@@ -894,7 +886,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
}
- private object ResultObjectSourceCode extends CodeAssembler[MemberHandler] {
+ private object ResultObjectSourceCode extends IMain.CodeAssembler[MemberHandler] {
/** We only want to generate this code when the result
* is a value which can be referred to as-is.
*/
@@ -946,7 +938,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
// compile the result-extraction object
- withoutWarnings(lineRep compile ResultObjectSourceCode(handlers))
+ val handls = if (printResults) handlers else Nil
+ withoutWarnings(lineRep compile ResultObjectSourceCode(handls))
}
}
@@ -993,11 +986,11 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
- @throws(classOf[ScriptException])
+ @throws[ScriptException]
def eval(script: String, context: ScriptContext): Object = compile(script).eval(context)
- @throws(classOf[ScriptException])
- def eval(reader: Reader, context: ScriptContext): Object = compile(reader).eval(context)
+ @throws[ScriptException]
+ def eval(reader: java.io.Reader, context: ScriptContext): Object = compile(reader).eval(context)
override def finalize = close
@@ -1096,7 +1089,24 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
val repl: IMain.this.type = imain
} with ExprTyper { }
- def parse(line: String): Option[List[Tree]] = exprTyper.parse(line)
+ /** Parse a line into and return parsing result (error, incomplete or success with list of trees) */
+ object parse {
+ abstract sealed class Result
+ case object Error extends Result
+ case object Incomplete extends Result
+ case class Success(trees: List[Tree]) extends Result
+
+ def apply(line: String): Result = debugging(s"""parse("$line")""") {
+ var isIncomplete = false
+ reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
+ reporter.reset()
+ val trees = newUnitParser(line).parseStats()
+ if (reporter.hasErrors) Error
+ else if (isIncomplete) Incomplete
+ else Success(trees)
+ }
+ }
+ }
def symbolOfLine(code: String): Symbol =
exprTyper.symbolOfLine(code)
@@ -1155,10 +1165,12 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
*/
def isShow = code.lines exists (_.trim endsWith "// show")
if (isReplDebug || isShow) {
- beSilentDuring(parse(code)) foreach { ts =>
- ts foreach { t =>
- withoutUnwrapping(echo(asCompactString(t)))
- }
+ beSilentDuring(parse(code)) match {
+ case parse.Success(ts) =>
+ ts foreach { t =>
+ withoutUnwrapping(echo(asCompactString(t)))
+ }
+ case _ =>
}
}
}
@@ -1172,6 +1184,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
/** Utility methods for the Interpreter. */
object IMain {
+ import java.util.Arrays.{ asList => asJavaList }
+
class Factory extends ScriptEngineFactory {
@BeanProperty
val engineName = "Scala Interpreter"
@@ -1180,21 +1194,21 @@ object IMain {
val engineVersion = "1.0"
@BeanProperty
- val extensions: JList[String] = Arrays.asList("scala")
+ val extensions: JList[String] = asJavaList("scala")
@BeanProperty
val languageName = "Scala"
@BeanProperty
- val languageVersion = versionString
+ val languageVersion = scala.util.Properties.versionString
def getMethodCallSyntax(obj: String, m: String, args: String*): String = null
@BeanProperty
- val mimeTypes: JList[String] = Arrays.asList("application/x-scala")
+ val mimeTypes: JList[String] = asJavaList("application/x-scala")
@BeanProperty
- val names: JList[String] = Arrays.asList("scala")
+ val names: JList[String] = asJavaList("scala")
def getOutputStatement(toDisplay: String): String = null
diff --git a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
index 8b8b668c9f..61db8d1748 100644
--- a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -190,10 +190,10 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
// literal Ints, Strings, etc.
object literals extends CompletionAware {
- def simpleParse(code: String): Tree = newUnitParser(code).templateStats().last
+ def simpleParse(code: String): Option[Tree] = newUnitParser(code).parseStats().lastOption
def completions(verbosity: Int) = Nil
- override def follow(id: String) = simpleParse(id) match {
+ override def follow(id: String) = simpleParse(id).flatMap {
case x: Literal => Some(new LiteralCompletion(x))
case _ => None
}
diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
index c6f0cca481..28b95aa442 100644
--- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -71,7 +71,7 @@ trait MemberHandlers {
override def definesImplicit = member.mods.isImplicit
override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
- override def definedSymbols = if (symbol eq NoSymbol) Nil else List(symbol)
+ override def definedSymbols = if (symbol.exists) symbol :: Nil else Nil
}
/** Class to handle one member among all the members included
diff --git a/src/repl/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala
index 7f577b3a8b..cf38a2ae3a 100644
--- a/src/repl/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala
@@ -40,7 +40,7 @@ trait Naming {
// $line3.$read$$iw$$iw$Bippy@4a6a00ca
private def noMeta(s: String) = "\\Q" + s + "\\E"
- private lazy val lineRegex = {
+ lazy val lineRegex = {
val sn = sessionNames
val members = List(sn.read, sn.eval, sn.print) map noMeta mkString ("(?:", "|", ")")
debugging("lineRegex")(noMeta(sn.line) + """\d+[./]""" + members + """[$.]""")
diff --git a/src/repl/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala
index da6d271a68..f69a5b487d 100644
--- a/src/repl/scala/tools/nsc/interpreter/Power.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Power.scala
@@ -316,7 +316,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { }
def unit(code: String) = newCompilationUnit(code)
- def trees(code: String) = parse(code) getOrElse Nil
+ def trees(code: String) = parse(code) match { case parse.Success(trees) => trees; case _ => Nil }
override def toString = s"""
|** Power mode status **
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplVals.scala b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
index ea100b25f2..9346b0553f 100644
--- a/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
@@ -39,7 +39,7 @@ class StdReplVals(final val r: ILoop) extends ReplVals {
def lastRequest = intp.lastRequest
class ReplImplicits extends power.Implicits2 {
- import intp.global._
+ import intp.global.Symbol
private val tagFn = ReplVals.mkCompilerTypeFromTag[intp.global.type](global)
implicit def mkCompilerTypeFromTag(sym: Symbol) = tagFn(sym)
diff --git a/src/repl/scala/tools/nsc/interpreter/Results.scala b/src/repl/scala/tools/nsc/interpreter/Results.scala
index e400906a58..a4e1e25cbb 100644
--- a/src/repl/scala/tools/nsc/interpreter/Results.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Results.scala
@@ -19,4 +19,4 @@ object Results {
/** The input was incomplete. The caller should request more input.
*/
case object Incomplete extends Result
-}
+} \ No newline at end of file
diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
index f82c38f5e7..5dc9b65436 100644
--- a/src/repl/scala/tools/nsc/interpreter/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
@@ -145,8 +145,8 @@ package object interpreter extends ReplConfig with ReplStrings {
case sym: TypeSymbol => Some(sym)
case _ => None
}
- (typeFromTypeString orElse typeFromNameTreatedAsTerm orElse typeFromFullName orElse typeOfTerm) foreach { sym =>
- val (kind, tpe) = exitingTyper {
+ (typeFromTypeString orElse typeFromNameTreatedAsTerm orElse typeFromFullName orElse typeOfTerm) foreach { sym =>
+ val (kind, tpe) = exitingTyper {
val tpe = sym.tpeHK
(intp.global.inferKind(NoPrefix)(tpe, sym.owner), tpe)
}
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
deleted file mode 100644
index db4163c8af..0000000000
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ /dev/null
@@ -1,447 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{FreqMap,Buildable}
-
-sealed abstract class Arbitrary[T] {
- val arbitrary: Gen[T]
-}
-
-/** Defines implicit [[org.scalacheck.Arbitrary]] instances for common types.
- * <p>
- * ScalaCheck
- * uses implicit [[org.scalacheck.Arbitrary]] instances when creating properties
- * out of functions with the `Prop.property` method, and when
- * the `Arbitrary.arbitrary` method is used. For example, the
- * following code requires that there exists an implicit
- * `Arbitrary[MyClass]` instance:
- * </p>
- *
- * {{{
- * val myProp = Prop.forAll { myClass: MyClass =>
- * ...
- * }
- *
- * val myGen = Arbitrary.arbitrary[MyClass]
- * }}}
- *
- * <p>
- * The required implicit definition could look like this:
- * </p>
- *
- * {{{
- * implicit val arbMyClass: Arbitrary[MyClass] = Arbitrary(...)
- * }}}
- *
- * <p>
- * The factory method `Arbitrary(...)` takes a generator of type
- * `Gen[T]` and returns an instance of `Arbitrary[T]`.
- * </p>
- *
- * <p>
- * The `Arbitrary` module defines implicit [[org.scalacheck.Arbitrary]]
- * instances for common types, for convenient use in your properties and
- * generators.
- * </p>
- */
-object Arbitrary {
-
- import Gen.{value, choose, sized, listOf, listOf1,
- frequency, oneOf, containerOf, resize}
- import util.StdRand
- import scala.collection.{immutable, mutable}
- import java.util.Date
-
- /** Creates an Arbitrary instance */
- def apply[T](g: => Gen[T]): Arbitrary[T] = new Arbitrary[T] {
- lazy val arbitrary = g
- }
-
- /** Returns an arbitrary generator for the type T. */
- def arbitrary[T](implicit a: Arbitrary[T]): Gen[T] = a.arbitrary
-
- /**** Arbitrary instances for each AnyVal ****/
-
- /** Arbitrary AnyVal */
- implicit lazy val arbAnyVal: Arbitrary[AnyVal] = Arbitrary(oneOf(
- arbitrary[Unit], arbitrary[Boolean], arbitrary[Char], arbitrary[Byte],
- arbitrary[Short], arbitrary[Int], arbitrary[Long], arbitrary[Float],
- arbitrary[Double]
- ))
-
- /** Arbitrary instance of Boolean */
- implicit lazy val arbBool: Arbitrary[Boolean] =
- Arbitrary(oneOf(true, false))
-
- /** Arbitrary instance of Int */
- implicit lazy val arbInt: Arbitrary[Int] = Arbitrary(
- Gen.chooseNum(Int.MinValue, Int.MaxValue)
- )
-
- /** Arbitrary instance of Long */
- implicit lazy val arbLong: Arbitrary[Long] = Arbitrary(
- Gen.chooseNum(Long.MinValue, Long.MaxValue)
- )
-
- /** Arbitrary instance of Float */
- implicit lazy val arbFloat: Arbitrary[Float] = Arbitrary(
- Gen.chooseNum(
- Float.MinValue, Float.MaxValue
- // I find that including these by default is a little TOO testy.
- // Float.Epsilon, Float.NaN, Float.PositiveInfinity, Float.NegativeInfinity
- )
- )
-
- /** Arbitrary instance of Double */
- implicit lazy val arbDouble: Arbitrary[Double] = Arbitrary(
- Gen.chooseNum(
- Double.MinValue / 2, Double.MaxValue / 2
- // As above. Perhaps behind some option?
- // Double.Epsilon, Double.NaN, Double.PositiveInfinity, Double.NegativeInfinity
- )
- )
-
- /** Arbitrary instance of Char */
- implicit lazy val arbChar: Arbitrary[Char] = Arbitrary(
- Gen.frequency(
- (0xD800-Char.MinValue, Gen.choose(Char.MinValue,0xD800-1)),
- (Char.MaxValue-0xDFFF, Gen.choose(0xDFFF+1,Char.MaxValue))
- )
- )
-
- /** Arbitrary instance of Byte */
- implicit lazy val arbByte: Arbitrary[Byte] = Arbitrary(
- Gen.chooseNum(Byte.MinValue, Byte.MaxValue)
- )
-
- /** Arbitrary instance of Short */
- implicit lazy val arbShort: Arbitrary[Short] = Arbitrary(
- Gen.chooseNum(Short.MinValue, Short.MaxValue)
- )
-
- /** Absolutely, totally, 100% arbitrarily chosen Unit. */
- implicit lazy val arbUnit: Arbitrary[Unit] = Arbitrary(value(()))
-
- /**** Arbitrary instances of other common types ****/
-
- /** Arbitrary instance of String */
- implicit lazy val arbString: Arbitrary[String] =
- Arbitrary(arbitrary[List[Char]] map (_.mkString))
-
- /** Arbitrary instance of Date */
- implicit lazy val arbDate: Arbitrary[Date] = Arbitrary(for {
- l <- arbitrary[Long]
- d = new Date
- } yield new Date(d.getTime + l))
-
- /** Arbitrary instance of Throwable */
- implicit lazy val arbThrowable: Arbitrary[Throwable] =
- Arbitrary(value(new Exception))
-
- /** Arbitrary BigInt */
- implicit lazy val arbBigInt: Arbitrary[BigInt] = {
- def chooseBigInt: Gen[BigInt] = sized((s: Int) => choose(-s, s)) map (x => BigInt(x))
- def chooseReallyBigInt = chooseBigInt.combine(choose(32, 128))((x, y) => Some(x.get << y.get))
-
- Arbitrary(
- frequency(
- (5, chooseBigInt),
- (10, chooseReallyBigInt),
- (1, BigInt(0)),
- (1, BigInt(1)),
- (1, BigInt(-1)),
- (1, BigInt(Int.MaxValue) + 1),
- (1, BigInt(Int.MinValue) - 1),
- (1, BigInt(Long.MaxValue)),
- (1, BigInt(Long.MinValue)),
- (1, BigInt(Long.MaxValue) + 1),
- (1, BigInt(Long.MinValue) - 1)
- )
- )
- }
-
- /** Arbitrary BigDecimal */
- implicit lazy val arbBigDecimal: Arbitrary[BigDecimal] = {
- import java.math.MathContext._
- val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
- val bdGen = for {
- x <- arbBigInt.arbitrary
- mc <- mcGen
- limit <- value(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
- scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
- } yield {
- try {
- BigDecimal(x, scale, mc)
- } catch {
- case ae: java.lang.ArithmeticException => BigDecimal(x, scale, UNLIMITED) // Handle the case where scale/precision conflict
- }
- }
- Arbitrary(bdGen)
- }
-
- /** Arbitrary java.lang.Number */
- implicit lazy val arbNumber: Arbitrary[Number] = {
- val gen = Gen.oneOf(
- arbitrary[Byte], arbitrary[Short], arbitrary[Int], arbitrary[Long],
- arbitrary[Float], arbitrary[Double]
- )
- Arbitrary(gen map (_.asInstanceOf[Number]))
- // XXX TODO - restore BigInt and BigDecimal
- // Arbitrary(oneOf(arbBigInt.arbitrary :: (arbs map (_.arbitrary) map toNumber) : _*))
- }
-
- /** Generates an arbitrary property */
- implicit lazy val arbProp: Arbitrary[Prop] = {
- import Prop._
- val undecidedOrPassed = forAll { b: Boolean =>
- b ==> true
- }
- Arbitrary(frequency(
- (4, falsified),
- (4, passed),
- (3, proved),
- (3, undecidedOrPassed),
- (2, undecided),
- (1, exception(null))
- ))
- }
-
- /** Arbitrary instance of test params
- * @deprecated (in 1.10.0) Use `arbTestParameters` instead.
- */
- @deprecated("Use 'arbTestParameters' instead", "1.10.0")
- implicit lazy val arbTestParams: Arbitrary[Test.Params] =
- Arbitrary(for {
- minSuccTests <- choose(10,200)
- maxDiscTests <- choose(100,500)
- mnSize <- choose(0,500)
- sizeDiff <- choose(0,500)
- mxSize <- choose(mnSize, mnSize + sizeDiff)
- ws <- choose(1,4)
- } yield Test.Params(
- minSuccessfulTests = minSuccTests,
- maxDiscardedTests = maxDiscTests,
- minSize = mnSize,
- maxSize = mxSize,
- workers = ws
- ))
-
- /** Arbitrary instance of test parameters */
- implicit lazy val arbTestParameters: Arbitrary[Test.Parameters] =
- Arbitrary(for {
- _minSuccTests <- choose(10,200)
- _maxDiscardRatio <- choose(0.2f,10f)
- _minSize <- choose(0,500)
- sizeDiff <- choose(0,500)
- _maxSize <- choose(_minSize, _minSize + sizeDiff)
- _workers <- choose(1,4)
- } yield new Test.Parameters.Default {
- override val minSuccessfulTests = _minSuccTests
- override val maxDiscardRatio = _maxDiscardRatio
- override val minSize = _minSize
- override val maxSize = _maxSize
- override val workers = _workers
- })
-
- /** Arbitrary instance of gen params */
- implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
- Arbitrary(for {
- size <- arbitrary[Int] suchThat (_ >= 0)
- } yield Gen.Params(size, StdRand))
-
- /** Arbitrary instance of prop params */
- implicit lazy val arbPropParams: Arbitrary[Prop.Params] =
- Arbitrary(for {
- genPrms <- arbitrary[Gen.Params]
- } yield Prop.Params(genPrms, FreqMap.empty[immutable.Set[Any]]))
-
-
- // Higher-order types //
-
- /** Arbitrary instance of Gen */
- implicit def arbGen[T](implicit a: Arbitrary[T]): Arbitrary[Gen[T]] =
- Arbitrary(frequency(
- (5, arbitrary[T] map (value(_))),
- (1, Gen.fail)
- ))
-
- /** Arbitrary instance of option type */
- implicit def arbOption[T](implicit a: Arbitrary[T]): Arbitrary[Option[T]] =
- Arbitrary(sized(n => if(n == 0) value(None) else resize(n - 1, arbitrary[T]).map(Some(_))))
-
- implicit def arbEither[T, U](implicit at: Arbitrary[T], au: Arbitrary[U]): Arbitrary[Either[T, U]] =
- Arbitrary(oneOf(arbitrary[T].map(Left(_)), arbitrary[U].map(Right(_))))
-
- /** Arbitrary instance of immutable map */
- implicit def arbImmutableMap[T,U](implicit at: Arbitrary[T], au: Arbitrary[U]
- ): Arbitrary[immutable.Map[T,U]] = Arbitrary(
- for(seq <- arbitrary[Stream[(T,U)]]) yield immutable.Map(seq: _*)
- )
-
- /** Arbitrary instance of mutable map */
- implicit def arbMutableMap[T,U](implicit at: Arbitrary[T], au: Arbitrary[U]
- ): Arbitrary[mutable.Map[T,U]] = Arbitrary(
- for(seq <- arbitrary[Stream[(T,U)]]) yield mutable.Map(seq: _*)
- )
-
- /** Arbitrary instance of any buildable container (such as lists, arrays,
- * streams, etc). The maximum size of the container depends on the size
- * generation parameter. */
- implicit def arbContainer[C[_],T](implicit a: Arbitrary[T], b: Buildable[T,C]
- ): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T]))
-
- /** Arbitrary instance of any array. */
- implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassManifest[T]
- ): Arbitrary[Array[T]] = Arbitrary(containerOf[Array,T](arbitrary[T]))
-
-
- // Functions //
-
- /** Arbitrary instance of Function1 */
- implicit def arbFunction1[T1,R](implicit a: Arbitrary[R]
- ): Arbitrary[T1 => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1) => r
- )
-
- /** Arbitrary instance of Function2 */
- implicit def arbFunction2[T1,T2,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2) => r
- )
-
- /** Arbitrary instance of Function3 */
- implicit def arbFunction3[T1,T2,T3,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3) => r
- )
-
- /** Arbitrary instance of Function4 */
- implicit def arbFunction4[T1,T2,T3,T4,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3,T4) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4) => r
- )
-
- /** Arbitrary instance of Function5 */
- implicit def arbFunction5[T1,T2,T3,T4,T5,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3,T4,T5) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) => r
- )
-
-
- // Tuples //
-
- /** Arbitrary instance of 2-tuple */
- implicit def arbTuple2[T1,T2](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2]
- ): Arbitrary[(T1,T2)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- } yield (t1,t2))
-
- /** Arbitrary instance of 3-tuple */
- implicit def arbTuple3[T1,T2,T3](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
- ): Arbitrary[(T1,T2,T3)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- } yield (t1,t2,t3))
-
- /** Arbitrary instance of 4-tuple */
- implicit def arbTuple4[T1,T2,T3,T4](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
- ): Arbitrary[(T1,T2,T3,T4)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- } yield (t1,t2,t3,t4))
-
- /** Arbitrary instance of 5-tuple */
- implicit def arbTuple5[T1,T2,T3,T4,T5](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5]
- ): Arbitrary[(T1,T2,T3,T4,T5)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- } yield (t1,t2,t3,t4,t5))
-
- /** Arbitrary instance of 6-tuple */
- implicit def arbTuple6[T1,T2,T3,T4,T5,T6](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- } yield (t1,t2,t3,t4,t5,t6))
-
- /** Arbitrary instance of 7-tuple */
- implicit def arbTuple7[T1,T2,T3,T4,T5,T6,T7](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- } yield (t1,t2,t3,t4,t5,t6,t7))
-
- /** Arbitrary instance of 8-tuple */
- implicit def arbTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- t8 <- arbitrary[T8]
- } yield (t1,t2,t3,t4,t5,t6,t7,t8))
-
- /** Arbitrary instance of 9-tuple */
- implicit def arbTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
- a9: Arbitrary[T9]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- t8 <- arbitrary[T8]
- t9 <- arbitrary[T9]
- } yield (t1,t2,t3,t4,t5,t6,t7,t8,t9))
-
-}
diff --git a/src/scalacheck/org/scalacheck/Arg.scala b/src/scalacheck/org/scalacheck/Arg.scala
deleted file mode 100644
index 4961c78a26..0000000000
--- a/src/scalacheck/org/scalacheck/Arg.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-case class Arg[+T](
- label: String,
- arg: T,
- shrinks: Int,
- origArg: T
-)(implicit prettyPrinter: T => Pretty) {
- lazy val prettyArg: Pretty = prettyPrinter(arg)
- lazy val prettyOrigArg: Pretty = prettyPrinter(origArg)
-}
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
deleted file mode 100644
index 604b68cb36..0000000000
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import Gen._
-import Prop._
-import Shrink._
-
-/** See User Guide for usage examples */
-trait Commands extends Prop {
-
- /** The abstract state data type. This type must be immutable.
- * The state type that encodes the abstract state. The abstract state
- * should model all the features we need from the real state, the system
- * under test. We should leave out all details that aren't needed for
- * specifying our pre- and postconditions. The state type must be called
- * State and be immutable. */
- type State <: AnyRef
-
- class Binding(private val key: State) {
- def get: Any = bindings.find(_._1 eq key) match {
- case None => sys.error("No value bound")
- case Some(x) => x._2
- }
- }
-
- /** Abstract commands are defined as subtypes of the traits Command or SetCommand.
- * Each command must have a run method and a method that returns the new abstract
- * state, as it should look after the command has been run.
- * A command can also define a precondition that states how the current
- * abstract state must look if the command should be allowed to run.
- * Finally, we can also define a postcondition which verifies that the
- * system under test is in a correct state after the command exectution. */
- trait Command {
-
- /** Used internally. */
- protected[Commands] def run_(s: State) = run(s)
-
- def run(s: State): Any
- def nextState(s: State): State
-
- /** Returns all preconditions merged into a single function */
- def preCondition: (State => Boolean) = s => preConditions.toList.forall(_.apply(s))
-
- /** A precondition is a function that
- * takes the current abstract state as parameter and returns a boolean
- * that says if the precondition is fulfilled or not. You can add several
- * conditions to the precondition list */
- val preConditions = new collection.mutable.ListBuffer[State => Boolean]
-
- /** Returns all postconditions merged into a single function */
- def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
-
- /** A postcondition is a function that
- * takes three parameters, s0, s1 and r. s0 is the abstract state before
- * the command was run, s1 is the abstract state after the command was
- * run, and r is the result from the command's run
- * method. The postcondition function should return a Boolean (or
- * a Prop instance) that says if the condition holds or not. You can add several
- * conditions to the postConditions list. */
- val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop]
- }
-
- /** A command that binds its result for later use */
- trait SetCommand extends Command {
- /** Used internally. */
- protected[Commands] final override def run_(s: State) = {
- val r = run(s)
- bindings += ((s,r))
- r
- }
-
- final def nextState(s: State) = nextState(s, new Binding(s))
- def nextState(s: State, b: Binding): State
- }
-
- private case class Cmds(cs: List[Command], ss: List[State]) {
- override def toString = cs.map(_.toString).mkString(", ")
- }
-
- private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)]
-
- private def initState() = {
- bindings.clear()
- initialState()
- }
-
- private def genCmds: Gen[Cmds] = {
- def sizedCmds(s: State)(sz: Int): Gen[Cmds] =
- if(sz <= 0) value(Cmds(Nil, Nil)) else for {
- c <- genCommand(s) suchThat (_.preCondition(s))
- Cmds(cs,ss) <- sizedCmds(c.nextState(s))(sz-1)
- } yield Cmds(c::cs, s::ss)
-
- for {
- s0 <- wrap(value(initialState()))
- cmds <- sized(sizedCmds(s0))
- } yield cmds
- }
-
- private def validCmds(s: State, cs: List[Command]): Option[Cmds] =
- cs match {
- case Nil => Some(Cmds(Nil, s::Nil))
- case c::_ if !c.preCondition(s) => None
- case c::cmds => for {
- Cmds(_, ss) <- validCmds(c.nextState(s), cmds)
- } yield Cmds(cs, s::ss)
- }
-
- private def runCommands(cmds: Cmds): Prop = cmds match {
- case Cmds(Nil, _) => proved
- case Cmds(c::cs, s::ss) =>
- c.postCondition(s,c.nextState(s),c.run_(s)) && runCommands(Cmds(cs,ss))
- case _ => sys.error("Should not be here")
- }
-
- private def commandsProp: Prop = {
- def shrinkCmds(cmds: Cmds) = cmds match { case Cmds(cs,_) =>
- shrink(cs)(shrinkContainer).flatMap(cs => validCmds(initialState(), cs).toList)
- }
-
- forAllShrink(genCmds label "COMMANDS", shrinkCmds)(runCommands _)
- }
-
- def apply(p: Prop.Params) = commandsProp(p)
-
- /** initialState should reset the system under test to a well defined
- * initial state, and return the abstract version of that state. */
- def initialState(): State
-
- /** The command generator. Given an abstract state, the generator
- * should return a command that is allowed to run in that state. Note that
- * it is still neccessary to define preconditions on the commands if there
- * are any. The generator is just giving a hint of which commands that are
- * suitable for a given state, the preconditions will still be checked before
- * a command runs. Sometimes you maybe want to adjust the distribution of
- * your command generator according to the state, or do other calculations
- * based on the state. */
- def genCommand(s: State): Gen[Command]
-
-}
diff --git a/src/scalacheck/org/scalacheck/ConsoleReporter.scala b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
deleted file mode 100644
index d565322d99..0000000000
--- a/src/scalacheck/org/scalacheck/ConsoleReporter.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import Pretty._
-import util.FreqMap
-
-class ConsoleReporter(val verbosity: Int) extends Test.TestCallback {
-
- private val prettyPrms = Params(verbosity)
-
- override def onTestResult(name: String, res: Test.Result) = {
- if(verbosity > 0) {
- if(name == "") {
- val s = (if(res.passed) "+ " else "! ") + pretty(res, prettyPrms)
- printf("\r%s\n", format(s, "", "", 75))
- } else {
- val s = (if(res.passed) "+ " else "! ") + name + ": " +
- pretty(res, prettyPrms)
- printf("\r%s\n", format(s, "", "", 75))
- }
- }
- }
-
-}
-
-object ConsoleReporter {
-
- /** Factory method, creates a ConsoleReporter with the
- * the given verbosity */
- def apply(verbosity: Int = 0) = new ConsoleReporter(verbosity)
-
- def testStatsEx(msg: String, res: Test.Result) = {
- lazy val m = if(msg.length == 0) "" else msg + ": "
- res.status match {
- case Test.Proved(_) => {}
- case Test.Passed => {}
- case f @ Test.Failed(_, _) => sys.error(m + f)
- case Test.Exhausted => {}
- case f @ Test.GenException(_) => sys.error(m + f)
- case f @ Test.PropException(_, _, _) => sys.error(m + f)
- }
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Gen.scala b/src/scalacheck/org/scalacheck/Gen.scala
deleted file mode 100644
index aec67159f1..0000000000
--- a/src/scalacheck/org/scalacheck/Gen.scala
+++ /dev/null
@@ -1,542 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import scala.collection.mutable.ListBuffer
-import util.Buildable
-import Prop._
-import Arbitrary._
-
-trait Choose[T] {
- def choose(min: T, max: T): Gen[T]
-}
-
-object Choose {
- import Gen.{fail, parameterized, value}
-
- implicit val chooseLong: Choose[Long] = new Choose[Long] {
- def choose(low: Long, high: Long) =
- if (low > high) fail
- else parameterized(prms => value(prms.choose(low,high)))
- }
-
- implicit val chooseDouble: Choose[Double] = new Choose[Double] {
- def choose(low: Double, high: Double) =
- if (low > high || (high-low > Double.MaxValue)) fail
- else parameterized(prms => value(prms.choose(low,high)))
- }
-
- implicit val chooseInt: Choose[Int] = new Choose[Int] {
- def choose(low: Int, high: Int) =
- chooseLong.choose(low, high).map(_.toInt)
- }
-
- implicit val chooseByte: Choose[Byte] = new Choose[Byte] {
- def choose(low: Byte, high: Byte) =
- chooseLong.choose(low, high).map(_.toByte)
- }
-
- implicit val chooseShort: Choose[Short] = new Choose[Short] {
- def choose(low: Short, high: Short) =
- chooseLong.choose(low, high).map(_.toShort)
- }
-
- implicit val chooseChar: Choose[Char] = new Choose[Char] {
- def choose(low: Char, high: Char) =
- chooseLong.choose(low, high).map(_.toChar)
- }
-
- implicit val chooseFloat: Choose[Float] = new Choose[Float] {
- def choose(low: Float, high: Float) =
- chooseDouble.choose(low, high).map(_.toFloat)
- }
-}
-
-case class FiniteGenRes[+T](
- r: T
-)
-
-sealed trait FiniteGen[+T] extends Gen[FiniteGenRes[T]]
-
-
-/** Class that represents a generator. */
-sealed trait Gen[+T] {
-
- import Gen.choose
-
- var label = "" // TODO: Ugly mutable field
-
- /** Put a label on the generator to make test reports clearer */
- def label(l: String): Gen[T] = {
- label = l
- this
- }
-
- /** Put a label on the generator to make test reports clearer */
- def :|(l: String) = label(l)
-
- /** Put a label on the generator to make test reports clearer */
- def |:(l: String) = label(l)
-
- /** Put a label on the generator to make test reports clearer */
- def :|(l: Symbol) = label(l.toString.drop(1))
-
- /** Put a label on the generator to make test reports clearer */
- def |:(l: Symbol) = label(l.toString.drop(1))
-
- def apply(prms: Gen.Params): Option[T]
-
- def map[U](f: T => U): Gen[U] = Gen(prms => this(prms).map(f)).label(label)
-
- def map2[U, V](g: Gen[U])(f: (T, U) => V) =
- combine(g)((t, u) => t.flatMap(t => u.flatMap(u => Some(f(t, u)))))
-
- def map3[U, V, W](gu: Gen[U], gv: Gen[V])(f: (T, U, V) => W) =
- combine3(gu, gv)((t, u, v) => t.flatMap(t => u.flatMap(u => v.flatMap(v => Some(f(t, u, v))))))
-
- def map4[U, V, W, X](gu: Gen[U], gv: Gen[V], gw: Gen[W])(f: (T, U, V, W) => X) =
- combine4(gu, gv, gw)((t, u, v, w) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => Some(f(t, u, v, w)))))))
-
- def map5[U, V, W, X, Y](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X])(f: (T, U, V, W, X) => Y) =
- combine5(gu, gv, gw, gx)((t, u, v, w, x) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => x.flatMap(x => Some(f(t, u, v, w, x))))))))
-
- def map6[U, V, W, X, Y, Z](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X], gy: Gen[Y])(f: (T, U, V, W, X, Y) => Z) =
- combine6(gu, gv, gw, gx, gy)((t, u, v, w, x, y) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => x.flatMap(x => y.flatMap(y => Some(f(t, u, v, w, x, y)))))))))
-
- def flatMap[U](f: T => Gen[U]): Gen[U] = Gen(prms => for {
- t <- this(prms)
- u <- f(t)(prms)
- } yield u)
-
- def filter(p: T => Boolean): Gen[T] = Gen(prms => for {
- t <- this(prms)
- u <- if (p(t)) Some(t) else None
- } yield u).label(label)
-
- def withFilter(p: T => Boolean) = new GenWithFilter[T](this, p)
-
- final class GenWithFilter[+A](self: Gen[A], p: A => Boolean) {
- def map[B](f: A => B): Gen[B] = self filter p map f
- def flatMap[B](f: A => Gen[B]): Gen[B] = self filter p flatMap f
- def withFilter(q: A => Boolean): GenWithFilter[A] = new GenWithFilter[A](self, x => p(x) && q(x))
- }
-
- def suchThat(p: T => Boolean): Gen[T] = filter(p)
-
- def combine[U,V](g: Gen[U])(f: (Option[T],Option[U]) => Option[V]): Gen[V] =
- Gen(prms => f(this(prms), g(prms)))
-
- def combine3[U, V, W](gu: Gen[U], gv: Gen[V])
- (f: (Option[T], Option[U], Option[V]) => Option[W]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms)))
-
- def combine4[U, V, W, X](gu: Gen[U], gv: Gen[V], gw: Gen[W])
- (f: (Option[T], Option[U], Option[V], Option[W]) => Option[X]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms)))
-
- def combine5[U, V, W, X, Y](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X])
- (f: (Option[T], Option[U], Option[V], Option[W], Option[X]) => Option[Y]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms), gx(prms)))
-
- def combine6[U, V, W, X, Y, Z](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X], gy: Gen[Y])
- (f: (Option[T], Option[U], Option[V], Option[W], Option[X], Option[Y]) => Option[Z]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms), gx(prms), gy(prms)))
-
- def ap[U](g: Gen[T => U]) = flatMap(t => g.flatMap(u => Gen(p => Some(u(t)))))
-
- override def toString =
- if(label.length == 0) "Gen()" else "Gen(\"" + label + "\")"
-
- /** Returns a new property that holds if and only if both this
- * and the given generator generates the same result, or both
- * generators generate no result. */
- def ==[U](g: Gen[U]) = Prop(prms =>
- (this(prms.genPrms), g(prms.genPrms)) match {
- case (None,None) => proved(prms)
- case (Some(r1),Some(r2)) if r1 == r2 => proved(prms)
- case _ => falsified(prms)
- }
- )
-
- def !=[U](g: Gen[U]) = forAll(this)(r => forAll(g)(_ != r))
-
- def !==[U](g: Gen[U]) = Prop(prms =>
- (this(prms.genPrms), g(prms.genPrms)) match {
- case (None,None) => falsified(prms)
- case (Some(r1),Some(r2)) if r1 == r2 => falsified(prms)
- case _ => proved(prms)
- }
- )
-
- private var freq = 1
- def |[U >: T](g: Gen[U]): Gen[U] = {
- val h = Gen.frequency((freq, this), (1, g))
- h.freq = freq+1
- h
- }
-
- /** Generates a sample value by using default parameters */
- def sample: Option[T] = apply(Gen.Params())
-
-}
-
-
-/** Contains combinators for building generators. */
-object Gen {
-
- import Arbitrary._
- import Shrink._
-
- /** Record that encapsulates all parameters required for data generation */
- case class Params(
- size: Int = 100,
- rng: java.util.Random = util.StdRand
- ) {
- def resize(newSize: Int) = this.copy(size = newSize)
-
- /** @throws IllegalArgumentException if l is greater than h, or if
- * the range between l and h doesn't fit in a Long. */
- def choose(l: Long, h: Long): Long = {
- if (h < l) throw new IllegalArgumentException("Invalid range")
- val d = h - l + 1
- if (d <= 0) {
- var n = rng.nextLong
- while (n < l || n > h) {
- n = rng.nextLong
- }
- n
- } else {
- l + math.abs(rng.nextLong % d)
- }
- }
-
- /** @throws IllegalArgumentException if l is greater than h, or if
- * the range between l and h doesn't fit in a Double. */
- def choose(l: Double, h: Double) = {
- val d = h-l
- if (d < 0 || d > Double.MaxValue)
- throw new IllegalArgumentException("Invalid range")
- else if (d == 0) l
- else rng.nextDouble * (h-l) + l
- }
- }
-
- /* Generator factory method */
- def apply[T](g: Gen.Params => Option[T]) = new Gen[T] {
- def apply(p: Gen.Params) = g(p)
- }
-
- /* Convenience method for using the `frequency` method like this:
- * {{{
- * frequency((1, "foo"), (3, "bar"))
- * }}}
- */
- implicit def freqTuple[T](t: (Int, T)): (Int, Gen[T]) = (t._1, value(t._2))
-
-
- //// Various Generator Combinators ////
-
- /** Sequences generators. If any of the given generators fails, the
- * resulting generator will also fail. */
- def sequence[C[_],T](gs: Iterable[Gen[T]])(implicit b: Buildable[T,C]): Gen[C[T]] = Gen(prms => {
- val builder = b.builder
- var none = false
- val xs = gs.iterator
- while(xs.hasNext && !none) xs.next.apply(prms) match {
- case None => none = true
- case Some(x) => builder += x
- }
- if(none) None else Some(builder.result())
- })
-
- /** Wraps a generator lazily. The given parameter is only evalutated once,
- * and not until the wrapper generator is evaluated. */
- def lzy[T](g: => Gen[T]) = new Gen[T] {
- lazy val h = g
- def apply(prms: Params) = h(prms)
- }
-
- /** Wraps a generator for later evaluation. The given parameter is
- * evaluated each time the wrapper generator is evaluated. */
- def wrap[T](g: => Gen[T]) = Gen(p => g(p))
-
- /** A generator that always generates the given value */
- implicit def value[T](x: T) = Gen(p => Some(x))
-
- /** A generator that never generates a value */
- def fail[T]: Gen[T] = Gen(p => None)
-
- /** A generator that generates a random value in the given (inclusive)
- * range. If the range is invalid, the generator will not generate any value.
- */
- def choose[T](min: T, max: T)(implicit c: Choose[T]): Gen[T] = {
- c.choose(min, max)
- }
-
- /** Creates a generator that can access its generation parameters */
- def parameterized[T](f: Params => Gen[T]): Gen[T] = Gen(prms => f(prms)(prms))
-
- /** Creates a generator that can access its generation size */
- def sized[T](f: Int => Gen[T]) = parameterized(prms => f(prms.size))
-
- /** Creates a resized version of a generator */
- def resize[T](s: Int, g: Gen[T]) = Gen(prms => g(prms.resize(s)))
-
- /** Chooses one of the given generators with a weighted random distribution */
- def frequency[T](gs: (Int,Gen[T])*): Gen[T] = {
- lazy val tot = (gs.map(_._1) :\ 0) (_+_)
-
- def pick(n: Int, l: List[(Int,Gen[T])]): Gen[T] = l match {
- case Nil => fail
- case (k,g)::gs => if(n <= k) g else pick(n-k, gs)
- }
-
- for {
- n <- choose(1,tot)
- x <- pick(n,gs.toList)
- } yield x
- }
-
- /** Picks a random value from a list */
- def oneOf[T](xs: Seq[T]): Gen[T] = if(xs.isEmpty) fail else for {
- i <- choose(0, xs.size-1)
- } yield xs(i)
-
- /** Picks a random generator from a list */
- def oneOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) = for {
- i <- choose(0, gs.length+1)
- x <- if(i == 0) g1 else if(i == 1) g2 else gs(i-2)
- } yield x
-
-
- //// List Generators ////
-
- /** Generates a container of any type for which there exists an implicit
- * [[org.scalacheck.util.Buildable]] instance. The elements in the container will
- * be generated by the given generator. The size of the generated container
- * is given by `n`. */
- def containerOfN[C[_],T](n: Int, g: Gen[T])(implicit b: Buildable[T,C]
- ): Gen[C[T]] = sequence[C,T](new Iterable[Gen[T]] {
- def iterator = new Iterator[Gen[T]] {
- var i = 0
- def hasNext = i < n
- def next = { i += 1; g }
- }
- })
-
- /** Generates a container of any type for which there exists an implicit
- * [[org.scalacheck.util.Buildable]] instance. The elements in the container
- * will be generated by the given generator. The size of the container is
- * bounded by the size parameter used when generating values. */
- def containerOf[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
- sized(size => for(n <- choose(0,size); c <- containerOfN[C,T](n,g)) yield c)
-
- /** Generates a non-empty container of any type for which there exists an
- * implicit [[org.scalacheck.util.Buildable]] instance. The elements in the container
- * will be generated by the given generator. The size of the container is
- * bounded by the size parameter used when generating values. */
- def containerOf1[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
- sized(size => for(n <- choose(1,size); c <- containerOfN[C,T](n,g)) yield c)
-
- /** Generates a list of random length. The maximum length depends on the
- * size parameter. This method is equal to calling
- * `containerOf[List,T](g)`. */
- def listOf[T](g: => Gen[T]) = containerOf[List,T](g)
-
- /** Generates a non-empty list of random length. The maximum length depends
- * on the size parameter. This method is equal to calling
- * `containerOf1[List,T](g)`. */
- def listOf1[T](g: => Gen[T]) = containerOf1[List,T](g)
-
- /** Generates a list of the given length. This method is equal to calling
- * `containerOfN[List,T](n,g)`. */
- def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
-
- /** A generator that picks a random number of elements from a list */
- def someOf[T](l: Iterable[T]) = choose(0,l.size) flatMap (pick(_,l))
-
- /** A generator that picks a random number of elements from a list */
- def someOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) = for {
- n <- choose(0, gs.length+2)
- x <- pick(n, g1, g2, gs: _*)
- } yield x
-
- /** A generator that picks a given number of elements from a list, randomly */
- def pick[T](n: Int, l: Iterable[T]): Gen[Seq[T]] =
- if(n > l.size || n < 0) fail
- else Gen(prms => {
- val buf = new ListBuffer[T]
- buf ++= l
- while(buf.length > n) {
- val g = choose(0, buf.length-1)
- buf.remove(g(prms).get)
- }
- Some(buf)
- })
-
- /** A generator that picks a given number of elements from a list, randomly */
- def pick[T](n: Int, g1: Gen[T], g2: Gen[T], gs: Gen[T]*): Gen[Seq[T]] = for {
- is <- pick(n, 0 until (gs.size+2))
- allGs = gs ++ (g1::g2::Nil)
- xs <- sequence[List,T](is.toList.map(allGs(_)))
- } yield xs
-
-
- //// Character Generators ////
-
- /* Generates a numerical character */
- def numChar: Gen[Char] = choose(48,57) map (_.toChar)
-
- /* Generates an upper-case alpha character */
- def alphaUpperChar: Gen[Char] = choose(65,90) map (_.toChar)
-
- /* Generates a lower-case alpha character */
- def alphaLowerChar: Gen[Char] = choose(97,122) map (_.toChar)
-
- /* Generates an alpha character */
- def alphaChar = frequency((1,alphaUpperChar), (9,alphaLowerChar))
-
- /* Generates an alphanumerical character */
- def alphaNumChar = frequency((1,numChar), (9,alphaChar))
-
- //// String Generators ////
-
- /* Generates a string that starts with a lower-case alpha character,
- * and only contains alphanumerical characters */
- def identifier: Gen[String] = for {
- c <- alphaLowerChar
- cs <- listOf(alphaNumChar)
- } yield (c::cs).mkString
-
- /* Generates a string of alpha characters */
- def alphaStr: Gen[String] = for(cs <- listOf(Gen.alphaChar)) yield cs.mkString
-
- /* Generates a string of digits */
- def numStr: Gen[String] = for(cs <- listOf(Gen.numChar)) yield cs.mkString
-
- //// Number Generators ////
-
- /** Generates positive numbers of uniform distribution, with an
- * upper bound of the generation size parameter. */
- def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
- import num._
- sized(max => c.choose(one, fromInt(max)))
- }
-
- /** Generates negative numbers of uniform distribution, with an
- * lower bound of the negated generation size parameter. */
- def negNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
- import num._
- sized(max => c.choose(-fromInt(max), -one))
- }
-
- /** Generates numbers within the given inclusive range, with
- * extra weight on zero, +/- unity, both extremities, and any special
- * numbers provided. The special numbers must lie within the given range,
- * otherwise they won't be included. */
- def chooseNum[T](minT: T, maxT: T, specials: T*)(
- implicit num: Numeric[T], c: Choose[T]
- ): Gen[T] = {
- import num._
- val basics = List(minT, maxT, zero, one, -one)
- val basicsAndSpecials = for {
- t <- specials ++ basics if t >= minT && t <= maxT
- } yield (1, value(t))
- val allGens = basicsAndSpecials ++ List(
- (basicsAndSpecials.length, c.choose(minT, maxT))
- )
- frequency(allGens: _*)
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T,R](f: T => R)(implicit a: Arbitrary[T]): Gen[R] =
- arbitrary[T] map f
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,R](f: (T1,T2) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2]
- ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2)) }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,R](f: (T1,T2,T3) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
- ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2, _:T3)) }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,R](f: (T1,T2,T3,T4) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,R](f: (T1,T2,T3,T4,T5) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,R](
- f: (T1,T2,T3,T4,T5,T6) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
- a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,R](
- f: (T1,T2,T3,T4,T5,T6,T7) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
- a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,R](
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,T9,R](
- f: (T1,T2,T3,T4,T5,T6,T7,T8,T9) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
- a9: Arbitrary[T9]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8, _:T9))
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
deleted file mode 100644
index 3e8f6de5f6..0000000000
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import math.round
-
-
-sealed trait Pretty {
- def apply(prms: Pretty.Params): String
-
- def map(f: String => String) = Pretty(prms => f(Pretty.this(prms)))
-
- def flatMap(f: String => Pretty) = Pretty(prms => f(Pretty.this(prms))(prms))
-}
-
-object Pretty {
-
- case class Params(verbosity: Int)
-
- val defaultParams = Params(0)
-
- def apply(f: Params => String) = new Pretty { def apply(p: Params) = f(p) }
-
- def pretty[T <% Pretty](t: T, prms: Params): String = t(prms)
-
- def pretty[T <% Pretty](t: T): String = t(defaultParams)
-
- implicit def strBreak(s1: String) = new {
- def /(s2: String) = if(s2 == "") s1 else s1+"\n"+s2
- }
-
- def pad(s: String, c: Char, length: Int) =
- if(s.length >= length) s
- else s + List.fill(length-s.length)(c).mkString
-
- def break(s: String, lead: String, length: Int): String =
- if(s.length <= length) s
- else s.substring(0, length) / break(lead+s.substring(length), lead, length)
-
- def format(s: String, lead: String, trail: String, width: Int) =
- s.lines.map(l => break(lead+l+trail, " ", width)).mkString("\n")
-
- implicit def prettyAny(t: Any) = Pretty { p => t.toString }
-
- implicit def prettyString(t: String) = Pretty { p => "\""++t++"\"" }
-
- implicit def prettyList(l: List[Any]) = Pretty { p =>
- l.map("\""+_+"\"").mkString("List(", ", ", ")")
- }
-
- implicit def prettyThrowable(e: Throwable) = Pretty { prms =>
- val strs = e.getStackTrace.map { st =>
- import st._
- getClassName+"."+getMethodName + "("+getFileName+":"+getLineNumber+")"
- }
-
- val strs2 =
- if(prms.verbosity <= 0) Array[String]()
- else if(prms.verbosity <= 1) strs.take(5)
- else strs
-
- e.getClass.getName + ": " + e.getMessage / strs2.mkString("\n")
- }
-
- implicit def prettyArgs(args: List[Arg[Any]]): Pretty = Pretty { prms =>
- if(args.isEmpty) "" else {
- for((a,i) <- args.zipWithIndex) yield {
- val l = if(a.label == "") "ARG_"+i else a.label
- val s =
- if(a.shrinks == 0 || prms.verbosity <= 1) ""
- else " (orig arg: "+a.prettyOrigArg(prms)+")"
-
- "> "+l+": "+a.prettyArg(prms)+""+s
- }
- }.mkString("\n")
- }
-
- implicit def prettyFreqMap(fm: Prop.FM) = Pretty { prms =>
- if(fm.total == 0) ""
- else {
- "> Collected test data: " / {
- for {
- (xs,r) <- fm.getRatios
- ys = xs - ()
- if !ys.isEmpty
- } yield round(r*100)+"% " + ys.mkString(", ")
- }.mkString("\n")
- }
- }
-
- implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
- def labels(ls: collection.immutable.Set[String]) =
- if(ls.isEmpty) ""
- else "> Labels of failing property: " / ls.mkString("\n")
- val s = res.status match {
- case Test.Proved(args) => "OK, proved property."/pretty(args,prms)
- case Test.Passed => "OK, passed "+res.succeeded+" tests."
- case Test.Failed(args, l) =>
- "Falsified after "+res.succeeded+" passed tests."/labels(l)/pretty(args,prms)
- case Test.Exhausted =>
- "Gave up after only "+res.succeeded+" passed tests. " +
- res.discarded+" tests were discarded."
- case Test.PropException(args,e,l) =>
- "Exception raised on property evaluation."/labels(l)/pretty(args,prms)/
- "> Exception: "+pretty(e,prms)
- case Test.GenException(e) =>
- "Exception raised on argument generation."/
- "> Exception: "+pretty(e,prms)
- }
- val t = if(prms.verbosity <= 1) "" else "Elapsed time: "+prettyTime(res.time)
- s/t/pretty(res.freqMap,prms)
- }
-
- def prettyTime(millis: Long): String = {
- val min = millis/(60*1000)
- val sec = (millis-(60*1000*min)) / 1000d
- if(min <= 0) "%.3f sec ".format(sec)
- else "%d min %.3f sec ".format(min, sec)
- }
-}
diff --git a/src/scalacheck/org/scalacheck/Prop.scala b/src/scalacheck/org/scalacheck/Prop.scala
deleted file mode 100644
index 38e00f260f..0000000000
--- a/src/scalacheck/org/scalacheck/Prop.scala
+++ /dev/null
@@ -1,818 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{FreqMap,Buildable}
-import scala.collection._
-import scala.annotation.tailrec
-
-/** A property is a generator that generates a property result */
-trait Prop {
-
- import Prop.{Result,Params,Proof,True,False,Exception,Undecided,provedToTrue}
- import Test.cmdLineParser.{Success, NoSuccess}
- import Result.merge
-
- def apply(prms: Params): Result
-
- def map(f: Result => Result): Prop = Prop(prms => f(this(prms)))
-
- def flatMap(f: Result => Prop): Prop = Prop(prms => f(this(prms))(prms))
-
- def combine(p: Prop)(f: (Result, Result) => Result) =
- for(r1 <- this; r2 <- p) yield f(r1,r2)
-
- /** Convenience method that checks this property with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead.
- * @deprecated (in 1.10.0) Use `check(Test.Parameters)` instead.
- */
- @deprecated("Use 'check(Test.Parameters)' instead", "1.10.0")
- def check(prms: Test.Params): Unit = Test.check(
- prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks this property with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead. */
- def check(prms: Test.Parameters): Unit = Test.check(
- prms copy (_testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks this property and reports the
- * result on the console. If you need to get the results from the test use
- * the `check` methods in [[org.scalacheck.Test]] instead. */
- def check: Unit = check(Test.Parameters.default)
-
- /** The logic for main, separated out to make it easier to
- * avoid System.exit calls. Returns exit code.
- */
- def mainRunner(args: Array[String]): Int = {
- Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) =>
- if (Test.check(params, this).passed) 0
- else 1
- case e: NoSuccess =>
- println("Incorrect options:"+"\n"+e+"\n")
- Test.cmdLineParser.printHelp
- -1
- }
- }
-
- /** Whether main should call System.exit with an exit code.
- * Defaults to true; override to change.
- */
- def mainCallsExit = true
-
- /** Convenience method that makes it possible to use this property
- * as an application that checks itself on execution */
- def main(args: Array[String]): Unit = {
- val code = mainRunner(args)
- if (mainCallsExit)
- System exit code
- }
-
- /** Returns a new property that holds if and only if both this
- * and the given property hold. If one of the properties doesn't
- * generate a result, the new property will generate false. */
- def &&(p: Prop) = combine(p)(_ && _)
-
- /** Returns a new property that holds if either this
- * or the given property (or both) hold. */
- def ||(p: Prop) = combine(p)(_ || _)
-
- /** Returns a new property that holds if and only if both this
- * and the given property hold. If one of the properties doesn't
- * generate a result, the new property will generate the same result
- * as the other property. */
- def ++(p: Prop): Prop = combine(p)(_ ++ _)
-
- /** Combines two properties through implication */
- def ==>(p: => Prop): Prop = flatMap { r1 =>
- if(r1.proved) p map { r2 => merge(r1,r2,r2.status) }
- else if(r1.success) p map { r2 => provedToTrue(merge(r1,r2,r2.status)) }
- else Prop(r1.copy(status = Undecided))
- }
-
- /** Returns a new property that holds if and only if both this
- * and the given property generates a result with the exact
- * same status. Note that this means that if one of the properties is
- * proved, and the other one passed, then the resulting property
- * will fail. */
- def ==(p: Prop) = this.flatMap { r1 =>
- p.map { r2 =>
- Result.merge(r1, r2, if(r1.status == r2.status) True else False)
- }
- }
-
- override def toString = "Prop"
-
- /** Put a label on the property to make test reports clearer */
- def label(l: String) = map(_.label(l))
-
- /** Put a label on the property to make test reports clearer */
- def :|(l: String) = label(l)
-
- /** Put a label on the property to make test reports clearer */
- def |:(l: String) = label(l)
-
- /** Put a label on the property to make test reports clearer */
- def :|(l: Symbol) = label(l.toString.drop(1))
-
- /** Put a label on the property to make test reports clearer */
- def |:(l: Symbol) = label(l.toString.drop(1))
-
-}
-
-object Prop {
-
- import Gen.{value, fail, frequency, oneOf}
- import Arbitrary._
- import Shrink._
-
-
- // Types
-
- type Args = List[Arg[Any]]
- type FM = FreqMap[immutable.Set[Any]]
-
- /** Property parameters */
- case class Params(val genPrms: Gen.Params, val freqMap: FM)
-
- object Result {
- def apply(st: Status) = new Result(
- st,
- Nil,
- immutable.Set.empty[Any],
- immutable.Set.empty[String]
- )
-
- def merge(x: Result, y: Result, status: Status) = new Result(
- status,
- x.args ++ y.args,
- (x.collected.asInstanceOf[Set[AnyRef]] ++ y.collected).asInstanceOf[immutable.Set[Any]],
- x.labels ++ y.labels
- )
- }
-
- /** The result of evaluating a property */
- case class Result(
- status: Status,
- args: Args,
- collected: immutable.Set[Any],
- labels: immutable.Set[String]
- ) {
- def success = status match {
- case True => true
- case Proof => true
- case _ => false
- }
-
- def failure = status match {
- case False => true
- case Exception(_) => true
- case _ => false
- }
-
- def proved = status == Proof
-
- def addArg(a: Arg[Any]) = copy(args = a::args)
-
- def collect(x: Any) = copy(collected = collected+x)
-
- def label(l: String) = copy(labels = labels+l)
-
- import Result.merge
-
- def &&(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,_) => this
- case (_,False) => r
-
- case (Undecided,_) => this
- case (_,Undecided) => r
-
- case (_,Proof) => merge(this, r, this.status)
- case (Proof,_) => merge(this, r, r.status)
-
- case (True,True) => merge(this, r, True)
- }
-
- def ||(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,False) => merge(this, r, False)
- case (False,_) => r
- case (_,False) => this
-
- case (Proof,_) => this
- case (_,Proof) => r
-
- case (True,_) => this
- case (_,True) => r
-
- case (Undecided,Undecided) => merge(this, r, Undecided)
- }
-
- def ++(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (_, Undecided) => this
- case (Undecided, _) => r
-
- case (_, Proof) => this
- case (Proof, _) => r
-
- case (_, True) => this
- case (True, _) => r
-
- case (False, _) => this
- case (_, False) => r
- }
-
- def ==>(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,_) => merge(this, r, Undecided)
-
- case (Undecided,_) => this
-
- case (Proof,_) => merge(this, r, r.status)
- case (True,_) => merge(this, r, r.status)
- }
-
- }
-
- sealed trait Status
-
- /** The property was proved */
- case object Proof extends Status
-
- /** The property was true */
- case object True extends Status
-
- /** The property was false */
- case object False extends Status
-
- /** The property could not be falsified or proved */
- case object Undecided extends Status
-
- /** Evaluating the property raised an exception */
- sealed case class Exception(e: Throwable) extends Status {
- override def equals(o: Any) = o match {
- case Exception(_) => true
- case _ => false
- }
- }
-
- def apply(f: Params => Result): Prop = new Prop {
- def apply(prms: Params) = f(prms)
- }
-
- def apply(r: Result): Prop = Prop(prms => r)
-
- def apply(b: Boolean): Prop = if(b) proved else falsified
-
-
- // Implicits
-
- /** A collection of property operators on [[Any]] values.
- * Import [[Prop.AnyOperators]] to make the operators available. */
- class ExtendedAny[T <% Pretty](x: => T) {
- /** See [[Prop.imply]] */
- def imply(f: PartialFunction[T,Prop]) = Prop.imply(x,f)
- /** See [[Prop.iff]] */
- def iff(f: PartialFunction[T,Prop]) = Prop.iff(x,f)
- @deprecated("Use 'Prop.throws' instead", "1.10.1")
- def throws[U <: Throwable](c: Class[U]): Prop = Prop.throws(c)(x)
- /** See [[Prop.?=]] */
- def ?=(y: T) = Prop.?=(x, y)
- /** See [[Prop.=?]] */
- def =?(y: T) = Prop.=?(x, y)
- }
-
- /** A collection of property operators on [[Boolean]] values.
- * Import [[Prop.BooleanOperators]] to make the operators available. */
- class ExtendedBoolean(b: => Boolean) {
- /** See [[Prop.==>]] */
- def ==>(p: => Prop) = Prop(b) ==> p
- }
-
- /** Implicit method that makes a number of property operators on values of
- * type [[Any]] available in the current scope. See [[Prop.ExtendedAny]] for
- * documentation on the operators. */
- @deprecated("Use 'Prop.AnyOperators' instead", "1.10.1")
- implicit def extendedAny[T <% Pretty](x: => T) = new ExtendedAny[T](x)
-
- /** Implicit method that makes a number of property operators on values of
- * type [[Any]] available in the current scope. See [[Prop.ExtendedAny]] for
- * documentation on the operators. */
- implicit def AnyOperators[T <% Pretty](x: => T) = new ExtendedAny[T](x)
-
- /** Implicit method that makes a number of property operators on boolean
- * values available in the current scope. See [[Prop.ExtendedBoolean]] for
- * documentation on the operators. */
- implicit def BooleanOperators(b: => Boolean) = new ExtendedBoolean(b)
-
- /** Implicit conversion of Boolean values to Prop values. */
- implicit def propBoolean(b: Boolean): Prop = Prop(b)
-
-
- // Private support functions
-
- private def provedToTrue(r: Result) = r.status match {
- case Proof => new Result(True, r.args, r.collected, r.labels)
- case _ => r
- }
-
-
- // Property combinators
-
- /** A property that never is proved or falsified */
- lazy val undecided = Prop(Result(Undecided))
-
- /** A property that always is false */
- lazy val falsified = Prop(Result(False))
-
- /** A property that always is proved */
- lazy val proved = Prop(Result(Proof))
-
- /** A property that always is passed */
- lazy val passed = Prop(Result(True))
-
- /** A property that denotes an exception */
- def exception(e: Throwable): Prop = Prop(Result(Exception(e)))
-
- /** A property that denotes an exception */
- lazy val exception: Prop = exception(null)
-
- /** Create a property that compares to values. If the values aren't equal,
- * the property will fail and report that first value doesn't match the
- * expected (second) value. */
- def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
- if(x == y) proved else falsified :| {
- val exp = Pretty.pretty[T](y, Pretty.Params(0))
- val act = Pretty.pretty[T](x, Pretty.Params(0))
- "Expected "+exp+" but got "+act
- }
-
- /** Create a property that compares to values. If the values aren't equal,
- * the property will fail and report that second value doesn't match the
- * expected (first) value. */
- def =?[T](x: T, y: T)(implicit pp: T => Pretty): Prop = ?=(y, x)
-
- /** A property that depends on the generator size */
- def sizedProp(f: Int => Prop): Prop = Prop { prms =>
- // provedToTrue since if the property is proved for
- // one size, it shouldn't be regarded as proved for
- // all sizes.
- provedToTrue(f(prms.genPrms.size)(prms))
- }
-
- /** Implication with several conditions */
- def imply[T](x: T, f: PartialFunction[T,Prop]): Prop =
- secure(if(f.isDefinedAt(x)) f(x) else undecided)
-
- /** Property holds only if the given partial function is defined at
- * `x`, and returns a property that holds */
- def iff[T](x: T, f: PartialFunction[T,Prop]): Prop =
- secure(if(f.isDefinedAt(x)) f(x) else falsified)
-
- /** Combines properties into one, which is true if and only if all the
- * properties are true */
- def all(ps: Prop*) = if(ps.isEmpty) proved else Prop(prms =>
- ps.map(p => p(prms)).reduceLeft(_ && _)
- )
-
- /** Combines properties into one, which is true if at least one of the
- * properties is true */
- def atLeastOne(ps: Prop*) = if(ps.isEmpty) falsified else Prop(prms =>
- ps.map(p => p(prms)).reduceLeft(_ || _)
- )
-
- /** A property that holds if at least one of the given generators
- * fails generating a value */
- def someFailing[T](gs: Seq[Gen[T]]) = atLeastOne(gs.map(_ == fail):_*)
-
- /** A property that holds iff none of the given generators
- * fails generating a value */
- def noneFailing[T](gs: Seq[Gen[T]]) = all(gs.map(_ !== fail):_*)
-
- /** A property that holds if the given statement throws an exception
- * of the specified type
- * @deprecated (in 1.10.1) Use `throws(...): Boolean` instead.
- */
- @deprecated("Use 'throws(...): Boolean' instead", "1.10.1")
- def throws[T <: Throwable](x: => Any, c: Class[T]): Prop = throws(c)(x)
-
- /** Returns true if the given statement throws an exception
- * of the specified type */
- def throws[T <: Throwable](c: Class[T])(x: => Any): Boolean =
- try { x; false } catch { case e if c.isInstance(e) => true }
-
- /** Collect data for presentation in test report */
- def collect[T, P <% Prop](f: T => P): T => Prop = t => Prop { prms =>
- val prop = f(t)
- prop(prms).collect(t)
- }
-
- /** Collect data for presentation in test report */
- def collect[T](t: T)(prop: Prop) = Prop { prms =>
- prop(prms).collect(t)
- }
-
- /** Collect data for presentation in test report */
- def classify(c: => Boolean, ifTrue: Any)(prop: Prop): Prop =
- if(c) collect(ifTrue)(prop) else collect(())(prop)
-
- /** Collect data for presentation in test report */
- def classify(c: => Boolean, ifTrue: Any, ifFalse: Any)(prop: Prop): Prop =
- if(c) collect(ifTrue)(prop) else collect(ifFalse)(prop)
-
- /** Wraps and protects a property */
- def secure[P <% Prop](p: => P): Prop =
- try { p: Prop } catch { case e: Throwable => exception(e) }
-
- /** Existential quantifier for an explicit generator. */
- def exists[A,P](f: A => P)(implicit
- pv: P => Prop,
- pp: A => Pretty,
- aa: Arbitrary[A]
- ): Prop = exists(aa.arbitrary)(f)
-
- /** Existential quantifier for an explicit generator. */
- def exists[A,P](g: Gen[A])(f: A => P)(implicit
- pv: P => Prop,
- pp: A => Pretty
- ): Prop = Prop { prms =>
- g(prms.genPrms) match {
- case None => undecided(prms)
- case Some(x) =>
- val p = secure(f(x))
- val r = p(prms).addArg(Arg(g.label,x,0,x))
- r.status match {
- case True => new Result(Proof, r.args, r.collected, r.labels)
- case False => new Result(Undecided, r.args, r.collected, r.labels)
- case _ => r
- }
- }
- }
-
- /** Universal quantifier for an explicit generator. Does not shrink failed
- * test cases. */
- def forAllNoShrink[T1,P](
- g1: Gen[T1])(
- f: T1 => P)(implicit
- pv: P => Prop,
- pp1: T1 => Pretty
- ): Prop = Prop { prms =>
- g1(prms.genPrms) match {
- case None => undecided(prms)
- case Some(x) =>
- val p = secure(f(x))
- provedToTrue(p(prms)).addArg(Arg(g1.label,x,0,x))
- }
- }
-
- /** Universal quantifier for two explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,P](
- g1: Gen[T1], g2: Gen[T2])(
- f: (T1,T2) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2)(f(t, _:T2)))
-
- /** Universal quantifier for three explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])(
- f: (T1,T2,T3) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3)(f(t, _:T2, _:T3)))
-
- /** Universal quantifier for four explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])(
- f: (T1,T2,T3,T4) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4)(f(t, _:T2, _:T3, _:T4)))
-
- /** Universal quantifier for five explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])(
- f: (T1,T2,T3,T4,T5) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5)))
-
- /** Universal quantifier for six explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])(
- f: (T1,T2,T3,T4,T5,T6) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)))
-
- /** Universal quantifier for seven explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])(
- f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty,
- pp7: T7 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)))
-
- /** Universal quantifier for eight explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,T8,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])(
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty,
- pp7: T7 => Pretty,
- pp8: T8 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)))
-
- /** Universal quantifier for an explicit generator. Shrinks failed arguments
- * with the given shrink function */
- def forAllShrink[T <% Pretty, P <% Prop](g: Gen[T],
- shrink: T => Stream[T])(f: T => P
- ): Prop = Prop { prms =>
-
- /** Returns the first failed result in Left or success in Right */
- def getFirstFailure(xs: Stream[T]): Either[(T,Result),(T,Result)] = {
- assert(!xs.isEmpty, "Stream cannot be empty")
- val results = xs.map { x =>
- val p = secure(f(x))
- (x, provedToTrue(p(prms)))
- }
- results.dropWhile(!_._2.failure).headOption match {
- case None => Right(results.head)
- case Some(xr) => Left(xr)
- }
- }
-
- def shrinker(x: T, r: Result, shrinks: Int, orig: T): Result = {
- val xs = shrink(x)
- val res = r.addArg(Arg(g.label,x,shrinks,orig))
- if(xs.isEmpty) res else getFirstFailure(xs) match {
- case Right(_) => res
- case Left((x2,r2)) => shrinker(x2, r2, shrinks+1, orig)
- }
- }
-
- g(prms.genPrms) match {
- case None => undecided(prms)
- case Some(x) => getFirstFailure(Stream.cons(x, Stream.empty)) match {
- case Right((x,r)) => r.addArg(Arg(g.label,x,0,x))
- case Left((x,r)) => shrinker(x,r,0,x)
- }
- }
-
- }
-
- /** Universal quantifier for an explicit generator. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,P](
- g1: Gen[T1])(
- f: T1 => P)(implicit
- p: P => Prop,
- s1: Shrink[T1],
- pp1: T1 => Pretty
- ): Prop = forAllShrink(g1, shrink[T1])(f)
-
- /** Universal quantifier for two explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,P](
- g1: Gen[T1], g2: Gen[T2])(
- f: (T1,T2) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2)(f(t, _:T2)))
-
- /** Universal quantifier for three explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])(
- f: (T1,T2,T3) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3)(f(t, _:T2, _:T3)))
-
- /** Universal quantifier for four explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])(
- f: (T1,T2,T3,T4) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4)(f(t, _:T2, _:T3, _:T4)))
-
- /** Universal quantifier for five explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])(
- f: (T1,T2,T3,T4,T5) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5)))
-
- /** Universal quantifier for six explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])(
- f: (T1,T2,T3,T4,T5,T6) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)))
-
- /** Universal quantifier for seven explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,T7,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])(
- f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty,
- s7: Shrink[T7], pp7: T7 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)))
-
- /** Universal quantifier for eight explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,T7,T8,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])(
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty,
- s7: Shrink[T7], pp7: T7 => Pretty,
- s8: Shrink[T8], pp8: T8 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,P] (
- f: A1 => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty
- ): Prop = forAllShrink(arbitrary[A1],shrink[A1])(f andThen p)
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,P] (
- f: (A1,A2) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,P] (
- f: (A1,A2,A3) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,P] (
- f: (A1,A2,A3,A4) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,P] (
- f: (A1,A2,A3,A4,A5) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,P] (
- f: (A1,A2,A3,A4,A5,A6) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,A7,P] (
- f: (A1,A2,A3,A4,A5,A6,A7) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty,
- a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,A7,A8,P] (
- f: (A1,A2,A3,A4,A5,A6,A7,A8) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty,
- a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty,
- a8: Arbitrary[A8], s8: Shrink[A8], pp8: A8 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7, _:A8)))
-
- /** Ensures that the property expression passed in completes within the given space of time. */
- def within(maximumMs: Long)(wrappedProp: => Prop): Prop = new Prop {
- @tailrec private def attempt(prms: Params, endTime: Long): Result = {
- val result = wrappedProp.apply(prms)
- if (System.currentTimeMillis > endTime) {
- (if (result.failure) result else Result(False)).label("Timeout")
- } else {
- if (result.success) result
- else attempt(prms, endTime)
- }
- }
- def apply(prms: Params) = attempt(prms, System.currentTimeMillis + maximumMs)
- }
-}
diff --git a/src/scalacheck/org/scalacheck/Properties.scala b/src/scalacheck/org/scalacheck/Properties.scala
deleted file mode 100644
index d4836d7420..0000000000
--- a/src/scalacheck/org/scalacheck/Properties.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-/** Represents a collection of properties, with convenient methods
- * for checking all properties at once. This class is itself a property, which
- * holds if and only if all of the contained properties hold.
- * <p>Properties are added in the following way:</p>
- *
- * {{{
- * object MyProps extends Properties("MyProps") {
- * property("myProp1") = forAll { (n:Int, m:Int) =>
- * n+m == m+n
- * }
- *
- * property("myProp2") = ((0/1) throws classOf[ArithmeticException])
- * }
- * }}}
- */
-class Properties(val name: String) extends Prop {
-
- import Test.cmdLineParser.{Success, NoSuccess}
-
- private val props = new scala.collection.mutable.ListBuffer[(String,Prop)]
-
- /** Returns one property which holds if and only if all of the
- * properties in this property collection hold */
- private def oneProperty: Prop = Prop.all((properties map (_._2)):_*)
-
- /** Returns all properties of this collection in a list of name/property
- * pairs. */
- def properties: Seq[(String,Prop)] = props
-
- def apply(p: Prop.Params) = oneProperty(p)
-
- /** Convenience method that checks the properties with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead. */
- override def check(prms: Test.Parameters): Unit = Test.checkProperties(
- prms copy (_testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks the properties with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead.
- * @deprecated (in 1.10.0) Use `check(Test.Parameters)` instead.
- */
- @deprecated("Use 'check(Test.Parameters)' instead", "1.10.0")
- override def check(prms: Test.Params): Unit = Test.checkProperties(
- prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks the properties and reports the
- * result on the console. If you need to get the results from the test use
- * the `check` methods in [[org.scalacheck.Test]] instead. */
- override def check: Unit = check(Test.Parameters.default)
-
- /** The logic for main, separated out to make it easier to
- * avoid System.exit calls. Returns exit code.
- */
- override def mainRunner(args: Array[String]): Int = {
- Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) =>
- val res = Test.checkProperties(params, this)
- val failed = res.filter(!_._2.passed).size
- failed
- case e: NoSuccess =>
- println("Incorrect options:"+"\n"+e+"\n")
- Test.cmdLineParser.printHelp
- -1
- }
- }
-
- /** Adds all properties from another property collection to this one. */
- def include(ps: Properties) = for((n,p) <- ps.properties) property(n) = p
-
- /** Used for specifying properties. Usage:
- * {{{
- * property("myProp") = ...
- * }}}
- */
- class PropertySpecifier() {
- def update(propName: String, p: Prop) = props += ((name+"."+propName, p))
- }
-
- lazy val property = new PropertySpecifier()
-}
diff --git a/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala b/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala
deleted file mode 100644
index 7764101844..0000000000
--- a/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-// vim: set ts=2 sw=2 et:
-
-package org.scalacheck
-
-import org.scalatools.testing._
-
-class ScalaCheckFramework extends Framework {
-
- private case object PropFingerprint extends TestFingerprint {
- val superClassName = "org.scalacheck.Prop"
- val isModule = false
- }
-
- private case object PropsFingerprint extends TestFingerprint {
- val superClassName = "org.scalacheck.Properties"
- val isModule = true
- }
-
- val name = "ScalaCheck"
-
- val tests = Array[Fingerprint](PropsFingerprint, PropsFingerprint)
-
- def testRunner(loader: ClassLoader, loggers: Array[Logger]) = new Runner2 {
-
- private def asEvent(nr: (String, Test.Result)) = nr match {
- case (n: String, r: Test.Result) => new Event {
- val testName = n
- val description = n
- val result = r.status match {
- case Test.Passed => Result.Success
- case _:Test.Proved => Result.Success
- case _:Test.Failed => Result.Failure
- case Test.Exhausted => Result.Skipped
- case _:Test.PropException | _:Test.GenException => Result.Error
- }
- val error = r.status match {
- case Test.PropException(_, e, _) => e
- case _:Test.Failed => new Exception(Pretty.pretty(r,Pretty.Params(0)))
- case _ => null
- }
- }
- }
-
- def run(testClassName: String, fingerprint: Fingerprint, handler: EventHandler, args: Array[String]) {
-
- val testCallback = new Test.TestCallback {
- override def onPropEval(n: String, w: Int, s: Int, d: Int) = {}
-
- override def onTestResult(n: String, r: Test.Result) = {
- for (l <- loggers) {
- import Pretty._
- l.info(
- (if (r.passed) "+ " else "! ") + n + ": " + pretty(r, Params(0))
- )
- }
- handler.handle(asEvent((n,r)))
- }
- }
-
- import Test.cmdLineParser.{Success, NoSuccess}
- val prms = Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) =>
- params.copy(_testCallback = testCallback, _customClassLoader = Some(loader))
- // TODO: Maybe handle this a bit better than throwing exception?
- case e: NoSuccess => throw new Exception(e.toString)
- }
-
- fingerprint match {
- case fp: SubclassFingerprint =>
- if(fp.isModule) {
- val obj = Class.forName(testClassName + "$", true, loader)
- val ps = obj.getField("MODULE$").get(null).asInstanceOf[Properties]
- Test.checkProperties(prms, ps)
- } else {
- val p = Class.forName(testClassName, true, loader).newInstance.asInstanceOf[Prop]
- handler.handle(asEvent((testClassName, Test.check(prms, p))))
- }
- }
- }
-
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Shrink.scala b/src/scalacheck/org/scalacheck/Shrink.scala
deleted file mode 100644
index 4895171a35..0000000000
--- a/src/scalacheck/org/scalacheck/Shrink.scala
+++ /dev/null
@@ -1,208 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.Buildable
-import scala.collection.{ JavaConversions => jcl }
-
-sealed abstract class Shrink[T] {
- def shrink(x: T): Stream[T]
-}
-
-object Shrink {
-
- import Stream.{cons, empty}
- import scala.collection._
- import java.util.ArrayList
-
- /** Interleaves to streams */
- private def interleave[T](xs: Stream[T], ys: Stream[T]): Stream[T] =
- if(xs.isEmpty) ys
- else if(ys.isEmpty) xs
- else Stream(xs.head, ys.head) append interleave(xs.tail, ys.tail)
-
- /** Shrink instance factory */
- def apply[T](s: T => Stream[T]): Shrink[T] = new Shrink[T] {
- override def shrink(x: T) = s(x)
- }
-
- /** Shrink a value */
- def shrink[T](x: T)(implicit s: Shrink[T]): Stream[T] = s.shrink(x)
-
- /** Default shrink instance */
- implicit def shrinkAny[T]: Shrink[T] = Shrink(x => empty)
-
- /** Shrink instance of container */
- implicit def shrinkContainer[C[_],T](implicit v: C[T] => Traversable[T], s: Shrink[T],
- b: Buildable[T,C]
- ): Shrink[C[T]] = Shrink { xs: C[T] =>
-
- def removeChunks(n: Int, xs: Stream[T]): Stream[Stream[T]] =
- if(xs.isEmpty) empty
- else if(xs.tail.isEmpty) cons(empty, empty)
- else {
- val n1 = n / 2
- val n2 = n - n1
- lazy val xs1 = xs.take(n1)
- lazy val xs2 = xs.drop(n1)
- lazy val xs3 =
- for(ys1 <- removeChunks(n1,xs1) if !ys1.isEmpty) yield ys1 append xs2
- lazy val xs4 =
- for(ys2 <- removeChunks(n2,xs2) if !ys2.isEmpty) yield xs1 append ys2
-
- cons(xs1, cons(xs2, interleave(xs3,xs4)))
- }
-
- def shrinkOne(zs: Stream[T]): Stream[Stream[T]] =
- if(zs.isEmpty) empty
- else {
- val x = zs.head
- val xs = zs.tail
- (for(y <- shrink(x)) yield cons(y,xs)) append
- (for(ys <- shrinkOne(xs)) yield cons(x,ys))
- }
-
- val ys = v(xs)
- val zs = ys.toStream
- removeChunks(ys.size,zs).append(shrinkOne(zs)).map(b.fromIterable)
-
- }
-
- /** Shrink instance of integer */
- implicit lazy val shrinkInt: Shrink[Int] = Shrink { n =>
-
- def halfs(n: Int): Stream[Int] =
- if(n == 0) empty else cons(n, halfs(n/2))
-
- if(n == 0) empty else {
- val ns = halfs(n/2).map(n - _)
- cons(0, interleave(ns, ns.map(-1 * _)))
- }
- }
-
- /** Shrink instance of String */
- implicit lazy val shrinkString: Shrink[String] = Shrink { s =>
- shrinkContainer[List,Char].shrink(s.toList).map(_.mkString)
- }
-
- /** Shrink instance of Option */
- implicit def shrinkOption[T](implicit s: Shrink[T]): Shrink[Option[T]] =
- Shrink {
- case None => empty
- case Some(x) => cons(None, for(y <- shrink(x)) yield Some(y))
- }
-
- /** Shrink instance of 2-tuple */
- implicit def shrinkTuple2[T1,T2](implicit
- s1: Shrink[T1], s2: Shrink[T2]
- ): Shrink[(T1,T2)] =
- Shrink { case (t1,t2) =>
- (for(x1 <- shrink(t1)) yield (x1, t2)) append
- (for(x2 <- shrink(t2)) yield (t1, x2))
- }
-
- /** Shrink instance of 3-tuple */
- implicit def shrinkTuple3[T1,T2,T3](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3]
- ): Shrink[(T1,T2,T3)] =
- Shrink { case (t1,t2,t3) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3))
- }
-
- /** Shrink instance of 4-tuple */
- implicit def shrinkTuple4[T1,T2,T3,T4](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4]
- ): Shrink[(T1,T2,T3,T4)] =
- Shrink { case (t1,t2,t3,t4) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4))
- }
-
- /** Shrink instance of 5-tuple */
- implicit def shrinkTuple5[T1,T2,T3,T4,T5](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5]
- ): Shrink[(T1,T2,T3,T4,T5)] =
- Shrink { case (t1,t2,t3,t4,t5) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5))
- }
-
- /** Shrink instance of 6-tuple */
- implicit def shrinkTuple6[T1,T2,T3,T4,T5,T6](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6]
- ): Shrink[(T1,T2,T3,T4,T5,T6)] =
- Shrink { case (t1,t2,t3,t4,t5,t6) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6))
- }
-
- /** Shrink instance of 7-tuple */
- implicit def shrinkTuple7[T1,T2,T3,T4,T5,T6,T7](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7]
- ): Shrink[(T1,T2,T3,T4,T5,T6,T7)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7)) append
- (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7))
- }
-
- /** Shrink instance of 8-tuple */
- implicit def shrinkTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7], s8: Shrink[T8]
- ): Shrink[(T1,T2,T3,T4,T5,T6,T7,T8)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7, t8)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7, t8)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7, t8)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7, t8)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7, t8)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7, t8)) append
- (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7, t8)) append
- (for(x8 <- shrink(t8)) yield (t1, t2, t3, t4, t5, t6, t7, x8))
- }
-
- /** Shrink instance of 9-tuple */
- implicit def shrinkTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7], s8: Shrink[T8],
- s9: Shrink[T9]
- ): Shrink[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8,t9) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7, t8, t9)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7, t8, t9)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7, t8, t9)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7, t8, t9)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7, t8, t9)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7, t8, t9)) append
- (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7, t8, t9)) append
- (for(x8 <- shrink(t8)) yield (t1, t2, t3, t4, t5, t6, t7, x8, t9)) append
- (for(x9 <- shrink(t9)) yield (t1, t2, t3, t4, t5, t6, t7, t8, x9))
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Test.scala b/src/scalacheck/org/scalacheck/Test.scala
deleted file mode 100644
index 6e9b6b88fd..0000000000
--- a/src/scalacheck/org/scalacheck/Test.scala
+++ /dev/null
@@ -1,392 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-object Test {
-
- import util.FreqMap
- import scala.collection.immutable
- import Prop.FM
- import util.CmdLineParser
-
- /** Test parameters used by the `Test.check` method.
- */
- trait Parameters {
- /** The minimum number of tests that must succeed for ScalaCheck to
- * consider a property passed. */
- def minSuccessfulTests: Int
-
- /** The starting size given as parameter to the generators. */
- def minSize: Int
-
- /** The maximum size given as parameter to the generators. */
- def maxSize: Int
-
- /** The random numbe generator used. */
- def rng: java.util.Random
-
- /** The number of tests run in parallell. */
- def workers: Int
-
- /** A callback that ScalaCheck calls each time a test is executed. */
- def testCallback: TestCallback
-
- /** The maximum ratio between discarded and passed tests allowed before
- * ScalaCheck gives up and discards the property. At least
- * `minSuccesfulTests` will always be run, though. */
- def maxDiscardRatio: Float
-
- /** A custom class loader that should be used during test execution. */
- def customClassLoader: Option[ClassLoader]
-
- // private since we can't guarantee binary compatibility for this one
- private[scalacheck] def copy(
- _minSuccessfulTests: Int = Parameters.this.minSuccessfulTests,
- _minSize: Int = Parameters.this.minSize,
- _maxSize: Int = Parameters.this.maxSize,
- _rng: java.util.Random = Parameters.this.rng,
- _workers: Int = Parameters.this.workers,
- _testCallback: TestCallback = Parameters.this.testCallback,
- _maxDiscardRatio: Float = Parameters.this.maxDiscardRatio,
- _customClassLoader: Option[ClassLoader] = Parameters.this.customClassLoader
- ): Parameters = new Parameters {
- val minSuccessfulTests: Int = _minSuccessfulTests
- val minSize: Int = _minSize
- val maxSize: Int = _maxSize
- val rng: java.util.Random = _rng
- val workers: Int = _workers
- val testCallback: TestCallback = _testCallback
- val maxDiscardRatio: Float = _maxDiscardRatio
- val customClassLoader: Option[ClassLoader] = _customClassLoader
- }
- }
-
- /** Test parameters used by the `Test.check` method.
- *
- * To override default values, extend the
- * [[org.scalacheck.Test.Parameters.Default]] trait:
- *
- * {{{
- * val myParams = new Parameters.Default {
- * override val minSuccesfulTests = 600
- * override val maxDiscardRatio = 8
- * }
- * }}}
- */
- object Parameters {
- /** Default test parameters trait. This can be overriden if you need to
- * tweak the parameters. */
- trait Default extends Parameters {
- val minSuccessfulTests: Int = 100
- val minSize: Int = 0
- val maxSize: Int = Gen.Params().size
- val rng: java.util.Random = Gen.Params().rng
- val workers: Int = 1
- val testCallback: TestCallback = new TestCallback {}
- val maxDiscardRatio: Float = 5
- val customClassLoader: Option[ClassLoader] = None
- }
-
- /** Default test parameters instance. */
- val default: Parameters = new Default {}
- }
-
- /** Test parameters
- * @deprecated (in 1.10.0) Use [[org.scalacheck.Test.Parameters]] instead.
- */
- @deprecated("Use [[org.scalacheck.Test.Parameters]] instead", "1.10.0")
- case class Params(
- minSuccessfulTests: Int = 100,
- maxDiscardedTests: Int = -1,
- minSize: Int = 0,
- maxSize: Int = Gen.Params().size,
- rng: java.util.Random = Gen.Params().rng,
- workers: Int = 1,
- testCallback: TestCallback = new TestCallback {}
- )
-
- @deprecated("Use [[org.scalacheck.Test.Parameters]] instead", "1.10.0")
- private def paramsToParameters(params: Params) = new Parameters {
- val minSuccessfulTests = params.minSuccessfulTests
- val minSize = params.minSize
- val maxSize = params.maxSize
- val rng = params.rng
- val workers = params.workers
- val testCallback = params.testCallback
-
- // maxDiscardedTests is deprecated, but if someone
- // uses it let it override maxDiscardRatio
- val maxDiscardRatio =
- if(params.maxDiscardedTests < 0) Parameters.default.maxDiscardRatio
- else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
-
- val customClassLoader = Parameters.default.customClassLoader
- }
-
- /** Test statistics */
- case class Result(status: Status, succeeded: Int, discarded: Int, freqMap: FM, time: Long = 0) {
- def passed = status match {
- case Passed => true
- case Proved(_) => true
- case _ => false
- }
- }
-
- /** Test status */
- sealed trait Status
-
- /** ScalaCheck found enough cases for which the property holds, so the
- * property is considered correct. (It is not proved correct, though). */
- case object Passed extends Status
-
- /** ScalaCheck managed to prove the property correct */
- sealed case class Proved(args: Prop.Args) extends Status
-
- /** The property was proved wrong with the given concrete arguments. */
- sealed case class Failed(args: Prop.Args, labels: Set[String]) extends Status
-
- /** The property test was exhausted, it wasn't possible to generate enough
- * concrete arguments satisfying the preconditions to get enough passing
- * property evaluations. */
- case object Exhausted extends Status
-
- /** An exception was raised when trying to evaluate the property with the
- * given concrete arguments. */
- sealed case class PropException(args: Prop.Args, e: Throwable,
- labels: Set[String]) extends Status
-
- /** An exception was raised when trying to generate concrete arguments
- * for evaluating the property. */
- sealed case class GenException(e: Throwable) extends Status
-
- trait TestCallback { self =>
- /** Called each time a property is evaluated */
- def onPropEval(name: String, threadIdx: Int, succeeded: Int,
- discarded: Int): Unit = ()
-
- /** Called whenever a property has finished testing */
- def onTestResult(name: String, result: Result): Unit = ()
-
- def chain(testCallback: TestCallback) = new TestCallback {
- override def onPropEval(name: String, threadIdx: Int,
- succeeded: Int, discarded: Int
- ): Unit = {
- self.onPropEval(name,threadIdx,succeeded,discarded)
- testCallback.onPropEval(name,threadIdx,succeeded,discarded)
- }
-
- override def onTestResult(name: String, result: Result): Unit = {
- self.onTestResult(name,result)
- testCallback.onTestResult(name,result)
- }
- }
- }
-
- private def assertParams(prms: Parameters) = {
- import prms._
- if(
- minSuccessfulTests <= 0 ||
- maxDiscardRatio <= 0 ||
- minSize < 0 ||
- maxSize < minSize ||
- workers <= 0
- ) throw new IllegalArgumentException("Invalid test parameters")
- }
-
- private def secure[T](x: => T): Either[T,Throwable] =
- try { Left(x) } catch { case e: Throwable => Right(e) }
-
- private[scalacheck] lazy val cmdLineParser = new CmdLineParser {
- object OptMinSuccess extends IntOpt {
- val default = Parameters.default.minSuccessfulTests
- val names = Set("minSuccessfulTests", "s")
- val help = "Number of tests that must succeed in order to pass a property"
- }
- object OptMaxDiscarded extends IntOpt {
- val default = -1
- val names = Set("maxDiscardedTests", "d")
- val help =
- "Number of tests that can be discarded before ScalaCheck stops " +
- "testing a property. NOTE: this option is deprecated, please use " +
- "the option maxDiscardRatio (-r) instead."
- }
- object OptMaxDiscardRatio extends FloatOpt {
- val default = Parameters.default.maxDiscardRatio
- val names = Set("maxDiscardRatio", "r")
- val help =
- "The maximum ratio between discarded and succeeded tests " +
- "allowed before ScalaCheck stops testing a property. At " +
- "least minSuccessfulTests will always be tested, though."
- }
- object OptMinSize extends IntOpt {
- val default = Parameters.default.minSize
- val names = Set("minSize", "n")
- val help = "Minimum data generation size"
- }
- object OptMaxSize extends IntOpt {
- val default = Parameters.default.maxSize
- val names = Set("maxSize", "x")
- val help = "Maximum data generation size"
- }
- object OptWorkers extends IntOpt {
- val default = Parameters.default.workers
- val names = Set("workers", "w")
- val help = "Number of threads to execute in parallel for testing"
- }
- object OptVerbosity extends IntOpt {
- val default = 1
- val names = Set("verbosity", "v")
- val help = "Verbosity level"
- }
-
- val opts = Set[Opt[_]](
- OptMinSuccess, OptMaxDiscarded, OptMaxDiscardRatio, OptMinSize,
- OptMaxSize, OptWorkers, OptVerbosity
- )
-
- def parseParams(args: Array[String]) = parseArgs(args) {
- optMap => Parameters.default.copy(
- _minSuccessfulTests = optMap(OptMinSuccess),
- _maxDiscardRatio =
- if (optMap(OptMaxDiscarded) < 0) optMap(OptMaxDiscardRatio)
- else optMap(OptMaxDiscarded).toFloat / optMap(OptMinSuccess),
- _minSize = optMap(OptMinSize),
- _maxSize = optMap(OptMaxSize),
- _workers = optMap(OptWorkers),
- _testCallback = ConsoleReporter(optMap(OptVerbosity))
- )
- }
- }
-
- /** Tests a property with the given testing parameters, and returns
- * the test results.
- * @deprecated (in 1.10.0) Use
- * `check(Parameters, Properties)` instead.
- */
- @deprecated("Use 'checkProperties(Parameters, Properties)' instead", "1.10.0")
- def check(params: Params, p: Prop): Result = {
- check(paramsToParameters(params), p)
- }
-
- /** Tests a property with the given testing parameters, and returns
- * the test results. */
- def check(params: Parameters, p: Prop): Result = {
- import params._
-
- assertParams(params)
- if(workers > 1) {
- assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
- }
-
- val iterations = math.ceil(minSuccessfulTests / (workers: Double))
- val sizeStep = (maxSize-minSize) / (iterations*workers)
- var stop = false
-
- def worker(workerIdx: Int) =
- if (workers < 2) () => workerFun(workerIdx)
- else actors.Futures.future {
- params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
- workerFun(workerIdx)
- }
-
- def workerFun(workerIdx: Int) = {
- var n = 0 // passed tests
- var d = 0 // discarded tests
- var res: Result = null
- var fm = FreqMap.empty[immutable.Set[Any]]
- while(!stop && res == null && n < iterations) {
- val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
- val propPrms = Prop.Params(Gen.Params(size.round.toInt, params.rng), fm)
- secure(p(propPrms)) match {
- case Right(e) => res =
- Result(GenException(e), n, d, FreqMap.empty[immutable.Set[Any]])
- case Left(propRes) =>
- fm =
- if(propRes.collected.isEmpty) fm
- else fm + propRes.collected
- propRes.status match {
- case Prop.Undecided =>
- d += 1
- testCallback.onPropEval("", workerIdx, n, d)
- // The below condition is kind of hacky. We have to have
- // some margin, otherwise workers might stop testing too
- // early because they have been exhausted, but the overall
- // test has not.
- if (n+d > minSuccessfulTests && 1+workers*maxDiscardRatio*n < d)
- res = Result(Exhausted, n, d, fm)
- case Prop.True =>
- n += 1
- testCallback.onPropEval("", workerIdx, n, d)
- case Prop.Proof =>
- n += 1
- res = Result(Proved(propRes.args), n, d, fm)
- stop = true
- case Prop.False =>
- res = Result(Failed(propRes.args,propRes.labels), n, d, fm)
- stop = true
- case Prop.Exception(e) =>
- res = Result(PropException(propRes.args,e,propRes.labels), n, d, fm)
- stop = true
- }
- }
- }
- if (res == null) {
- if (maxDiscardRatio*n > d) Result(Passed, n, d, fm)
- else Result(Exhausted, n, d, fm)
- } else res
- }
-
- def mergeResults(r1: () => Result, r2: () => Result) = {
- val Result(st1, s1, d1, fm1, _) = r1()
- val Result(st2, s2, d2, fm2, _) = r2()
- if (st1 != Passed && st1 != Exhausted)
- () => Result(st1, s1+s2, d1+d2, fm1++fm2, 0)
- else if (st2 != Passed && st2 != Exhausted)
- () => Result(st2, s1+s2, d1+d2, fm1++fm2, 0)
- else {
- if (s1+s2 >= minSuccessfulTests && maxDiscardRatio*(s1+s2) >= (d1+d2))
- () => Result(Passed, s1+s2, d1+d2, fm1++fm2, 0)
- else
- () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, 0)
- }
- }
-
- val start = System.currentTimeMillis
- val results = for(i <- 0 until workers) yield worker(i)
- val r = results.reduceLeft(mergeResults)()
- stop = true
- results foreach (_.apply())
- val timedRes = r.copy(time = System.currentTimeMillis-start)
- params.testCallback.onTestResult("", timedRes)
- timedRes
- }
-
- /** Check a set of properties.
- * @deprecated (in 1.10.0) Use
- * `checkProperties(Parameters, Properties)` instead.
- */
- @deprecated("Use 'checkProperties(Parameters, Properties)' instead", "1.10.0")
- def checkProperties(prms: Params, ps: Properties): Seq[(String,Result)] =
- checkProperties(paramsToParameters(prms), ps)
-
- /** Check a set of properties. */
- def checkProperties(prms: Parameters, ps: Properties): Seq[(String,Result)] =
- ps.properties.map { case (name,p) =>
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) =
- prms.testCallback.onPropEval(name,t,s,d)
- override def onTestResult(n: String, r: Result) =
- prms.testCallback.onTestResult(name,r)
- }
- val res = check(prms copy (_testCallback = testCallback), p)
- (name,res)
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
deleted file mode 100644
index 140c541a95..0000000000
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import scala.collection._
-
-trait Buildable[T,C[_]] {
- def builder: mutable.Builder[T,C[T]]
- def fromIterable(it: Traversable[T]): C[T] = {
- val b = builder
- b ++= it
- b.result()
- }
-}
-
-object Buildable {
-
- implicit def buildableList[T] = new Buildable[T,List] {
- def builder = new mutable.ListBuffer[T]
- }
-
- implicit def buildableStream[T] = new Buildable[T,Stream] {
- def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
- }
-
- implicit def buildableArray[T](implicit cm: ClassManifest[T]) =
- new Buildable[T,Array] {
- def builder = mutable.ArrayBuilder.make[T]
- }
-
- implicit def buildableMutableSet[T] = new Buildable[T,mutable.Set] {
- def builder = new mutable.SetBuilder(mutable.Set.empty[T])
- }
-
- implicit def buildableImmutableSet[T] = new Buildable[T,immutable.Set] {
- def builder = new mutable.SetBuilder(immutable.Set.empty[T])
- }
-
- implicit def buildableSet[T] = new Buildable[T,Set] {
- def builder = new mutable.SetBuilder(Set.empty[T])
- }
-
- import java.util.ArrayList
- implicit def buildableArrayList[T] = new Buildable[T,ArrayList] {
- def builder = new mutable.Builder[T,ArrayList[T]] {
- val al = new ArrayList[T]
- def +=(x: T) = {
- al.add(x)
- this
- }
- def clear() = al.clear()
- def result() = al
- }
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
deleted file mode 100644
index eb3a91fe59..0000000000
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.Reader
-import scala.util.parsing.input.Position
-import scala.collection.Set
-import org.scalacheck.Test
-
-trait CmdLineParser extends Parsers {
-
- type Elem = String
-
- trait Opt[+T] {
- val default: T
- val names: Set[String]
- val help: String
- }
- trait Flag extends Opt[Unit]
- trait IntOpt extends Opt[Int]
- trait FloatOpt extends Opt[Float]
- trait StrOpt extends Opt[String]
-
- class OptMap {
- private val opts = new collection.mutable.HashMap[Opt[_], Any]
- def apply(flag: Flag): Boolean = opts.contains(flag)
- def apply[T](opt: Opt[T]): T = opts.get(opt) match {
- case None => opt.default
- case Some(v) => v.asInstanceOf[T]
- }
- def update[T](opt: Opt[T], optVal: T) = opts.update(opt, optVal)
- }
-
- val opts: Set[Opt[_]]
-
- private class ArgsReader(args: Array[String], i: Int) extends Reader[String] {
- val pos = new Position {
- val column = (args take i).foldLeft(1)(_ + _.length + 1)
- val line = 1
- val lineContents = args.mkString(" ")
- }
- val atEnd = i >= args.length
- def first = if(atEnd) null else args(i)
- def rest = if(atEnd) this else new ArgsReader(args, i+1)
- }
-
- private def getOpt(s: String) = {
- if(s == null || s.length == 0 || s.charAt(0) != '-') None
- else opts.find(_.names.contains(s.drop(1)))
- }
-
- private val opt: Parser[Opt[Any]] = accept("option name", {
- case s if getOpt(s).isDefined => getOpt(s).get
- })
-
- private val strVal: Parser[String] = accept("string", {
- case s if s != null => s
- })
-
- private val intVal: Parser[Int] = accept("integer", {
- case s if s != null && s.length > 0 && s.forall(_.isDigit) => s.toInt
- })
-
- private val floatVal: Parser[Float] = accept("float", {
- case s if s != null && s.matches("[0987654321]+\\.?[0987654321]*")
- => s.toFloat
- })
-
- private case class OptVal[T](o: Opt[T], v: T)
-
- private val optVal: Parser[OptVal[Any]] = opt into {
- case o: Flag => success(OptVal(o, ()))
- case o: IntOpt => intVal ^^ (v => OptVal(o, v))
- case o: FloatOpt => floatVal ^^ (v => OptVal(o, v))
- case o: StrOpt => strVal ^^ (v => OptVal(o, v))
- }
-
- val options: Parser[OptMap] = rep(optVal) ^^ { xs =>
- val map = new OptMap
- xs.foreach { case OptVal(o,v) => map(o) = v }
- map
- }
-
- def printHelp = {
- println("Available options:")
- opts.foreach { opt =>
- println(" " + opt.names.map("-"+_).mkString(", ") + ": " + opt.help)
- }
- }
-
- def parseArgs[T](args: Array[String])(f: OptMap => T) =
- phrase(options map f)(new ArgsReader(args,0))
-}
diff --git a/src/scalacheck/org/scalacheck/util/FreqMap.scala b/src/scalacheck/org/scalacheck/util/FreqMap.scala
deleted file mode 100644
index d0686aec72..0000000000
--- a/src/scalacheck/org/scalacheck/util/FreqMap.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-trait FreqMap[T] {
- protected val underlying: scala.collection.immutable.Map[T,Int]
- val total: Int
-
- def +(t: T) = new FreqMap[T] {
- private val n = FreqMap.this.underlying.get(t) match {
- case None => 1
- case Some(n) => n+1
- }
- val underlying = FreqMap.this.underlying + (t -> n)
- val total = FreqMap.this.total + 1
- }
-
- def -(t: T) = new FreqMap[T] {
- val underlying = FreqMap.this.underlying.get(t) match {
- case None => FreqMap.this.underlying
- case Some(n) => FreqMap.this.underlying + (t -> (n-1))
- }
- val total = FreqMap.this.total + 1
- }
-
- def ++(fm: FreqMap[T]) = new FreqMap[T] {
- private val keys = FreqMap.this.underlying.keySet ++ fm.underlying.keySet
- private val mappings = keys.toStream.map { x =>
- (x, fm.getCount(x).getOrElse(0) + FreqMap.this.getCount(x).getOrElse(0))
- }
- val underlying = scala.collection.immutable.Map(mappings: _*)
- val total = FreqMap.this.total + fm.total
- }
-
- def --(fm: FreqMap[T]) = new FreqMap[T] {
- val underlying = FreqMap.this.underlying transform {
- case (x,n) => n - fm.getCount(x).getOrElse(0)
- }
- lazy val total = (0 /: underlying.valuesIterator) (_ + _)
- }
-
- def getCount(t: T) = underlying.get(t)
-
- def getCounts: List[(T,Int)] = underlying.toList.sortBy(-_._2)
-
- def getRatio(t: T) = for(c <- getCount(t)) yield (c: Float)/total
-
- def getRatios = for((t,c) <- getCounts) yield (t, (c: Float)/total)
-
- override def toString = underlying.toString
-}
-
-object FreqMap {
- def empty[T] = new FreqMap[T] {
- val underlying = scala.collection.immutable.Map.empty[T,Int]
- val total = 0
- }
-}
diff --git a/src/scalacheck/org/scalacheck/util/StdRand.scala b/src/scalacheck/org/scalacheck/util/StdRand.scala
deleted file mode 100644
index 7c1dc8dcc4..0000000000
--- a/src/scalacheck/org/scalacheck/util/StdRand.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-object StdRand extends java.util.Random
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
index d407b93a4b..fe5ed47d43 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -7,12 +7,10 @@ package scala.tools.nsc
package doc
import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
-import scala.reflect.internal.Chars._
-import symtab._
import typechecker.Analyzer
+import scala.reflect.internal.Chars._
import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
import scala.tools.nsc.doc.base.{ CommentFactoryBase, MemberLookupBase, LinkTo, LinkToExternal }
-import scala.language.postfixOps
trait ScaladocAnalyzer extends Analyzer {
val global : Global // generally, a ScaladocGlobal
@@ -168,7 +166,7 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
}
override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] = None
- override def chooseLink(links: List[LinkTo]): LinkTo = links.headOption orNull
+ override def chooseLink(links: List[LinkTo]): LinkTo = links.headOption.orNull
override def toString(link: LinkTo): String = "No link"
override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = None
override def warnNoLink: Boolean = false
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
index 20f24dc753..e654678c6d 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package doc
import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
-import scala.reflect.internal.Chars._
import symtab._
import reporters.Reporter
import typechecker.Analyzer
@@ -23,9 +22,11 @@ trait ScaladocGlobalTrait extends Global {
val runsAfter = List[String]()
val runsRightAfter = None
}
- override lazy val loaders = new SymbolLoaders {
- val global: outer.type = outer
+ override lazy val loaders = new {
+ val global: outer.type = outer
+ val platform: outer.platform.type = outer.platform
+ } with GlobalSymbolLoaders {
// SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
// therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
// that are not in their correct place (see bug for details)
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index c4e3c115be..8f217e087c 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -305,10 +305,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
else None
}
+ private def templateAndType(ancestor: Symbol): (TemplateImpl, TypeEntity) = (makeTemplate(ancestor), makeType(reprSymbol.info.baseType(ancestor), this))
lazy val (linearizationTemplates, linearizationTypes) =
- reprSymbol.ancestors map { ancestor =>
- (makeTemplate(ancestor), makeType(reprSymbol.info.baseType(ancestor), this))
- } unzip
+ (reprSymbol.ancestors map templateAndType).unzip
/* Subclass cache */
private lazy val subClassesCache = (
@@ -321,7 +320,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def directSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
- /* Implcitly convertible class cache */
+ /* Implicitly convertible class cache */
private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null
def registerImplicitlyConvertibleClass(dtpl: DocTemplateImpl, conv: ImplicitConversionImpl): Unit = {
if (implicitlyConvertibleClassesCache == null)
@@ -841,7 +840,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def value = tree
}
}
- case None =>
+ case None =>
argTrees map { tree =>
new ValueArgument {
def parameter = None
diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala
index 9f139cb5ea..123516bb2d 100644
--- a/src/scalap/scala/tools/scalap/Arguments.scala
+++ b/src/scalap/scala/tools/scalap/Arguments.scala
@@ -5,7 +5,6 @@
**
*/
-
package scala.tools.scalap
import scala.collection.mutable
diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
index 9c72bdbf1e..59f083ee76 100644
--- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala
+++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
@@ -5,13 +5,9 @@
**
*/
-
-package scala
-package tools.scalap
-
+package scala.tools.scalap
class ByteArrayReader(content: Array[Byte]) {
- import java.io._
/** the buffer containing the file
*/
@@ -105,9 +101,6 @@ class ByteArrayReader(content: Array[Byte]) {
def getDouble(bp: Int): Double = java.lang.Double.longBitsToDouble(getLong(bp))
/** skip next 'n' bytes
- */
- def skip(n: Int) {
- bp += n
- }
-
+ */
+ def skip(n: Int): Unit = bp += n
}
diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala
index f62df285f9..d9d264bbbf 100644
--- a/src/scalap/scala/tools/scalap/Classfile.scala
+++ b/src/scalap/scala/tools/scalap/Classfile.scala
@@ -5,10 +5,8 @@
**
*/
-
package scala.tools.scalap
-
class Classfile(in: ByteArrayReader) {
import Classfiles._
diff --git a/src/scalap/scala/tools/scalap/Classfiles.scala b/src/scalap/scala/tools/scalap/Classfiles.scala
index 9295dd7aff..982a83cfa0 100644
--- a/src/scalap/scala/tools/scalap/Classfiles.scala
+++ b/src/scalap/scala/tools/scalap/Classfiles.scala
@@ -5,10 +5,8 @@
**
*/
-
package scala.tools.scalap
-
object Classfiles {
final val JAVA_MAGIC = 0xCAFEBABE
final val JAVA_MAJOR_VERSION = 45
diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala
index 168050096d..21c4399d5c 100644
--- a/src/scalap/scala/tools/scalap/CodeWriter.scala
+++ b/src/scalap/scala/tools/scalap/CodeWriter.scala
@@ -6,13 +6,9 @@
*/
-package scala
-package tools.scalap
+package scala.tools.scalap
-import java.io._
-
-
-class CodeWriter(writer: Writer) {
+class CodeWriter(writer: java.io.Writer) {
private val nl = scala.compat.Platform.EOL
private var step = " "
diff --git a/src/scalap/scala/tools/scalap/Decode.scala b/src/scalap/scala/tools/scalap/Decode.scala
index 76ce3f4173..69325c1ec8 100644
--- a/src/scalap/scala/tools/scalap/Decode.scala
+++ b/src/scalap/scala/tools/scalap/Decode.scala
@@ -5,17 +5,14 @@
**
*/
-// $Id$
-
package scala.tools.scalap
-import scala.tools.scalap.scalax.rules.scalasig._
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.nsc.util.ScalaClassLoader.appLoader
+import scala.tools.scalap.scalasig._
+
+import scala.reflect.internal.util.ScalaClassLoader
import scala.reflect.internal.pickling.ByteCodecs
import ClassFileParser.{ ConstValueIndex, Annotation }
-import Main.{ SCALA_SIG, SCALA_SIG_ANNOTATION, BYTES_VALUE }
/** Temporary decoder. This would be better off in the scala.tools.nsc
* but right now the compiler won't acknowledge scala.tools.scalap
@@ -31,7 +28,7 @@ object Decode {
/** Return the classfile bytes representing the scala sig classfile attribute.
* This has been obsoleted by the switch to annotations.
*/
- def scalaSigBytes(name: String): Option[Array[Byte]] = scalaSigBytes(name, appLoader)
+ def scalaSigBytes(name: String): Option[Array[Byte]] = scalaSigBytes(name, ScalaClassLoader.appLoader)
def scalaSigBytes(name: String, classLoader: ScalaClassLoader): Option[Array[Byte]] = {
val bytes = classLoader.classBytes(name)
val reader = new ByteArrayReader(bytes)
@@ -39,17 +36,16 @@ object Decode {
cf.scalaSigAttribute map (_.data)
}
- /** Return the bytes representing the annotation
- */
- def scalaSigAnnotationBytes(name: String): Option[Array[Byte]] = scalaSigAnnotationBytes(name, appLoader)
+ /** Return the bytes representing the annotation. */
+ def scalaSigAnnotationBytes(name: String): Option[Array[Byte]] = scalaSigAnnotationBytes(name, ScalaClassLoader.appLoader)
def scalaSigAnnotationBytes(name: String, classLoader: ScalaClassLoader): Option[Array[Byte]] = {
val bytes = classLoader.classBytes(name)
val byteCode = ByteCode(bytes)
val classFile = ClassFileParser.parse(byteCode)
import classFile._
- classFile annotation SCALA_SIG_ANNOTATION map { case Annotation(_, els) =>
- val bytesElem = els find (x => constant(x.elementNameIndex) == BYTES_VALUE) getOrElse null
+ classFile annotation Main.SCALA_SIG_ANNOTATION map { case Annotation(_, els) =>
+ val bytesElem = els find (x => constant(x.elementNameIndex) == Main.BYTES_VALUE) getOrElse null
val _bytes = bytesElem.elementValue match { case ConstValueIndex(x) => constantWrapped(x) }
val bytes = _bytes.asInstanceOf[StringBytesPair].bytes
val length = ByteCodecs.decode(bytes)
@@ -58,8 +54,7 @@ object Decode {
}
}
- /** private[scala] so nobody gets the idea this is a supported interface.
- */
+ /** private[scala] so nobody gets the idea this is a supported interface. */
private[scala] def caseParamNames(path: String): Option[List[String]] = {
val (outer, inner) = (path indexOf '$') match {
case -1 => (path, "")
@@ -67,7 +62,7 @@ object Decode {
}
for {
- clazz <- appLoader.tryToLoadClass[AnyRef](outer)
+ clazz <- ScalaClassLoader.appLoader.tryToLoadClass[AnyRef](outer)
ssig <- ScalaSigParser.parse(clazz)
}
yield {
@@ -85,11 +80,10 @@ object Decode {
}
}
- /** Returns a map of Alias -> Type for the given package.
- */
+ /** Returns a map of Alias -> Type for the given package. */
private[scala] def typeAliases(pkg: String) = {
for {
- clazz <- appLoader.tryToLoadClass[AnyRef](pkg + ".package")
+ clazz <- ScalaClassLoader.appLoader.tryToLoadClass[AnyRef](pkg + ".package")
ssig <- ScalaSigParser.parse(clazz)
}
yield {
diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala
index 772cf6eacd..1ba89e4702 100644
--- a/src/scalap/scala/tools/scalap/JavaWriter.scala
+++ b/src/scalap/scala/tools/scalap/JavaWriter.scala
@@ -5,13 +5,11 @@
**
*/
-
package scala.tools.scalap
-import java.io._
import scala.reflect.NameTransformer
-class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer) {
+class JavaWriter(classfile: Classfile, writer: java.io.Writer) extends CodeWriter(writer) {
val cf = classfile
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 5da4227e53..44d7ef6a41 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -5,16 +5,16 @@
**
*/
-package scala
-package tools.scalap
+package scala.tools.scalap
import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
-import scala.reflect.NameTransformer
-import scalax.rules.scalasig._
+
import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
-import scala.tools.util.PathResolver
-import ClassPath.DefaultJavaContext
-import scala.tools.nsc.io.{ PlainFile, AbstractFile }
+import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+import scala.tools.nsc.io.AbstractFile
+
+import scala.tools.scalap.scalasig._
+
/**The main object used to execute scalap on the command-line.
*
@@ -104,7 +104,7 @@ class Main {
// we have to encode every fragment of a name separately, otherwise the NameTransformer
// will encode using unicode escaping dot separators as well
// we can afford allocations because this is not a performance critical code
- classname.split('.').map(NameTransformer.encode).mkString(".")
+ classname.split('.').map(scala.reflect.NameTransformer.encode).mkString(".")
}
val cls = path.findClass(encName)
if (cls.isDefined && cls.get.binary.isDefined) {
@@ -185,7 +185,7 @@ object Main extends Main {
val cparg = List("-classpath", "-cp") map (arguments getArgument _) reduceLeft (_ orElse _)
val path = cparg match {
case Some(cp) => new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
- case _ => PathResolver.fromPathString(".") // include '.' in the default classpath SI-6669
+ case _ => scala.tools.util.PathResolver.fromPathString(".") // include '.' in the default classpath SI-6669
}
// print the classpath if output is verbose
if (verbose)
diff --git a/src/scalap/scala/tools/scalap/MetaParser.scala b/src/scalap/scala/tools/scalap/MetaParser.scala
index 8b4ffb3efd..324330466f 100644
--- a/src/scalap/scala/tools/scalap/MetaParser.scala
+++ b/src/scalap/scala/tools/scalap/MetaParser.scala
@@ -6,18 +6,15 @@
*/
-package scala
-package tools.scalap
+package scala.tools.scalap
-import java.io._
-import java.util._
/** a parser class for parsing meta type information in classfiles
* generated by pico.
*/
class MetaParser(meta: String) {
- val scanner = new StringTokenizer(meta, "()[], \t<;", true)
+ val scanner = new java.util.StringTokenizer(meta, "()[], \t<;", true)
var token: String = _
val res = new StringBuffer
diff --git a/src/scalap/scala/tools/scalap/Properties.scala b/src/scalap/scala/tools/scalap/Properties.scala
index 8f9a9d8606..432dd495e9 100644
--- a/src/scalap/scala/tools/scalap/Properties.scala
+++ b/src/scalap/scala/tools/scalap/Properties.scala
@@ -9,8 +9,7 @@
package scala.tools.scalap
/** Loads decoder.properties from the jar. */
-object Properties extends scala.util.PropertiesTrait
-{
+object Properties extends scala.util.PropertiesTrait {
protected def propCategory = "decoder"
protected def pickJarBasedOn = classOf[Classfile]
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala b/src/scalap/scala/tools/scalap/rules/Memoisable.scala
index b4ce8cab23..418141bee7 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
+++ b/src/scalap/scala/tools/scalap/rules/Memoisable.scala
@@ -10,26 +10,24 @@
//
// -----------------------------------------------------------------------------
-package scala.tools.scalap
-package scalax
-package rules
+package scala.tools.scalap.rules
import scala.collection.mutable
trait MemoisableRules extends Rules {
- def memo[In <: Memoisable, Out, A, X](key : AnyRef)(toRule : => In => Result[Out, A, X]) = {
+ def memo[In <: Memoisable, Out, A, X](key: AnyRef)(toRule: => In => Result[Out, A, X]) = {
lazy val rule = toRule
from[In] { in => in.memo(key, rule(in)) }
}
- override def ruleWithName[In, Out, A, X](name : String, f : In => rules.Result[Out, A, X]) = super.ruleWithName(name, (in : In) => in match {
- case s : Memoisable => s.memo(name, f(in))
+ override def ruleWithName[In, Out, A, X](name: String, f: In => Result[Out, A, X]) = super.ruleWithName(name, (in: In) => in match {
+ case s: Memoisable => s.memo(name, f(in))
case _ => f(in)
})
}
trait Memoisable {
- def memo[A](key : AnyRef, a : => A) : A
+ def memo[A](key: AnyRef, a: => A): A
}
@@ -40,22 +38,19 @@ object DefaultMemoisable {
trait DefaultMemoisable extends Memoisable {
protected val map = new mutable.HashMap[AnyRef, Any]
- def memo[A](key : AnyRef, a : => A) = {
+ def memo[A](key: AnyRef, a: => A) = {
map.getOrElseUpdate(key, compute(key, a)).asInstanceOf[A]
}
- protected def compute[A](key : AnyRef, a : => A): Any = a match {
- case success : Success[_, _] => onSuccess(key, success); success
+ protected def compute[A](key: AnyRef, a: => A): Any = a match {
+ case success: Success[_, _] => onSuccess(key, success); success
case other =>
if(DefaultMemoisable.debug) println(key + " -> " + other)
other
}
- protected def onSuccess[S, T](key : AnyRef, result : Success[S, T]) {
+ protected def onSuccess[S, T](key: AnyRef, result: Success[S, T]) {
val Success(out, t) = result
if(DefaultMemoisable.debug) println(key + " -> " + t + " (" + out + ")")
}
}
-
-
-
diff --git a/src/scalap/scala/tools/scalap/rules/Result.scala b/src/scalap/scala/tools/scalap/rules/Result.scala
new file mode 100644
index 0000000000..ae05416d7a
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/rules/Result.scala
@@ -0,0 +1,69 @@
+// -----------------------------------------------------------------------------
+//
+// Scalax - The Scala Community Library
+// Copyright (c) 2005-8 The Scalax Project. All rights reserved.
+//
+// The primary distribution site is http://scalax.scalaforge.org/
+//
+// This software is released under the terms of the Revised BSD License.
+// There is NO WARRANTY. See the file LICENSE for the full text.
+//
+// -----------------------------------------------------------------------------
+
+package scala.tools.scalap.rules;
+
+/** Represents the combined value of two rules applied in sequence.
+ *
+ * @see the Scala parser combinator
+ */
+case class ~[+A, +B](_1: A, _2: B) {
+ override def toString = "(" + _1 + " ~ " + _2 + ")"
+}
+
+
+sealed abstract class Result[+Out, +A, +X] {
+ def out: Out
+ def value: A
+ def error: X
+
+ implicit def toOption: Option[A]
+
+ def map[B](f: A => B): Result[Out, B, X]
+ def mapOut[Out2](f: Out => Out2): Result[Out2, A, X]
+ def map[Out2, B](f: (Out, A) => (Out2, B)): Result[Out2, B, X]
+ def flatMap[Out2, B](f: (Out, A) => Result[Out2, B, Nothing]): Result[Out2, B, X]
+ def orElse[Out2 >: Out, B >: A](other: => Result[Out2, B, Nothing]): Result[Out2, B, X]
+}
+
+case class Success[+Out, +A](out: Out, value: A) extends Result[Out, A, Nothing] {
+ def error = throw new ScalaSigParserError("No error")
+
+ def toOption = Some(value)
+
+ def map[B](f: A => B): Result[Out, B, Nothing] = Success(out, f(value))
+ def mapOut[Out2](f: Out => Out2): Result[Out2, A, Nothing] = Success(f(out), value)
+ def map[Out2, B](f: (Out, A) => (Out2, B)): Success[Out2, B] = f(out, value) match { case (out2, b) => Success(out2, b) }
+ def flatMap[Out2, B](f: (Out, A) => Result[Out2, B, Nothing]): Result[Out2, B, Nothing]= f(out, value)
+ def orElse[Out2 >: Out, B >: A](other: => Result[Out2, B, Nothing]): Result[Out2, B, Nothing] = this
+}
+
+sealed abstract class NoSuccess[+X] extends Result[Nothing, Nothing, X] {
+ def out = throw new ScalaSigParserError("No output")
+ def value = throw new ScalaSigParserError("No value")
+
+ def toOption = None
+
+ def map[B](f: Nothing => B) = this
+ def mapOut[Out2](f: Nothing => Out2) = this
+ def map[Out2, B](f: (Nothing, Nothing) => (Out2, B)) = this
+ def flatMap[Out2, B](f: (Nothing, Nothing) => Result[Out2, B, Nothing]) = this
+ def orElse[Out2, B](other: => Result[Out2, B, Nothing]) = other
+}
+
+case object Failure extends NoSuccess[Nothing] {
+ def error = throw new ScalaSigParserError("No error")
+}
+
+case class ScalaSigParserError(msg: String) extends RuntimeException(msg)
+
+case class Error[+X](error: X) extends NoSuccess[X]
diff --git a/src/scalap/scala/tools/scalap/rules/Rule.scala b/src/scalap/scala/tools/scalap/rules/Rule.scala
new file mode 100644
index 0000000000..0a00111f7a
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/rules/Rule.scala
@@ -0,0 +1,172 @@
+// -----------------------------------------------------------------------------
+//
+// Scalax - The Scala Community Library
+// Copyright (c) 2005-8 The Scalax Project. All rights reserved.
+//
+// The primary distribution site is http://scalax.scalaforge.org/
+//
+// This software is released under the terms of the Revised BSD License.
+// There is NO WARRANTY. See the file LICENSE for the full text.
+//
+// -----------------------------------------------------------------------------
+
+package scala.tools.scalap.rules
+
+/** A Rule is a function from some input to a Result. The result may be:
+ * <ul>
+ * <li>Success, with a value of some type and an output that may serve as the input to subsequent rules.</li>
+ * <li>Failure. A failure may result in some alternative rule being applied.</li>
+ * <li>Error. No further rules should be attempted.</li>
+ * </ul>
+ *
+ * @author Andrew Foggin
+ *
+ * Inspired by the Scala parser combinator.
+ */
+trait Rule[-In, +Out, +A, +X] extends (In => Result[Out, A, X]) {
+ val factory: Rules
+ import factory._
+
+ def as(name: String) = ruleWithName(name, this)
+
+ def flatMap[Out2, B, X2 >: X](fa2ruleb: A => Out => Result[Out2, B, X2]) = mapResult {
+ case Success(out, a) => fa2ruleb(a)(out)
+ case Failure => Failure
+ case err @ Error(_) => err
+ }
+
+ def map[B](fa2b: A => B) = flatMap { a => out => Success(out, fa2b(a)) }
+
+ def filter(f: A => Boolean) = flatMap { a => out => if(f(a)) Success(out, a) else Failure }
+
+ def mapResult[Out2, B, Y](f: Result[Out, A, X] => Result[Out2, B, Y]) = rule {
+ in: In => f(apply(in))
+ }
+
+ def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other: => Rule[In2, Out2, A2, X2]): Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
+ val factory = Rule.this.factory
+ lazy val choices = Rule.this :: other :: Nil
+ }
+
+ def orError[In2 <: In] = this orElse error[Any]
+
+ def |[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other: => Rule[In2, Out2, A2, X2]) = orElse(other)
+
+ def ^^[B](fa2b: A => B) = map(fa2b)
+
+ def ^^?[B](pf: PartialFunction[A, B]) = filter (pf.isDefinedAt(_)) ^^ pf
+
+ def ??(pf: PartialFunction[A, Any]) = filter (pf.isDefinedAt(_))
+
+ def -^[B](b: B) = map { any => b }
+
+ /** Maps an Error */
+ def !^[Y](fx2y: X => Y) = mapResult {
+ case s @ Success(_, _) => s
+ case Failure => Failure
+ case Error(x) => Error(fx2y(x))
+ }
+
+ def >>[Out2, B, X2 >: X](fa2ruleb: A => Out => Result[Out2, B, X2]) = flatMap(fa2ruleb)
+
+ def >->[Out2, B, X2 >: X](fa2resultb: A => Result[Out2, B, X2]) = flatMap { a => any => fa2resultb(a) }
+
+ def >>?[Out2, B, X2 >: X](pf: PartialFunction[A, Rule[Out, Out2, B, X2]]) = filter(pf isDefinedAt _) flatMap pf
+
+ def >>&[B, X2 >: X](fa2ruleb: A => Out => Result[Any, B, X2]) = flatMap { a => out => fa2ruleb(a)(out) mapOut { any => out } }
+
+ def ~[Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield new ~(a, b)
+
+ def ~-[Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield a
+
+ def -~[Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield b
+
+ def ~++[Out2, B >: A, X2 >: X](next: => Rule[Out, Out2, Seq[B], X2]) = for (a <- this; b <- next) yield a :: b.toList
+
+ /** Apply the result of this rule to the function returned by the next rule */
+ def ~>[Out2, B, X2 >: X](next: => Rule[Out, Out2, A => B, X2]) = for (a <- this; fa2b <- next) yield fa2b(a)
+
+ /** Apply the result of this rule to the function returned by the previous rule */
+ def <~:[InPrev, B, X2 >: X](prev: => Rule[InPrev, In, A => B, X2]) = for (fa2b <- prev; a <- this) yield fa2b(a)
+
+ def ~![Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next.orError) yield new ~(a, b)
+
+ def ~-![Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next.orError) yield a
+
+ def -~![Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next.orError) yield b
+
+ def -[In2 <: In](exclude: => Rule[In2, Any, Any, Any]) = !exclude -~ this
+
+ /** ^~^(f) is equivalent to ^^ { case b1 ~ b2 => f(b1, b2) }
+ */
+ def ^~^[B1, B2, B >: A <% B1 ~ B2, C](f: (B1, B2) => C) = map { a =>
+ (a: B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
+ }
+
+ /** ^~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
+ */
+ def ^~~^[B1, B2, B3, B >: A <% B1 ~ B2 ~ B3, C](f: (B1, B2, B3) => C) = map { a =>
+ (a: B1 ~ B2 ~ B3) match { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
+ }
+
+ /** ^~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
+ */
+ def ^~~~^[B1, B2, B3, B4, B >: A <% B1 ~ B2 ~ B3 ~ B4, C](f: (B1, B2, B3, B4) => C) = map { a =>
+ (a: B1 ~ B2 ~ B3 ~ B4) match { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
+ }
+
+ /** ^~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
+ */
+ def ^~~~~^[B1, B2, B3, B4, B5, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5, C](f: (B1, B2, B3, B4, B5) => C) = map { a =>
+ (a: B1 ~ B2 ~ B3 ~ B4 ~ B5) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
+ }
+
+ /** ^~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
+ */
+ def ^~~~~~^[B1, B2, B3, B4, B5, B6, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6, C](f: (B1, B2, B3, B4, B5, B6) => C) = map { a =>
+ (a: B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
+ }
+
+ /** ^~~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
+ */
+ def ^~~~~~~^[B1, B2, B3, B4, B5, B6, B7, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7, C](f: (B1, B2, B3, B4, B5, B6, B7) => C) = map { a =>
+ (a: B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 ~b7 => f(b1, b2, b3, b4, b5, b6, b7) }
+ }
+
+ /** >~>(f) is equivalent to >> { case b1 ~ b2 => f(b1, b2) }
+ */
+ def >~>[Out2, B1, B2, B >: A <% B1 ~ B2, C, X2 >: X](f: (B1, B2) => Out => Result[Out2, C, X2]) = flatMap { a =>
+ (a: B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
+ }
+
+ /** ^-^(f) is equivalent to ^^ { b2 => b1 => f(b1, b2) }
+ */
+ def ^-^ [B1, B2 >: A, C](f: (B1, B2) => C) = map { b2: B2 => b1: B1 => f(b1, b2) }
+
+ /** ^~>~^(f) is equivalent to ^^ { case b2 ~ b3 => b1 => f(b1, b2, b3) }
+ */
+ def ^~>~^ [B1, B2, B3, B >: A <% B2 ~ B3, C](f: (B1, B2, B3) => C) = map { a =>
+ (a: B2 ~ B3) match { case b2 ~ b3 => b1: B1 => f(b1, b2, b3) }
+ }
+}
+
+
+trait Choice[-In, +Out, +A, +X] extends Rule[In, Out, A, X] {
+ def choices: List[Rule[In, Out, A, X]]
+
+ def apply(in: In) = {
+ def oneOf(list: List[Rule[In, Out, A, X]]): Result[Out, A, X] = list match {
+ case Nil => Failure
+ case first :: rest => first(in) match {
+ case Failure => oneOf(rest)
+ case result => result
+ }
+ }
+ oneOf(choices)
+ }
+
+ override def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other: => Rule[In2, Out2, A2, X2]): Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
+ val factory = Choice.this.factory
+ lazy val choices = Choice.this.choices ::: other :: Nil
+ }
+}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala b/src/scalap/scala/tools/scalap/rules/Rules.scala
index 70926208b3..bdcc81c22d 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
+++ b/src/scalap/scala/tools/scalap/rules/Rules.scala
@@ -11,11 +11,10 @@
// -----------------------------------------------------------------------------
package scala.tools.scalap
-package scalax
package rules
trait Name {
- def name : String
+ def name: String
override def toString = name
}
@@ -26,13 +25,18 @@ trait Name {
* Inspired by the Scala parser combinator.
*/
trait Rules {
- implicit def rule[In, Out, A, X](f : In => Result[Out, A, X]) : Rule[In, Out, A, X] = new DefaultRule(f)
- implicit def inRule[In, Out, A, X](rule : Rule[In, Out, A, X]) : InRule[In, Out, A, X] = new InRule(rule)
- implicit def seqRule[In, A, X](rule : Rule[In, In, A, X]) : SeqRule[In, A, X] = new SeqRule(rule)
+ import scala.language.implicitConversions
+ implicit def rule[In, Out, A, X](f: In => Result[Out, A, X]): Rule[In, Out, A, X] = new DefaultRule(f)
+ implicit def inRule[In, Out, A, X](rule: Rule[In, Out, A, X]): InRule[In, Out, A, X] = new InRule(rule)
+ implicit def seqRule[In, A, X](rule: Rule[In, In, A, X]): SeqRule[In, A, X] = new SeqRule(rule)
- def from[In] = new {
- def apply[Out, A, X](f : In => Result[Out, A, X]) = rule(f)
+ trait FromRule[In] {
+ def apply[Out, A, X](f: In => Result[Out, A, X]): Rule[In, Out, A, X]
+ }
+
+ def from[In] = new FromRule[In] {
+ def apply[Out, A, X](f: In => Result[Out, A, X]) = rule(f)
}
def state[s] = new StateRules {
@@ -40,30 +44,30 @@ trait Rules {
val factory = Rules.this
}
- def success[Out, A](out : Out, a : A) = rule { in : Any => Success(out, a) }
+ def success[Out, A](out: Out, a: A) = rule { in: Any => Success(out, a) }
- def failure = rule { in : Any => Failure }
+ def failure = rule { in: Any => Failure }
- def error[In] = rule { in : In => Error(in) }
- def error[X](err : X) = rule { in : Any => Error(err) }
+ def error[In] = rule { in: In => Error(in) }
+ def error[X](err: X) = rule { in: Any => Error(err) }
- def oneOf[In, Out, A, X](rules : Rule[In, Out, A, X] *) : Rule[In, Out, A, X] = new Choice[In, Out, A, X] {
+ def oneOf[In, Out, A, X](rules: Rule[In, Out, A, X] *): Rule[In, Out, A, X] = new Choice[In, Out, A, X] {
val factory = Rules.this
val choices = rules.toList
}
- def ruleWithName[In, Out, A, X](_name : String, f : In => Result[Out, A, X]) : Rule[In, Out, A, X] with Name =
+ def ruleWithName[In, Out, A, X](_name: String, f: In => Result[Out, A, X]): Rule[In, Out, A, X] with Name =
new DefaultRule(f) with Name {
val name = _name
}
- class DefaultRule[In, Out, A, X](f : In => Result[Out, A, X]) extends Rule[In, Out, A, X] {
+ class DefaultRule[In, Out, A, X](f: In => Result[Out, A, X]) extends Rule[In, Out, A, X] {
val factory = Rules.this
- def apply(in : In) = f(in)
+ def apply(in: In) = f(in)
}
/** Converts a rule into a function that throws an Exception on failure. */
- def expect[In, Out, A, Any](rule : Rule[In, Out, A, Any]) : In => A = (in) => rule(in) match {
+ def expect[In, Out, A, Any](rule: Rule[In, Out, A, Any]): In => A = (in) => rule(in) match {
case Success(_, a) => a
case Failure => throw new ScalaSigParserError("Unexpected failure")
case Error(x) => throw new ScalaSigParserError("Unexpected error: " + x)
@@ -82,30 +86,30 @@ trait StateRules {
type S
type Rule[+A, +X] = rules.Rule[S, S, A, X]
- val factory : Rules
+ val factory: Rules
import factory._
- def apply[A, X](f : S => Result[S, A, X]) = rule(f)
+ def apply[A, X](f: S => Result[S, A, X]) = rule(f)
- def unit[A](a : => A) = apply { s => Success(s, a) }
- def read[A](f : S => A) = apply { s => Success(s, f(s)) }
+ def unit[A](a: => A) = apply { s => Success(s, a) }
+ def read[A](f: S => A) = apply { s => Success(s, f(s)) }
def get = apply { s => Success(s, s) }
- def set(s : => S) = apply { oldS => Success(s, oldS) }
+ def set(s: => S) = apply { oldS => Success(s, oldS) }
- def update(f : S => S) = apply { s => Success(s, f(s)) }
+ def update(f: S => S) = apply { s => Success(s, f(s)) }
def nil = unit(Nil)
def none = unit(None)
/** Create a rule that identities if f(in) is true. */
- def cond(f : S => Boolean) = get filter f
+ def cond(f: S => Boolean) = get filter f
/** Create a rule that succeeds if all of the given rules succeed.
@param rules the rules to apply in sequence.
*/
- def allOf[A, X](rules : Seq[Rule[A, X]]) = {
- def rep(in : S, rules : List[Rule[A, X]], results : List[A]) : Result[S, List[A], X] = {
+ def allOf[A, X](rules: Seq[Rule[A, X]]) = {
+ def rep(in: S, rules: List[Rule[A, X]], results: List[A]): Result[S, List[A], X] = {
rules match {
case Nil => Success(in, results.reverse)
case rule::tl => rule(in) match {
@@ -115,19 +119,19 @@ trait StateRules {
}
}
}
- in : S => rep(in, rules.toList, Nil)
+ in: S => rep(in, rules.toList, Nil)
}
/** Create a rule that succeeds with a list of all the provided rules that succeed.
@param rules the rules to apply in sequence.
*/
- def anyOf[A, X](rules : Seq[Rule[A, X]]) = allOf(rules.map(_ ?)) ^^ { opts => opts.flatMap(x => x) }
+ def anyOf[A, X](rules: Seq[Rule[A, X]]) = allOf(rules.map(_ ?)) ^^ { opts => opts.flatMap(x => x) }
/** Repeatedly apply a rule from initial value until finished condition is met. */
- def repeatUntil[T, X](rule : Rule[T => T, X])(finished : T => Boolean)(initial : T) = apply {
+ def repeatUntil[T, X](rule: Rule[T => T, X])(finished: T => Boolean)(initial: T) = apply {
// more compact using HoF but written this way so it's tail-recursive
- def rep(in : S, t : T) : Result[S, T, X] = {
+ def rep(in: S, t: T): Result[S, T, X] = {
if (finished(t)) Success(in, t)
else rule(in) match {
case Success(out, f) => rep(out, f(t)) // SI-5189 f.asInstanceOf[T => T]
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/rules/SeqRule.scala
index 51a789e041..e96a38b6be 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
+++ b/src/scalap/scala/tools/scalap/rules/SeqRule.scala
@@ -10,81 +10,79 @@
//
// -----------------------------------------------------------------------------
-package scala.tools.scalap
-package scalax
-package rules
+package scala.tools.scalap.rules
/**
* A workaround for the difficulties of dealing with
* a contravariant 'In' parameter type...
*/
-class InRule[In, +Out, +A, +X](rule : Rule[In, Out, A, X]) {
+class InRule[In, +Out, +A, +X](rule: Rule[In, Out, A, X]) {
- def mapRule[Out2, B, Y](f : Result[Out, A, X] => In => Result[Out2, B, Y]) : Rule[In, Out2, B, Y] = rule.factory.rule {
- in : In => f(rule(in))(in)
+ def mapRule[Out2, B, Y](f: Result[Out, A, X] => In => Result[Out2, B, Y]): Rule[In, Out2, B, Y] = rule.factory.rule {
+ in: In => f(rule(in))(in)
}
/** Creates a rule that succeeds only if the original rule would fail on the given context. */
def unary_! : Rule[In, In, Unit, Nothing] = mapRule {
- case Success(_, _) => in : In => Failure
- case _ => in : In => Success(in, ())
+ case Success(_, _) => in: In => Failure
+ case _ => in: In => Success(in, ())
}
/** Creates a rule that succeeds if the original rule succeeds, but returns the original input. */
def & : Rule[In, In, A, X] = mapRule {
- case Success(_, a) => in : In => Success(in, a)
- case Failure => in : In => Failure
- case Error(x) => in : In => Error(x)
+ case Success(_, a) => in: In => Success(in, a)
+ case Failure => in: In => Failure
+ case Error(x) => in: In => Error(x)
}
}
-class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
+class SeqRule[S, +A, +X](rule: Rule[S, S, A, X]) {
import rule.factory._
def ? = rule mapRule {
- case Success(out, a) => in : S => Success(out, Some(a))
- case Failure => in : S => Success(in, None)
- case Error(x) => in : S => Error(x)
+ case Success(out, a) => in: S => Success(out, Some(a))
+ case Failure => in: S => Success(in, None)
+ case Error(x) => in: S => Error(x)
}
/** Creates a rule that always succeeds with a Boolean value.
* Value is 'true' if this rule succeeds, 'false' otherwise */
- def -? = ? map { _ isDefined }
+ def -? = ? map { _.isDefined }
def * = from[S] {
// tail-recursive function with reverse list accumulator
- def rep(in : S, acc : List[A]) : Result[S, List[A], X] = rule(in) match {
+ def rep(in: S, acc: List[A]): Result[S, List[A], X] = rule(in) match {
case Success(out, a) => rep(out, a :: acc)
case Failure => Success(in, acc.reverse)
- case err : Error[_] => err
+ case err: Error[_] => err
}
in => rep(in, Nil)
}
def + = rule ~++ *
- def ~>?[B >: A, X2 >: X](f : => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f?) yield fs.foldLeft[B](a) { (b, f) => f(b) }
+ def ~>?[B >: A, X2 >: X](f: => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f?) yield fs.foldLeft[B](a) { (b, f) => f(b) }
- def ~>*[B >: A, X2 >: X](f : => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f*) yield fs.foldLeft[B](a) { (b, f) => f(b) }
+ def ~>*[B >: A, X2 >: X](f: => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f*) yield fs.foldLeft[B](a) { (b, f) => f(b) }
- def ~*~[B >: A, X2 >: X](join : => Rule[S, S, (B, B) => B, X2]) = {
- this ~>* (for (f <- join; a <- rule) yield f(_ : B, a))
+ def ~*~[B >: A, X2 >: X](join: => Rule[S, S, (B, B) => B, X2]) = {
+ this ~>* (for (f <- join; a <- rule) yield f(_: B, a))
}
/** Repeats this rule one or more times with a separator (which is discarded) */
- def +/[X2 >: X](sep : => Rule[S, S, Any, X2]) = rule ~++ (sep -~ rule *)
+ def +/[X2 >: X](sep: => Rule[S, S, Any, X2]) = rule ~++ (sep -~ rule *)
/** Repeats this rule zero or more times with a separator (which is discarded) */
- def */[X2 >: X](sep : => Rule[S, S, Any, X2]) = +/(sep) | state[S].nil
+ def */[X2 >: X](sep: => Rule[S, S, Any, X2]) = +/(sep) | state[S].nil
- def *~-[Out, X2 >: X](end : => Rule[S, Out, Any, X2]) = (rule - end *) ~- end
- def +~-[Out, X2 >: X](end : => Rule[S, Out, Any, X2]) = (rule - end +) ~- end
+ def *~-[Out, X2 >: X](end: => Rule[S, Out, Any, X2]) = (rule - end *) ~- end
+ def +~-[Out, X2 >: X](end: => Rule[S, Out, Any, X2]) = (rule - end +) ~- end
/** Repeats this rule num times */
- def times(num : Int) : Rule[S, S, Seq[A], X] = from[S] {
+ def times(num: Int): Rule[S, S, Seq[A], X] = from[S] {
val result = new scala.collection.mutable.ArraySeq[A](num)
// more compact using HoF but written this way so it's tail-recursive
- def rep(i : Int, in : S) : Result[S, Seq[A], X] = {
+ def rep(i: Int, in: S): Result[S, Seq[A], X] = {
if (i == num) Success(in, result)
else rule(in) match {
case Success(out, a) => {
@@ -92,10 +90,9 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
rep(i + 1, out)
}
case Failure => Failure
- case err : Error[_] => err
+ case err: Error[_] => err
}
}
in => rep(0, in)
}
}
-
diff --git a/src/scalap/scala/tools/scalap/rules/package.scala b/src/scalap/scala/tools/scalap/rules/package.scala
new file mode 100644
index 0000000000..dcd5f7ac00
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/rules/package.scala
@@ -0,0 +1,6 @@
+package scala.tools.scalap
+
+package object rules {
+ // make some language features in this package compile without warning
+ implicit def postfixOps = scala.language.postfixOps
+}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalasig/ClassFileParser.scala
index 1a4b3456b8..9bd8402ccc 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalasig/ClassFileParser.scala
@@ -1,18 +1,11 @@
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
+package scala.tools.scalap.scalasig
-
-import java.io.IOException
-
-import scala._
-import scala.Predef._
+import scala.tools.scalap.rules.{ Success, Failure, ~, RulesWithState }
object ByteCode {
- def apply(bytes : Array[Byte]) = new ByteCode(bytes, 0, bytes.length)
+ def apply(bytes: Array[Byte]) = new ByteCode(bytes, 0, bytes.length)
- def forClass(clazz : Class[_]) = {
+ def forClass(clazz: Class[_]) = {
val name = clazz.getName
val subPath = name.substring(name.lastIndexOf('.') + 1) + ".class"
val in = clazz.getResourceAsStream(subPath)
@@ -22,7 +15,7 @@ object ByteCode {
val bytes = new Array[Byte](rest)
while (rest > 0) {
val res = in.read(bytes, bytes.length - rest, rest)
- if (res == -1) throw new IOException("read error")
+ if (res == -1) throw new java.io.IOException("read error")
rest -= res
}
ByteCode(bytes)
@@ -33,19 +26,18 @@ object ByteCode {
}
}
-/** Represents a chunk of raw bytecode. Used as input for the parsers
- */
-class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
+/** Represents a chunk of raw bytecode. Used as input for the parsers. */
+class ByteCode(val bytes: Array[Byte], val pos: Int, val length: Int) {
assert(pos >= 0 && length >= 0 && pos + length <= bytes.length)
def nextByte = if (length == 0) Failure else Success(drop(1), bytes(pos))
- def next(n : Int) = if (length >= n) Success(drop(n), take(n)) else Failure
+ def next(n: Int) = if (length >= n) Success(drop(n), take(n)) else Failure
- def take(n : Int) = new ByteCode(bytes, pos, n)
- def drop(n : Int) = new ByteCode(bytes, pos + n, length - n)
+ def take(n: Int) = new ByteCode(bytes, pos, n)
+ def drop(n: Int) = new ByteCode(bytes, pos + n, length - n)
- def fold[X](x : X)(f : (X, Byte) => X) : X = {
+ def fold[X](x: X)(f: (X, Byte) => X): X = {
var result = x
var i = pos
while (i < pos + length) {
@@ -72,7 +64,7 @@ class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
StringBytesPair(str, chunk)
}
- def byte(i : Int) = bytes(pos) & 0xFF
+ def byte(i: Int) = bytes(pos) & 0xFF
}
/**
@@ -86,22 +78,22 @@ trait ByteCodeReader extends RulesWithState {
type S = ByteCode
type Parser[A] = Rule[A, String]
- val byte = apply(_ nextByte)
+ val byte = apply(_.nextByte)
val u1 = byte ^^ (_ & 0xFF)
- val u2 = bytes(2) ^^ (_ toInt)
- val u4 = bytes(4) ^^ (_ toInt) // should map to Long??
+ val u2 = bytes(2) ^^ (_.toInt)
+ val u4 = bytes(4) ^^ (_.toInt) // should map to Long??
- def bytes(n : Int) = apply(_ next n)
+ def bytes(n: Int) = apply(_ next n)
}
object ClassFileParser extends ByteCodeReader {
- def parse(byteCode : ByteCode) = expect(classFile)(byteCode)
+ def parse(byteCode: ByteCode) = expect(classFile)(byteCode)
def parseAnnotations(byteCode: ByteCode) = expect(annotations)(byteCode)
val magicNumber = (u4 filter (_ == 0xCAFEBABE)) | error("Not a valid class file")
val version = u2 ~ u2 ^^ { case minor ~ major => (major, minor) }
- val constantPool = (u2 ^^ ConstantPool) >> repeatUntil(constantPoolEntry)(_ isFull)
+ val constantPool = (u2 ^^ ConstantPool) >> repeatUntil(constantPoolEntry)(_.isFull)
// NOTE currently most constants just evaluate to a string description
// TODO evaluate to useful values
@@ -169,19 +161,19 @@ object ClassFileParser extends ByteCodeReader {
val classFile = header ~ fields ~ methods ~ attributes ~- !u1 ^~~~^ ClassFile
// TODO create a useful object, not just a string
- def memberRef(description : String) = u2 ~ u2 ^^ add1 {
+ def memberRef(description: String) = u2 ~ u2 ^^ add1 {
case classRef ~ nameAndTypeRef => pool => description + ": " + pool(classRef) + ", " + pool(nameAndTypeRef)
}
- def add1[T](f : T => ConstantPool => Any)(raw : T)(pool : ConstantPool) = pool add f(raw)
- def add2[T](f : T => ConstantPool => Any)(raw : T)(pool : ConstantPool) = pool add f(raw) add { pool => "<empty>" }
+ def add1[T](f: T => ConstantPool => Any)(raw: T)(pool: ConstantPool) = pool add f(raw)
+ def add2[T](f: T => ConstantPool => Any)(raw: T)(pool: ConstantPool) = pool add f(raw) add { pool => "<empty>" }
}
case class ClassFile(
- header : ClassFileHeader,
- fields : Seq[Field],
- methods : Seq[Method],
- attributes : Seq[Attribute]) {
+ header: ClassFileHeader,
+ fields: Seq[Field],
+ methods: Seq[Method],
+ attributes: Seq[Attribute]) {
def majorVersion = header.major
def minorVersion = header.minor
@@ -190,14 +182,14 @@ case class ClassFile(
def superClass = constant(header.superClassIndex)
def interfaces = header.interfaces.map(constant)
- def constant(index : Int) = header.constants(index) match {
+ def constant(index: Int) = header.constants(index) match {
case StringBytesPair(str, _) => str
case z => z
}
def constantWrapped(index: Int) = header.constants(index)
- def attribute(name : String) = attributes.find {attrib => constant(attrib.nameIndex) == name }
+ def attribute(name: String) = attributes.find {attrib => constant(attrib.nameIndex) == name }
val RUNTIME_VISIBLE_ANNOTATIONS = "RuntimeVisibleAnnotations"
def annotations = (attributes.find(attr => constant(attr.nameIndex) == RUNTIME_VISIBLE_ANNOTATIONS)
@@ -206,23 +198,23 @@ case class ClassFile(
def annotation(name: String) = annotations.flatMap(seq => seq.find(annot => constant(annot.typeIndex) == name))
}
-case class Attribute(nameIndex : Int, byteCode : ByteCode)
-case class Field(flags : Int, nameIndex : Int, descriptorIndex : Int, attributes : Seq[Attribute])
-case class Method(flags : Int, nameIndex : Int, descriptorIndex : Int, attributes : Seq[Attribute])
+case class Attribute(nameIndex: Int, byteCode: ByteCode)
+case class Field(flags: Int, nameIndex: Int, descriptorIndex: Int, attributes: Seq[Attribute])
+case class Method(flags: Int, nameIndex: Int, descriptorIndex: Int, attributes: Seq[Attribute])
case class ClassFileHeader(
- minor : Int,
- major : Int,
- constants : ConstantPool,
- flags : Int,
- classIndex : Int,
- superClassIndex : Int,
- interfaces : Seq[Int]) {
-
- def constant(index : Int) = constants(index)
+ minor: Int,
+ major: Int,
+ constants: ConstantPool,
+ flags: Int,
+ classIndex: Int,
+ superClassIndex: Int,
+ interfaces: Seq[Int]) {
+
+ def constant(index: Int) = constants(index)
}
-case class ConstantPool(len : Int) {
+case class ConstantPool(len: Int) {
val size = len - 1
private val buffer = new scala.collection.mutable.ArrayBuffer[ConstantPool => Any]
@@ -230,7 +222,7 @@ case class ConstantPool(len : Int) {
def isFull = buffer.length >= size
- def apply(index : Int) = {
+ def apply(index: Int) = {
// Note constant pool indices are 1-based
val i = index - 1
values(i) getOrElse {
@@ -241,9 +233,8 @@ case class ConstantPool(len : Int) {
}
}
- def add(f : ConstantPool => Any) = {
+ def add(f: ConstantPool => Any) = {
buffer += f
this
}
}
-
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala b/src/scalap/scala/tools/scalap/scalasig/Flags.scala
index 218639e4a2..b9925150d2 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala
+++ b/src/scalap/scala/tools/scalap/scalasig/Flags.scala
@@ -1,10 +1,7 @@
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
+package scala.tools.scalap.scalasig
trait Flags {
- def hasFlag(flag : Long) : Boolean
+ def hasFlag(flag: Long): Boolean
def isImplicit = hasFlag(0x00000001)
def isFinal = hasFlag(0x00000002)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalasig/ScalaSig.scala
index fd70e0de35..311e4acd6f 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
+++ b/src/scalap/scala/tools/scalap/scalasig/ScalaSig.scala
@@ -5,24 +5,25 @@
**
*/
+package scala.tools.scalap.scalasig
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
+import scala.language.implicitConversions
-import ClassFileParser.{ ConstValueIndex, Annotation }
import scala.reflect.internal.pickling.ByteCodecs
+import scala.tools.scalap.Main
+import scala.tools.scalap.rules._
+
+import ClassFileParser.{ ConstValueIndex, Annotation }
+
object ScalaSigParser {
- import Main.{ SCALA_SIG, SCALA_SIG_ANNOTATION, BYTES_VALUE }
def scalaSigFromAnnotation(classFile: ClassFile): Option[ScalaSig] = {
import classFile._
- classFile.annotation(SCALA_SIG_ANNOTATION) map {
+ classFile.annotation(Main.SCALA_SIG_ANNOTATION) map {
case Annotation(_, elements) =>
- val bytesElem = elements.find(elem => constant(elem.elementNameIndex) == BYTES_VALUE).get
+ val bytesElem = elements.find(elem => constant(elem.elementNameIndex) == Main.BYTES_VALUE).get
val bytes = ((bytesElem.elementValue match {case ConstValueIndex(index) => constantWrapped(index)})
.asInstanceOf[StringBytesPair].bytes)
val length = ByteCodecs.decode(bytes)
@@ -31,8 +32,8 @@ object ScalaSigParser {
}
}
- def scalaSigFromAttribute(classFile: ClassFile) : Option[ScalaSig] =
- classFile.attribute(SCALA_SIG).map(_.byteCode).map(ScalaSigAttributeParsers.parse)
+ def scalaSigFromAttribute(classFile: ClassFile): Option[ScalaSig] =
+ classFile.attribute(Main.SCALA_SIG).map(_.byteCode).map(ScalaSigAttributeParsers.parse)
def parse(classFile: ClassFile): Option[ScalaSig] = {
val scalaSig = scalaSigFromAttribute(classFile)
@@ -45,7 +46,7 @@ object ScalaSigParser {
}
}
- def parse(clazz : Class[_]): Option[ScalaSig] = {
+ def parse(clazz: Class[_]): Option[ScalaSig] = {
val byteCode = ByteCode.forClass(clazz)
val classFile = ClassFileParser.parse(byteCode)
@@ -54,10 +55,10 @@ object ScalaSigParser {
}
object ScalaSigAttributeParsers extends ByteCodeReader {
- def parse(byteCode : ByteCode) = expect(scalaSig)(byteCode)
+ def parse(byteCode: ByteCode) = expect(scalaSig)(byteCode)
val nat = apply {
- def natN(in : ByteCode, x : Int) : Result[ByteCode, Int, Nothing] = in.nextByte match {
+ def natN(in: ByteCode, x: Int): Result[ByteCode, Int, Nothing] = in.nextByte match {
case Success(out, b) => {
val y = (x << 7) + (b & 0x7f)
if ((b & 0x80) == 0) Success(out, y) else natN(out, y)
@@ -76,33 +77,33 @@ object ScalaSigAttributeParsers extends ByteCodeReader {
val longValue = read(_ toLong)
}
-case class ScalaSig(majorVersion : Int, minorVersion : Int, table : Seq[Int ~ ByteCode]) extends DefaultMemoisable {
+case class ScalaSig(majorVersion: Int, minorVersion: Int, table: Seq[Int ~ ByteCode]) extends DefaultMemoisable {
- case class Entry(index : Int, entryType : Int, byteCode : ByteCode) extends DefaultMemoisable {
+ case class Entry(index: Int, entryType: Int, byteCode: ByteCode) extends DefaultMemoisable {
def scalaSig = ScalaSig.this
- def setByteCode(byteCode : ByteCode) = Entry(index, entryType, byteCode)
+ def setByteCode(byteCode: ByteCode) = Entry(index, entryType, byteCode)
}
- def hasEntry(index : Int) = table isDefinedAt index
+ def hasEntry(index: Int) = table isDefinedAt index
- def getEntry(index : Int) = {
+ def getEntry(index: Int) = {
val entryType ~ byteCode = table(index)
Entry(index, entryType, byteCode)
}
- def parseEntry(index : Int) = applyRule(ScalaSigParsers.parseEntry(ScalaSigEntryParsers.entry)(index))
+ def parseEntry(index: Int) = applyRule(ScalaSigParsers.parseEntry(ScalaSigEntryParsers.entry)(index))
- implicit def applyRule[A](parser : ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(parser)(this)
+ implicit def applyRule[A](parser: ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(parser)(this)
override def toString = "ScalaSig version " + majorVersion + "." + minorVersion + {
for (i <- 0 until table.size) yield i + ":\t" + parseEntry(i) // + "\n\t" + getEntry(i)
}.mkString("\n", "\n", "")
- lazy val symbols : Seq[Symbol] = ScalaSigParsers.symbols
+ lazy val symbols: Seq[Symbol] = ScalaSigParsers.symbols
- lazy val topLevelClasses : List[ClassSymbol] = ScalaSigParsers.topLevelClasses
- lazy val topLevelObjects : List[ObjectSymbol] = ScalaSigParsers.topLevelObjects
+ lazy val topLevelClasses: List[ClassSymbol] = ScalaSigParsers.topLevelClasses
+ lazy val topLevelObjects: List[ObjectSymbol] = ScalaSigParsers.topLevelObjects
}
object ScalaSigParsers extends RulesWithState with MemoisableRules {
@@ -112,14 +113,14 @@ object ScalaSigParsers extends RulesWithState with MemoisableRules {
val symTab = read(_.table)
val size = symTab ^^ (_.size)
- def entry(index : Int) = memo(("entry", index)) {
+ def entry(index: Int) = memo(("entry", index)) {
cond(_ hasEntry index) -~ read(_ getEntry index) >-> { entry => Success(entry, entry.entryType) }
}
- def parseEntry[A](parser : ScalaSigEntryParsers.EntryParser[A])(index : Int) : Parser[A] =
+ def parseEntry[A](parser: ScalaSigEntryParsers.EntryParser[A])(index: Int): Parser[A] =
entry(index) -~ parser >> { a => entry => Success(entry.scalaSig, a) }
- def allEntries[A](f : ScalaSigEntryParsers.EntryParser[A]) = size >> { n => anyOf((0 until n) map parseEntry(f)) }
+ def allEntries[A](f: ScalaSigEntryParsers.EntryParser[A]) = size >> { n => anyOf((0 until n) map parseEntry(f)) }
lazy val entries = allEntries(ScalaSigEntryParsers.entry) as "entries"
lazy val symbols = allEntries(ScalaSigEntryParsers.symbol) as "symbols"
@@ -136,20 +137,20 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
type S = ScalaSig#Entry
type EntryParser[A] = Rule[A, String]
- implicit def byteCodeEntryParser[A](rule : ScalaSigAttributeParsers.Parser[A]) : EntryParser[A] = apply { entry =>
+ implicit def byteCodeEntryParser[A](rule: ScalaSigAttributeParsers.Parser[A]): EntryParser[A] = apply { entry =>
rule(entry.byteCode) mapOut (entry setByteCode _)
}
- def toEntry[A](index : Int) = apply { sigEntry => ScalaSigParsers.entry(index)(sigEntry.scalaSig) }
+ def toEntry[A](index: Int) = apply { sigEntry => ScalaSigParsers.entry(index)(sigEntry.scalaSig) }
- def parseEntry[A](parser : EntryParser[A])(index : Int) = (toEntry(index) -~ parser)
+ def parseEntry[A](parser: EntryParser[A])(index: Int) = (toEntry(index) -~ parser)
- implicit def entryType(code : Int) = key filter (_ == code)
+ implicit def entryType(code: Int) = key filter (_ == code)
val index = read(_.index)
val key = read(_.entryType)
- lazy val entry : EntryParser[Any] = symbol | typeEntry | literal | name | attributeInfo | annotInfo | children | get
+ lazy val entry: EntryParser[Any] = symbol | typeEntry | literal | name | attributeInfo | annotInfo | children | get
val ref = byteCodeEntryParser(nat)
@@ -158,7 +159,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
val name = termName | typeName as "name"
- def refTo[A](rule : EntryParser[A]) : EntryParser[A] = ref >>& parseEntry(rule)
+ def refTo[A](rule: EntryParser[A]): EntryParser[A] = ref >>& parseEntry(rule)
lazy val nameRef = refTo(name)
lazy val symbolRef = refTo(symbol)
@@ -169,7 +170,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
def symHeader(key: Int): EntryParser[Any] = (key -~ none | (key + 64) -~ nat)
- def symbolEntry(key : Int) = symHeader(key) -~ symbolInfo
+ def symbolEntry(key: Int) = symHeader(key) -~ symbolInfo
val noSymbol = 3 -^ NoSymbol
val typeSymbol = symbolEntry(4) ^^ TypeSymbol as "typeSymbol"
@@ -180,7 +181,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
val extRef = 9 -~ nameRef ~ (symbolRef?) ~ get ^~~^ ExternalSymbol as "extRef"
val extModClassRef = 10 -~ nameRef ~ (symbolRef?) ~ get ^~~^ ExternalSymbol as "extModClassRef"
- lazy val symbol : EntryParser[Symbol] = oneOf(
+ lazy val symbol: EntryParser[Symbol] = oneOf(
noSymbol,
typeSymbol,
aliasSymbol,
@@ -195,7 +196,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
val typeLevel = nat
val typeIndex = nat
- lazy val typeEntry : EntryParser[Type] = oneOf(
+ lazy val typeEntry: EntryParser[Type] = oneOf(
11 -^ NoType,
12 -^ NoPrefixType,
13 -~ symbolRef ^^ ThisType,
@@ -236,17 +237,17 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
lazy val topLevelClass = classSymbol filter isTopLevelClass
lazy val topLevelObject = objectSymbol filter isTopLevel
- def isTopLevel(symbol : Symbol) = symbol.parent match {
- case Some(ext : ExternalSymbol) => true
+ def isTopLevel(symbol: Symbol) = symbol.parent match {
+ case Some(ext: ExternalSymbol) => true
case _ => false
}
- def isTopLevelClass (symbol : Symbol) = !symbol.isModule && isTopLevel(symbol)
+ def isTopLevelClass (symbol: Symbol) = !symbol.isModule && isTopLevel(symbol)
}
- case class AttributeInfo(symbol : Symbol, typeRef : Type, value : Option[Any], values : Seq[String ~ Any]) // sym_Ref info_Ref {constant_Ref} {nameRef constantRef}
- case class Children(symbolRefs : Seq[Int]) //sym_Ref {sym_Ref}
+case class AttributeInfo(symbol: Symbol, typeRef: Type, value: Option[Any], values: Seq[String ~ Any]) // sym_Ref info_Ref {constant_Ref} {nameRef constantRef}
+case class Children(symbolRefs: Seq[Int]) //sym_Ref {sym_Ref}
- case class AnnotInfo(refs : Seq[Int]) // attarg_Ref {constant_Ref attarg_Ref}
+case class AnnotInfo(refs: Seq[Int]) // attarg_Ref {constant_Ref attarg_Ref}
/***************************************************
* | 49 TREE len_Nat 1 EMPTYtree
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalasig/ScalaSigPrinter.scala
index e5a4ff649e..5929e0f59f 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalasig/ScalaSigPrinter.scala
@@ -5,17 +5,14 @@
**
*/
+package scala.tools.scalap.scalasig
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
+import scala.language.implicitConversions
import java.io.{PrintStream, ByteArrayOutputStream}
import java.util.regex.Pattern
-import scala.tools.scalap.scalax.util.StringUtil
-import scala.reflect.NameTransformer
-import java.lang.String
+
+import scala.tools.scalap.rules.~
class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
import stream._
@@ -136,7 +133,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
print(" {")
//Print class selftype
c.selfType match {
- case Some(t: Type) => print("\n"); print(" this : " + toString(t) + " =>")
+ case Some(t: Type) => print("\n"); print(" this: " + toString(t) + " =>")
case None =>
}
print("\n")
@@ -186,22 +183,12 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
printWithIndent(level, "}\n")
}
- def genParamNames(t: {def paramTypes: Seq[Type]}): List[String] = t.paramTypes.toList.map(x => {
- var str = toString(x)
- val j = str.indexOf("[")
- if (j > 0) str = str.substring(0, j)
- str = StringUtil.trimStart(str, "=> ")
- val i = str.lastIndexOf(".")
- val res = if (i > 0) str.substring(i + 1) else str
- if (res.length > 1) StringUtil.decapitalize(res.substring(0, 1)) else res.toLowerCase
- })
-
def printMethodType(t: Type, printResult: Boolean)(cont: => Unit): Unit = {
- def _pmt(mt: Type {def resultType: Type; def paramSymbols: Seq[Symbol]}) = {
+ def _pmt(mt: MethodType) = {
val paramEntries = mt.paramSymbols.map({
- case ms: MethodSymbol => ms.name + " : " + toString(ms.infoType)(TypeFlags(true))
+ case ms: MethodSymbol => ms.name + ": " + toString(ms.infoType)(TypeFlags(true))
case _ => "^___^"
})
val implicitWord = mt.paramSymbols.headOption match {
@@ -216,21 +203,21 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
mt.resultType match {
case mt: MethodType => printMethodType(mt, printResult)({})
case x => if (printResult) {
- print(" : ");
+ print(": ");
printType(x)
}
}
}
t match {
- case NullaryMethodType(resType) => if (printResult) { print(" : "); printType(resType) }
+ case NullaryMethodType(resType) => if (printResult) { print(": "); printType(resType) }
case mt@MethodType(resType, paramSymbols) => _pmt(mt)
case pt@PolyType(mt, typeParams) => {
print(typeParamString(typeParams))
printMethodType(mt, printResult)({})
}
//todo consider another method types
- case x => print(" : "); printType(x)
+ case x => print(": "); printType(x)
}
// Print rest of the symbol output
@@ -356,8 +343,8 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
}
case "scala.<byname>" => "=> " + toString(typeArgs.head)
case _ => {
- val path = StringUtil.cutSubstring(symbol.path)(".package") //remove package object reference
- StringUtil.trimStart(processName(path) + typeArgString(typeArgs), "<empty>.")
+ val path = cutSubstring(symbol.path)(".package") //remove package object reference
+ trimStart(processName(path) + typeArgString(typeArgs), "<empty>.")
}
})
case TypeBoundsType(lower, upper) => {
@@ -402,7 +389,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
def typeArgString(typeArgs: Seq[Type]): String =
if (typeArgs.isEmpty) ""
- else typeArgs.map(toString).map(StringUtil.trimStart(_, "=> ")).mkString("[", ", ", "]")
+ else typeArgs.map(toString).map(trimStart(_, "=> ")).mkString("[", ", ", "]")
def typeParamString(params: Seq[Symbol]): String =
if (params.isEmpty) ""
@@ -423,7 +410,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
if (i > 0) name.substring(i + 2) else name
}
- def processName(name: String) = {
+ private def processName(name: String) = {
val stripped = stripPrivatePrefix(name)
val m = pattern.matcher(stripped)
var temp = stripped
@@ -433,7 +420,15 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
temp = temp.replaceAll(re, _syms(re))
}
val result = temp.replaceAll(placeholderPattern, "_")
- NameTransformer.decode(result)
+ scala.reflect.NameTransformer.decode(result)
}
+ private def trimStart(s: String, prefix: String) =
+ if (s != null && s.startsWith(prefix)) s.substring(prefix.length) else s
+
+ private def decapitalize(s: String) =
+ java.beans.Introspector.decapitalize(s)
+
+ private def cutSubstring(dom: String)(s: String) =
+ if (dom != null && s != null) dom.replace(s, "") else dom
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala b/src/scalap/scala/tools/scalap/scalasig/SourceFileAttributeParser.scala
index fc5a75c046..88d3d3b8b0 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala
+++ b/src/scalap/scala/tools/scalap/scalasig/SourceFileAttributeParser.scala
@@ -1,7 +1,4 @@
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
+package scala.tools.scalap.scalasig
/**
* @author ilyas
@@ -16,13 +13,12 @@ object SourceFileAttributeParser extends ByteCodeReader {
/**
*
* SourceFile_attribute {
- u2 attribute_name_index;
- u4 attribute_length;
- u2 sourcefile_index;
+ u2 attribute_name_index;
+ u4 attribute_length;
+ u2 sourcefile_index;
}
*
* Contains only file index in ConstantPool, first two fields are already treated
* by {@link scalax.rules.scalasig.ClassFile.attribute#attribute}
*/
case class SourceFileInfo(sourceFileIndex: Int)
-
diff --git a/src/scalap/scala/tools/scalap/scalasig/Symbol.scala b/src/scalap/scala/tools/scalap/scalasig/Symbol.scala
new file mode 100644
index 0000000000..0656938150
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/scalasig/Symbol.scala
@@ -0,0 +1,70 @@
+package scala.tools.scalap.scalasig
+
+import ScalaSigEntryParsers._
+
+trait Symbol extends Flags {
+ def name: String
+ def parent: Option[Symbol]
+ def children: Seq[Symbol]
+
+ def path: String = parent.map(_.path + ".").getOrElse("") + name
+}
+
+case object NoSymbol extends Symbol {
+ def name = "<no symbol>"
+ def parent = None
+ def hasFlag(flag: Long) = false
+ def children = Nil
+}
+
+abstract class ScalaSigSymbol extends Symbol {
+ def applyRule[A](rule: EntryParser[A]): A = expect(rule)(entry)
+ def applyScalaSigRule[A](rule: ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(rule)(entry.scalaSig)
+
+ def entry: ScalaSig#Entry
+ def index = entry.index
+
+ lazy val children: Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (_.parent == Some(this))
+ lazy val attributes: Seq[AttributeInfo] = applyScalaSigRule(ScalaSigParsers.attributes) filter (_.symbol == this)
+}
+
+case class ExternalSymbol(name: String, parent: Option[Symbol], entry: ScalaSig#Entry) extends ScalaSigSymbol {
+ override def toString = path
+ def hasFlag(flag: Long) = false
+}
+
+case class SymbolInfo(name: String, owner: Symbol, flags: Int, privateWithin: Option[AnyRef], info: Int, entry: ScalaSig#Entry) {
+ def symbolString(any: AnyRef) = any match {
+ case sym: SymbolInfoSymbol => sym.index.toString
+ case other => other.toString
+ }
+
+ override def toString = name + ", owner=" + symbolString(owner) + ", flags=" + flags.toHexString + ", info=" + info + (privateWithin match {
+ case Some(any) => ", privateWithin=" + symbolString(any)
+ case None => " "
+ })
+}
+
+abstract class SymbolInfoSymbol extends ScalaSigSymbol {
+ def symbolInfo: SymbolInfo
+
+ def entry = symbolInfo.entry
+ def name = symbolInfo.name
+ def parent = Some(symbolInfo.owner)
+ def hasFlag(flag: Long) = (symbolInfo.flags & flag) != 0L
+
+ lazy val infoType = applyRule(parseEntry(typeEntry)(symbolInfo.info))
+}
+
+case class TypeSymbol(symbolInfo: SymbolInfo) extends SymbolInfoSymbol{
+ override def path = name
+}
+
+case class AliasSymbol(symbolInfo: SymbolInfo) extends SymbolInfoSymbol{
+ override def path = name
+}
+case class ClassSymbol(symbolInfo: SymbolInfo, thisTypeRef: Option[Int]) extends SymbolInfoSymbol {
+ lazy val selfType = thisTypeRef.map{(x: Int) => applyRule(parseEntry(typeEntry)(x))}
+}
+case class ObjectSymbol(symbolInfo: SymbolInfo) extends SymbolInfoSymbol
+case class MethodSymbol(symbolInfo: SymbolInfo, aliasRef: Option[Int]) extends SymbolInfoSymbol
diff --git a/src/scalap/scala/tools/scalap/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalasig/Type.scala
new file mode 100644
index 0000000000..97dc28d223
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/scalasig/Type.scala
@@ -0,0 +1,22 @@
+package scala.tools.scalap.scalasig
+
+abstract class Type
+
+case object NoType extends Type
+case object NoPrefixType extends Type
+
+case class ThisType(symbol: Symbol) extends Type
+case class SingleType(typeRef: Type, symbol: Symbol) extends Type
+case class ConstantType(constant: Any) extends Type
+case class TypeRefType(prefix: Type, symbol: Symbol, typeArgs: Seq[Type]) extends Type
+case class TypeBoundsType(lower: Type, upper: Type) extends Type
+case class RefinedType(classSym: Symbol, typeRefs: List[Type]) extends Type
+case class ClassInfoType(symbol: Symbol, typeRefs: Seq[Type]) extends Type
+case class ClassInfoTypeWithCons(symbol: Symbol, typeRefs: Seq[Type], cons: String) extends Type
+case class MethodType(resultType: Type, paramSymbols: Seq[Symbol]) extends Type
+case class NullaryMethodType(resultType: Type) extends Type
+case class PolyType(typeRef: Type, symbols: Seq[TypeSymbol]) extends Type
+case class PolyTypeWithCons(typeRef: Type, symbols: Seq[TypeSymbol], cons: String) extends Type
+case class AnnotatedType(typeRef: Type, attribTreeRefs: List[Int]) extends Type
+case class AnnotatedWithSelfType(typeRef: Type, symbol: Symbol, attribTreeRefs: List[Int]) extends Type
+case class ExistentialType(typeRef: Type, symbols: Seq[Symbol]) extends Type
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Result.scala b/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
deleted file mode 100644
index 17ad4bd053..0000000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-// -----------------------------------------------------------------------------
-//
-// Scalax - The Scala Community Library
-// Copyright (c) 2005-8 The Scalax Project. All rights reserved.
-//
-// The primary distribution site is http://scalax.scalaforge.org/
-//
-// This software is released under the terms of the Revised BSD License.
-// There is NO WARRANTY. See the file LICENSE for the full text.
-//
-// -----------------------------------------------------------------------------
-
-package scala.tools.scalap
-package scalax
-package rules;
-
-/** Represents the combined value of two rules applied in sequence.
- *
- * @see the Scala parser combinator
- */
-case class ~[+A, +B](_1 : A, _2 : B) {
- override def toString = "(" + _1 + " ~ " + _2 + ")"
-}
-
-
-sealed abstract class Result[+Out, +A, +X] {
- def out : Out
- def value : A
- def error : X
-
- implicit def toOption : Option[A]
-
- def map[B](f : A => B) : Result[Out, B, X]
- def mapOut[Out2](f : Out => Out2) : Result[Out2, A, X]
- def map[Out2, B](f : (Out, A) => (Out2, B)) : Result[Out2, B, X]
- def flatMap[Out2, B](f : (Out, A) => Result[Out2, B, Nothing]) : Result[Out2, B, X]
- def orElse[Out2 >: Out, B >: A](other : => Result[Out2, B, Nothing]) : Result[Out2, B, X]
-}
-
-case class Success[+Out, +A](out : Out, value : A) extends Result[Out, A, Nothing] {
- def error = throw new ScalaSigParserError("No error")
-
- def toOption = Some(value)
-
- def map[B](f : A => B) : Result[Out, B, Nothing] = Success(out, f(value))
- def mapOut[Out2](f : Out => Out2) : Result[Out2, A, Nothing] = Success(f(out), value)
- def map[Out2, B](f : (Out, A) => (Out2, B)) : Success[Out2, B] = f(out, value) match { case (out2, b) => Success(out2, b) }
- def flatMap[Out2, B](f : (Out, A) => Result[Out2, B, Nothing]) : Result[Out2, B, Nothing]= f(out, value)
- def orElse[Out2 >: Out, B >: A](other : => Result[Out2, B, Nothing]) : Result[Out2, B, Nothing] = this
-}
-
-sealed abstract class NoSuccess[+X] extends Result[Nothing, Nothing, X] {
- def out = throw new ScalaSigParserError("No output")
- def value = throw new ScalaSigParserError("No value")
-
- def toOption = None
-
- def map[B](f : Nothing => B) = this
- def mapOut[Out2](f : Nothing => Out2) = this
- def map[Out2, B](f : (Nothing, Nothing) => (Out2, B)) = this
- def flatMap[Out2, B](f : (Nothing, Nothing) => Result[Out2, B, Nothing]) = this
- def orElse[Out2, B](other : => Result[Out2, B, Nothing]) = other
-}
-
-case object Failure extends NoSuccess[Nothing] {
- def error = throw new ScalaSigParserError("No error")
-}
-
-case class ScalaSigParserError(msg: String) extends RuntimeException(msg)
-
-case class Error[+X](error : X) extends NoSuccess[X] {
-}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
deleted file mode 100644
index 489a05ecd0..0000000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
+++ /dev/null
@@ -1,177 +0,0 @@
-// -----------------------------------------------------------------------------
-//
-// Scalax - The Scala Community Library
-// Copyright (c) 2005-8 The Scalax Project. All rights reserved.
-//
-// The primary distribution site is http://scalax.scalaforge.org/
-//
-// This software is released under the terms of the Revised BSD License.
-// There is NO WARRANTY. See the file LICENSE for the full text.
-//
-// -----------------------------------------------------------------------------
-
-package scala.tools.scalap
-package scalax
-package rules
-
-/** A Rule is a function from some input to a Result. The result may be:
- * <ul>
- * <li>Success, with a value of some type and an output that may serve as the input to subsequent rules.</li>
- * <li>Failure. A failure may result in some alternative rule being applied.</li>
- * <li>Error. No further rules should be attempted.</li>
- * </ul>
- *
- * @author Andrew Foggin
- *
- * Inspired by the Scala parser combinator.
- */
-trait Rule[-In, +Out, +A, +X] extends (In => Result[Out, A, X]) {
- val factory : Rules
- import factory._
-
- def as(name : String) = ruleWithName(name, this)
-
- def flatMap[Out2, B, X2 >: X](fa2ruleb : A => Out => Result[Out2, B, X2]) = mapResult {
- case Success(out, a) => fa2ruleb(a)(out)
- case Failure => Failure
- case err @ Error(_) => err
- }
-
- def map[B](fa2b : A => B) = flatMap { a => out => Success(out, fa2b(a)) }
-
- def filter(f : A => Boolean) = flatMap { a => out => if(f(a)) Success(out, a) else Failure }
-
- def mapResult[Out2, B, Y](f : Result[Out, A, X] => Result[Out2, B, Y]) = rule {
- in : In => f(apply(in))
- }
-
- def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) : Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
- val factory = Rule.this.factory
- lazy val choices = Rule.this :: other :: Nil
- }
-
- def orError[In2 <: In] = this orElse error[Any]
-
- def |[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) = orElse(other)
-
- def ^^[B](fa2b : A => B) = map(fa2b)
-
- def ^^?[B](pf : PartialFunction[A, B]) = filter (pf.isDefinedAt(_)) ^^ pf
-
- def ??(pf : PartialFunction[A, Any]) = filter (pf.isDefinedAt(_))
-
- def -^[B](b : B) = map { any => b }
-
- /** Maps an Error */
- def !^[Y](fx2y : X => Y) = mapResult {
- case s @ Success(_, _) => s
- case Failure => Failure
- case Error(x) => Error(fx2y(x))
- }
-
- def >>[Out2, B, X2 >: X](fa2ruleb : A => Out => Result[Out2, B, X2]) = flatMap(fa2ruleb)
-
- def >->[Out2, B, X2 >: X](fa2resultb : A => Result[Out2, B, X2]) = flatMap { a => any => fa2resultb(a) }
-
- def >>?[Out2, B, X2 >: X](pf : PartialFunction[A, Rule[Out, Out2, B, X2]]) = filter(pf isDefinedAt _) flatMap pf
-
- def >>&[B, X2 >: X](fa2ruleb : A => Out => Result[Any, B, X2]) = flatMap { a => out => fa2ruleb(a)(out) mapOut { any => out } }
-
- def ~[Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield new ~(a, b)
-
- def ~-[Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield a
-
- def -~[Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield b
-
- def ~++[Out2, B >: A, X2 >: X](next : => Rule[Out, Out2, Seq[B], X2]) = for (a <- this; b <- next) yield a :: b.toList
-
- /** Apply the result of this rule to the function returned by the next rule */
- def ~>[Out2, B, X2 >: X](next : => Rule[Out, Out2, A => B, X2]) = for (a <- this; fa2b <- next) yield fa2b(a)
-
- /** Apply the result of this rule to the function returned by the previous rule */
- def <~:[InPrev, B, X2 >: X](prev : => Rule[InPrev, In, A => B, X2]) = for (fa2b <- prev; a <- this) yield fa2b(a)
-
- def ~![Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next orError) yield new ~(a, b)
-
- def ~-![Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next orError) yield a
-
- def -~![Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next orError) yield b
-
- def -[In2 <: In](exclude : => Rule[In2, Any, Any, Any]) = !exclude -~ this
-
- /** ^~^(f) is equivalent to ^^ { case b1 ~ b2 => f(b1, b2) }
- */
- def ^~^[B1, B2, B >: A <% B1 ~ B2, C](f : (B1, B2) => C) = map { a =>
- (a : B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
- }
-
- /** ^~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
- */
- def ^~~^[B1, B2, B3, B >: A <% B1 ~ B2 ~ B3, C](f : (B1, B2, B3) => C) = map { a =>
- (a : B1 ~ B2 ~ B3) match { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
- }
-
- /** ^~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
- */
- def ^~~~^[B1, B2, B3, B4, B >: A <% B1 ~ B2 ~ B3 ~ B4, C](f : (B1, B2, B3, B4) => C) = map { a =>
- (a : B1 ~ B2 ~ B3 ~ B4) match { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
- }
-
- /** ^~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
- */
- def ^~~~~^[B1, B2, B3, B4, B5, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5, C](f : (B1, B2, B3, B4, B5) => C) = map { a =>
- (a : B1 ~ B2 ~ B3 ~ B4 ~ B5) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
- }
-
- /** ^~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
- */
- def ^~~~~~^[B1, B2, B3, B4, B5, B6, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6, C](f : (B1, B2, B3, B4, B5, B6) => C) = map { a =>
- (a : B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
- }
-
- /** ^~~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
- */
- def ^~~~~~~^[B1, B2, B3, B4, B5, B6, B7, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7, C](f : (B1, B2, B3, B4, B5, B6, B7) => C) = map { a =>
- (a : B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 ~b7 => f(b1, b2, b3, b4, b5, b6, b7) }
- }
-
- /** >~>(f) is equivalent to >> { case b1 ~ b2 => f(b1, b2) }
- */
- def >~>[Out2, B1, B2, B >: A <% B1 ~ B2, C, X2 >: X](f : (B1, B2) => Out => Result[Out2, C, X2]) = flatMap { a =>
- (a : B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
- }
-
- /** ^-^(f) is equivalent to ^^ { b2 => b1 => f(b1, b2) }
- */
- def ^-^ [B1, B2 >: A, C](f : (B1, B2) => C) = map { b2 : B2 => b1 : B1 => f(b1, b2) }
-
- /** ^~>~^(f) is equivalent to ^^ { case b2 ~ b3 => b1 => f(b1, b2, b3) }
- */
- def ^~>~^ [B1, B2, B3, B >: A <% B2 ~ B3, C](f : (B1, B2, B3) => C) = map { a =>
- (a : B2 ~ B3) match { case b2 ~ b3 => b1 : B1 => f(b1, b2, b3) }
- }
-}
-
-
-trait Choice[-In, +Out, +A, +X] extends Rule[In, Out, A, X] {
- def choices : List[Rule[In, Out, A, X]]
-
- def apply(in : In) = {
- def oneOf(list : List[Rule[In, Out, A, X]]) : Result[Out, A, X] = list match {
- case Nil => Failure
- case first :: rest => first(in) match {
- case Failure => oneOf(rest)
- case result => result
- }
- }
- oneOf(choices)
- }
-
- override def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) : Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
- val factory = Choice.this.factory
- lazy val choices = Choice.this.choices ::: other :: Nil
- }
-}
-
-
-
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala
deleted file mode 100644
index dee1cf84ac..0000000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
-
-import ScalaSigEntryParsers._
-
-trait Symbol extends Flags {
- def name : String
- def parent : Option[Symbol]
- def children : Seq[Symbol]
-
- def path : String = parent.map(_.path + ".").getOrElse("") + name
-}
-
-case object NoSymbol extends Symbol {
- def name = "<no symbol>"
- def parent = None
- def hasFlag(flag : Long) = false
- def children = Nil
-}
-
-abstract class ScalaSigSymbol extends Symbol {
- def applyRule[A](rule : EntryParser[A]) : A = expect(rule)(entry)
- def applyScalaSigRule[A](rule : ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(rule)(entry.scalaSig)
-
- def entry : ScalaSig#Entry
- def index = entry.index
-
- lazy val children : Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (_.parent == Some(this))
- lazy val attributes : Seq[AttributeInfo] = applyScalaSigRule(ScalaSigParsers.attributes) filter (_.symbol == this)
-}
-
-case class ExternalSymbol(name : String, parent : Option[Symbol], entry : ScalaSig#Entry) extends ScalaSigSymbol {
- override def toString = path
- def hasFlag(flag : Long) = false
-}
-
-case class SymbolInfo(name : String, owner : Symbol, flags : Int, privateWithin : Option[AnyRef], info : Int, entry : ScalaSig#Entry) {
- def symbolString(any : AnyRef) = any match {
- case sym : SymbolInfoSymbol => sym.index.toString
- case other => other.toString
- }
-
- override def toString = name + ", owner=" + symbolString(owner) + ", flags=" + flags.toHexString + ", info=" + info + (privateWithin match {
- case Some(any) => ", privateWithin=" + symbolString(any)
- case None => " "
- })
-}
-
-abstract class SymbolInfoSymbol extends ScalaSigSymbol {
- def symbolInfo : SymbolInfo
-
- def entry = symbolInfo.entry
- def name = symbolInfo.name
- def parent = Some(symbolInfo.owner)
- def hasFlag(flag : Long) = (symbolInfo.flags & flag) != 0L
-
- lazy val infoType = applyRule(parseEntry(typeEntry)(symbolInfo.info))
-}
-
-case class TypeSymbol(symbolInfo : SymbolInfo) extends SymbolInfoSymbol{
- override def path = name
-}
-
-case class AliasSymbol(symbolInfo : SymbolInfo) extends SymbolInfoSymbol{
- override def path = name
-}
-case class ClassSymbol(symbolInfo : SymbolInfo, thisTypeRef : Option[Int]) extends SymbolInfoSymbol {
- lazy val selfType = thisTypeRef.map{(x: Int) => applyRule(parseEntry(typeEntry)(x))}
-}
-case class ObjectSymbol(symbolInfo : SymbolInfo) extends SymbolInfoSymbol
-case class MethodSymbol(symbolInfo : SymbolInfo, aliasRef : Option[Int]) extends SymbolInfoSymbol
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
deleted file mode 100644
index 0444e701f2..0000000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
-
-abstract class Type
-
-case object NoType extends Type
-case object NoPrefixType extends Type
-
-case class ThisType(symbol : Symbol) extends Type
-case class SingleType(typeRef : Type, symbol : Symbol) extends Type
-case class ConstantType(constant : Any) extends Type
-case class TypeRefType(prefix : Type, symbol : Symbol, typeArgs : Seq[Type]) extends Type
-case class TypeBoundsType(lower : Type, upper : Type) extends Type
-case class RefinedType(classSym : Symbol, typeRefs : List[Type]) extends Type
-case class ClassInfoType(symbol : Symbol, typeRefs : Seq[Type]) extends Type
-case class ClassInfoTypeWithCons(symbol : Symbol, typeRefs : Seq[Type], cons: String) extends Type
-case class MethodType(resultType : Type, paramSymbols : Seq[Symbol]) extends Type
-case class NullaryMethodType(resultType : Type) extends Type
-case class PolyType(typeRef : Type, symbols : Seq[TypeSymbol]) extends Type
-case class PolyTypeWithCons(typeRef : Type, symbols : Seq[TypeSymbol], cons: String) extends Type
-case class AnnotatedType(typeRef : Type, attribTreeRefs : List[Int]) extends Type
-case class AnnotatedWithSelfType(typeRef : Type, symbol : Symbol, attribTreeRefs : List[Int]) extends Type
-case class ExistentialType(typeRef : Type, symbols : Seq[Symbol]) extends Type
diff --git a/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala b/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala
deleted file mode 100644
index 6077eded0f..0000000000
--- a/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package scala.tools.scalap
-package scalax
-package util
-
-import java.beans.Introspector
-
-/**
- * @author ilyas
- */
-
-object StringUtil {
-
- def trimStart(s: String, prefix: String) = if (s != null && s.startsWith(prefix)) s.substring(prefix.length) else s
-
- def decapitalize(s: String) = Introspector.decapitalize(s)
-
- def cutSubstring(dom: String)(s: String) = if (dom != null && s != null) dom.replace(s, "") else dom
-
-}
diff --git a/src/xml/scala/xml/Elem.scala b/src/xml/scala/xml/Elem.scala
index 484cf98744..e9b87e516c 100755
--- a/src/xml/scala/xml/Elem.scala
+++ b/src/xml/scala/xml/Elem.scala
@@ -37,6 +37,7 @@ object Elem {
}
import scala.sys.process._
+ import scala.language.implicitConversions
/** Implicitly convert a [[scala.xml.Elem]] into a
* [[scala.sys.process.ProcessBuilder]]. This is done by obtaining the text
* elements of the element, trimming spaces, and then converting the result
diff --git a/starr.number b/starr.number
deleted file mode 100644
index d55aa7d7fc..0000000000
--- a/starr.number
+++ /dev/null
@@ -1 +0,0 @@
-starr.version=2.11.0-M4 \ No newline at end of file
diff --git a/test/build-partest.xml b/test/build-partest.xml
index 44502ffa61..22ad85ac03 100755
--- a/test/build-partest.xml
+++ b/test/build-partest.xml
@@ -7,18 +7,14 @@
<attribute name="srcdir" default="files"/> <!-- TODO: make targets for `pending` and other subdirs -->
<attribute name="colors" default="${partest.colors}"/>
<attribute name="scalacOpts" default="${scalac.args.optimise}"/>
- <attribute name="kinds" default="pos neg run jvm res scalap scalacheck specialized instrumented presentation"/>
+ <attribute name="kinds"/>
<sequential>
<property name="partest.dir" value="@{dir}" />
<partest srcdir="@{srcdir}"
kinds="@{kinds}"
colors="@{colors}"
scalacOpts="@{scalacOpts}"
- compilationpathref="partest.classpath">
- <compilationpath>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
- </partest>
+ compilationpathref="partest.compilation.path"/>
</sequential>
</macrodef>
</project>
diff --git a/test/files/ant/README b/test/files/ant/README
deleted file mode 100644
index 8cd8745970..0000000000
--- a/test/files/ant/README
+++ /dev/null
@@ -1,42 +0,0 @@
-README
-======
-
-Test cases in directory test/files/ant/ are executed by invoking an
-Ant script whose name ends with "build.xml" (eg. "fsc001-build.xml").
-
-The Scala Ant tasks fsc/scalac/scaladoc are instantiated from various
-binaries (quick/pack/latest/installed) and are executed with different
-combinations of Ant attributes/elements:
-
- +---------------------------+--------------------------+
- | Attributes | Nested elements |
-------------+---------------------------+--------------------------+
-fsc001 | srcdir,classpath (1) | compilerarg |
-fsc002 | srcref,classpathref (1) | compilerarg |
-fsc003 | (2) | compilerarg,src,include |
-------------+---------------------------+--------------------------+
-scalac001 | srcdir,classpath (1) | |
-scalac002 | srcref,classpathref (1) | |
-scalac003 | (2) | src,include |
-scalac004 | deprecation,unchecked (3) | |
-------------+---------------------------+--------------------------+
-scaladoc | srcdir,classpathref | |
-------------+---------------------------+--------------------------+
-
-Other attributes:
-(1) includes,destdir
-(2) destdir,classpathref
-(3) srcdir,includes,destdir,classpath
-
-
-The above test cases can also be run from the command prompt using one of
-the following shell commands:
-
-1) For quick/pack/latest binaries (-Dbinary=quick|pack|latest)
-
-$ ant -Dbinary=quick -Dproject.dir=$HOME/workspace/scala -f scalac001-build.xml
-
-2) For installed binaries (-Dbinary=installed)
-
-$ ant -Dbinary=installed -Dinstalled.dir=/opt/scala -f scalac001-build.xml
-
diff --git a/test/files/ant/fsc001-build.check b/test/files/ant/fsc001-build.check
deleted file mode 100644
index b5141f587b..0000000000
--- a/test/files/ant/fsc001-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/fsc001-ant.obj
- [fsc] Compiling 1 source file to [...]/files/ant/fsc001-ant.obj
diff --git a/test/files/ant/fsc001-build.xml b/test/files/ant/fsc001-build.xml
deleted file mode 100644
index 0130f3615c..0000000000
--- a/test/files/ant/fsc001-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="fsc001" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <pathconvert property="classpath" refid="build.classpath"/>
- <fsc
- srcdir="${source.dir}"
- includes="**/${ant.project.name}*.scala"
- destdir="${build.dir}"
- classpath="${classpath}">
- </fsc>
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/fsc001.scala b/test/files/ant/fsc001.scala
deleted file mode 100644
index 6ede5981ce..0000000000
--- a/test/files/ant/fsc001.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- println(args mkString " ")
- }
-}
diff --git a/test/files/ant/fsc002-build.check b/test/files/ant/fsc002-build.check
deleted file mode 100644
index 0c9c30dbfa..0000000000
--- a/test/files/ant/fsc002-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/fsc002-ant.obj
- [fsc] Compiling 1 source file to [...]/files/ant/fsc002-ant.obj
diff --git a/test/files/ant/fsc002-build.xml b/test/files/ant/fsc002-build.xml
deleted file mode 100644
index db91070fa1..0000000000
--- a/test/files/ant/fsc002-build.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="fsc002" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <path id="source.ref">
- <pathelement location="${source.dir}"/>
- </path>
- <fsc
- srcref="source.ref"
- includes="**/${ant.project.name}*.scala"
- destdir="${build.dir}"
- classpathref="build.classpath">
- </fsc>
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/fsc002.scala b/test/files/ant/fsc002.scala
deleted file mode 100644
index 47131daac6..0000000000
--- a/test/files/ant/fsc002.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]): Unit =
- Console.println(args.toList)
-}
diff --git a/test/files/ant/fsc003-build.check b/test/files/ant/fsc003-build.check
deleted file mode 100644
index c8c9ed857e..0000000000
--- a/test/files/ant/fsc003-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/fsc003-ant.obj
- [fsc] Compiling 1 source file to [...]/files/ant/fsc003-ant.obj
diff --git a/test/files/ant/fsc003-build.xml b/test/files/ant/fsc003-build.xml
deleted file mode 100644
index 5f71770bf2..0000000000
--- a/test/files/ant/fsc003-build.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="fsc003" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <fsc
- destdir="${build.dir}"
- classpathref="build.classpath">
- <src path="${source.dir}"/>
- <include name="**/${ant.project.name}*.scala"/>
- </fsc>
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/fsc003.scala b/test/files/ant/fsc003.scala
deleted file mode 100644
index 6ede5981ce..0000000000
--- a/test/files/ant/fsc003.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- println(args mkString " ")
- }
-}
diff --git a/test/files/ant/imported.xml b/test/files/ant/imported.xml
deleted file mode 100644
index 182c80aadf..0000000000
--- a/test/files/ant/imported.xml
+++ /dev/null
@@ -1,150 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="imported">
-
- <!-- This file is imported by the main Ant script. -->
-
- <!-- Prevents system classpath from being used -->
- <property name="build.sysclasspath" value="ignore"/>
-
-<!-- ===========================================================================
-PROPERTIES
-============================================================================ -->
-
- <property name="source.dir" value="${basedir}"/>
-
- <property file="${basedir}/build.properties"/>
-
- <property name="build.dir" location="${source.dir}/${ant.project.name}-ant.obj"/>
- <property name="log.dir" location="${source.dir}"/>
- <property name="log.file" value="${log.dir}/${ant.project.name}-build-ant.log"/>
- <property name="project.dir" value="../../.."/>
-
- <condition property="quick.binary">
- <equals arg1="${binary}" arg2="quick"/>
- </condition>
- <condition property="pack.binary">
- <equals arg1="${binary}" arg2="pack"/>
- </condition>
- <condition property="latest.binary">
- <equals arg1="${binary}" arg2="latest"/>
- </condition>
- <condition property="installed.binary">
- <equals arg1="${binary}" arg2="installed"/>
- </condition>
-
- <fail message="Property 'binary' must be set to either 'quick', 'pack', 'latest' or 'installed'.">
- <condition><not><or>
- <isset property="quick.binary"/>
- <isset property="pack.binary"/>
- <isset property="latest.binary"/>
- <isset property="installed.binary"/>
- </or></not></condition>
- </fail>
- <echo level="verbose" message="binary=${binary}"/>
- <echo level="verbose" message="build.dir=${build.dir}"/>
-
-<!-- ===========================================================================
-INITIALISATION
-============================================================================ -->
-
- <target name="quick.init" if="quick.binary">
- <property name="quick.dir" value="${project.dir}/build/quick"/>
- <fail message="Quick build could not be found.">
- <condition><not><available file="${quick.dir}"/></not></condition>
- </fail>
- <property name="scala.dir" value="${quick.dir}"/>
- <property name="scala-library.lib" value="${scala.dir}/classes/library/"/>
- <property name="scala-compiler.lib" value="${scala.dir}/classes/compiler/"/>
- </target>
-
- <target name="pack.init" if="pack.binary">
- <property name="pack.dir" value="${project.dir}/build/pack"/>
- <fail message="Pack build could not be found.">
- <condition><not><available file="${pack.dir}"/></not></condition>
- </fail>
- <property name="scala.dir" value="${pack.dir}"/>
- <property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
- <property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
- </target>
-
- <target name="latest.init" if="latest.binary">
- <property name="latest.dir" value="${project.dir}/dists/latest"/>
- <fail message="Latest build could not be found.">
- <condition><not><available file="${latest.dir}"/></not></condition>
- </fail>
- <property name="scala.dir" value="${latest.dir}"/>
- <property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
- <property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
- </target>
-
- <target name="installed.init" if="installed.binary">
- <property name="installed.dir" value="/opt/scala"/>
- <fail message="Installed distribution could not be found.">
- <condition><not><available file="${installed.dir}"/></not></condition>
- </fail>
- <property name="scala.dir" value="${installed.dir}"/>
- <property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
- <property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
- </target>
-
- <target name="init" depends="quick.init, pack.init, latest.init, installed.init">
- <echo level="verbose" message="scala.dir=${scala.dir}"/>
-
- <path id="scala.classpath">
- <pathelement location="${scala-library.lib}"/>
- <pathelement location="${scala-compiler.lib}"/>
- </path>
-
- <fail message="Scala library '${scala-library.lib}' or '${scala-compiler.lib}' is missing/broken">
- <condition><not><and>
- <available classname="scala.Predef"
- classpathref="scala.classpath"/>
- <available classname="scala.Option"
- classpathref="scala.classpath"/>
- <available classname="scala.runtime.ObjectRef"
- classpathref="scala.classpath"/>
- <available classname="scala.tools.ant.Scalac"
- classpathref="scala.classpath"/>
- <available classname="scala.tools.nsc.Main"
- classpathref="scala.classpath"/>
- <available classname="scala.tools.util.StringOps"
- classpathref="scala.classpath"/>
- </and></not></condition>
- </fail>
- <taskdef resource="scala/tools/ant/antlib.xml" classpathref="scala.classpath"/>
-
- <path id="build.classpath">
- <!--<pathelement location="${scala-actors.lib}"/>-->
- <pathelement location="${scala-library.lib}"/>
- <pathelement location="${build.dir}"/>
- </path>
-
- <!-- make sure the log file exists when the Ant build scripts -->
- <!-- are run manually from the command prompt -->
- <touch file="${log.file}"/>
- </target>
-
-<!-- ===========================================================================
-RUN
-============================================================================ -->
-
- <target name="run" depends="build, clean"/>
-
-<!-- ===========================================================================
-CLEAN
-============================================================================ -->
-
- <macrodef name="remove">
- <attribute name="dir"/>
- <sequential>
- <delete dir="@{dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </sequential>
- </macrodef>
-
- <target name="clean">
- <remove dir="${build.dir}"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac001-build.check b/test/files/ant/scalac001-build.check
deleted file mode 100644
index 05a43ba572..0000000000
--- a/test/files/ant/scalac001-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scalac001-ant.obj
- [scalac] Compiling 1 source file to [...]/files/ant/scalac001-ant.obj
diff --git a/test/files/ant/scalac001-build.xml b/test/files/ant/scalac001-build.xml
deleted file mode 100644
index 4ec7fc833c..0000000000
--- a/test/files/ant/scalac001-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac001" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <pathconvert property="classpath" refid="build.classpath"/>
- <scalac
- srcdir="${source.dir}"
- includes="**/${ant.project.name}*.scala"
- destdir="${build.dir}"
- classpath="${classpath}"
- />
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac001.scala b/test/files/ant/scalac001.scala
deleted file mode 100644
index 47131daac6..0000000000
--- a/test/files/ant/scalac001.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]): Unit =
- Console.println(args.toList)
-}
diff --git a/test/files/ant/scalac002-build.check b/test/files/ant/scalac002-build.check
deleted file mode 100644
index e7b3670a0c..0000000000
--- a/test/files/ant/scalac002-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scalac002-ant.obj
- [scalac] Compiling 1 source file to [...]/files/ant/scalac002-ant.obj
diff --git a/test/files/ant/scalac002-build.xml b/test/files/ant/scalac002-build.xml
deleted file mode 100644
index 07628afa64..0000000000
--- a/test/files/ant/scalac002-build.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac002" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <path id="source.ref">
- <pathelement location="${source.dir}"/>
- </path>
- <scalac
- srcref="source.ref"
- includes="**/${ant.project.name}*.scala"
- destdir="${build.dir}"
- classpathref="build.classpath"
- />
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac002.scala b/test/files/ant/scalac002.scala
deleted file mode 100644
index 6ede5981ce..0000000000
--- a/test/files/ant/scalac002.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- println(args mkString " ")
- }
-}
diff --git a/test/files/ant/scalac003-build.check b/test/files/ant/scalac003-build.check
deleted file mode 100644
index 7b0d3367ed..0000000000
--- a/test/files/ant/scalac003-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scalac003-ant.obj
- [scalac] Compiling 1 source file to [...]/files/ant/scalac003-ant.obj
diff --git a/test/files/ant/scalac003-build.xml b/test/files/ant/scalac003-build.xml
deleted file mode 100644
index 1d70aa115e..0000000000
--- a/test/files/ant/scalac003-build.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac003" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <scalac
- destdir="${build.dir}"
- classpathref="build.classpath">
- <src path="${source.dir}"/>
- <include name="**/${ant.project.name}*.scala"/>
- </scalac>
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac003.scala b/test/files/ant/scalac003.scala
deleted file mode 100644
index 6ede5981ce..0000000000
--- a/test/files/ant/scalac003.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- println(args mkString " ")
- }
-}
diff --git a/test/files/ant/scalac004-build.check b/test/files/ant/scalac004-build.check
deleted file mode 100644
index ffe9e8c79a..0000000000
--- a/test/files/ant/scalac004-build.check
+++ /dev/null
@@ -1,24 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scalac004-ant.obj
- [scalac] Compiling 1 source file to [...]/files/ant/scalac004-ant.obj
- [scalac] [...]/files/ant/scalac004.scala:9: warning: method exit in object Predef is deprecated: Use sys.exit(status) instead
- [scalac] Predef.exit(0) //deprecated in 2.9.0
- [scalac] ^
- [scalac] [...]/files/ant/scalac004.scala:6: warning: match is not exhaustive!
- [scalac] missing combination Nil
- [scalac]
- [scalac] xs match { //(xs: @unchecked) match {
- [scalac] ^
- [scalac] two warnings found
- [scalac] Compile succeeded with 2 warnings; see the compiler output for details.
diff --git a/test/files/ant/scalac004-build.xml b/test/files/ant/scalac004-build.xml
deleted file mode 100644
index 66c19a39fb..0000000000
--- a/test/files/ant/scalac004-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac004" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <scalac
- deprecation="yes" unchecked="yes"
- srcdir="${source.dir}"
- includes="**/${ant.project.name}*.scala"
- destdir="${build.dir}"
- classpathref="build.classpath"
- />
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac004.scala b/test/files/ant/scalac004.scala
deleted file mode 100644
index 66b2ba7985..0000000000
--- a/test/files/ant/scalac004.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- val xs = List(1, 2, 3, 4)
- xs match { //(xs: @unchecked) match {
- case x::xs => println(x)
- }
- Predef.exit(0) //deprecated in 2.9.0
- }
-}
diff --git a/test/files/ant/scaladoc-build.check b/test/files/ant/scaladoc-build.check
deleted file mode 100644
index 1c82456ad0..0000000000
--- a/test/files/ant/scaladoc-build.check
+++ /dev/null
@@ -1,15 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scaladoc-ant.obj
- [scaladoc] Documenting 1 source file to [...]/files/ant/scaladoc-ant.obj
- [scaladoc] model contains 3 documentable templates
diff --git a/test/files/ant/scaladoc-build.xml b/test/files/ant/scaladoc-build.xml
deleted file mode 100644
index fb4dc6fe69..0000000000
--- a/test/files/ant/scaladoc-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scaladoc" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <scaladoc
- srcdir="${source.dir}"
- includes="**/${ant.project.name}*.scala"
- deprecation="yes" unchecked="yes"
- destdir="${build.dir}"
- classpathref="build.classpath"
- />
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scaladoc.scala b/test/files/ant/scaladoc.scala
deleted file mode 100644
index 6ede5981ce..0000000000
--- a/test/files/ant/scaladoc.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- println(args mkString " ")
- }
-}
diff --git a/test/files/jvm/opt_value_class.check b/test/files/jvm/opt_value_class.check
new file mode 100644
index 0000000000..a0c18c5ca0
--- /dev/null
+++ b/test/files/jvm/opt_value_class.check
@@ -0,0 +1,2 @@
+[ok] <init> ()V public
+[ok] unapply (Ljava/lang/Object;)Ljava/lang/String; public (Ljava/lang/Object;)Ljava/lang/String;
diff --git a/test/files/jvm/opt_value_class/Value_1.scala b/test/files/jvm/opt_value_class/Value_1.scala
new file mode 100644
index 0000000000..2440609b9e
--- /dev/null
+++ b/test/files/jvm/opt_value_class/Value_1.scala
@@ -0,0 +1,28 @@
+final class Opt[+A >: Null](val value: A) extends AnyVal {
+ def get: A = value
+ def isEmpty = value == null
+}
+object Opt {
+ final val None = new Opt[Null](null)
+ def unapply[A >: Null](x: A): Opt[A] = if (x == null) None else Opt(x)
+ def empty[A >: Null] = None
+ def apply[A >: Null](value: A): Opt[A] = if (value == null) None else new Opt[A](value)
+}
+
+class ValueExtract {
+ def unapply(x: Any): Opt[String] = x match {
+ case _: String => Opt("String")
+ case _: List[_] => Opt("List")
+ case _: Int => Opt("Int")
+ case _ => Opt.None
+ }
+}
+
+class Direct {
+ def unapply(x: Any): String = x match {
+ case _: String => "String"
+ case _: List[_] => "List"
+ case _: Int => "Int"
+ case _ => null
+ }
+}
diff --git a/test/files/jvm/opt_value_class/test.scala b/test/files/jvm/opt_value_class/test.scala
new file mode 100644
index 0000000000..7aea7deb99
--- /dev/null
+++ b/test/files/jvm/opt_value_class/test.scala
@@ -0,0 +1,16 @@
+import scala.tools.partest.BytecodeTest
+
+// import scala.tools.nsc.util.JavaClassPath
+// import java.io.InputStream
+// import scala.tools.asm
+// import asm.ClassReader
+// import asm.tree.{ClassNode, InsnList}
+// import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode1 = loadClassNode("ValueExtract")
+ val classNode2 = loadClassNode("Direct")
+ sameMethodAndFieldDescriptors(classNode1, classNode2)
+ }
+}
diff --git a/test/files/lib/scalacheck.jar.desired.sha1 b/test/files/lib/scalacheck.jar.desired.sha1
deleted file mode 100644
index 2f15402d18..0000000000
--- a/test/files/lib/scalacheck.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b6f4dbb29f0c2ec1eba682414f60d52fea84f703 *scalacheck.jar
diff --git a/test/files/neg/compile-time-only-a.check b/test/files/neg/compile-time-only-a.check
new file mode 100644
index 0000000000..1c4c72171f
--- /dev/null
+++ b/test/files/neg/compile-time-only-a.check
@@ -0,0 +1,49 @@
+compile-time-only-a.scala:9: error: C3
+@compileTimeOnly("C3") case class C3(x: Int)
+ ^
+compile-time-only-a.scala:11: error: C4
+@compileTimeOnly("C4") case class C4(x: Int)
+ ^
+compile-time-only-a.scala:16: error: C5
+ implicit class C5(val x: Int) {
+ ^
+compile-time-only-a.scala:28: error: C1
+ new C1()
+ ^
+compile-time-only-a.scala:32: error: C2
+ C2
+ ^
+compile-time-only-a.scala:34: error: C3
+ new C3(2)
+ ^
+compile-time-only-a.scala:37: error: C4
+ new C4(2)
+ ^
+compile-time-only-a.scala:41: error: C5
+ 2.ext
+ ^
+compile-time-only-a.scala:42: error: C5
+ C5(2)
+ ^
+compile-time-only-a.scala:45: error: C6.x
+ val _ = c6.x
+ ^
+compile-time-only-a.scala:46: error: C6.foo
+ c6.foo
+ ^
+compile-time-only-a.scala:48: error: C6.y
+ c6.y = c6.y
+ ^
+compile-time-only-a.scala:48: error: C6.y
+ c6.y = c6.y
+ ^
+compile-time-only-a.scala:54: error: placebo
+@placebo
+ ^
+compile-time-only-a.scala:56: error: placebo
+ @placebo def x = (2: @placebo)
+ ^
+compile-time-only-a.scala:56: error: placebo
+ @placebo def x = (2: @placebo)
+ ^
+16 errors found
diff --git a/test/files/neg/compile-time-only-a.scala b/test/files/neg/compile-time-only-a.scala
new file mode 100644
index 0000000000..43d36dfab1
--- /dev/null
+++ b/test/files/neg/compile-time-only-a.scala
@@ -0,0 +1,57 @@
+import scala.annotation.compileTimeOnly
+
+@compileTimeOnly("C1") class C1
+object C1
+
+class C2
+@compileTimeOnly("C2") object C2
+
+@compileTimeOnly("C3") case class C3(x: Int)
+
+@compileTimeOnly("C4") case class C4(x: Int)
+object C4
+
+object pkg {
+ @compileTimeOnly("C5")
+ implicit class C5(val x: Int) {
+ def ext = ???
+ }
+}
+
+class C6(@compileTimeOnly("C6.x") val x: Int) {
+ @compileTimeOnly("C6.foo") def foo = 2
+ @compileTimeOnly("C6.Foo") type Foo = Int
+ @compileTimeOnly("C6.y") var y = 3
+}
+
+object Test extends App {
+ new C1()
+ C1
+
+ new C2()
+ C2
+
+ new C3(2)
+ C3(2)
+
+ new C4(2)
+ C4(2)
+
+ import pkg._
+ 2.ext
+ C5(2)
+
+ val c6 = new C6(2)
+ val _ = c6.x
+ c6.foo
+ type Foo = c6.Foo
+ c6.y = c6.y
+}
+
+@compileTimeOnly("placebo")
+class placebo extends scala.annotation.StaticAnnotation
+
+@placebo
+class Test {
+ @placebo def x = (2: @placebo)
+} \ No newline at end of file
diff --git a/test/files/neg/compile-time-only-b.check b/test/files/neg/compile-time-only-b.check
new file mode 100644
index 0000000000..8292a0ddeb
--- /dev/null
+++ b/test/files/neg/compile-time-only-b.check
@@ -0,0 +1,7 @@
+compile-time-only-b.scala:13: error: splice must be enclosed within a reify {} block
+ val ignored3 = reify(fortyTwo).splice
+ ^
+compile-time-only-b.scala:14: error: cannot use value except for signatures of macro implementations
+ val ignored4 = reify(fortyTwo).value
+ ^
+two errors found
diff --git a/test/files/neg/compile-time-only-b.scala b/test/files/neg/compile-time-only-b.scala
new file mode 100644
index 0000000000..d5568dbe67
--- /dev/null
+++ b/test/files/neg/compile-time-only-b.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ // HAHA!!!
+ // no compileTimeOnly errors here, because scalac does constant folding
+ // the type of reify(42) is Expr[42.type]
+ // therefore the type of expr.splice is 42.type, which is then constfolded
+ val expr = reify(42)
+ val ignored1 = expr.splice
+ val ignored2 = expr.value
+
+ val fortyTwo = 42
+ val ignored3 = reify(fortyTwo).splice
+ val ignored4 = reify(fortyTwo).value
+} \ No newline at end of file
diff --git a/test/files/neg/javac-error.check b/test/files/neg/javac-error.check
deleted file mode 100644
index e7d1ccc1a1..0000000000
--- a/test/files/neg/javac-error.check
+++ /dev/null
@@ -1,10 +0,0 @@
-#partest java6
-javac-error/J.java:2: method does not override or implement a method from a supertype
- @Override public void foo() { }
- ^
-1 error
-#partest java7
-javac-error/J.java:2: error: method does not override or implement a method from a supertype
- @Override public void foo() { }
- ^
-1 error
diff --git a/test/files/neg/macro-abort.check b/test/files/neg/macro-abort.check
new file mode 100644
index 0000000000..1e58add533
--- /dev/null
+++ b/test/files/neg/macro-abort.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: aborted
+ Macros.abort
+ ^
+one error found
diff --git a/test/files/neg/macro-abort/Macros_1.scala b/test/files/neg/macro-abort/Macros_1.scala
new file mode 100644
index 0000000000..676c112098
--- /dev/null
+++ b/test/files/neg/macro-abort/Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ c.abort(c.enclosingPosition, "aborted")
+ }
+ def abort = macro impl
+} \ No newline at end of file
diff --git a/test/files/neg/macro-abort/Test_2.scala b/test/files/neg/macro-abort/Test_2.scala
new file mode 100644
index 0000000000..1d0a7a25dc
--- /dev/null
+++ b/test/files/neg/macro-abort/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.abort
+} \ No newline at end of file
diff --git a/test/files/neg/macro-exception.check b/test/files/neg/macro-exception.check
new file mode 100644
index 0000000000..cee8b32ebd
--- /dev/null
+++ b/test/files/neg/macro-exception.check
@@ -0,0 +1,7 @@
+Test_2.scala:2: error: exception during macro expansion:
+java.lang.Exception
+ at Macros$.impl(Macros_1.scala:6)
+
+ Macros.exception
+ ^
+one error found
diff --git a/test/files/neg/macro-exception/Macros_1.scala b/test/files/neg/macro-exception/Macros_1.scala
new file mode 100644
index 0000000000..60e4020aec
--- /dev/null
+++ b/test/files/neg/macro-exception/Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ throw new Exception()
+ }
+ def exception = macro impl
+} \ No newline at end of file
diff --git a/test/files/neg/macro-exception/Test_2.scala b/test/files/neg/macro-exception/Test_2.scala
new file mode 100644
index 0000000000..d82b21f2b2
--- /dev/null
+++ b/test/files/neg/macro-exception/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.exception
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-presuper.check b/test/files/neg/macro-invalidusage-presuper.check
index f63a0eef80..c0b1ec0248 100644
--- a/test/files/neg/macro-invalidusage-presuper.check
+++ b/test/files/neg/macro-invalidusage-presuper.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:3: error: only type definitions and concrete field definitions allowed in early object initialization section
+Macros_Test_2.scala:3: error: only concrete field definitions allowed in early object initialization section
class D extends { def x = macro impl } with AnyRef
^
one error found
diff --git a/test/files/neg/quasiquotes-syntax-error-position.check b/test/files/neg/quasiquotes-syntax-error-position.check
new file mode 100644
index 0000000000..3bd813b1bb
--- /dev/null
+++ b/test/files/neg/quasiquotes-syntax-error-position.check
@@ -0,0 +1,32 @@
+quasiquotes-syntax-error-position.scala:5: error: '=' expected but identifier found.
+ q"def $a f"
+ ^
+quasiquotes-syntax-error-position.scala:6: error: illegal start of simple expression
+ q"$a("
+ ^
+quasiquotes-syntax-error-position.scala:7: error: '}' expected but end of quote found.
+ q"class $t { def foo = $a"
+ ^
+quasiquotes-syntax-error-position.scala:8: error: '.' expected but splicee found.
+ q"import $t $t"
+ ^
+quasiquotes-syntax-error-position.scala:9: error: illegal start of definition
+ q"package p"
+ ^
+quasiquotes-syntax-error-position.scala:10: error: ';' expected but '@' found.
+ q"foo@$a"
+ ^
+quasiquotes-syntax-error-position.scala:11: error: case classes without a parameter list are not allowed;
+use either case objects or case classes with an explicit `()' as a parameter list.
+ q"case class A"
+ ^
+quasiquotes-syntax-error-position.scala:12: error: identifier expected but ']' found.
+ tq"$t => $t $t]"
+ ^
+quasiquotes-syntax-error-position.scala:13: error: end of quote expected but 'case' found.
+ cq"pattern => body ; case pattern2 =>"
+ ^
+quasiquotes-syntax-error-position.scala:14: error: ')' expected but end of quote found.
+ pq"$a(bar"
+ ^
+10 errors found
diff --git a/test/files/neg/quasiquotes-syntax-error-position.scala b/test/files/neg/quasiquotes-syntax-error-position.scala
new file mode 100644
index 0000000000..b97af52cfc
--- /dev/null
+++ b/test/files/neg/quasiquotes-syntax-error-position.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+object test extends App {
+ val a = TermName("a")
+ val t = TypeName("t")
+ q"def $a f"
+ q"$a("
+ q"class $t { def foo = $a"
+ q"import $t $t"
+ q"package p"
+ q"foo@$a"
+ q"case class A"
+ tq"$t => $t $t]"
+ cq"pattern => body ; case pattern2 =>"
+ pq"$a(bar"
+} \ No newline at end of file
diff --git a/test/files/neg/t1980.check b/test/files/neg/t1980.check
new file mode 100644
index 0000000000..2fa27fa462
--- /dev/null
+++ b/test/files/neg/t1980.check
@@ -0,0 +1,12 @@
+t1980.scala:2: warning: by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.
+ def op1_:(x: => Any) = () // warn
+ ^
+t1980.scala:3: warning: by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.
+ def op2_:(x: Any, y: => Any) = () // warn
+ ^
+t1980.scala:4: warning: by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.
+ def op3_:(x: Any, y: => Any)(a: Any) = () // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t1980.flags b/test/files/neg/t1980.flags
new file mode 100644
index 0000000000..7949c2afa2
--- /dev/null
+++ b/test/files/neg/t1980.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
diff --git a/test/files/neg/t1980.scala b/test/files/neg/t1980.scala
new file mode 100644
index 0000000000..132865e694
--- /dev/null
+++ b/test/files/neg/t1980.scala
@@ -0,0 +1,9 @@
+object Test {
+ def op1_:(x: => Any) = () // warn
+ def op2_:(x: Any, y: => Any) = () // warn
+ def op3_:(x: Any, y: => Any)(a: Any) = () // warn
+
+ def op4() = () // no warn
+ def op5(x: => Any) = () // no warn
+ def op6_:(x: Any)(a: => Any) = () // no warn
+}
diff --git a/test/files/neg/t2796.check b/test/files/neg/t2796.check
index 4456a7fc19..22ee35a7e6 100644
--- a/test/files/neg/t2796.check
+++ b/test/files/neg/t2796.check
@@ -1,6 +1,9 @@
+t2796.scala:11: warning: early type members are deprecated. Move them to the regular body: the semantics are the same.
+ type X = Int // warn
+ ^
t2796.scala:7: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
val abstractVal = "T1.abstractVal" // warn
^
error: No warnings can be incurred under -Xfatal-warnings.
-one warning found
+two warnings found
one error found
diff --git a/test/files/neg/t2796.flags b/test/files/neg/t2796.flags
index e8fb65d50c..d1b831ea87 100644
--- a/test/files/neg/t2796.flags
+++ b/test/files/neg/t2796.flags
@@ -1 +1 @@
--Xfatal-warnings \ No newline at end of file
+-deprecation -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t2796.scala b/test/files/neg/t2796.scala
index 3bcc9df562..fa2f2358b9 100644
--- a/test/files/neg/t2796.scala
+++ b/test/files/neg/t2796.scala
@@ -8,10 +8,9 @@ trait T1 extends {
} with Base
trait T2 extends {
- type X = Int // okay
+ type X = Int // warn
} with Base
-
class C1 extends {
val abstractVal = "C1.abstractVal" // okay
} with Base
diff --git a/test/files/neg/t4425.check b/test/files/neg/t4425.check
index 0f2fe6f2d1..95b88a6b3d 100644
--- a/test/files/neg/t4425.check
+++ b/test/files/neg/t4425.check
@@ -1,4 +1,13 @@
-t4425.scala:3: error: isInstanceOf cannot test if value types are references.
+t4425.scala:3: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: Int)(y: Option[Int]): None.type exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
42 match { case _ X _ => () }
^
-one error found
+t4425.scala:8: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: Int)(y: Int): Some[(Int, Int)] exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ 42 match { case _ X _ => () }
+ ^
+t4425.scala:13: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Some[(Int, Int)] exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ "" match { case _ X _ => () }
+ ^
+three errors found
diff --git a/test/files/neg/t4425.scala b/test/files/neg/t4425.scala
index d8cc6922f7..1714955c27 100644
--- a/test/files/neg/t4425.scala
+++ b/test/files/neg/t4425.scala
@@ -2,3 +2,13 @@ object Foo {
object X { def unapply(x : Int)(y : Option[Int] = None) = None }
42 match { case _ X _ => () }
}
+
+object Foo2 {
+ object X { def unapply(x : Int)(y: Int) = Some((2,2)) }
+ 42 match { case _ X _ => () }
+}
+
+object Foo3 {
+ object X { def unapply(x : String)(y: String) = Some((2,2)) }
+ "" match { case _ X _ => () }
+} \ No newline at end of file
diff --git a/test/files/neg/t4425b.check b/test/files/neg/t4425b.check
new file mode 100644
index 0000000000..1186e8b609
--- /dev/null
+++ b/test/files/neg/t4425b.check
@@ -0,0 +1,61 @@
+t4425b.scala:5: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:6: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:7: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println( "" match { case X(_) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:8: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println((X: Any) match { case X(_) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:9: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:10: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:18: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:19: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:22: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:22: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:23: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:23: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:31: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:32: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:35: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:35: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:36: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:36: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+18 errors found
diff --git a/test/files/neg/t4425b.scala b/test/files/neg/t4425b.scala
new file mode 100644
index 0000000000..861e9521f6
--- /dev/null
+++ b/test/files/neg/t4425b.scala
@@ -0,0 +1,38 @@
+object Test1 {
+ object X { def unapply(x : String)(y: String) = throw new Exception }
+
+ def f1() {
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ println( "" match { case X(_) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_) => "ok" ; case _ => "fail" })
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ }
+}
+
+object Test2 {
+ object X { def unapply(x : String) = throw new Exception }
+
+ def f1() {
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ println( "" match { case X(_) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_) => "ok" ; case _ => "fail" })
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ }
+}
+
+object Test3 {
+ object X { def unapply(x : String) = None }
+
+ def f1() {
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ println( "" match { case X(_) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_) => "ok" ; case _ => "fail" })
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ }
+}
diff --git a/test/files/neg/t5903a.check b/test/files/neg/t5903a.check
new file mode 100644
index 0000000000..cbdcfd1bdd
--- /dev/null
+++ b/test/files/neg/t5903a.check
@@ -0,0 +1,7 @@
+Test_2.scala:4: error: wrong number of patterns for <$anon: AnyRef> offering (SomeTree.type, SomeTree.type): expected 2, found 3
+ case nq"$x + $y + $z" => println((x, y))
+ ^
+Test_2.scala:4: error: not found: value x
+ case nq"$x + $y + $z" => println((x, y))
+ ^
+two errors found
diff --git a/test/files/neg/t5903a/Macros_1.scala b/test/files/neg/t5903a/Macros_1.scala
new file mode 100644
index 0000000000..e82be0fc68
--- /dev/null
+++ b/test/files/neg/t5903a/Macros_1.scala
@@ -0,0 +1,28 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait Tree
+case object SomeTree extends Tree
+
+object NewQuasiquotes {
+ implicit class QuasiquoteInterpolation(c: StringContext) {
+ object nq {
+ def unapply(t: Tree) = macro QuasiquoteMacros.unapplyImpl
+ }
+ }
+}
+
+object QuasiquoteMacros {
+ def unapplyImpl(c: Context)(t: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ def isEmpty = false
+ def get = this
+ def _1 = SomeTree
+ def _2 = SomeTree
+ def unapply(t: Tree) = this
+ }.unapply($t)
+ """
+ }
+}
diff --git a/test/files/neg/t5903a/Test_2.scala b/test/files/neg/t5903a/Test_2.scala
new file mode 100644
index 0000000000..4d78dfb5e5
--- /dev/null
+++ b/test/files/neg/t5903a/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import NewQuasiquotes._
+ SomeTree match {
+ case nq"$x + $y + $z" => println((x, y))
+ }
+}
diff --git a/test/files/neg/t5903b.check b/test/files/neg/t5903b.check
new file mode 100644
index 0000000000..faeb73ad03
--- /dev/null
+++ b/test/files/neg/t5903b.check
@@ -0,0 +1,9 @@
+Test_2.scala:4: error: type mismatch;
+ found : Int
+ required: String
+ case t"$x" => println(x)
+ ^
+Test_2.scala:4: error: not found: value x
+ case t"$x" => println(x)
+ ^
+two errors found
diff --git a/test/files/neg/t5903b/Macros_1.scala b/test/files/neg/t5903b/Macros_1.scala
new file mode 100644
index 0000000000..b1b875969d
--- /dev/null
+++ b/test/files/neg/t5903b/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply[T](x: T) = macro Macros.unapplyImpl[T]
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ def isEmpty = false
+ def get = "2"
+ def unapply(x: String) = this
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/neg/t5903b/Test_2.scala b/test/files/neg/t5903b/Test_2.scala
new file mode 100644
index 0000000000..0f6f80d327
--- /dev/null
+++ b/test/files/neg/t5903b/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ 2 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/neg/t5903c.check b/test/files/neg/t5903c.check
new file mode 100644
index 0000000000..c9476edd11
--- /dev/null
+++ b/test/files/neg/t5903c.check
@@ -0,0 +1,7 @@
+Test_2.scala:4: error: String is not supported
+ case t"$x" => println(x)
+ ^
+Test_2.scala:4: error: not found: value x
+ case t"$x" => println(x)
+ ^
+two errors found
diff --git a/test/files/neg/t5903c/Macros_1.scala b/test/files/neg/t5903c/Macros_1.scala
new file mode 100644
index 0000000000..70efab3101
--- /dev/null
+++ b/test/files/neg/t5903c/Macros_1.scala
@@ -0,0 +1,26 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply[T](x: T) = macro Macros.unapplyImpl[T]
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+ import c.universe._
+ if (!(c.weakTypeOf[Int] =:= c.weakTypeOf[T])) c.abort(c.enclosingPosition, s"${c.weakTypeOf[T]} is not supported")
+ else {
+ q"""
+ new {
+ def isEmpty = false
+ def get = 2
+ def unapply(x: Int) = this
+ }.unapply($x)
+ """
+ }
+ }
+}
diff --git a/test/files/neg/t5903c/Test_2.scala b/test/files/neg/t5903c/Test_2.scala
new file mode 100644
index 0000000000..a1fd31dd49
--- /dev/null
+++ b/test/files/neg/t5903c/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ "2" match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/neg/t5903d.check b/test/files/neg/t5903d.check
new file mode 100644
index 0000000000..d5d3fdcc28
--- /dev/null
+++ b/test/files/neg/t5903d.check
@@ -0,0 +1,7 @@
+Test_2.scala:4: error: extractor macros can only expand into extractor calls
+ case t"$x" => println(x)
+ ^
+Test_2.scala:4: error: not found: value x
+ case t"$x" => println(x)
+ ^
+two errors found
diff --git a/test/files/neg/t5903d/Macros_1.scala b/test/files/neg/t5903d/Macros_1.scala
new file mode 100644
index 0000000000..15ff226cff
--- /dev/null
+++ b/test/files/neg/t5903d/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply(x: Int) = macro Macros.unapplyImpl
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl(c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ class Match(x: Int) {
+ def isEmpty = false
+ def get = x
+ }
+ new { def unapply(x: Int) = new Match(x) }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/neg/t5903d/Test_2.scala b/test/files/neg/t5903d/Test_2.scala
new file mode 100644
index 0000000000..95c717a9d8
--- /dev/null
+++ b/test/files/neg/t5903d/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ 42 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/neg/t5903e.check b/test/files/neg/t5903e.check
new file mode 100644
index 0000000000..3bdeb091a0
--- /dev/null
+++ b/test/files/neg/t5903e.check
@@ -0,0 +1,4 @@
+Test_2.scala:4: error: value class may not be a member of another class
+ case t"$x" => println(x)
+ ^
+one error found
diff --git a/test/files/neg/t5903e/Macros_1.scala b/test/files/neg/t5903e/Macros_1.scala
new file mode 100644
index 0000000000..4e1ce89c9f
--- /dev/null
+++ b/test/files/neg/t5903e/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply(x: Int) = macro Macros.unapplyImpl
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl(c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ class Match(x: Int) extends AnyVal {
+ def isEmpty = false
+ def get = x
+ }
+ def unapply(x: Int) = new Match(x)
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/neg/t5903e/Test_2.scala b/test/files/neg/t5903e/Test_2.scala
new file mode 100644
index 0000000000..d69d472436
--- /dev/null
+++ b/test/files/neg/t5903e/Test_2.scala
@@ -0,0 +1,6 @@
+class C {
+ import Interpolation._
+ 42 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/neg/t6289.check b/test/files/neg/t6289.check
new file mode 100644
index 0000000000..f6f43cabd3
--- /dev/null
+++ b/test/files/neg/t6289.check
@@ -0,0 +1,10 @@
+#partest java6
+t6289/J.java:2: method does not override or implement a method from a supertype
+ @Override public void foo() { }
+ ^
+1 error
+#partest java7
+t6289/J.java:2: error: method does not override or implement a method from a supertype
+ @Override public void foo() { }
+ ^
+1 error
diff --git a/test/files/neg/javac-error.flags b/test/files/neg/t6289.flags
index 85d8eb2ba2..85d8eb2ba2 100644
--- a/test/files/neg/javac-error.flags
+++ b/test/files/neg/t6289.flags
diff --git a/test/files/neg/javac-error/J.java b/test/files/neg/t6289/J.java
index 83f50c9ae2..83f50c9ae2 100644
--- a/test/files/neg/javac-error/J.java
+++ b/test/files/neg/t6289/J.java
diff --git a/test/files/neg/javac-error/SUT_5.scala b/test/files/neg/t6289/SUT_5.scala
index 0a996352c0..0a996352c0 100644
--- a/test/files/neg/javac-error/SUT_5.scala
+++ b/test/files/neg/t6289/SUT_5.scala
diff --git a/test/files/neg/t6675.check b/test/files/neg/t6675.check
index 3a277af866..aecf04cb68 100644
--- a/test/files/neg/t6675.check
+++ b/test/files/neg/t6675.check
@@ -1,4 +1,4 @@
-t6675.scala:10: warning: extractor pattern binds a single value to a Product3 of type (Int, Int, Int)
+t6675.scala:10: warning: object X expects 3 patterns to hold (Int, Int, Int) but crushing into 3-tuple to fit single pattern (SI-6675)
"" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/t7020.check b/test/files/neg/t7020.check
new file mode 100644
index 0000000000..f9600ca7fc
--- /dev/null
+++ b/test/files/neg/t7020.check
@@ -0,0 +1,19 @@
+t7020.scala:3: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+ List(5) match {
+ ^
+t7020.scala:10: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+ List(5) match {
+ ^
+t7020.scala:17: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+ List(5) match {
+ ^
+t7020.scala:24: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+ List(5) match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t7020.flags b/test/files/neg/t7020.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t7020.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7020.scala b/test/files/neg/t7020.scala
new file mode 100644
index 0000000000..cc5421bab1
--- /dev/null
+++ b/test/files/neg/t7020.scala
@@ -0,0 +1,30 @@
+object Test {
+ // warning was non-deterministic
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+}
diff --git a/test/files/neg/t7214neg.check b/test/files/neg/t7214neg.check
new file mode 100644
index 0000000000..0660cccd02
--- /dev/null
+++ b/test/files/neg/t7214neg.check
@@ -0,0 +1,7 @@
+t7214neg.scala:28: error: wrong number of patterns for object Extractor offering Any: expected 1, found 0
+ case Extractor() =>
+ ^
+t7214neg.scala:28: error: wrong number of patterns for object Extractor offering Any: expected 1, found 0
+ case Extractor() =>
+ ^
+two errors found
diff --git a/test/files/neg/t7214neg.scala b/test/files/neg/t7214neg.scala
new file mode 100644
index 0000000000..ff1ea8082d
--- /dev/null
+++ b/test/files/neg/t7214neg.scala
@@ -0,0 +1,57 @@
+// pattern matcher crashes here trying to synthesize an uneeded outer test.
+// no-symbol does not have an owner
+// at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:49)
+// at scala.tools.nsc.Global.abort(Global.scala:253)
+// at scala.reflect.internal.Symbols$NoSymbol.owner(Symbols.scala:3248)
+// at scala.reflect.internal.Symbols$Symbol.effectiveOwner(Symbols.scala:678)
+// at scala.reflect.internal.Symbols$Symbol.isDefinedInPackage(Symbols.scala:664)
+// at scala.reflect.internal.TreeGen.mkAttributedSelect(TreeGen.scala:188)
+// at scala.reflect.internal.TreeGen.mkAttributedRef(TreeGen.scala:124)
+// at scala.tools.nsc.ast.TreeDSL$CODE$.REF(TreeDSL.scala:308)
+// at scala.tools.nsc.typechecker.PatternMatching$TreeMakers$TypeTestTreeMaker$treeCondStrategy$.outerTest(PatternMatching.scala:1209)
+class Crash {
+ type Alias = C#T
+
+ val c = new C
+ val t = new c.T
+
+ // Crash via a Typed Pattern...
+ (t: Any) match {
+ case e: Alias =>
+ }
+
+ // ... or via a Typed Extractor Pattern.
+ object Extractor {
+ def unapply(a: Alias): Option[Any] = None
+ }
+ (t: Any) match {
+ case Extractor() =>
+ case _ =>
+ }
+
+ // checking that correct outer tests are applied when
+ // aliases for path dependent types are involved.
+ val c2 = new C
+ type CdotT = c.T
+ type C2dotT = c2.T
+
+ val outerField = t.getClass.getDeclaredFields.find(_.getName contains ("outer")).get
+ outerField.setAccessible(true)
+
+ (t: Any) match {
+ case _: C2dotT =>
+ println(s"!!! wrong match. t.outer=${outerField.get(t)} / c2 = $c2") // this matches on 2.10.0
+ case _: CdotT =>
+ case _ =>
+ println(s"!!! wrong match. t.outer=${outerField.get(t)} / c = $c")
+ }
+}
+
+class C {
+ class T
+}
+
+object Test extends App {
+ new Crash
+}
+
diff --git a/test/files/neg/t7325.check b/test/files/neg/t7325.check
index 709ab6db3e..d2c40f4df8 100644
--- a/test/files/neg/t7325.check
+++ b/test/files/neg/t7325.check
@@ -1,19 +1,19 @@
-t7325.scala:2: error: percent signs not directly following splicees must be escaped
+t7325.scala:2: error: conversions must follow a splice; use %% for literal %, %n for newline
println(f"%")
^
-t7325.scala:4: error: percent signs not directly following splicees must be escaped
+t7325.scala:4: error: conversions must follow a splice; use %% for literal %, %n for newline
println(f"%%%")
^
-t7325.scala:6: error: percent signs not directly following splicees must be escaped
+t7325.scala:6: error: conversions must follow a splice; use %% for literal %, %n for newline
println(f"%%%%%")
^
t7325.scala:16: error: wrong conversion string
println(f"${0}%")
^
-t7325.scala:19: error: percent signs not directly following splicees must be escaped
+t7325.scala:19: error: conversions must follow a splice; use %% for literal %, %n for newline
println(f"${0}%%%d")
^
-t7325.scala:21: error: percent signs not directly following splicees must be escaped
+t7325.scala:21: error: conversions must follow a splice; use %% for literal %, %n for newline
println(f"${0}%%%%%d")
^
6 errors found
diff --git a/test/files/neg/t7501.check b/test/files/neg/t7501.check
new file mode 100644
index 0000000000..2ded07c7ed
--- /dev/null
+++ b/test/files/neg/t7501.check
@@ -0,0 +1,7 @@
+t7501_2.scala:2: error: value name is not a member of A
+ def foo(a: A) = a.name
+ ^
+t7501_2.scala:4: error: not found: type X
+ type TP = X // already failed before this fix
+ ^
+two errors found
diff --git a/test/files/neg/t7501/t7501_1.scala b/test/files/neg/t7501/t7501_1.scala
new file mode 100644
index 0000000000..323c327623
--- /dev/null
+++ b/test/files/neg/t7501/t7501_1.scala
@@ -0,0 +1,12 @@
+object Test2 {
+ def test[X](name: String) = 12
+}
+class strangeTest(x: Int) extends scala.annotation.StaticAnnotation
+
+trait A {
+ // When picking the type of `test`, the value parameter
+ // `x` was pickled with the owner `trait A`. On unpickling,
+ // it was taken to be a member!
+ @strangeTest(Test2.test("test"))
+ def test(x: String): Unit
+}
diff --git a/test/files/neg/t7501/t7501_2.scala b/test/files/neg/t7501/t7501_2.scala
new file mode 100644
index 0000000000..044caea3c3
--- /dev/null
+++ b/test/files/neg/t7501/t7501_2.scala
@@ -0,0 +1,5 @@
+object Test {
+ def foo(a: A) = a.name
+
+ type TP = X // already failed before this fix
+}
diff --git a/test/files/neg/t7694b.check b/test/files/neg/t7694b.check
new file mode 100644
index 0000000000..ea3d7736f8
--- /dev/null
+++ b/test/files/neg/t7694b.check
@@ -0,0 +1,7 @@
+t7694b.scala:8: error: type arguments [_3,_4] do not conform to trait L's type parameter bounds [A2,B2 <: A2]
+ def d = if (true) (null: L[A, A]) else (null: L[B, B])
+ ^
+t7694b.scala:9: error: type arguments [_1,_2] do not conform to trait L's type parameter bounds [A2,B2 <: A2]
+ val v = if (true) (null: L[A, A]) else (null: L[B, B])
+ ^
+two errors found
diff --git a/test/files/neg/t7715.check b/test/files/neg/t7715.check
new file mode 100644
index 0000000000..4ee6b6c95d
--- /dev/null
+++ b/test/files/neg/t7715.check
@@ -0,0 +1,13 @@
+t7715.scala:8: error: error in interpolated string: identifier or block expected
+ days map s"On the $_th day of Christmas" foreach println
+ ^
+t7715.scala:10: error: error in interpolated string: identifier or block expected
+ val rf = (n: Int) => s"\\*{$_}"(n).r
+ ^
+t7715.scala:17: error: unbound placeholder parameter
+ days zip days map s"${_: Int} by ${_: Int}".tupled foreach println
+ ^
+t7715.scala:17: error: unbound placeholder parameter
+ days zip days map s"${_: Int} by ${_: Int}".tupled foreach println
+ ^
+four errors found
diff --git a/test/files/neg/t7715.scala b/test/files/neg/t7715.scala
new file mode 100644
index 0000000000..637ab8df6d
--- /dev/null
+++ b/test/files/neg/t7715.scala
@@ -0,0 +1,18 @@
+
+import PartialFunction.cond
+import util._
+
+object Test extends App {
+ val days = (1 to 12).toList
+
+ days map s"On the $_th day of Christmas" foreach println
+
+ val rf = (n: Int) => s"\\*{$_}"(n).r
+ def stars(n: Int)(s: String) = {
+ val r = rf(n)
+ cond(s) { case r(_*) => true }
+ }
+ Console println stars(5)("*****")
+
+ days zip days map s"${_: Int} by ${_: Int}".tupled foreach println
+}
diff --git a/test/files/neg/t7721.check b/test/files/neg/t7721.check
new file mode 100644
index 0000000000..e056b9a293
--- /dev/null
+++ b/test/files/neg/t7721.check
@@ -0,0 +1,21 @@
+t7721.scala:11: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure
+ case x: Foo with Concrete => x.bippy + x.conco
+ ^
+t7721.scala:15: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure
+ case x: Concrete with Foo => x.bippy + x.conco
+ ^
+t7721.scala:19: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure
+ case x: Foo with Bar => x.bippy + x.barry
+ ^
+t7721.scala:39: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure
+ case x: Foo with Concrete => x.bippy + x.dingo + x.conco
+ ^
+t7721.scala:43: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure
+ case x: Concrete with Foo => x.bippy + x.dingo + x.conco
+ ^
+t7721.scala:47: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure
+ case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/t7721.flags b/test/files/neg/t7721.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t7721.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7721.scala b/test/files/neg/t7721.scala
new file mode 100644
index 0000000000..27884c9e35
--- /dev/null
+++ b/test/files/neg/t7721.scala
@@ -0,0 +1,140 @@
+import scala.language.reflectiveCalls
+
+trait A {
+ trait Concrete { def conco: Int = 1 }
+ type Foo <: { def bippy: Int }
+ type Bar <: { def barry: Int }
+
+ implicit def barTag: scala.reflect.ClassTag[Bar]
+
+ def f1(x: Any) = x match {
+ case x: Foo with Concrete => x.bippy + x.conco
+ case _ => -1
+ }
+ def f2(x: Any) = x match {
+ case x: Concrete with Foo => x.bippy + x.conco
+ case _ => -1
+ }
+ def f3(x: Any) = x match {
+ case x: Foo with Bar => x.bippy + x.barry
+ case _ => -1
+ }
+ def f4(x: Any) = x match {
+ case x: (Foo @unchecked) => x.bippy // warns, suppressed
+ case _ => -1
+ }
+ def f5(x: Any) = x match {
+ case x: (Bar @unchecked) => x.barry // warns (but about the "outer reference"), suppressed
+ case _ => -1
+ }
+}
+
+trait B extends A {
+ type Foo <: { def bippy: Int ; def dingo: Int }
+ type Bar <: { def barry: Int ; def bongo: Int }
+
+ override implicit def barTag: scala.reflect.ClassTag[Bar]
+
+ override def f1(x: Any) = x match {
+ case x: Foo with Concrete => x.bippy + x.dingo + x.conco
+ case _ => -1
+ }
+ override def f2(x: Any) = x match {
+ case x: Concrete with Foo => x.bippy + x.dingo + x.conco
+ case _ => -1
+ }
+ override def f3(x: Any) = x match {
+ case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo
+ case _ => -1
+ }
+ override def f4(x: Any) = x match {
+ case x: (Foo @unchecked) => x.bippy + x.dingo // warns, suppressed
+ case _ => -1
+ }
+ override def f5(x: Any) = x match {
+ case x: (Bar @unchecked) => x.barry + x.bongo // warns (but about the "outer reference"), suppressed
+ case _ => -1
+ }
+}
+
+object Test {
+ abstract class Base extends A {
+ trait Foo {
+ def bippy = 2
+ def dingo = 3
+ }
+ trait Bar {
+ def barry = 2
+ def bongo = 3
+ }
+ implicit def barTag: scala.reflect.ClassTag[Bar] = scala.reflect.ClassTag(classOf[Bar])
+
+ def run() {
+ println("f1")
+ wrap(f1(new Concrete {}))
+ wrap(f1(new Foo {}))
+ wrap(f1(new Bar {}))
+ wrap(f1(new Foo with Concrete {}))
+ wrap(f1(new Concrete with Foo {}))
+
+ println("\nf2")
+ wrap(f2(new Concrete {}))
+ wrap(f2(new Foo {}))
+ wrap(f2(new Bar {}))
+ wrap(f2(new Foo with Concrete {}))
+ wrap(f2(new Concrete with Foo {}))
+ wrap(f2(new Bar with Concrete {}))
+ wrap(f2(new Concrete with Bar {}))
+ wrap(f2(new Concrete with Foo with Bar {}))
+ wrap(f2(new Foo with Bar with Concrete {}))
+
+ println("\nf3")
+ wrap(f3(new Concrete {}))
+ wrap(f3(new Foo {}))
+ wrap(f3(new Bar {}))
+ wrap(f3(new Foo with Concrete {}))
+ wrap(f3(new Concrete with Foo {}))
+ wrap(f3(new Bar with Concrete {}))
+ wrap(f3(new Concrete with Bar {}))
+ wrap(f3(new Concrete with Foo with Bar {}))
+ wrap(f3(new Foo with Bar with Concrete {}))
+
+ println("\nf4")
+ wrap(f4(new Concrete {}))
+ wrap(f4(new Foo {}))
+ wrap(f4(new Bar {}))
+ wrap(f4(new Foo with Concrete {}))
+ wrap(f4(new Concrete with Foo {}))
+ wrap(f4(new Bar with Concrete {}))
+ wrap(f4(new Concrete with Bar {}))
+ wrap(f4(new Concrete with Foo with Bar {}))
+ wrap(f4(new Foo with Bar with Concrete {}))
+
+ println("\nf5")
+ wrap(f5(new Concrete {}))
+ wrap(f5(new Foo {}))
+ wrap(f5(new Bar {}))
+ wrap(f5(new Foo with Concrete {}))
+ wrap(f5(new Concrete with Foo {}))
+ wrap(f5(new Bar with Concrete {}))
+ wrap(f5(new Concrete with Bar {}))
+ wrap(f5(new Concrete with Foo with Bar {}))
+ wrap(f5(new Foo with Bar with Concrete {}))
+ }
+ }
+
+ object ao extends Base
+ object bo extends Base with B
+
+ private def wrap(body: => Any) {
+ try println(body)
+ catch { case ex: NoSuchMethodException => println(ex) }
+ }
+
+ def main(args: Array[String]) {
+ ao.run()
+ bo.run()
+ }
+}
+
+// java.lang.NoSuchMethodException: Test$$anon$1.bippy() \ No newline at end of file
diff --git a/test/files/neg/t7752.check b/test/files/neg/t7752.check
new file mode 100644
index 0000000000..0a015d3f37
--- /dev/null
+++ b/test/files/neg/t7752.check
@@ -0,0 +1,27 @@
+t7752.scala:25: error: overloaded method value foo with alternatives:
+ [A](heading: String, rows: A*)(A,) <and>
+ [A, B](heading: (String, String), rows: (A, B)*)(A, B) <and>
+ [A, B, C](heading: (String, String, String), rows: (A, B, C)*)(A, B, C) <and>
+ [A, B, C, D](heading: (String, String, String, String), rows: (A, B, C, D)*)(A, B, C, D) <and>
+ [A, B, C, D, E](heading: (String, String, String, String, String), rows: (A, B, C, D, E)*)(A, B, C, D, E) <and>
+ [A, B, C, D, E, F](heading: (String, String, String, String, String, String), rows: (A, B, C, D, E, F)*)(A, B, C, D, E, F) <and>
+ [A, B, C, D, E, F, G](heading: (String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G)*)(A, B, C, D, E, F, G) <and>
+ [A, B, C, D, E, F, G, H](heading: (String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H)*)(A, B, C, D, E, F, G, H) <and>
+ [A, B, C, D, E, F, G, H, I](heading: (String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I)*)(A, B, C, D, E, F, G, H, I) <and>
+ [A, B, C, D, E, F, G, H, I, J](heading: (String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J)*)(A, B, C, D, E, F, G, H, I, J) <and>
+ [A, B, C, D, E, F, G, H, I, J, K](heading: (String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K)*)(A, B, C, D, E, F, G, H, I, J, K) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L](heading: (String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L)*)(A, B, C, D, E, F, G, H, I, J, K, L) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M)*)(A, B, C, D, E, F, G, H, I, J, K, L, M) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)
+ cannot be applied to (Int)
+ foo((1))
+ ^
+one error found
diff --git a/test/files/neg/t7752.scala b/test/files/neg/t7752.scala
new file mode 100644
index 0000000000..40ba2103b1
--- /dev/null
+++ b/test/files/neg/t7752.scala
@@ -0,0 +1,26 @@
+object Test {
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)*): Tuple22[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)*): Tuple21[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)*): Tuple20[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)*): Tuple19[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)*): Tuple18[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)*): Tuple17[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)*): Tuple16[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)*): Tuple15[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N)*): Tuple14[A,B,C,D,E,F,G,H,I,J,K,L,M,N] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M)*): Tuple13[A,B,C,D,E,F,G,H,I,J,K,L,M] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L](heading: (String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L)*): Tuple12[A,B,C,D,E,F,G,H,I,J,K,L] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K](heading: (String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K)*): Tuple11[A,B,C,D,E,F,G,H,I,J,K] = null
+ def foo[A, B, C, D, E, F, G, H, I, J](heading: (String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J)*): Tuple10[A,B,C,D,E,F,G,H,I,J] = null
+ def foo[A, B, C, D, E, F, G, H, I](heading: (String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I)*): Tuple9[A,B,C,D,E,F,G,H,I] = null
+ def foo[A, B, C, D, E, F, G, H](heading: (String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H)*): Tuple8[A,B,C,D,E,F,G,H] = null
+ def foo[A, B, C, D, E, F, G](heading: (String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G)*): Tuple7[A,B,C,D,E,F,G] = null
+ def foo[A, B, C, D, E, F](heading: (String, String, String, String, String, String), rows: (A, B, C, D, E, F)*): Tuple6[A,B,C,D,E,F] = null
+ def foo[A, B, C, D, E](heading: (String, String, String, String, String), rows: (A, B, C, D, E)*): Tuple5[A,B,C,D,E] = null
+ def foo[A, B, C, D](heading: (String, String, String, String), rows: (A, B, C, D)*): Tuple4[A,B,C,D] = null
+ def foo[A, B, C](heading: (String, String, String), rows: (A, B, C)*): Tuple3[A,B,C] = null
+ def foo[A, B](heading: (String, String), rows: (A, B)*): Tuple2[A,B] = null
+ def foo[A](heading: String, rows: A*): Tuple1[A] = null
+
+ foo((1))
+} \ No newline at end of file
diff --git a/test/files/neg/t7756a.check b/test/files/neg/t7756a.check
new file mode 100644
index 0000000000..8d42717e47
--- /dev/null
+++ b/test/files/neg/t7756a.check
@@ -0,0 +1,7 @@
+t7756a.scala:7: error: type arguments [Object] do not conform to trait TA's type parameter bounds [X <: CharSequence]
+ locally(null: TA[Object])
+ ^
+t7756a.scala:7: error: type arguments [Object] do not conform to trait TA's type parameter bounds [X <: CharSequence]
+ locally(null: TA[Object])
+ ^
+two errors found
diff --git a/test/files/neg/t7756a.scala b/test/files/neg/t7756a.scala
new file mode 100644
index 0000000000..4453e84963
--- /dev/null
+++ b/test/files/neg/t7756a.scala
@@ -0,0 +1,11 @@
+object Test {
+ def test: Unit = {
+ trait TA[X <: CharSequence]
+ 0 match {
+ case _ =>
+ // the bounds violation isn't reported. RefChecks seems to be too broadly disabled under virtpatmat: see 65340ed4ad2e
+ locally(null: TA[Object])
+ ()
+ }
+ }
+}
diff --git a/test/files/neg/t7756b.check b/test/files/neg/t7756b.check
new file mode 100644
index 0000000000..2817a7e230
--- /dev/null
+++ b/test/files/neg/t7756b.check
@@ -0,0 +1,6 @@
+t7756b.scala:3: warning: comparing values of types Int and String using `==' will always yield false
+ case _ => 0 == ""
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t7756b.flags b/test/files/neg/t7756b.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t7756b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t7756b.scala b/test/files/neg/t7756b.scala
new file mode 100644
index 0000000000..a2de29c8e7
--- /dev/null
+++ b/test/files/neg/t7756b.scala
@@ -0,0 +1,5 @@
+object Test {
+ 0 match {
+ case _ => 0 == ""
+ }
+}
diff --git a/test/files/neg/t7757a.check b/test/files/neg/t7757a.check
new file mode 100644
index 0000000000..de24e23004
--- /dev/null
+++ b/test/files/neg/t7757a.check
@@ -0,0 +1,4 @@
+t7757a.scala:1: error: ';' expected but '@' found.
+trait Foo @annot
+ ^
+one error found
diff --git a/test/files/neg/t7757a.scala b/test/files/neg/t7757a.scala
new file mode 100644
index 0000000000..24f6c16cb4
--- /dev/null
+++ b/test/files/neg/t7757a.scala
@@ -0,0 +1 @@
+trait Foo @annot \ No newline at end of file
diff --git a/test/files/neg/t7757b.check b/test/files/neg/t7757b.check
new file mode 100644
index 0000000000..3e5a0f1fa6
--- /dev/null
+++ b/test/files/neg/t7757b.check
@@ -0,0 +1,4 @@
+t7757b.scala:2: error: expected start of definition
+@annot2
+ ^
+one error found
diff --git a/test/files/neg/t7757b.scala b/test/files/neg/t7757b.scala
new file mode 100644
index 0000000000..e9a537dba1
--- /dev/null
+++ b/test/files/neg/t7757b.scala
@@ -0,0 +1,2 @@
+trait Foo2
+@annot2 \ No newline at end of file
diff --git a/test/files/neg/t997.check b/test/files/neg/t997.check
index 186095f44a..be1e92c369 100644
--- a/test/files/neg/t997.check
+++ b/test/files/neg/t997.check
@@ -1,7 +1,10 @@
-t997.scala:13: error: wrong number of arguments for object Foo
+t997.scala:13: error: wrong number of patterns for object Foo offering (String, String): expected 2, found 3
+"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
+ ^
+t997.scala:13: error: wrong number of patterns for object Foo offering (String, String): expected 2, found 3
"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
^
t997.scala:13: error: not found: value a
"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
^
-two errors found
+three errors found
diff --git a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
index cf58bc3dfd..ecf8916c46 100644
--- a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
+++ b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
@@ -22,7 +22,7 @@ object Macros {
var b1 = new Transformer {
override def transform(tree: Tree): Tree = tree match {
case Ident(x) if (x==n) => Ident(TermName("_arg"))
- case tt @ TypeTree() if tt.original != null => TypeTree(tt.tpe) setOriginal transform(tt.original)
+ case tt: TypeTree if tt.original != null => TypeTree(tt.tpe) setOriginal transform(tt.original)
// without the fix to LazyTreeCopier.Annotated, we would need to uncomment the line below to make the macro work
// that's because the pattern match in the input expression gets expanded into Typed(<x>, TypeTree(<Int @unchecked>))
// with the original of the TypeTree being Annotated(<@unchecked>, Ident(<x>))
diff --git a/test/files/pos/erasure-nsquared.scala b/test/files/pos/erasure-nsquared.scala
new file mode 100644
index 0000000000..b0e30ade58
--- /dev/null
+++ b/test/files/pos/erasure-nsquared.scala
@@ -0,0 +1,35 @@
+trait BigCast {
+ def bar(x: Int): AnyRef = (
+ null
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ .asInstanceOf[List[AnyRef]].head
+ )
+}
diff --git a/test/files/pos/extractor-types.scala b/test/files/pos/extractor-types.scala
new file mode 100644
index 0000000000..bb9659a13c
--- /dev/null
+++ b/test/files/pos/extractor-types.scala
@@ -0,0 +1,30 @@
+package p1 {
+ object Ex { def unapply(p: Any): Option[_ <: Int] = null }
+ object Foo { val Ex(_) = null }
+}
+// a.scala:2: error: error during expansion of this match (this is a scalac bug).
+// The underlying error was: type mismatch;
+// found : Some[_$1(in value x$1)] where type _$1(in value x$1)
+// required: Some[_$1(in method unapply)]
+// object Foo { val Ex(_) = null }
+// ^
+// one error found
+
+package p2 {
+ trait Other {
+ class Quux
+ object Baz { def unapply(x: Any): Option[Quux] = None }
+ }
+ trait Reifiers {
+ def f() {
+ val u2: Other = null
+ (null: Any) match { case u2.Baz(x) => println(x) } //: u2.Quux) }
+ // The underlying error was: type mismatch;
+ // found : Other#Quux
+ // required: u2.Quux
+ // x match { case u2.Baz(x) => println(x: u2.Quux) }
+ // ^
+ // one error found
+ }
+ }
+}
diff --git a/test/files/pos/optmatch.scala b/test/files/pos/optmatch.scala
new file mode 100644
index 0000000000..354be65da7
--- /dev/null
+++ b/test/files/pos/optmatch.scala
@@ -0,0 +1,33 @@
+// final case class NonZeroLong(value: Long) extends AnyVal {
+// def get: Long = value
+// def isEmpty: Boolean = get == 0l
+// }
+
+class NonZeroLong(val value: Long) extends AnyVal {
+ def get: Long = value
+ def isEmpty: Boolean = get == 0l
+}
+object NonZeroLong {
+ def unapply(value: Long): NonZeroLong = new NonZeroLong(value)
+}
+
+
+object Foo {
+ def unapply(x: Int): NonZeroLong = new NonZeroLong(1L << x)
+ // public long unapply(int);
+ // 0: lconst_1
+ // 1: iload_1
+ // 2: lshl
+ // 3: lreturn
+}
+
+object Test {
+ def f(x: Int): Int = x match {
+ case Foo(1024l) => 1
+ case _ => 2
+ }
+ def main(args: Array[String]): Unit = {
+ println(f(10))
+ println(f(11))
+ }
+}
diff --git a/test/files/pos/overloaded-unapply.scala b/test/files/pos/overloaded-unapply.scala
new file mode 100644
index 0000000000..4105a25f10
--- /dev/null
+++ b/test/files/pos/overloaded-unapply.scala
@@ -0,0 +1,8 @@
+trait Baz {
+ type Type >: Null
+
+ case class HoleType(a: String, b: String, c: String)
+ object HoleType { def unapply(tpe: Type): Option[HoleType] = ??? }
+
+ (null: Type) match { case HoleType(holeTpe) => holeTpe }
+}
diff --git a/test/files/pos/patmat-extract-tparam.scala b/test/files/pos/patmat-extract-tparam.scala
new file mode 100644
index 0000000000..6417b49c2b
--- /dev/null
+++ b/test/files/pos/patmat-extract-tparam.scala
@@ -0,0 +1,13 @@
+trait Bip[T] { def h: T }
+trait BoolBip extends Bip[Boolean]
+
+class A {
+ def g(x: Boolean): Unit = ()
+ def f(xs: List[Bip[_]]) = xs foreach { case x: BoolBip => g(x.h) }
+}
+
+class B {
+ def g(x: Boolean): Unit = ()
+ def g(x: Int): Unit = ()
+ def f(xs: List[Bip[_]]) = xs foreach { case x: BoolBip => g(x.h) }
+}
diff --git a/test/files/pos/t6797.scala b/test/files/pos/t6797.scala
new file mode 100644
index 0000000000..ef1afa1eb3
--- /dev/null
+++ b/test/files/pos/t6797.scala
@@ -0,0 +1,4 @@
+object Test extends App /* workaround: don't extend App */ {
+ private class Matcher(aParam: Option[String] = None)
+ private val stringMatcher = new Matcher
+}
diff --git a/test/files/pos/t7014/ThreadSafety.java b/test/files/pos/t7014/ThreadSafety.java
new file mode 100644
index 0000000000..ed508804e3
--- /dev/null
+++ b/test/files/pos/t7014/ThreadSafety.java
@@ -0,0 +1,9 @@
+package t7014;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME) // must be exactly RUNTIME retention (those we parse)
+public @interface ThreadSafety {
+ ThreadSafetyLevel level();
+} \ No newline at end of file
diff --git a/test/files/pos/t7014/ThreadSafetyLevel.java b/test/files/pos/t7014/ThreadSafetyLevel.java
new file mode 100644
index 0000000000..4df1dc787a
--- /dev/null
+++ b/test/files/pos/t7014/ThreadSafetyLevel.java
@@ -0,0 +1,8 @@
+package t7014; // package needed due to other bug in scalac's java parser
+
+// since we parse eagerly, we have not yet parsed the classfile when parsing the annotation,
+// and on doing so, fail to find a symbol for the COMPLETELY_THREADSAFE reference
+// from the annotation's argument to the enum's member
+// for now, let's just not crash -- should implement lazy completing at some point
+@ThreadSafety(level=ThreadSafetyLevel.COMPLETELY_THREADSAFE)
+public enum ThreadSafetyLevel { COMPLETELY_THREADSAFE }
diff --git a/test/files/pos/t7014/t7014.scala b/test/files/pos/t7014/t7014.scala
new file mode 100644
index 0000000000..faec4c7740
--- /dev/null
+++ b/test/files/pos/t7014/t7014.scala
@@ -0,0 +1,4 @@
+package t7014
+
+import ThreadSafetyLevel.COMPLETELY_THREADSAFE // refer to annotation so it gets parsed
+ \ No newline at end of file
diff --git a/test/files/pos/t7486-named.scala b/test/files/pos/t7486-named.scala
new file mode 100644
index 0000000000..253293e5f1
--- /dev/null
+++ b/test/files/pos/t7486-named.scala
@@ -0,0 +1,8 @@
+
+object Test {
+ def fold(empty: Any) = ()
+ implicit val notAnnotatedImplicit = new {
+ fold(empty = 0)
+ def empty[A]: Any = ???
+ }
+}
diff --git a/test/pending/pos/t7486.scala b/test/files/pos/t7486.scala
index 6dd7f4c4ac..6dd7f4c4ac 100644
--- a/test/pending/pos/t7486.scala
+++ b/test/files/pos/t7486.scala
diff --git a/test/files/pos/t7690.scala b/test/files/pos/t7690.scala
new file mode 100644
index 0000000000..e8911a93e8
--- /dev/null
+++ b/test/files/pos/t7690.scala
@@ -0,0 +1,17 @@
+object A
+trait B[T]
+
+object C {
+ implicit def notUsed[L[x]](in: L[Int]): B[L[Int]] = ???
+
+ class E(val ls: Int) {
+ def x(f: Int => Boolean): Boolean = f(ls)
+ }
+ implicit def isUsed(ls: Int): E = new E(ls)
+
+ def amethod(in: Int): Boolean =
+ in.x { i =>
+ import A._
+ "asdf" == i.toString
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t7694.scala b/test/files/pos/t7694.scala
new file mode 100644
index 0000000000..9852d5ec79
--- /dev/null
+++ b/test/files/pos/t7694.scala
@@ -0,0 +1,40 @@
+trait A
+trait B
+
+trait L[A2, B2 <: A2] {
+ def bar(a: Any, b: Any) = 0
+}
+
+object Lub {
+ // use named args transforms to include TypeTree(<lub.tpe>) in the AST before refchecks.
+ def foo(a: L[_, _], b: Any) = 0
+
+ foo(b = 0, a = if (true) (null: L[A, A]) else (null: L[B, B]))
+
+ (if (true) (null: L[A, A]) else (null: L[B, B])).bar(b = 0, a = 0)
+}
+
+/*
+The LUB ends up as:
+
+TypeRef(
+ TypeSymbol(
+ abstract trait L#7038[A2#7039, B2#7040 <: A2#7039] extends AnyRef#2197
+
+ )
+ args = List(
+ AbstractTypeRef(
+ AbstractType(
+ type _1#13680 >: A#7036 with B#7037 <: Object#1752
+ )
+ )
+ AbstractTypeRef(
+ AbstractType(
+ type _2#13681 >: A#7036 with B#7037 <: Object#1752
+ )
+ )
+ )
+)
+
+Note that type _2#13681 is *not* bound by _1#13680
+*/
diff --git a/test/files/pos/t7716.scala b/test/files/pos/t7716.scala
new file mode 100644
index 0000000000..40117051ed
--- /dev/null
+++ b/test/files/pos/t7716.scala
@@ -0,0 +1,16 @@
+object Test {
+ def test: Unit = {
+ val e: java.lang.Enum[_] = java.util.concurrent.TimeUnit.SECONDS
+ e match { case x => println(x) }
+
+
+ trait TA[X <: CharSequence]
+ val ta: TA[_] = new TA[String] {}
+
+ ta match {
+ case _ => println("hi")
+ }
+
+ def f(ta: TA[_]) = ta match { case _ => "hi" }
+ }
+}
diff --git a/test/files/pos/t7785.scala b/test/files/pos/t7785.scala
new file mode 100644
index 0000000000..1de693d137
--- /dev/null
+++ b/test/files/pos/t7785.scala
@@ -0,0 +1,34 @@
+import scala.language._
+
+trait R[+Repr]
+
+trait TraversableOps {
+ implicit val R: R[Nothing] = ???
+
+ // Removing the implicit parameter in both fixes the crash
+ // removing it into one only gives a valid compiler error.
+ trait OpsDup1[Repr] {
+ def force(implicit bf: R[Repr]): Any
+ }
+
+ trait Ops[Repr] extends OpsDup1[Repr] {
+ def force(implicit bf: R[Repr], dummy: DummyImplicit): Any
+ }
+
+ implicit def ct2ops[T, C[+X]](t: C[T]):
+ Ops[C[T]]
+
+ def force[T](t: Option[T]) =
+ // ct2ops(t).force
+ t.force //Fails compilation on 2.10.2.
+
+
+ /* To get a closer look at the crash:
+ :power
+ val foo = typeOf[C].member(TermName("foo"))
+ val pt = analyzer.HasMember(TermName("force"))
+ val instantiated = foo.info.finalResultType.instantiateTypeParams(foo.typeParams, foo.typeParams.map(TypeVar(_)))
+ instantiated <:< pt
+ */
+ def foo[T, C[+X]]: Ops[C[T]]
+}
diff --git a/test/files/pos/t942/Amount_1.java b/test/files/pos/t942/Amount_1.java
new file mode 100644
index 0000000000..d9d37d127b
--- /dev/null
+++ b/test/files/pos/t942/Amount_1.java
@@ -0,0 +1,5 @@
+import java.util.concurrent.Callable;
+
+public abstract class Amount_1<Q> extends Object
+ implements Callable<Amount_1<?>> {
+}
diff --git a/test/files/pos/t942/Test_2.scala b/test/files/pos/t942/Test_2.scala
new file mode 100644
index 0000000000..3cc84dae3c
--- /dev/null
+++ b/test/files/pos/t942/Test_2.scala
@@ -0,0 +1,3 @@
+abstract class Foo {
+ val x: Amount_1[Foo]
+}
diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala
index c884b6425b..f2233f1828 100755
--- a/test/files/presentation/doc/doc.scala
+++ b/test/files/presentation/doc/doc.scala
@@ -51,10 +51,6 @@ object Test extends InteractiveTest {
new Typer(context) with InteractiveTyper with ScaladocTyper
}
- override lazy val loaders = new scala.tools.nsc.symtab.SymbolLoaders {
- val global: outer.type = outer
- }
-
def chooseLink(links: List[LinkTo]): LinkTo = links.head
def internalLink(sym: Symbol, site: Symbol) = None
def toString(link: LinkTo) = link.toString
diff --git a/test/files/run/analyzerPlugins.scala b/test/files/run/analyzerPlugins.scala
index b20a734fe6..4b297ff220 100644
--- a/test/files/run/analyzerPlugins.scala
+++ b/test/files/run/analyzerPlugins.scala
@@ -8,7 +8,9 @@ object Test extends DirectTest {
def code = """
class testAnn extends annotation.TypeConstraint
- class A(param: Double) extends { val x: Int = 1; val y = "two"; type T = A } with AnyRef {
+ class A(param: Double) extends { val x: Int = 1; val y = "two" } with AnyRef {
+ type T = A
+
val inferField = ("str": @testAnn)
val annotField: Boolean @testAnn = false
@@ -81,7 +83,7 @@ object Test extends DirectTest {
output += s"pluginsPt($pt, ${treeClass(tree)})"
pt
}
-
+
override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
output += s"pluginsTyped($tpe, ${treeClass(tree)})"
tpe
diff --git a/test/files/run/deprecate-early-type-defs.check b/test/files/run/deprecate-early-type-defs.check
new file mode 100644
index 0000000000..1ee01df13e
--- /dev/null
+++ b/test/files/run/deprecate-early-type-defs.check
@@ -0,0 +1,3 @@
+deprecate-early-type-defs.scala:1: warning: early type members are deprecated. Move them to the regular body: the semantics are the same.
+object Test extends { type T = Int } with App
+ ^
diff --git a/test/files/run/deprecate-early-type-defs.flags b/test/files/run/deprecate-early-type-defs.flags
new file mode 100644
index 0000000000..c36e713ab8
--- /dev/null
+++ b/test/files/run/deprecate-early-type-defs.flags
@@ -0,0 +1 @@
+-deprecation \ No newline at end of file
diff --git a/test/files/run/deprecate-early-type-defs.scala b/test/files/run/deprecate-early-type-defs.scala
new file mode 100644
index 0000000000..99e42166f2
--- /dev/null
+++ b/test/files/run/deprecate-early-type-defs.scala
@@ -0,0 +1 @@
+object Test extends { type T = Int } with App \ No newline at end of file
diff --git a/test/files/run/interpolation.flags b/test/files/run/interpolation.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/run/interpolation.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/run/interpolationArgs.check b/test/files/run/interpolationArgs.check
index 155991e618..983214cbee 100644
--- a/test/files/run/interpolationArgs.check
+++ b/test/files/run/interpolationArgs.check
@@ -1,2 +1,2 @@
-java.lang.IllegalArgumentException: wrong number of arguments for interpolated string
-java.lang.IllegalArgumentException: wrong number of arguments for interpolated string
+java.lang.IllegalArgumentException: wrong number of arguments (1) for interpolated string with 3 parts
+java.lang.IllegalArgumentException: wrong number of arguments (1) for interpolated string with 1 parts
diff --git a/test/files/run/interpolationArgs.flags b/test/files/run/interpolationArgs.flags
deleted file mode 100644
index e1b37447c9..0000000000
--- a/test/files/run/interpolationArgs.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental \ No newline at end of file
diff --git a/test/files/run/interpolationMultiline1.flags b/test/files/run/interpolationMultiline1.flags
deleted file mode 100644
index 48fd867160..0000000000
--- a/test/files/run/interpolationMultiline1.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental
diff --git a/test/files/run/interpolationMultiline2.flags b/test/files/run/interpolationMultiline2.flags
deleted file mode 100644
index e1b37447c9..0000000000
--- a/test/files/run/interpolationMultiline2.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental \ No newline at end of file
diff --git a/test/files/run/macro-auto-duplicate.check b/test/files/run/macro-auto-duplicate.check
new file mode 100644
index 0000000000..d81cc0710e
--- /dev/null
+++ b/test/files/run/macro-auto-duplicate.check
@@ -0,0 +1 @@
+42
diff --git a/test/files/run/macro-auto-duplicate/Macros_1.scala b/test/files/run/macro-auto-duplicate/Macros_1.scala
new file mode 100644
index 0000000000..e3df05ba50
--- /dev/null
+++ b/test/files/run/macro-auto-duplicate/Macros_1.scala
@@ -0,0 +1,17 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val x = Ident(newTermName("x"))
+ def defAndUseX(rhs: Tree) = {
+ Block(List(ValDef(NoMods, newTermName("x"), TypeTree(), rhs)), x)
+ }
+ val xi4 = defAndUseX(Literal(Constant(4)))
+ val xs2 = defAndUseX(Literal(Constant("2")))
+ c.Expr[String](Apply(Select(xi4, newTermName("$plus")), List(xs2)))
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel/Test_2.scala b/test/files/run/macro-auto-duplicate/Test_2.scala
index eee2d6ae13..f697da6020 100644
--- a/test/files/run/macro-toplevel/Test_2.scala
+++ b/test/files/run/macro-auto-duplicate/Test_2.scala
@@ -1,6 +1,3 @@
-import Macros._
-
object Test extends App {
println(Macros.foo)
- println(Macros.foo2)
} \ No newline at end of file
diff --git a/test/files/run/macro-duplicate/Impls_Macros_1.scala b/test/files/run/macro-duplicate/Impls_Macros_1.scala
index af80147a90..85a581585f 100644
--- a/test/files/run/macro-duplicate/Impls_Macros_1.scala
+++ b/test/files/run/macro-duplicate/Impls_Macros_1.scala
@@ -26,4 +26,4 @@ object Macros {
}
def foo = macro impl
-} \ No newline at end of file
+}
diff --git a/test/files/run/macro-expand-unapply-b.check b/test/files/run/macro-expand-unapply-b.check
deleted file mode 100644
index 5272f0d00a..0000000000
--- a/test/files/run/macro-expand-unapply-b.check
+++ /dev/null
@@ -1,2 +0,0 @@
-(1,List(2))
-List(1)
diff --git a/test/files/run/macro-expand-unapply-b.flags b/test/files/run/macro-expand-unapply-b.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-expand-unapply-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-unapply-b/Impls_Macros_1.scala b/test/files/run/macro-expand-unapply-b/Impls_Macros_1.scala
deleted file mode 100644
index d0300bdf7e..0000000000
--- a/test/files/run/macro-expand-unapply-b/Impls_Macros_1.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-import language.experimental.macros
-import scala.reflect.macros.Context
-
-object Macros {
- implicit class ContextExtensions(c: StringContext) {
- object q {
- def unapply(x: Any): Option[Any] = macro impl
- }
- }
-
- def impl(c: Context)(x: c.Expr[Any]): c.Expr[Option[Any]] = {
- import c.universe._
- import Flag._
-
- // parts here will be string literals - static parts of the string interpolation
- // e.g. for q"$x, $y" parts will be Literal(Constant("")), Literal(Constant(", ")) and Literal(Constant(""))
- val Apply(Select(Select(Apply(_, List(Apply(_, parts))), _), _), _) = c.macroApplication
- val nresults = parts.length - 1
-
- def results() =
- ((1 to (nresults - 1)).toList map (i => Literal(Constant(i)))) :+ // (n - 1) results of type Int
- Apply(Ident(TermName("List")), List(Literal(Constant(nresults)))) // and also one result of a different type
- def extractorBody() =
- if (nresults == 0) Literal(Constant(true))
- else if (nresults == 1) Apply(Ident(TermName("Some")), results())
- else Apply(Ident(TermName("Some")), List(Apply(Ident(TermName("Tuple" + nresults)), results())))
-
- val name = TermName(java.util.UUID.randomUUID().toString.replace("-", ""))
- val mdef = ModuleDef(NoMods, name, Template(List(Select(Ident(TermName("scala")), TypeName("AnyRef"))), emptyValDef, List(
- DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(),
- Block(List(pendingSuperCall), Literal(Constant(())))),
- DefDef(Modifiers(), TermName("unapply"), List(), List(List(ValDef(Modifiers(PARAM), TermName("x"), Ident(TypeName("Any")), EmptyTree))), TypeTree(),
- extractorBody()))))
- c.introduceTopLevel(nme.EMPTY_PACKAGE_NAME.toString, mdef)
- c.Expr[Option[Any]](Apply(Select(Ident(name), TermName("unapply")), List(x.tree)))
- }
-} \ No newline at end of file
diff --git a/test/files/run/macro-expand-unapply-b/Test_2.scala b/test/files/run/macro-expand-unapply-b/Test_2.scala
deleted file mode 100644
index 5352160dfe..0000000000
--- a/test/files/run/macro-expand-unapply-b/Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Test extends App {
- import Macros._
- def whatever() = null
- val q"$x1, $y1" = whatever()
- println(x1, y1)
- val q"$x2" = whatever()
- println(x2)
-}
diff --git a/test/files/run/macro-toplevel-companion-a.flags b/test/files/run/macro-toplevel-companion-a.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-toplevel-companion-a.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-a/Impls_Macros_1.scala b/test/files/run/macro-toplevel-companion-a/Impls_Macros_1.scala
deleted file mode 100644
index 23e8694ddc..0000000000
--- a/test/files/run/macro-toplevel-companion-a/Impls_Macros_1.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import scala.reflect.macros.Context
-import language.experimental.macros
-
-object Macros {
- def impl(c: Context) = {
- import c.universe._
- val synthetic = reify{ class C { override def toString = "C" }; object C { implicit val c = new C } }.tree
- val defs = synthetic.asInstanceOf[Block].stats.asInstanceOf[List[ImplDef]]
- if (c.topLevelRef(TypeName("C")).isEmpty) c.introduceTopLevel(nme.EMPTY_PACKAGE_NAME.toString, defs: _*)
- c.literalUnit
- }
-
- def foo = macro impl
-} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-a/Test_2.scala b/test/files/run/macro-toplevel-companion-a/Test_2.scala
deleted file mode 100644
index 78b65b5b1f..0000000000
--- a/test/files/run/macro-toplevel-companion-a/Test_2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-import Macros._
-
-object Test extends App {
- foo;
- implicitly[C];
- foo;
- implicitly[C];
-} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-b.check b/test/files/run/macro-toplevel-companion-b.check
deleted file mode 100644
index bd30dc75d3..0000000000
--- a/test/files/run/macro-toplevel-companion-b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-reflective compilation has failed:
-
-Companions 'class C' and 'object C' must be defined in same file:
- Found in <synthetic file name> and <synthetic file name>
diff --git a/test/files/run/macro-toplevel-companion-b.flags b/test/files/run/macro-toplevel-companion-b.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-toplevel-companion-b.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-b/Impls_Macros_1.scala b/test/files/run/macro-toplevel-companion-b/Impls_Macros_1.scala
deleted file mode 100644
index f30adc2965..0000000000
--- a/test/files/run/macro-toplevel-companion-b/Impls_Macros_1.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.reflect.macros.Context
-import language.experimental.macros
-
-object Macros {
- def impl(c: Context) = {
- import c.universe._
- val Block(List(cdef: ClassDef), _) = reify{ class C }.tree
- val classRef = c.topLevelRef(TypeName("C")) orElse c.introduceTopLevel(nme.EMPTY_PACKAGE_NAME.toString, cdef)
- val Block(List(mdef: ModuleDef), _) = reify{ object C }.tree
- val moduleRef = c.topLevelRef(TermName("C")) orElse c.introduceTopLevel(nme.EMPTY_PACKAGE_NAME.toString, mdef)
- c.literalUnit
- }
-
- def foo = macro impl
-} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-b/Test_2.scala b/test/files/run/macro-toplevel-companion-b/Test_2.scala
deleted file mode 100644
index 4e766bde89..0000000000
--- a/test/files/run/macro-toplevel-companion-b/Test_2.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{universe => ru}
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.{ToolBox, ToolBoxError}
-import Macros._
-
-object Test extends App {
- val tb = cm.mkToolBox()
- try tb.compile(Select(Ident(TermName("Macros")), TermName("foo")))
- catch { case ToolBoxError(message, _) => println("""(Found in|and) .*?compileLateSynthetic-.*?\.scala""".r.replaceAllIn(message, m => m.group(1) + " <synthetic file name>")) }
-} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-c.check b/test/files/run/macro-toplevel-companion-c.check
deleted file mode 100644
index 4052c472f8..0000000000
--- a/test/files/run/macro-toplevel-companion-c.check
+++ /dev/null
@@ -1,3 +0,0 @@
-error: Companions 'class C' and 'object C' must be defined in same file:
- Found in <synthetic file name> and newSource1.scala
-
diff --git a/test/files/run/macro-toplevel-companion-c.flags b/test/files/run/macro-toplevel-companion-c.flags
deleted file mode 100644
index cd66464f2f..0000000000
--- a/test/files/run/macro-toplevel-companion-c.flags
+++ /dev/null
@@ -1 +0,0 @@
--language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-c.scala b/test/files/run/macro-toplevel-companion-c.scala
deleted file mode 100644
index c315f8b942..0000000000
--- a/test/files/run/macro-toplevel-companion-c.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-import scala.tools.partest._
-import java.io._
-
-object Test extends DirectTest {
- def code = ???
-
- def macros_1 = """
- package test
-
- import scala.reflect.macros.Context
- import language.experimental.macros
-
- object Macros {
- def impl(c: Context) = {
- import c.universe._
- val Block(List(cdef: ClassDef), _) = reify{ class C }.tree
- val ref = c.topLevelRef(TypeName("test.C")) orElse c.introduceTopLevel("test", cdef)
- c.literalUnit
- }
-
- def foo = macro impl
- }
- """
- def compileMacros() = {
- val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator")
- compileString(newCompiler("-language:experimental.macros", "-cp", classpath, "-d", testOutput.path))(macros_1)
- }
-
- def test_2 = """
- package test
- object C { Macros.foo }
- """
- def compileTest() = {
- val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
- compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(test_2)
- }
-
- def show(): Unit = {
- // redirect err to string, for logging
- val prevErr = System.err
- val baos = new ByteArrayOutputStream()
- System.setErr(new PrintStream(baos))
- log("Compiling Macros_1...")
- if (compileMacros()) {
- log("Compiling Test_2...")
- if (compileTest()) log("Success!") else log("Failed...")
- }
- println("""(Found in|and) .*?compileLateSynthetic-.*?\.scala""".r.replaceAllIn(baos.toString, m => m.group(1) + " <synthetic file name>"))
- System.setErr(prevErr)
- }
-} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel.check b/test/files/run/macro-toplevel.check
deleted file mode 100644
index 257c3764fd..0000000000
--- a/test/files/run/macro-toplevel.check
+++ /dev/null
@@ -1,2 +0,0 @@
-I've been created from Macros.foo
-I've been created from Macros.foo
diff --git a/test/files/run/macro-toplevel/Macros_1.scala b/test/files/run/macro-toplevel/Macros_1.scala
deleted file mode 100644
index f681c86735..0000000000
--- a/test/files/run/macro-toplevel/Macros_1.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.reflect.macros.Context
-import language.experimental.macros
-
-object Macros {
- def impl(c: Context) = {
- import c.universe._
- val msg = "I've been created from " + c.macroApplication
- val Block(List(synthetic: ClassDef), _) = reify{ class SomeUniqueName { def hello = c.literal(msg).splice } }.tree
- val ref = c.topLevelRef(synthetic.name) orElse c.introduceTopLevel(nme.EMPTY_PACKAGE_NAME.toString, synthetic)
- c.Expr[String](Select(Apply(Select(New(ref), nme.CONSTRUCTOR), List()), TermName("hello")))
- }
-
- def foo = macro impl
- def foo2 = macro impl
-}
diff --git a/test/files/run/matchonseq.scala b/test/files/run/matchonseq.scala
index 49b406a6ec..f6f320245a 100644
--- a/test/files/run/matchonseq.scala
+++ b/test/files/run/matchonseq.scala
@@ -1,8 +1,8 @@
-object Test extends App{
- Vector(1,2,3) match {
- case head +: tail => println("It worked! head=" + head)
+object Test extends App {
+ Vector(1,2,3) match {
+ case head +: tail => println("It worked! head=" + head)
}
- Vector(1,2,3) match {
- case init :+ last => println("It worked! last=" + last)
+ Vector(1,2,3) match {
+ case init :+ last => println("It worked! last=" + last)
}
}
diff --git a/test/files/run/name-based-patmat.check b/test/files/run/name-based-patmat.check
new file mode 100644
index 0000000000..1cc605ea3d
--- /dev/null
+++ b/test/files/run/name-based-patmat.check
@@ -0,0 +1,10 @@
+catdog
+2 catdogs! A ha ha!
+3 catdogs! A ha ha!
+catdog
+2 catdogs! A ha ha!
+3 catdogs! A ha ha!
+1
+1
+2
+3
diff --git a/test/files/run/name-based-patmat.scala b/test/files/run/name-based-patmat.scala
new file mode 100644
index 0000000000..2c429c141f
--- /dev/null
+++ b/test/files/run/name-based-patmat.scala
@@ -0,0 +1,75 @@
+final class MiniSome[T](val get: T) extends AnyVal { def isEmpty = false }
+
+package p1 {
+ class Triple(val x: Any) extends AnyRef with Product3[String, String, String] {
+ private def s = "" + x
+ override def canEqual(x: Any) = this eq x.asInstanceOf[AnyRef]
+ def isEmpty = false
+ def get = this
+ def _1 = s
+ def _2 = "2 " + s + "s! A ha ha!"
+ def _3 = "3 " + s + "s! A ha ha!"
+
+ override def toString = s"Triple(${_1}, ${_2}, ${_3})"
+ }
+
+ object Triple {
+ def unapply(x: Any): Triple = new Triple(x)
+ }
+}
+
+package p2 {
+ class Triple(val x: Any) {
+ private def s = "" + x
+ def isEmpty = false
+ def get = this
+ def _1 = s
+ def _2 = "2 " + s + "s! A ha ha!"
+ def _3 = "3 " + s + "s! A ha ha!"
+ override def toString = s"Triple(${_1}, ${_2}, ${_3})"
+ }
+
+ object Triple {
+ def unapply(x: Any): Triple = new Triple(x)
+ }
+}
+
+package p3 {
+ case class Foo(x: Int, y: Int, zs: Int*)
+
+ object Bar {
+ def f(x: Foo) = x match {
+ case Foo(5, 10, 15, 20, _*) => 1
+ case Foo(5, 10, 15, _*) => 2
+ case Foo(5, 10, _*) => 3
+ case Foo(5, 10) => 4 // should warn unreachable
+ case _ => 5
+ }
+ }
+}
+
+object Test {
+
+ // def f(x: Any) = x match {
+ // case p1.Foo(x, y, z) => println((x, y, z))
+ // case x => println(x)
+ // }
+
+ def main(args: Array[String]): Unit = {
+ "catdog" match {
+ case p1.Triple(x, y, z) => List(x, y, z) foreach println
+ case x => println("fail: " + x)
+ }
+ // TODO
+ "catdog" match {
+ case p2.Triple(x, y, z) => List(x, y, z) foreach println
+ case x => println("fail: " + x)
+ }
+
+ println(p3.Bar.f(p3.Foo(5, 10, 15, 20, 25)))
+ println(p3.Bar.f(p3.Foo(5, 10, 15, 20)))
+ println(p3.Bar.f(p3.Foo(5, 10, 15)))
+ println(p3.Bar.f(p3.Foo(5, 10)))
+ // println(p3.Bar.f(p3.Foo(5)))
+ }
+}
diff --git a/test/files/run/patmat-behavior-2.check b/test/files/run/patmat-behavior-2.check
new file mode 100644
index 0000000000..a928fe7918
--- /dev/null
+++ b/test/files/run/patmat-behavior-2.check
@@ -0,0 +1,24 @@
+f1(Foo(1)) == true
+f1(Foo(1, 2)) == false
+f1(Foo(1, 2, 3)) == false
+
+f2(Foo(1)) == false
+f2(Foo(1, 2)) == true
+f2(Foo(1, 2, 3)) == false
+
+f3(Foo(1)) == false
+f3(Foo(1, 2)) == false
+f3(Foo(1, 2, 3)) == true
+
+f1seq(Foo(1)) == true
+f1seq(Foo(1, 2)) == true
+f1seq(Foo(1, 2, 3)) == true
+
+f2seq(Foo(1)) == false
+f2seq(Foo(1, 2)) == true
+f2seq(Foo(1, 2, 3)) == true
+
+f3seq(Foo(1)) == false
+f3seq(Foo(1, 2)) == false
+f3seq(Foo(1, 2, 3)) == true
+
diff --git a/test/files/run/patmat-behavior-2.scala b/test/files/run/patmat-behavior-2.scala
new file mode 100644
index 0000000000..b31f773772
--- /dev/null
+++ b/test/files/run/patmat-behavior-2.scala
@@ -0,0 +1,50 @@
+case class Foo(x: Int, ys: Int*) {
+ // We write our own toString because of SI-7735
+ override def toString = (x +: ys).mkString("Foo(", ", ", ")")
+}
+
+object Test {
+ def f1(x: Any) = x match {
+ case Foo(x) => true
+ case _ => false
+ }
+ def f2(x: Any) = x match {
+ case Foo(x, y) => true
+ case _ => false
+ }
+ def f3(x: Any) = x match {
+ case Foo(x, y, z) => true
+ case _ => false
+ }
+ def f1seq(x: Any) = x match {
+ case Foo(x, ys @ _*) => true
+ case _ => false
+ }
+ def f2seq(x: Any) = x match {
+ case Foo(x, y, zs @ _*) => true
+ case _ => false
+ }
+ def f3seq(x: Any) = x match {
+ case Foo(x, y, z, qs @ _*) => true
+ case _ => false
+ }
+
+ val x1 = Foo(1)
+ val x2 = Foo(1, 2)
+ val x3 = Foo(1, 2, 3)
+
+ val fs = List[Any => Boolean](f1, f2, f3)
+ val fseqs = List[Any => Boolean](f1seq, f2seq, f3seq)
+ val xs = List[Foo](x1, x2, x3)
+
+ def main(args: Array[String]): Unit = {
+ for ((f, i) <- fs.zipWithIndex) {
+ xs foreach (x => println(s"f${i+1}($x) == ${f(x)}"))
+ println("")
+ }
+ for ((f, i) <- fseqs.zipWithIndex) {
+ xs foreach (x => println(s"f${i+1}seq($x) == ${f(x)}"))
+ println("")
+ }
+ }
+}
diff --git a/test/files/run/patmat-behavior.check b/test/files/run/patmat-behavior.check
new file mode 100644
index 0000000000..273a1434fb
--- /dev/null
+++ b/test/files/run/patmat-behavior.check
@@ -0,0 +1,90 @@
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C10[A]
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C20[A]
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C01[A]
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C11[A]
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C21[A]
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C00[A]
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C20[A]
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C01[A]
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C11[A]
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C21[A]
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C00[A]
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C10[A]
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C01[A]
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C11[A]
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C21[A]
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C00[A]
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C10[A]
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C20[A]
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C11[A]
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C21[A]
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C00[A]
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C10[A]
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C20[A]
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C01[A]
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C21[A]
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C00[A]
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C10[A]
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C20[A]
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C01[A]
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C11[A]
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
diff --git a/test/files/run/patmat-behavior.scala b/test/files/run/patmat-behavior.scala
new file mode 100644
index 0000000000..8b6370d796
--- /dev/null
+++ b/test/files/run/patmat-behavior.scala
@@ -0,0 +1,95 @@
+package s {
+ sealed trait C[+A]
+
+ case class C00[+A]() extends C[A]
+ case class C10[+A](x: A) extends C[A]
+ case class C20[+A](x: A, y: A) extends C[A]
+ case class C01[+A](xs: A*) extends C[A]
+ case class C11[+A](x: A, ys: A*) extends C[A]
+ case class C21[+A](x: A, y: A, zs: A*) extends C[A]
+
+ object E00 { def unapply[A](x: Any): Boolean = ??? }
+ object E10 { def unapply[A](x: Any): Option[A] = ??? }
+ object E20 { def unapply[A](x: Any): Option[(A, A)] = ??? }
+ object E01 { def unapplySeq[A](x: Any): Option[Seq[A]] = ??? }
+ object E11 { def unapplySeq[A](x: Any): Option[(A, Seq[A])] = ??? }
+ object E21 { def unapplySeq[A](x: Any): Option[(A, A, Seq[A])] = ??? }
+
+ object F00 { def unapply[A](x: C[A]): Boolean = ??? }
+ object F10 { def unapply[A](x: C[A]): Option[A] = ??? }
+ object F20 { def unapply[A](x: C[A]): Option[(A, A)] = ??? }
+ object F01 { def unapplySeq[A](x: C[A]): Option[Seq[A]] = ??? }
+ object F11 { def unapplySeq[A](x: C[A]): Option[(A, Seq[A])] = ??? }
+ object F21 { def unapplySeq[A](x: C[A]): Option[(A, A, Seq[A])] = ??? }
+
+ object G00 { def unapply[A](x: C00[A]): Boolean = ??? }
+ object G10 { def unapply[A](x: C10[A]): Option[A] = ??? }
+ object G20 { def unapply[A](x: C20[A]): Option[(A, A)] = ??? }
+ object G01 { def unapplySeq[A](x: C01[A]): Option[Seq[A]] = ??? }
+ object G11 { def unapplySeq[A](x: C11[A]): Option[(A, Seq[A])] = ??? }
+ object G21 { def unapplySeq[A](x: C21[A]): Option[(A, A, Seq[A])] = ??? }
+}
+import s._
+
+package pos {
+ object Test {
+ def ga1(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+ def ga2(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+ def ga3(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+ def ga4(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+ def ga5(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+ def ga6(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+
+ def gb1[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb2[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb3[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb4[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb5[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb6[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+
+ def gc1[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc2[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc3[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc4[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc5[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc6[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+
+ def gd1[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gd2[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gd3[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gd4[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gd5[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gd6[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ }
+}
+
+package neg {
+ object Fail {
+ def gb1[A](x: C00[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb2[A](x: C10[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb3[A](x: C20[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb4[A](x: C01[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb5[A](x: C11[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb6[A](x: C21[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+
+ def gc1[A](x: C00[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc2[A](x: C10[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc3[A](x: C20[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc4[A](x: C01[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc5[A](x: C11[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc6[A](x: C21[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+
+ }
+}
diff --git a/test/files/run/patmat-bind-typed.check b/test/files/run/patmat-bind-typed.check
new file mode 100644
index 0000000000..8baef1b4ab
--- /dev/null
+++ b/test/files/run/patmat-bind-typed.check
@@ -0,0 +1 @@
+abc
diff --git a/test/files/run/patmat-bind-typed.scala b/test/files/run/patmat-bind-typed.scala
new file mode 100644
index 0000000000..10de921c51
--- /dev/null
+++ b/test/files/run/patmat-bind-typed.scala
@@ -0,0 +1,8 @@
+object Test {
+ def f(xs: List[Any]) = for (key @ (dummy: String) <- xs) yield key
+
+ def main(args: Array[String]): Unit = {
+ f("abc" :: Nil) foreach println
+ f(5 :: Nil) foreach println
+ }
+}
diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check
index f5258efeb7..352aefaf25 100644
--- a/test/files/run/reflection-magicsymbols-invoke.check
+++ b/test/files/run/reflection-magicsymbols-invoke.check
@@ -82,7 +82,7 @@ Array
it's important to print the list of Array's members
if some of them change (possibly, adding and/or removing magic symbols), we must update this test
constructor Array: (_length: Int)Array[T]
-constructor Object: ()java.lang.Object
+constructor Cloneable: ()java.lang.Cloneable
method !=: (x$1: Any)Boolean
method !=: (x$1: AnyRef)Boolean
method ##: ()Int
diff --git a/test/files/run/repl-javap-app.check b/test/files/run/repl-javap-app.check
index db1f09b977..490860585c 100644
--- a/test/files/run/repl-javap-app.check
+++ b/test/files/run/repl-javap-app.check
@@ -6,14 +6,13 @@ scala> :javap -app MyApp$
public final void delayedEndpoint$MyApp$1();
Code:
Stack=2, Locals=1, Args_size=1
- 0: getstatic #61; //Field scala/Console$.MODULE$:Lscala/Console$;
- 3: ldc #63; //String Hello, delayed world.
- 5: invokevirtual #67; //Method scala/Console$.println:(Ljava/lang/Object;)V
- 8: return
+ 0: getstatic #61; //Field scala/Console$.MODULE$:Lscala/Console$;
+ 3: ldc #63; //String Hello, delayed world.
+ 5: invokevirtual #67; //Method scala/Console$.println:(Ljava/lang/Object;)V
+ 8: return
LocalVariableTable:
Start Length Slot Name Signature
0 9 0 this LMyApp$;
-}
scala>
#partest !java6
diff --git a/test/files/run/repl-javap-outdir-funs/run-repl_7.scala b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
index dfe3dae270..6c6fe2d515 100644
--- a/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
+++ b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
@@ -5,8 +5,13 @@ object Test extends JavapTest {
|:javap -fun disktest/Foo.class
""".stripMargin
- override def yah(res: Seq[String]) = {
- def filtered = res filter (_ contains "public final class disktest.Foo")
- 1 == filtered.size
- }
+ override def yah(res: Seq[String]) =
+ // It's currently unknown why this test fails on Avian with
+ // “Failed: No anonfuns found.”, skip it for now. See SI-7630.
+ if (scala.tools.partest.utils.Properties.isAvian)
+ true
+ else {
+ def filtered = res filter (_ contains "public final class disktest.Foo")
+ 1 == filtered.size
+ }
}
diff --git a/test/files/run/repl-trim-stack-trace.scala b/test/files/run/repl-trim-stack-trace.scala
new file mode 100644
index 0000000000..bbf46f2f19
--- /dev/null
+++ b/test/files/run/repl-trim-stack-trace.scala
@@ -0,0 +1,33 @@
+
+import scala.tools.partest.SessionTest
+
+// SI-7740
+object Test extends SessionTest {
+ def session =
+"""Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> def f = throw new Exception("Uh-oh")
+f: Nothing
+
+scala> f
+java.lang.Exception: Uh-oh
+ at .f(<console>:7)
+
+scala> def f = throw new Exception("")
+f: Nothing
+
+scala> f
+java.lang.Exception:
+ at .f(<console>:7)
+
+scala> def f = throw new Exception
+f: Nothing
+
+scala> f
+java.lang.Exception
+ at .f(<console>:7)
+
+scala> """
+
+}
diff --git a/test/files/run/stream_length.check b/test/files/run/stream_length.check
index d1068f3247..e4350aa741 100644
--- a/test/files/run/stream_length.check
+++ b/test/files/run/stream_length.check
@@ -1,6 +1,5 @@
#partest !avian
Length: 970299
-
#partest avian
!!!TEST SKIPPED!!!
See SI-7600 for further information.
diff --git a/test/files/run/string-extractor.check b/test/files/run/string-extractor.check
new file mode 100644
index 0000000000..47f3722c86
--- /dev/null
+++ b/test/files/run/string-extractor.check
@@ -0,0 +1,9 @@
+by
+BY
+oTheClown
+nope
+1: ob
+2: obby
+2: OBBY
+3: BOBO
+3: TomTomTheClown
diff --git a/test/files/run/string-extractor.scala b/test/files/run/string-extractor.scala
new file mode 100644
index 0000000000..c0fe911ff3
--- /dev/null
+++ b/test/files/run/string-extractor.scala
@@ -0,0 +1,60 @@
+final class StringExtract(val s: String) extends AnyVal {
+ def isEmpty = (s eq null) || (s == "")
+ def get = this
+ def length = s.length
+ def lengthCompare(n: Int) = s.length compare n
+ def apply(idx: Int): Char = s charAt idx
+ def head: Char = s charAt 0
+ def tail: String = s drop 1
+ def drop(n: Int): StringExtract = new StringExtract(s drop n)
+
+ override def toString = s
+}
+
+final class ThreeStringExtract(val s: String) extends AnyVal {
+ def isEmpty = (s eq null) || (s == "")
+ def get: (List[Int], Double, ThreeStringExtract) = ((s.length :: Nil, s.length.toDouble, this))
+ def length = s.length
+ def lengthCompare(n: Int) = s.length compare n
+ def apply(idx: Int): Char = s charAt idx
+ def head: Char = s charAt 0
+ def tail: String = s drop 1
+ def drop(n: Int): ThreeStringExtract = new ThreeStringExtract(s drop n)
+
+ override def toString = s
+}
+
+
+object Bippy {
+ def unapplySeq(x: Any): StringExtract = new StringExtract("" + x)
+}
+object TripleBippy {
+ def unapplySeq(x: Any): ThreeStringExtract = new ThreeStringExtract("" + x)
+}
+
+object Test {
+ def f(x: Any) = x match {
+ case Bippy('B' | 'b', 'O' | 'o', 'B' | 'b', xs @ _*) => xs
+ case _ => "nope"
+ }
+
+ def g(x: Any): String = x match {
+ case TripleBippy(3 :: Nil, 3.0, 'b', chars @ _*) => "1: " + chars
+ case TripleBippy(5 :: Nil, 5.0, 'b' | 'B', chars @ _*) => "2: " + chars
+ case TripleBippy(_, _, chars @ _*) => "3: " + chars
+ case _ => "nope"
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f("Bobby"))
+ println(f("BOBBY"))
+ println(f("BoBoTheClown"))
+ println(f("TomTomTheClown"))
+
+ println(g("bob"))
+ println(g("bobby"))
+ println(g("BOBBY"))
+ println(g("BOBO"))
+ println(g("TomTomTheClown"))
+ }
+}
diff --git a/test/files/run/t5903a.check b/test/files/run/t5903a.check
new file mode 100644
index 0000000000..ce6efd812d
--- /dev/null
+++ b/test/files/run/t5903a.check
@@ -0,0 +1 @@
+(SomeTree,SomeTree)
diff --git a/test/files/run/t5903a.flags b/test/files/run/t5903a.flags
new file mode 100644
index 0000000000..02ecab49e7
--- /dev/null
+++ b/test/files/run/t5903a.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls \ No newline at end of file
diff --git a/test/files/run/t5903a/Macros_1.scala b/test/files/run/t5903a/Macros_1.scala
new file mode 100644
index 0000000000..e82be0fc68
--- /dev/null
+++ b/test/files/run/t5903a/Macros_1.scala
@@ -0,0 +1,28 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait Tree
+case object SomeTree extends Tree
+
+object NewQuasiquotes {
+ implicit class QuasiquoteInterpolation(c: StringContext) {
+ object nq {
+ def unapply(t: Tree) = macro QuasiquoteMacros.unapplyImpl
+ }
+ }
+}
+
+object QuasiquoteMacros {
+ def unapplyImpl(c: Context)(t: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ def isEmpty = false
+ def get = this
+ def _1 = SomeTree
+ def _2 = SomeTree
+ def unapply(t: Tree) = this
+ }.unapply($t)
+ """
+ }
+}
diff --git a/test/files/run/t5903a/Test_2.scala b/test/files/run/t5903a/Test_2.scala
new file mode 100644
index 0000000000..3a0b68b568
--- /dev/null
+++ b/test/files/run/t5903a/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import NewQuasiquotes._
+ SomeTree match {
+ case nq"$x + $y" => println((x, y))
+ }
+}
diff --git a/test/files/run/t5903b.check b/test/files/run/t5903b.check
new file mode 100644
index 0000000000..75891bc672
--- /dev/null
+++ b/test/files/run/t5903b.check
@@ -0,0 +1 @@
+oops
diff --git a/test/files/run/t5903b.flags b/test/files/run/t5903b.flags
new file mode 100644
index 0000000000..02ecab49e7
--- /dev/null
+++ b/test/files/run/t5903b.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls \ No newline at end of file
diff --git a/test/files/run/t5903b/Macros_1.scala b/test/files/run/t5903b/Macros_1.scala
new file mode 100644
index 0000000000..c0124850b8
--- /dev/null
+++ b/test/files/run/t5903b/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply[T](x: T) = macro Macros.unapplyImpl[T]
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ def isEmpty = false
+ def get = this
+ def _1 = 2
+ def unapply(x: Int) = this
+ override def toString = "oops"
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/run/t5903b/Test_2.scala b/test/files/run/t5903b/Test_2.scala
new file mode 100644
index 0000000000..0f6f80d327
--- /dev/null
+++ b/test/files/run/t5903b/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ 2 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/run/t5903c.check b/test/files/run/t5903c.check
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/test/files/run/t5903c.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t5903c.flags b/test/files/run/t5903c.flags
new file mode 100644
index 0000000000..02ecab49e7
--- /dev/null
+++ b/test/files/run/t5903c.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls \ No newline at end of file
diff --git a/test/files/run/t5903c/Macros_1.scala b/test/files/run/t5903c/Macros_1.scala
new file mode 100644
index 0000000000..f8baa2275b
--- /dev/null
+++ b/test/files/run/t5903c/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply[T](x: T) = macro Macros.unapplyImpl[T]
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ def isEmpty = false
+ def get = 2
+ def unapply(x: Int) = this
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/run/t5903c/Test_2.scala b/test/files/run/t5903c/Test_2.scala
new file mode 100644
index 0000000000..0f6f80d327
--- /dev/null
+++ b/test/files/run/t5903c/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ 2 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/run/t5903d.check b/test/files/run/t5903d.check
new file mode 100644
index 0000000000..d81cc0710e
--- /dev/null
+++ b/test/files/run/t5903d.check
@@ -0,0 +1 @@
+42
diff --git a/test/files/run/t5903d.flags b/test/files/run/t5903d.flags
new file mode 100644
index 0000000000..02ecab49e7
--- /dev/null
+++ b/test/files/run/t5903d.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls \ No newline at end of file
diff --git a/test/files/run/t5903d/Macros_1.scala b/test/files/run/t5903d/Macros_1.scala
new file mode 100644
index 0000000000..88d714e17b
--- /dev/null
+++ b/test/files/run/t5903d/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply(x: Int) = macro Macros.unapplyImpl
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl(c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ class Match(x: Int) {
+ def isEmpty = false
+ def get = x
+ }
+ def unapply(x: Int) = new Match(x)
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/run/t5903d/Test_2.scala b/test/files/run/t5903d/Test_2.scala
new file mode 100644
index 0000000000..95c717a9d8
--- /dev/null
+++ b/test/files/run/t5903d/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ 42 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/run/t5923a/Macros_1.scala b/test/files/run/t5923a/Macros_1.scala
index 6d21362c4d..97076eb102 100644
--- a/test/files/run/t5923a/Macros_1.scala
+++ b/test/files/run/t5923a/Macros_1.scala
@@ -7,8 +7,46 @@ object C {
}
object Macros {
- def impl[T: c.WeakTypeTag](c: Context) = {
+ def impl[T](c: Context)(ttag: c.WeakTypeTag[T]) = {
import c.universe._
- reify(C[T](c.literal(weakTypeOf[T].toString).splice))
+ val ttag0 = ttag;
+ {
+ // When we're expanding implicitly[C[Nothing]], the type inferencer will see
+ // that foo[T] returns C[T] and that we request an implicit of type C[Nothing].
+ //
+ // Then the type inferencer will try to match C[T] against C[Nothing] and infer everything it can infer
+ // from that match, but not more (e.g. if we were returning Iso[T, U] and the type we were looking at was Iso[Foo, L],
+ // we wouldn't want U to be auto-inferred to Nothing, as it usually happens with normal methods,
+ // but would rather want it to remain unknown, so that our macro could take a stab at inferring it:
+ // see the comments in this commit for more information).
+ //
+ // Equipped with common sense, in our case of C[T] and C[Nothing] we would expect T to be inferred as Nothing, and then we
+ // would expect T in the corresponding macro invocation to be Nothing. Unfortunately it is not that simple.
+ //
+ // Internally the type inferencer uses Nothing as a dummy value, which stands for "don't know how to
+ // infer this type parameter". In the Iso example, matching Iso[T, U] against Iso[Foo, L] would result in
+ // T being inferred as Foo and U being inferred as Nothing (!!). Then the type inferencer will think:
+ // "Aha! U ended up being Nothing. This means that I failed to infer it,
+ // therefore the result of my work is: T -> Foo, U -> still unknown".
+ //
+ // That's all very good and works very well until Nothing is a genuine result of type inference,
+ // as in our original example of inferring T in C[T] from C[Nothing]. In that case, the inferencer becomes confused
+ // and here in the macro implementation we get weakTypeOf[T] equal to some dummy type carrying a type parameter
+ // instead of Nothing.
+ //
+ // This eccentric behavior of the type inferencer is a long-standing problem in scalac,
+ // so the best one can do for now until it's fixed is to work around, manually converting
+ // suspicious T's into Nothings. Of course, this means that we would have to approximate,
+ // because there's no way to know whether having T here stands for a failed attempt to infer Nothing
+ // or for a failed attempt to infer anything, but at least we're in full control of making the best
+ // of this sad situation.
+ implicit def ttag: WeakTypeTag[T] = {
+ val tpe = ttag0.tpe
+ val sym = tpe.typeSymbol.asType
+ if (sym.isParameter && !sym.isSkolem) TypeTag.Nothing.asInstanceOf[TypeTag[T]]
+ else ttag0
+ }
+ reify(C[T](c.literal(weakTypeOf[T].toString).splice))
+ }
}
} \ No newline at end of file
diff --git a/test/files/run/t5923c.check b/test/files/run/t5923c.check
new file mode 100644
index 0000000000..bed7429108
--- /dev/null
+++ b/test/files/run/t5923c.check
@@ -0,0 +1 @@
+(23,foo,true)
diff --git a/test/files/run/t5923c/Macros_1.scala b/test/files/run/t5923c/Macros_1.scala
new file mode 100644
index 0000000000..0b7a3399e2
--- /dev/null
+++ b/test/files/run/t5923c/Macros_1.scala
@@ -0,0 +1,39 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+trait Iso[T, U] {
+ def to(t : T) : U
+ // def from(u : U) : T
+}
+
+object Iso {
+ implicit def materializeIso[T, U]: Iso[T, U] = macro impl[T, U]
+ def impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Context): c.Expr[Iso[T, U]] = {
+ import c.universe._
+ import definitions._
+ import Flag._
+
+ val sym = c.weakTypeOf[T].typeSymbol
+ if (!sym.isClass || !sym.asClass.isCaseClass) c.abort(c.enclosingPosition, s"$sym is not a case class")
+ val fields = sym.typeSignature.declarations.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+
+ def mkTpt() = {
+ val core = Ident(TupleClass(fields.length) orElse UnitClass)
+ if (fields.length == 0) core
+ else AppliedTypeTree(core, fields map (f => TypeTree(f.typeSignature)))
+ }
+
+ def mkFrom() = {
+ if (fields.length == 0) Literal(Constant(Unit))
+ else Apply(Ident(newTermName("Tuple" + fields.length)), fields map (f => Select(Ident(newTermName("f")), newTermName(f.name.toString.trim))))
+ }
+
+ val evidenceClass = ClassDef(Modifiers(FINAL), newTypeName("$anon"), List(), Template(
+ List(AppliedTypeTree(Ident(newTypeName("Iso")), List(Ident(sym), mkTpt()))),
+ emptyValDef,
+ List(
+ DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
+ DefDef(Modifiers(), newTermName("to"), List(), List(List(ValDef(Modifiers(PARAM), newTermName("f"), Ident(sym), EmptyTree))), TypeTree(), mkFrom()))))
+ c.Expr[Iso[T, U]](Block(List(evidenceClass), Apply(Select(New(Ident(newTypeName("$anon"))), nme.CONSTRUCTOR), List())))
+ }
+}
diff --git a/test/files/run/t5923c/Test_2.scala b/test/files/run/t5923c/Test_2.scala
new file mode 100644
index 0000000000..a00f4ed7db
--- /dev/null
+++ b/test/files/run/t5923c/Test_2.scala
@@ -0,0 +1,12 @@
+// see the comments for macroExpandApply.onDelayed for an explanation of what's tested here
+object Test extends App {
+ case class Foo(i: Int, s: String, b: Boolean)
+ def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
+
+ {
+ val equiv = foo(Foo(23, "foo", true))
+ def typed[T](t: => T) {}
+ typed[(Int, String, Boolean)](equiv)
+ println(equiv)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-a.check b/test/files/run/t5923d.check
index e69de29bb2..e69de29bb2 100644
--- a/test/files/run/macro-toplevel-companion-a.check
+++ b/test/files/run/t5923d.check
diff --git a/test/files/run/t5923d/Macros_1.scala b/test/files/run/t5923d/Macros_1.scala
new file mode 100644
index 0000000000..f32d1af704
--- /dev/null
+++ b/test/files/run/t5923d/Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+trait MappedRow
+trait RowMapper[T <: MappedRow]
+object RowMapper {
+ implicit def mapper[T <: MappedRow]: RowMapper[T] = macro impl[T]
+ def impl[T <: MappedRow : c.WeakTypeTag](c: Context) = c.universe.reify(new RowMapper[T]{})
+} \ No newline at end of file
diff --git a/test/files/run/t5923d/Test_2.scala b/test/files/run/t5923d/Test_2.scala
new file mode 100644
index 0000000000..6be10227c2
--- /dev/null
+++ b/test/files/run/t5923d/Test_2.scala
@@ -0,0 +1,7 @@
+class RowA extends MappedRow
+class RowB extends MappedRow
+
+object Test extends App {
+ implicitly[RowMapper[RowA]]
+ implicitly[RowMapper[RowB]]
+} \ No newline at end of file
diff --git a/test/files/run/t6331b.scala b/test/files/run/t6331b.scala
index 3e09965ee8..3a560ea64b 100644
--- a/test/files/run/t6331b.scala
+++ b/test/files/run/t6331b.scala
@@ -1,4 +1,4 @@
-import scala.tools.partest.trace
+import scala.tools.partest.Util.trace
import scala.util.control.Exception.allCatch
diff --git a/test/files/run/t6392b.check b/test/files/run/t6392b.check
index 2afc48495f..1ccfced1c6 100644
--- a/test/files/run/t6392b.check
+++ b/test/files/run/t6392b.check
@@ -1 +1 @@
-ModuleDef(Modifiers(), TermName("C"), Template(List(Select(Ident(scala#PK), TypeName("AnyRef")#TPE)), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(TypeName("C")), tpnme.EMPTY), nme.CONSTRUCTOR#PCTOR), List())), Literal(Constant(())))))))
+ModuleDef(Modifiers(), TermName("C")#MOD, Template(List(Select(Ident(scala#PK), TypeName("AnyRef")#TPE)), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR#PCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(TypeName("C")), tpnme.EMPTY), nme.CONSTRUCTOR#PCTOR), List())), Literal(Constant(())))))))
diff --git a/test/files/run/t6507.check b/test/files/run/t6507.check
new file mode 100644
index 0000000000..336db0aeaf
--- /dev/null
+++ b/test/files/run/t6507.check
@@ -0,0 +1,26 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> :silent
+Switched off result printing.
+
+scala> class A { override def toString() = { println("!"); "A" } }
+
+scala> val a = new A
+
+scala> var b: A = new A
+
+scala> b = new A
+
+scala> new A
+
+scala> :silent
+Switched on result printing.
+
+scala> res0
+!
+res1: A = A
+
+scala>
diff --git a/test/files/run/t6507.scala b/test/files/run/t6507.scala
new file mode 100644
index 0000000000..25f0a73e04
--- /dev/null
+++ b/test/files/run/t6507.scala
@@ -0,0 +1,14 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+:silent
+class A { override def toString() = { println("!"); "A" } }
+val a = new A
+var b: A = new A
+b = new A
+new A
+:silent
+res0
+"""
+}
diff --git a/test/files/run/t6989.check b/test/files/run/t6989.check
index 8943792115..43d4bbaf02 100644
--- a/test/files/run/t6989.check
+++ b/test/files/run/t6989.check
@@ -101,6 +101,12 @@ isProtected = false
isPublic = false
privateWithin = <none>
============
+sym = constructor $PrivateJavaClass, signature = ()JavaClass_1.this.$PrivateJavaClass, owner = class $PrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
sym = value this$0, signature = foo.JavaClass_1, owner = class $PrivateJavaClass
isPrivate = false
isProtected = false
@@ -119,6 +125,12 @@ isProtected = true
isPublic = false
privateWithin = package foo
============
+sym = constructor $ProtectedJavaClass, signature = ()JavaClass_1.this.$ProtectedJavaClass, owner = class $ProtectedJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
sym = value this$0, signature = foo.JavaClass_1, owner = class $ProtectedJavaClass
isPrivate = false
isProtected = false
@@ -173,6 +185,12 @@ isProtected = false
isPublic = false
privateWithin = <none>
============
+sym = constructor PrivateStaticJavaClass, signature = ()foo.JavaClass_1.PrivateStaticJavaClass, owner = class PrivateStaticJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
sym = object PrivateStaticJavaClass, signature = foo.JavaClass_1.PrivateStaticJavaClass.type, owner = object JavaClass_1
isPrivate = true
isProtected = false
@@ -185,6 +203,12 @@ isProtected = false
isPublic = false
privateWithin = <none>
============
+sym = constructor ProtectedStaticJavaClass, signature = ()foo.JavaClass_1.ProtectedStaticJavaClass, owner = class ProtectedStaticJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
sym = object ProtectedStaticJavaClass, signature = foo.JavaClass_1.ProtectedStaticJavaClass.type, owner = object JavaClass_1
isPrivate = true
isProtected = false
diff --git a/test/files/run/t7214.scala b/test/files/run/t7214.scala
index ff1ea8082d..15c2c24fa0 100644
--- a/test/files/run/t7214.scala
+++ b/test/files/run/t7214.scala
@@ -25,7 +25,7 @@ class Crash {
def unapply(a: Alias): Option[Any] = None
}
(t: Any) match {
- case Extractor() =>
+ case Extractor(_) =>
case _ =>
}
diff --git a/test/files/run/t7265.scala b/test/files/run/t7265.scala
deleted file mode 100644
index c556930303..0000000000
--- a/test/files/run/t7265.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-
-import scala.util.Properties._
-
-object Test extends App {
-
- setProp("java.specification.version", "1.7")
-
- assert( isJavaAtLeast("1.5"))
- assert( isJavaAtLeast("1.6"))
- assert( isJavaAtLeast("1.7"))
- assert(!isJavaAtLeast("1.8"))
- assert(!isJavaAtLeast("1.71"))
-
- failing(isJavaAtLeast("1.a"))
- failing(isJavaAtLeast("1"))
- failing(isJavaAtLeast(""))
- failing(isJavaAtLeast("."))
- failing(isJavaAtLeast(".5"))
- failing(isJavaAtLeast("1.7.1"))
-
- def failing(u: =>Unit) = try {
- u
- assert(false, "Expected Exception")
- } catch {
- case _: NumberFormatException =>
- }
-}
diff --git a/test/files/run/t7331a.check b/test/files/run/t7331a.check
new file mode 100644
index 0000000000..a59b400344
--- /dev/null
+++ b/test/files/run/t7331a.check
@@ -0,0 +1,2 @@
+source-<toolbox>,line-1,offset=0
+2 \ No newline at end of file
diff --git a/test/files/run/t7331a.scala b/test/files/run/t7331a.scala
new file mode 100644
index 0000000000..1851945e63
--- /dev/null
+++ b/test/files/run/t7331a.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val tree = tb.parse("x")
+ println(tree.pos)
+ println(tree.pos.source.content.length)
+} \ No newline at end of file
diff --git a/test/files/run/t7331b.check b/test/files/run/t7331b.check
new file mode 100644
index 0000000000..7034a95a3f
--- /dev/null
+++ b/test/files/run/t7331b.check
@@ -0,0 +1,3 @@
+reflective compilation has failed:
+
+')' expected but eof found.
diff --git a/test/files/run/t7331b.scala b/test/files/run/t7331b.scala
new file mode 100644
index 0000000000..052656d11b
--- /dev/null
+++ b/test/files/run/t7331b.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ try tb.parse("f(x")
+ catch {
+ case ToolBoxError(msg, _) => println(msg)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t7331c.check b/test/files/run/t7331c.check
new file mode 100644
index 0000000000..fd3ac1d9ef
--- /dev/null
+++ b/test/files/run/t7331c.check
@@ -0,0 +1,3 @@
+ClassDef(Modifiers(), TypeName("C"), List(), Template(List(Select(Ident(scala), TypeName("AnyRef"))), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))))))
+source-<toolbox>,line-1,offset=6
+NoPosition
diff --git a/test/files/run/t7331c.scala b/test/files/run/t7331c.scala
new file mode 100644
index 0000000000..75873afcd0
--- /dev/null
+++ b/test/files/run/t7331c.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val tree = tb.parse("class C").asInstanceOf[ClassDef]
+ println(showRaw(tree))
+ println(tree.pos)
+ println(tree.impl.self.pos)
+} \ No newline at end of file
diff --git a/test/files/run/t7407.check b/test/files/run/t7407.check
new file mode 100644
index 0000000000..e965047ad7
--- /dev/null
+++ b/test/files/run/t7407.check
@@ -0,0 +1 @@
+Hello
diff --git a/test/files/run/t7407.flags b/test/files/run/t7407.flags
new file mode 100644
index 0000000000..c8547a27dc
--- /dev/null
+++ b/test/files/run/t7407.flags
@@ -0,0 +1 @@
+-Ynooptimise -Ybackend:GenBCode
diff --git a/test/files/run/t7407.scala b/test/files/run/t7407.scala
new file mode 100644
index 0000000000..cf67602126
--- /dev/null
+++ b/test/files/run/t7407.scala
@@ -0,0 +1,11 @@
+// SI-7407
+object Test {
+
+ def main(args: Array[String]) { println(foo) }
+
+ def foo: String = {
+ try return "Hello" finally 10 match {case x => ()}
+ }
+
+}
+
diff --git a/test/files/run/t7407b.check b/test/files/run/t7407b.check
new file mode 100644
index 0000000000..f30294447b
--- /dev/null
+++ b/test/files/run/t7407b.check
@@ -0,0 +1,2 @@
+Hello
+abc
diff --git a/test/files/run/t7407b.flags b/test/files/run/t7407b.flags
new file mode 100644
index 0000000000..c8547a27dc
--- /dev/null
+++ b/test/files/run/t7407b.flags
@@ -0,0 +1 @@
+-Ynooptimise -Ybackend:GenBCode
diff --git a/test/files/run/t7407b.scala b/test/files/run/t7407b.scala
new file mode 100644
index 0000000000..b0c00878b5
--- /dev/null
+++ b/test/files/run/t7407b.scala
@@ -0,0 +1,20 @@
+object Test {
+
+ def main(args: Array[String]) {
+ println(foo(true))
+ println(foo(false))
+ }
+
+ def foo(b: Boolean): String = {
+ try {
+ if(b)
+ return "Hello"
+ else
+ "abc"
+ } finally {
+ 10 match {case x => ()}
+ }
+ }
+
+}
+
diff --git a/test/files/run/t7510.check b/test/files/run/t7510.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t7510.check
diff --git a/test/files/run/t7510/Ann_1.java b/test/files/run/t7510/Ann_1.java
new file mode 100644
index 0000000000..c8c5b2035f
--- /dev/null
+++ b/test/files/run/t7510/Ann_1.java
@@ -0,0 +1,4 @@
+package foo;
+
+public @interface Ann_1 {
+} \ No newline at end of file
diff --git a/test/files/run/t7510/Test_2.scala b/test/files/run/t7510/Test_2.scala
new file mode 100644
index 0000000000..7d7a95e0f2
--- /dev/null
+++ b/test/files/run/t7510/Test_2.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ tb.compile(tb.parse("@foo.Ann_1 class C"))
+}
+
diff --git a/test/files/run/t7715.check b/test/files/run/t7715.check
new file mode 100644
index 0000000000..592d7fe2ea
--- /dev/null
+++ b/test/files/run/t7715.check
@@ -0,0 +1,3 @@
+6
+4
+4
diff --git a/test/files/run/t7715.scala b/test/files/run/t7715.scala
new file mode 100644
index 0000000000..0ad3913016
--- /dev/null
+++ b/test/files/run/t7715.scala
@@ -0,0 +1,24 @@
+
+import PartialFunction.cond
+import util._
+
+object Test extends App {
+
+ object I { def unapply(x: String): Option[Int] = Try(x.toInt).toOption }
+ implicit class RX(val sc: StringContext) {
+ def rx = sc.parts.mkString("(.+)").r
+ }
+
+ Console println ("2 by 4" match {
+ case rx"${I(a)} by ${I(b)}" => a+b
+ case _ => -1
+ })
+ Console println ("2 by 4" match {
+ case rx"${_} by ${I(b)}" => b // pattern placeholder
+ case _ => -1
+ })
+ Console println ("2 by 4" match {
+ case rx"$_ by ${I(b)}" => b // is permitted this way, too
+ case _ => -1
+ })
+}
diff --git a/test/files/run/t7763.scala b/test/files/run/t7763.scala
new file mode 100644
index 0000000000..638077e64a
--- /dev/null
+++ b/test/files/run/t7763.scala
@@ -0,0 +1,20 @@
+object Test {
+ class A; class B
+ def main(args: Array[String]) {
+ def noExpectedType() {
+ a().asInstanceOf[B] // cast elided!
+ }
+ def withExpectedType(): B = {
+ a().asInstanceOf[B]
+ }
+ def test(a: => Any) = try {
+ a
+ sys.error("no CCE!")
+ } catch {case _: ClassCastException => }
+
+ test(noExpectedType())
+ test(withExpectedType())
+ }
+
+ def a(): Object = new A
+}
diff --git a/test/files/run/tailcalls.check b/test/files/run/tailcalls.check
index 10384ac46e..7607921856 100644
--- a/test/files/run/tailcalls.check
+++ b/test/files/run/tailcalls.check
@@ -52,29 +52,28 @@ test TailCall.b2 was successful
test FancyTailCalls.tcTryLocal was successful
test FancyTailCalls.differentInstance was successful
test PolyObject.tramp was successful
-
#partest avian
-test Object .f was successful
-test Final .f was successful
-test Class .f was successful
+test Object .f was successful
+test Final .f was successful
+test Class .f was successful
test SubClass .f was successful
-test Sealed .f was successful
+test Sealed .f was successful
test SubSealed.f was successful
-test O .f was successful
-test c .f was successful
-test O.O .f was successful
-test O.c .f was successful
-test c.O .f was successful
-test c.c .f was successful
-test O.O.O .f was successful
-test O.O.c .f was successful
-test O.c.O .f was successful
-test O.c.c .f was successful
-test c.O.O .f was successful
-test c.O.c .f was successful
-test c.c.O .f was successful
-test c.c.c .f was successful
+test O .f was successful
+test c .f was successful
+test O.O .f was successful
+test O.c .f was successful
+test c.O .f was successful
+test c.c .f was successful
+test O.O.O .f was successful
+test O.O.c .f was successful
+test O.c.O .f was successful
+test O.c.c .f was successful
+test c.O.O .f was successful
+test c.O.c .f was successful
+test c.c.O .f was successful
+test c.c.c .f was successful
test O.O.O.O.f was successful
test O.O.O.c.f was successful
test O.O.c.O.f was successful
@@ -106,4 +105,4 @@ test TailCall.b1 was successful
test TailCall.b2 was successful
test FancyTailCalls.tcTryLocal was successful
test FancyTailCalls.differentInstance was successful
-test PolyObject.tramp was successful \ No newline at end of file
+test PolyObject.tramp was successful
diff --git a/test/files/run/toolbox_current_run_compiles.check b/test/files/run/toolbox_current_run_compiles.check
new file mode 100644
index 0000000000..da29283aaa
--- /dev/null
+++ b/test/files/run/toolbox_current_run_compiles.check
@@ -0,0 +1,2 @@
+true
+false
diff --git a/test/files/run/toolbox_current_run_compiles.scala b/test/files/run/toolbox_current_run_compiles.scala
new file mode 100644
index 0000000000..b48c998e64
--- /dev/null
+++ b/test/files/run/toolbox_current_run_compiles.scala
@@ -0,0 +1,28 @@
+package pkg {
+ import scala.reflect.macros.Context
+ import scala.language.experimental.macros
+
+ object Macros {
+ def impl[T: c.WeakTypeTag](c: Context) = {
+ import c.universe._
+ val sym = c.weakTypeOf[T].typeSymbol
+ val g = c.universe.asInstanceOf[scala.tools.nsc.Global]
+ c.Expr[Boolean](Literal(Constant(g.currentRun.compiles(sym.asInstanceOf[g.Symbol]))))
+ }
+ def compiles[T] = macro impl[T]
+ }
+}
+
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val cm = ru.runtimeMirror(getClass.getClassLoader)
+ val toolbox = cm.mkToolBox()
+ toolbox.eval(toolbox.parse("""{
+ class C
+ println(pkg.Macros.compiles[C])
+ println(pkg.Macros.compiles[Object])
+ }"""))
+} \ No newline at end of file
diff --git a/test/files/run/value-class-extractor-2.check b/test/files/run/value-class-extractor-2.check
new file mode 100644
index 0000000000..5903b996b6
--- /dev/null
+++ b/test/files/run/value-class-extractor-2.check
@@ -0,0 +1,8 @@
+String
+List
+Int
+Something else
+String
+List
+Int
+Something else
diff --git a/test/files/run/value-class-extractor-2.scala b/test/files/run/value-class-extractor-2.scala
new file mode 100644
index 0000000000..d776c35eda
--- /dev/null
+++ b/test/files/run/value-class-extractor-2.scala
@@ -0,0 +1,108 @@
+final class Opt[+A >: Null](val value: A) extends AnyVal {
+ def get: A = value
+ def isEmpty = value == null
+}
+object Opt {
+ final val None = new Opt[Null](null)
+ def apply[A >: Null](value: A): Opt[A] = if (value == null) None else new Opt[A](value)
+}
+
+object ValueOpt {
+ // public java.lang.String unapply(java.lang.Object);
+ // 0: aload_1
+ // 1: instanceof #16 // class java/lang/String
+ // 4: ifeq 21
+ // 7: getstatic #21 // Field Opt$.MODULE$:LOpt$;
+ // 10: astore_2
+ // 11: ldc #23 // String String
+ // 13: checkcast #16 // class java/lang/String
+ // 16: astore 5
+ // 18: goto 71
+ // 21: aload_1
+ // 22: instanceof #25 // class scala/collection/immutable/List
+ // 25: ifeq 42
+ // 28: getstatic #21 // Field Opt$.MODULE$:LOpt$;
+ // 31: astore_3
+ // 32: ldc #27 // String List
+ // 34: checkcast #16 // class java/lang/String
+ // 37: astore 5
+ // 39: goto 71
+ // 42: aload_1
+ // 43: instanceof #29 // class java/lang/Integer
+ // 46: ifeq 64
+ // 49: getstatic #21 // Field Opt$.MODULE$:LOpt$;
+ // 52: astore 4
+ // 54: ldc #31 // String Int
+ // 56: checkcast #16 // class java/lang/String
+ // 59: astore 5
+ // 61: goto 71
+ // 64: getstatic #21 // Field Opt$.MODULE$:LOpt$;
+ // 67: pop
+ // 68: aconst_null
+ // 69: astore 5
+ // 71: aload 5
+ // 73: areturn
+ def unapply(x: Any): Opt[String] = x match {
+ case _: String => Opt("String")
+ case _: List[_] => Opt("List")
+ case _: Int => Opt("Int")
+ case _ => Opt.None
+ }
+}
+object RegularOpt {
+ // public scala.Option<java.lang.String> unapply(java.lang.Object);
+ // 0: aload_1
+ // 1: instanceof #16 // class java/lang/String
+ // 4: ifeq 20
+ // 7: new #18 // class scala/Some
+ // 10: dup
+ // 11: ldc #20 // String String
+ // 13: invokespecial #23 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 16: astore_2
+ // 17: goto 64
+ // 20: aload_1
+ // 21: instanceof #25 // class scala/collection/immutable/List
+ // 24: ifeq 40
+ // 27: new #18 // class scala/Some
+ // 30: dup
+ // 31: ldc #27 // String List
+ // 33: invokespecial #23 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 36: astore_2
+ // 37: goto 64
+ // 40: aload_1
+ // 41: instanceof #29 // class java/lang/Integer
+ // 44: ifeq 60
+ // 47: new #18 // class scala/Some
+ // 50: dup
+ // 51: ldc #31 // String Int
+ // 53: invokespecial #23 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 56: astore_2
+ // 57: goto 64
+ // 60: getstatic #36 // Field scala/None$.MODULE$:Lscala/None$;
+ // 63: astore_2
+ // 64: aload_2
+ // 65: areturn
+ def unapply(x: Any): Option[String] = x match {
+ case _: String => Some("String")
+ case _: List[_] => Some("List")
+ case _: Int => Some("Int")
+ case _ => None
+ }
+}
+
+object Test {
+ def f(x: Any) = x match {
+ case ValueOpt(s) => s
+ case _ => "Something else"
+ }
+ def g(x: Any) = x match {
+ case RegularOpt(s) => s
+ case _ => "Something else"
+ }
+ val xs = List("abc", Nil, 5, Test)
+
+ def main(args: Array[String]): Unit = {
+ xs map f foreach println
+ xs map g foreach println
+ }
+}
diff --git a/test/files/run/value-class-extractor-seq.check b/test/files/run/value-class-extractor-seq.check
new file mode 100644
index 0000000000..84552a7aa5
--- /dev/null
+++ b/test/files/run/value-class-extractor-seq.check
@@ -0,0 +1,3 @@
+Bip(1, 2, 3)
+Bip(1, 2, c @ Array(3, 4, 5): _*)
+class [I
diff --git a/test/files/run/value-class-extractor-seq.scala b/test/files/run/value-class-extractor-seq.scala
new file mode 100644
index 0000000000..f17a5314f2
--- /dev/null
+++ b/test/files/run/value-class-extractor-seq.scala
@@ -0,0 +1,59 @@
+import scala.runtime.ScalaRunTime.stringOf
+
+final class ArrayOpt[T](val xs: Array[T]) extends AnyVal {
+ def isEmpty = xs == null
+ def get = xs
+}
+
+object Bip {
+ def mkInts(xs: Array[Short]) = xs map (_.toInt)
+ def unapplySeq(x: Any): ArrayOpt[Int] = x match {
+ case xs: Array[Int] => new ArrayOpt(xs)
+ case xs: Array[Short] => new ArrayOpt(mkInts(xs))
+ case _ => new ArrayOpt(null)
+ }
+ // public int[] unapplySeq(java.lang.Object);
+ // 0: aload_1
+ // 1: astore_2
+ // 2: aload_2
+ // 3: instanceof #52 // class "[I"
+ // 6: ifeq 20
+ // 9: aload_2
+ // 10: checkcast #52 // class "[I"
+ // 13: astore_3
+ // 14: aload_3
+ // 15: astore 4
+ // 17: goto 47
+ // 20: aload_2
+ // 21: instanceof #58 // class "[S"
+ // 24: ifeq 44
+ // 27: aload_2
+ // 28: checkcast #58 // class "[S"
+ // 31: astore 5
+ // 33: aload_0
+ // 34: aload 5
+ // 36: invokevirtual #60 // Method mkInts:([S)[I
+ // 39: astore 4
+ // 41: goto 47
+ // 44: aconst_null
+ // 45: astore 4
+ // 47: aload 4
+ // 49: areturn
+}
+
+object Test {
+ def f(x: Any) = x match {
+ case Bip(a, b, c) => s"Bip($a, $b, $c)"
+ case Bip(a, b, c @ _*) => s"Bip($a, $b, c @ ${stringOf(c)}: _*)"
+ case _ => "" + x.getClass
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f(Array[Int](1,2,3)))
+ println(f(Array[Int](1,2,3,4,5)))
+ println(f(Array[Int](1)))
+ }
+ // Bip(1, 2, 3)
+ // Bip(1, 2, c @ [I@782be20e: _*)
+ // class [I
+}
diff --git a/test/files/run/value-class-extractor.check b/test/files/run/value-class-extractor.check
new file mode 100644
index 0000000000..e16447118c
--- /dev/null
+++ b/test/files/run/value-class-extractor.check
@@ -0,0 +1,9 @@
+'a'
+'b'
+'c'
+NoChar
+Some(a)
+Some(b)
+Some(c)
+None
+9
diff --git a/test/files/run/value-class-extractor.scala b/test/files/run/value-class-extractor.scala
new file mode 100644
index 0000000000..3eaffa0c23
--- /dev/null
+++ b/test/files/run/value-class-extractor.scala
@@ -0,0 +1,91 @@
+final class NonNullChar(val get: Char) extends AnyVal {
+ def isEmpty = get == 0.toChar
+ override def toString = if (isEmpty) "NoChar" else s"'$get'"
+}
+object NonNullChar {
+ @inline final val None = new NonNullChar(0.toChar)
+}
+
+final class SomeProduct extends Product3[String, Int, List[String]] {
+ def canEqual(x: Any) = x.isInstanceOf[SomeProduct]
+ def _1 = "abc"
+ def _2 = 5
+ def _3 = List("bippy")
+ def isEmpty = false
+ def get = this
+}
+object SomeProduct {
+ def unapply(x: SomeProduct) = x
+}
+
+object Test {
+ def prod(x: SomeProduct): Int = x match {
+ case SomeProduct(x, y, z) => x.length + y + z.length
+ case _ => -1
+ }
+
+ def f(x: Char): NonNullChar = x match {
+ case 'a' => new NonNullChar('a')
+ case 'b' => new NonNullChar('b')
+ case 'c' => new NonNullChar('c')
+ case _ => NonNullChar.None
+ }
+ // public char f(char);
+ // 0: iload_1
+ // 1: tableswitch { // 97 to 99
+ // 97: 47
+ // 98: 42
+ // 99: 37
+ // default: 28
+ // }
+ // 28: getstatic #19 // Field NonNullChar$.MODULE$:LNonNullChar$;
+ // 31: invokevirtual #23 // Method NonNullChar$.None:()C
+ // 34: goto 49
+ // 37: bipush 99
+ // 39: goto 49
+ // 42: bipush 98
+ // 44: goto 49
+ // 47: bipush 97
+ // 49: ireturn
+ def g(x: Char): Option[Char] = x match {
+ case 'a' => Some('a')
+ case 'b' => Some('b')
+ case 'c' => Some('c')
+ case _ => None
+ }
+ // public scala.Option<java.lang.Object> g(char);
+ // 0: iload_1
+ // 1: tableswitch { // 97 to 99
+ // 97: 64
+ // 98: 49
+ // 99: 34
+ // default: 28
+ // }
+ // 28: getstatic #33 // Field scala/None$.MODULE$:Lscala/None$;
+ // 31: goto 76
+ // 34: new #35 // class scala/Some
+ // 37: dup
+ // 38: bipush 99
+ // 40: invokestatic #41 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
+ // 43: invokespecial #44 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 46: goto 76
+ // 49: new #35 // class scala/Some
+ // 52: dup
+ // 53: bipush 98
+ // 55: invokestatic #41 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
+ // 58: invokespecial #44 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 61: goto 76
+ // 64: new #35 // class scala/Some
+ // 67: dup
+ // 68: bipush 97
+ // 70: invokestatic #41 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
+ // 73: invokespecial #44 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 76: areturn
+ def main(args: Array[String]): Unit = {
+ "abcd" foreach (ch => println(f(ch)))
+ "abcd" foreach (ch => println(g(ch)))
+ println(prod(new SomeProduct))
+ }
+}
+
+
diff --git a/test/files/scalacheck/CheckCollections.scala b/test/files/scalacheck/CheckCollections.scala
new file mode 100644
index 0000000000..108040b900
--- /dev/null
+++ b/test/files/scalacheck/CheckCollections.scala
@@ -0,0 +1,59 @@
+import org.scalacheck.{ ConsoleReporter, Properties }
+import org.scalacheck.Prop._
+
+import scala.reflect.internal.util.Collections._
+
+object Test extends Properties("reflect.internal.util.Collections") {
+ def map2ConserveOld[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
+ if (xs.isEmpty || ys.isEmpty) xs
+ else {
+ val x1 = f(xs.head, ys.head)
+ val xs1 = map2Conserve(xs.tail, ys.tail)(f)
+ if ((x1 eq xs.head) && (xs1 eq xs.tail)) xs
+ else x1 :: xs1
+ }
+
+ val testfun: (String, Int) => String = { case(x, y) =>
+ x.toLowerCase + y.toString
+ }
+ val testid: (String, Int) => String = { case (x, y) => x }
+
+ val prop1_map2Conserve = forAll { (xs: List[String], ys: List[Int]) =>
+ val res = map2Conserve(xs, ys)(testid)
+ res eq xs
+ }
+
+ val prop2_map2Conserve = forAll { (xs: List[String], ys: List[Int]) =>
+ map2Conserve(xs, ys)(testid) == map2ConserveOld(xs, ys)(testid) &&
+ map2Conserve(xs, ys)(testfun) == map2ConserveOld(xs, ys)(testfun)
+ }
+
+ def checkStackOverflow() {
+ var xs: List[String] = Nil
+ var ys: List[Int] = Nil
+ for (i <- 0 until 250000) {
+ xs = "X" :: xs
+ ys = 1 :: ys
+ }
+ map2Conserve(xs, ys){ case(x, y) => x.toLowerCase + y.toString }
+ }
+
+
+ val tests = List(
+ ("map2Conserve(identity)", prop1_map2Conserve),
+ ("map2Conserve == old impl", prop2_map2Conserve)
+ )
+
+ checkStackOverflow()
+
+ for {
+ (label, prop) <- tests
+ } property(label) = prop
+
+ import org.scalacheck.{ Test => STest }
+
+ def runTests() =
+ STest.checkProperties(
+ STest.Params(testCallback = ConsoleReporter(0)), this)
+
+}
diff --git a/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala b/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
index 03f8aa58d3..23b6a5fbdb 100644
--- a/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
+++ b/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
@@ -91,7 +91,7 @@ trait ArbitraryTreesAndNames {
yield CompoundTypeTree(templ)
def genDefDef(size: Int) =
- for(mods <- genModifiers; name <- genName;
+ for(mods <- genModifiers; name <- genTermName;
tpt <- genTree(size -1); rhs <- genTree(size - 1);
tparams <- smallList(size, genTypeDef(size - 1));
vparamss <- smallList(size, smallList(size, genValDef(size - 1))))
diff --git a/test/files/scalacheck/quasiquotes/ErrorProps.scala b/test/files/scalacheck/quasiquotes/ErrorProps.scala
index 044a332a04..b9e69e0e88 100644
--- a/test/files/scalacheck/quasiquotes/ErrorProps.scala
+++ b/test/files/scalacheck/quasiquotes/ErrorProps.scala
@@ -188,12 +188,6 @@ object ErrorProps extends QuasiquoteProperties("errors") {
val q"$m1 $m2 def foo" = EmptyTree
""")
- property("can't parse more than one casedef") = fails(
- "Can't parse more than one casedef, consider generating a match tree instead",
- """
- cq"1 => 2 case 3 => 5"
- """)
-
// // Make sure a nice error is reported in this case
// { import Flag._; val mods = NoMods; q"lazy $mods val x: Int" }
} \ No newline at end of file
diff --git a/test/files/scalap/abstractClass.check b/test/files/scalap/abstractClass.check
index ef1daac23d..95e80ac3c9 100644
--- a/test/files/scalap/abstractClass.check
+++ b/test/files/scalap/abstractClass.check
@@ -1,4 +1,4 @@
abstract class AbstractClass extends scala.AnyRef {
def this() = { /* compiled code */ }
- def foo : scala.Predef.String
+ def foo: scala.Predef.String
}
diff --git a/test/files/scalap/abstractMethod.check b/test/files/scalap/abstractMethod.check
index 40fa02d408..0d0b1b7421 100644
--- a/test/files/scalap/abstractMethod.check
+++ b/test/files/scalap/abstractMethod.check
@@ -1,5 +1,5 @@
trait AbstractMethod extends scala.AnyRef {
- def $init$() : scala.Unit = { /* compiled code */ }
- def arity : scala.Int
- def isCool : scala.Boolean = { /* compiled code */ }
+ def $init$(): scala.Unit = { /* compiled code */ }
+ def arity: scala.Int
+ def isCool: scala.Boolean = { /* compiled code */ }
}
diff --git a/test/files/scalap/caseClass.check b/test/files/scalap/caseClass.check
index 7d7aa4fd8f..51ad90d9b2 100644
--- a/test/files/scalap/caseClass.check
+++ b/test/files/scalap/caseClass.check
@@ -1,20 +1,20 @@
-case class CaseClass[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) extends scala.AnyRef with scala.Product with scala.Serializable {
- val i : A = { /* compiled code */ }
- val s : scala.Predef.String = { /* compiled code */ }
- def foo : scala.Int = { /* compiled code */ }
- def copy[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) : CaseClass[A] = { /* compiled code */ }
- override def productPrefix : java.lang.String = { /* compiled code */ }
- def productArity : scala.Int = { /* compiled code */ }
- def productElement(x$1 : scala.Int) : scala.Any = { /* compiled code */ }
- override def productIterator : scala.collection.Iterator[scala.Any] = { /* compiled code */ }
- def canEqual(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
- override def hashCode() : scala.Int = { /* compiled code */ }
- override def toString() : java.lang.String = { /* compiled code */ }
- override def equals(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
+case class CaseClass[A <: scala.Seq[scala.Int]](i: A, s: scala.Predef.String) extends scala.AnyRef with scala.Product with scala.Serializable {
+ val i: A = { /* compiled code */ }
+ val s: scala.Predef.String = { /* compiled code */ }
+ def foo: scala.Int = { /* compiled code */ }
+ def copy[A <: scala.Seq[scala.Int]](i: A, s: scala.Predef.String): CaseClass[A] = { /* compiled code */ }
+ override def productPrefix: java.lang.String = { /* compiled code */ }
+ def productArity: scala.Int = { /* compiled code */ }
+ def productElement(x$1: scala.Int): scala.Any = { /* compiled code */ }
+ override def productIterator: scala.collection.Iterator[scala.Any] = { /* compiled code */ }
+ def canEqual(x$1: scala.Any): scala.Boolean = { /* compiled code */ }
+ override def hashCode(): scala.Int = { /* compiled code */ }
+ override def toString(): java.lang.String = { /* compiled code */ }
+ override def equals(x$1: scala.Any): scala.Boolean = { /* compiled code */ }
}
object CaseClass extends scala.AnyRef with scala.Serializable {
def this() = { /* compiled code */ }
- final override def toString() : java.lang.String = { /* compiled code */ }
- def apply[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) : CaseClass[A] = { /* compiled code */ }
- def unapply[A <: scala.Seq[scala.Int]](x$0 : CaseClass[A]) : scala.Option[scala.Tuple2[A, scala.Predef.String]] = { /* compiled code */ }
+ final override def toString(): java.lang.String = { /* compiled code */ }
+ def apply[A <: scala.Seq[scala.Int]](i: A, s: scala.Predef.String): CaseClass[A] = { /* compiled code */ }
+ def unapply[A <: scala.Seq[scala.Int]](x$0: CaseClass[A]): scala.Option[scala.Tuple2[A, scala.Predef.String]] = { /* compiled code */ }
}
diff --git a/test/files/scalap/caseObject.check b/test/files/scalap/caseObject.check
index 867a4b2162..a342e5ff1a 100644
--- a/test/files/scalap/caseObject.check
+++ b/test/files/scalap/caseObject.check
@@ -1,10 +1,10 @@
case object CaseObject extends scala.AnyRef with scala.Product with scala.Serializable {
- def bar : scala.Int = { /* compiled code */ }
- override def productPrefix : java.lang.String = { /* compiled code */ }
- def productArity : scala.Int = { /* compiled code */ }
- def productElement(x$1 : scala.Int) : scala.Any = { /* compiled code */ }
- override def productIterator : scala.collection.Iterator[scala.Any] = { /* compiled code */ }
- def canEqual(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
- override def hashCode() : scala.Int = { /* compiled code */ }
- override def toString() : java.lang.String = { /* compiled code */ }
+ def bar: scala.Int = { /* compiled code */ }
+ override def productPrefix: java.lang.String = { /* compiled code */ }
+ def productArity: scala.Int = { /* compiled code */ }
+ def productElement(x$1: scala.Int): scala.Any = { /* compiled code */ }
+ override def productIterator: scala.collection.Iterator[scala.Any] = { /* compiled code */ }
+ def canEqual(x$1: scala.Any): scala.Boolean = { /* compiled code */ }
+ override def hashCode(): scala.Int = { /* compiled code */ }
+ override def toString(): java.lang.String = { /* compiled code */ }
}
diff --git a/test/files/scalap/cbnParam.check b/test/files/scalap/cbnParam.check
index 52ecb6ae66..abe01297b9 100644
--- a/test/files/scalap/cbnParam.check
+++ b/test/files/scalap/cbnParam.check
@@ -1,3 +1,3 @@
class CbnParam extends scala.AnyRef {
- def this(s : => scala.Predef.String) = { /* compiled code */ }
+ def this(s: => scala.Predef.String) = { /* compiled code */ }
}
diff --git a/test/files/scalap/classPrivate.check b/test/files/scalap/classPrivate.check
index ab2d40cdaf..cf0ffe0cb2 100644
--- a/test/files/scalap/classPrivate.check
+++ b/test/files/scalap/classPrivate.check
@@ -1,10 +1,10 @@
class ClassPrivate extends scala.AnyRef {
def this() = { /* compiled code */ }
- def baz : scala.Int = { /* compiled code */ }
+ def baz: scala.Int = { /* compiled code */ }
class Outer extends scala.AnyRef {
def this() = { /* compiled code */ }
- private[ClassPrivate] def qux : scala.Int = { /* compiled code */ }
+ private[ClassPrivate] def qux: scala.Int = { /* compiled code */ }
}
- protected def quux : scala.Int = { /* compiled code */ }
- private[ClassPrivate] def bar : scala.Int = { /* compiled code */ }
+ protected def quux: scala.Int = { /* compiled code */ }
+ private[ClassPrivate] def bar: scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/classWithExistential.check b/test/files/scalap/classWithExistential.check
index caee3fd6de..7df6bfb765 100644
--- a/test/files/scalap/classWithExistential.check
+++ b/test/files/scalap/classWithExistential.check
@@ -1,4 +1,4 @@
class ClassWithExistential extends scala.AnyRef {
def this() = { /* compiled code */ }
- def foo[A, B] : scala.Function1[A, B forSome {type A <: scala.Seq[scala.Int]; type B >: scala.Predef.String}] = { /* compiled code */ }
+ def foo[A, B]: scala.Function1[A, B forSome {type A <: scala.Seq[scala.Int]; type B >: scala.Predef.String}] = { /* compiled code */ }
}
diff --git a/test/files/scalap/classWithSelfAnnotation.check b/test/files/scalap/classWithSelfAnnotation.check
index 82bbd9e8df..7a1c206e33 100644
--- a/test/files/scalap/classWithSelfAnnotation.check
+++ b/test/files/scalap/classWithSelfAnnotation.check
@@ -1,5 +1,5 @@
class ClassWithSelfAnnotation extends scala.AnyRef {
- this : ClassWithSelfAnnotation with java.lang.CharSequence =>
+ this: ClassWithSelfAnnotation with java.lang.CharSequence =>
def this() = { /* compiled code */ }
- def foo : scala.Int = { /* compiled code */ }
+ def foo: scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/covariantParam.check b/test/files/scalap/covariantParam.check
index f7a3c98966..85b1400fce 100644
--- a/test/files/scalap/covariantParam.check
+++ b/test/files/scalap/covariantParam.check
@@ -1,4 +1,4 @@
class CovariantParam[+A] extends scala.AnyRef {
def this() = { /* compiled code */ }
- def foo[A](a : A) : scala.Int = { /* compiled code */ }
+ def foo[A](a: A): scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/defaultParameter.check b/test/files/scalap/defaultParameter.check
index 0c775ea7b5..4e244d18f1 100644
--- a/test/files/scalap/defaultParameter.check
+++ b/test/files/scalap/defaultParameter.check
@@ -1,3 +1,3 @@
trait DefaultParameter extends scala.AnyRef {
- def foo(s : scala.Predef.String) : scala.Unit
+ def foo(s: scala.Predef.String): scala.Unit
}
diff --git a/test/files/scalap/implicitParam.check b/test/files/scalap/implicitParam.check
index a2cfd6092d..46e995652e 100644
--- a/test/files/scalap/implicitParam.check
+++ b/test/files/scalap/implicitParam.check
@@ -1,4 +1,4 @@
class ImplicitParam extends scala.AnyRef {
def this() = { /* compiled code */ }
- def foo(i : scala.Int)(implicit f : scala.Float, d : scala.Double) : scala.Int = { /* compiled code */ }
+ def foo(i: scala.Int)(implicit f: scala.Float, d: scala.Double): scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/packageObject.check b/test/files/scalap/packageObject.check
index 5732d92958..d1d0bbf122 100644
--- a/test/files/scalap/packageObject.check
+++ b/test/files/scalap/packageObject.check
@@ -1,5 +1,5 @@
package object PackageObject extends scala.AnyRef {
def this() = { /* compiled code */ }
type A = scala.Predef.String
- def foo(i : scala.Int) : scala.Int = { /* compiled code */ }
+ def foo(i: scala.Int): scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/paramClauses.check b/test/files/scalap/paramClauses.check
index 3a141e8faf..11c5e4bbd6 100644
--- a/test/files/scalap/paramClauses.check
+++ b/test/files/scalap/paramClauses.check
@@ -1,4 +1,4 @@
class ParamClauses extends scala.AnyRef {
def this() = { /* compiled code */ }
- def foo(i : scala.Int)(s : scala.Predef.String)(t : scala.Double) : scala.Int = { /* compiled code */ }
+ def foo(i: scala.Int)(s: scala.Predef.String)(t: scala.Double): scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/paramNames.check b/test/files/scalap/paramNames.check
index 85e37f858d..836b3d0e7b 100644
--- a/test/files/scalap/paramNames.check
+++ b/test/files/scalap/paramNames.check
@@ -1,4 +1,4 @@
class ParamNames extends scala.AnyRef {
def this() = { /* compiled code */ }
- def foo(s : => scala.Seq[scala.Int], s2 : => scala.Seq[scala.Any]) : scala.Unit = { /* compiled code */ }
+ def foo(s: => scala.Seq[scala.Int], s2: => scala.Seq[scala.Any]): scala.Unit = { /* compiled code */ }
}
diff --git a/test/files/scalap/sequenceParam.check b/test/files/scalap/sequenceParam.check
index 142d92fea3..f7bf83f6b2 100644
--- a/test/files/scalap/sequenceParam.check
+++ b/test/files/scalap/sequenceParam.check
@@ -1,3 +1,3 @@
class SequenceParam extends scala.AnyRef {
- def this(s : scala.Predef.String, i : scala.Int*) = { /* compiled code */ }
+ def this(s: scala.Predef.String, i: scala.Int*) = { /* compiled code */ }
}
diff --git a/test/files/scalap/simpleClass.check b/test/files/scalap/simpleClass.check
index 4fdf25d1cf..4675cbf665 100644
--- a/test/files/scalap/simpleClass.check
+++ b/test/files/scalap/simpleClass.check
@@ -1,4 +1,4 @@
class SimpleClass extends scala.AnyRef {
def this() = { /* compiled code */ }
- def foo : scala.Int = { /* compiled code */ }
+ def foo: scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/traitObject.check b/test/files/scalap/traitObject.check
index 104ba14f1a..f7ae4fd2f4 100644
--- a/test/files/scalap/traitObject.check
+++ b/test/files/scalap/traitObject.check
@@ -1,8 +1,8 @@
trait TraitObject extends scala.AnyRef {
- def $init$() : scala.Unit = { /* compiled code */ }
- def foo : scala.Int = { /* compiled code */ }
+ def $init$(): scala.Unit = { /* compiled code */ }
+ def foo: scala.Int = { /* compiled code */ }
}
object TraitObject extends scala.AnyRef {
def this() = { /* compiled code */ }
- def bar : scala.Int = { /* compiled code */ }
+ def bar: scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/typeAnnotations.check b/test/files/scalap/typeAnnotations.check
index 407b0235c6..cba69f8e41 100644
--- a/test/files/scalap/typeAnnotations.check
+++ b/test/files/scalap/typeAnnotations.check
@@ -1,8 +1,8 @@
abstract class TypeAnnotations[@scala.specialized R] extends scala.AnyRef {
def this() = { /* compiled code */ }
@scala.specialized
- val x : scala.Int = { /* compiled code */ }
+ val x: scala.Int = { /* compiled code */ }
@scala.specialized
type T
- def compose[@scala.specialized A](x : A, y : R) : A = { /* compiled code */ }
+ def compose[@scala.specialized A](x: A, y: R): A = { /* compiled code */ }
}
diff --git a/test/files/scalap/valAndVar.check b/test/files/scalap/valAndVar.check
index e940da9801..98eae5192d 100644
--- a/test/files/scalap/valAndVar.check
+++ b/test/files/scalap/valAndVar.check
@@ -1,5 +1,5 @@
class ValAndVar extends scala.AnyRef {
def this() = { /* compiled code */ }
- val foo : java.lang.String = { /* compiled code */ }
- var bar : scala.Int = { /* compiled code */ }
+ val foo: java.lang.String = { /* compiled code */ }
+ var bar: scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/wildcardType.check b/test/files/scalap/wildcardType.check
index e43261db32..6ea696647e 100644
--- a/test/files/scalap/wildcardType.check
+++ b/test/files/scalap/wildcardType.check
@@ -1,3 +1,3 @@
class WildcardType extends scala.AnyRef {
- def this(f : scala.Function1[scala.Int, _]) = { /* compiled code */ }
+ def this(f: scala.Function1[scala.Int, _]) = { /* compiled code */ }
}
diff --git a/test/junit/scala/reflect/io/ZipArchiveTest.scala b/test/junit/scala/reflect/io/ZipArchiveTest.scala
new file mode 100644
index 0000000000..1bcd06f5a7
--- /dev/null
+++ b/test/junit/scala/reflect/io/ZipArchiveTest.scala
@@ -0,0 +1,37 @@
+package scala.reflect.io
+
+import java.io.{IOException, File => JFile}
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class ZipArchiveTest {
+
+ @Test
+ def corruptZip {
+ val f = JFile.createTempFile("test", ".jar")
+ val fza = new FileZipArchive(f)
+ try {
+ fza.iterator
+ } catch {
+ case x: IOException =>
+ assertTrue(x.getMessage, x.getMessage.contains(f.getPath))
+ } finally {
+ f.delete()
+ }
+ }
+
+ @Test
+ def missingFile {
+ val f = new JFile("xxx.does.not.exist")
+ val fza = new FileZipArchive(f)
+ try {
+ fza.iterator
+ } catch {
+ case x: IOException =>
+ assertTrue(x.getMessage, x.getMessage.contains(f.getPath))
+ }
+ }
+}
diff --git a/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala b/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala
new file mode 100644
index 0000000000..355771bf04
--- /dev/null
+++ b/test/junit/scala/tools/nsc/symtab/CannotHaveAttrsTest.scala
@@ -0,0 +1,67 @@
+package scala.tools.nsc
+package symtab
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.tools.testing.AssertUtil.assertThrows
+import scala.reflect.internal.util.OffsetPosition
+
+@RunWith(classOf[JUnit4])
+class CannotHaveAttrsTest {
+ object symbolTable extends SymbolTableForUnitTesting {
+ object CHA extends CannotHaveAttrs {
+ def canEqual(that: Any): Boolean = ???
+ def productArity: Int = ???
+ def productElement(n: Int): Any = ???
+ }
+ val attrlessTrees = List(CHA, EmptyTree, emptyValDef, pendingSuperCall)
+ }
+ import symbolTable._
+
+ @Test
+ def canHaveAttrsIsFalse =
+ attrlessTrees.foreach { t =>
+ assertFalse(t.canHaveAttrs)
+ }
+
+ @Test
+ def defaultPosAssignment =
+ attrlessTrees.foreach { t =>
+ assertEquals(t.pos, NoPosition)
+ t.pos = NoPosition
+ assertEquals(t.pos, NoPosition)
+ t.setPos(NoPosition)
+ assertEquals(t.pos, NoPosition)
+ }
+
+ @Test
+ def defaultTpeAssignment =
+ attrlessTrees.foreach { t =>
+ assertEquals(t.tpe, NoType)
+ t.tpe = NoType
+ assertEquals(t.tpe, NoType)
+ t.setType(NoType)
+ assertEquals(t.tpe, NoType)
+ }
+
+ @Test
+ def nonDefaultPosAssignmentFails = {
+ val pos = new OffsetPosition(null, 0)
+ attrlessTrees.foreach { t =>
+ assertThrows[IllegalArgumentException] { t.pos = pos }
+ assertThrows[IllegalArgumentException] { t.setPos(pos) }
+ }
+ }
+
+ @Test
+ def nonDefaultTpeAssignmentFails = {
+ val tpe = typeOf[Int]
+ attrlessTrees.foreach { t =>
+ assertThrows[IllegalArgumentException] { t.tpe = tpe }
+ assertThrows[IllegalArgumentException] { t.setType(tpe) }
+ }
+ }
+}
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
new file mode 100644
index 0000000000..285e87e3b2
--- /dev/null
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
@@ -0,0 +1,89 @@
+package scala.tools.nsc
+package symtab
+
+import scala.reflect.internal.{Phase, NoPhase, SomePhase}
+import scala.tools.util.PathResolver
+import util.ClassPath
+import io.AbstractFile
+
+/**
+ * A complete SymbolTable implementation designed to be used in JUnit tests.
+ *
+ * It enables `usejavacp` setting so classpath of JUnit runner is being used
+ * for symbol table's classpath.
+ *
+ * This class contains enough of logic implemented to make it possible to
+ * initialize definitions and inspect symbols.
+ */
+class SymbolTableForUnitTesting extends SymbolTable {
+ // Members declared in scala.reflect.api.Trees
+ override def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
+ override def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
+ trait TreeCopier extends InternalTreeCopierOps
+ // these should be mocks
+ class StrictTreeCopier extends super.StrictTreeCopier with TreeCopier
+ class LazyTreeCopier extends super.LazyTreeCopier with TreeCopier
+
+ override def isCompilerUniverse: Boolean = true
+ def classPath = new PathResolver(settings).result
+
+ object platform extends backend.Platform {
+ val symbolTable: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
+ lazy val loaders: SymbolTableForUnitTesting.this.loaders.type = SymbolTableForUnitTesting.this.loaders
+ def platformPhases: List[SubComponent] = Nil
+ val classPath: ClassPath[AbstractFile] = new PathResolver(settings).result
+ def doLoad(cls: ClassPath[AbstractFile]#ClassRep): Boolean = true
+ def isMaybeBoxed(sym: Symbol): Boolean = ???
+ def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ???
+ def externalEquals: Symbol = ???
+ def updateClassPath(subst: Map[ClassPath[AbstractFile], ClassPath[AbstractFile]]): Unit = ???
+ }
+
+ object loaders extends symtab.SymbolLoaders {
+ val symbolTable: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
+ lazy val platform: symbolTable.platform.type = symbolTable.platform
+ def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol =
+ sym.info.member(name)
+ protected override def compileLate(srcfile: AbstractFile): Unit =
+ sys.error(s"We do not expect compileLate to be called in SymbolTableTest. The srcfile passed in is $srcfile")
+ }
+
+ class GlobalMirror extends Roots(NoSymbol) {
+ val universe: SymbolTableForUnitTesting.this.type = SymbolTableForUnitTesting.this
+ def rootLoader: LazyType = new loaders.PackageLoader(classPath)
+ override def toString = "compiler mirror"
+ }
+
+ lazy val rootMirror: Mirror = {
+ val rm = new GlobalMirror
+ rm.init()
+ rm.asInstanceOf[Mirror]
+ }
+
+ def settings: Settings = {
+ val s = new Settings
+ // initialize classpath using java classpath
+ s.usejavacp.value = true
+ s
+ }
+
+ // Members declared in scala.reflect.internal.Required
+ def picklerPhase: scala.reflect.internal.Phase = SomePhase
+
+ // Members declared in scala.reflect.internal.SymbolTable
+ def currentRunId: Int = 1
+ def log(msg: => AnyRef): Unit = println(msg)
+ def mirrorThatLoaded(sym: Symbol): Mirror = rootMirror
+ val phases: Seq[Phase] = List(NoPhase, SomePhase)
+ val phaseWithId: Array[Phase] = {
+ val maxId = phases.map(_.id).max
+ val phasesArray = Array.ofDim[Phase](maxId+1)
+ phases foreach { phase =>
+ phasesArray(phase.id) = phase
+ }
+ phasesArray
+ }
+ lazy val treeInfo: scala.reflect.internal.TreeInfo{val global: SymbolTableForUnitTesting.this.type} = ???
+
+ phase = SomePhase
+}
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala
new file mode 100644
index 0000000000..11e955a4bb
--- /dev/null
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableTest.scala
@@ -0,0 +1,47 @@
+package scala.tools.nsc
+package symtab
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class SymbolTableTest {
+ object symbolTable extends SymbolTableForUnitTesting
+
+ @Test
+ def initDefinitions = {
+ symbolTable.definitions.init()
+ }
+
+ @Test
+ def basicSubTypeCheck = {
+ symbolTable.definitions.init()
+ val listClassTpe = symbolTable.definitions.ListClass.tpe
+ val seqClassTpe = symbolTable.definitions.SeqClass.tpe
+ assertTrue("List should be subclass of Seq", listClassTpe <:< seqClassTpe)
+ }
+
+ /**
+ * Demonstrates how one can create symbols and type completely
+ * from scratch and perform sub type check.
+ */
+ @Test
+ def customClassesSubTypeCheck: Unit = {
+ import symbolTable._
+ symbolTable.definitions.init()
+ val rootClass = symbolTable.rootMirror.RootClass
+ val fooSymbol = rootClass.newClassSymbol("Foo": TypeName, NoPosition, 0)
+ val fooType = new ClassInfoType(Nil, EmptyScope, fooSymbol)
+ fooSymbol.info = fooType
+ val barSymbol = rootClass.newClassSymbol("Bar": TypeName, NoPosition, 0)
+ val fooTypeRef = TypeRef(fooSymbol.owner.tpe, fooSymbol, Nil)
+ val barType = new ClassInfoType(List(fooTypeRef), EmptyScope, barSymbol)
+ barSymbol.info = barType
+ assertTrue("Bar should be subclass of Foo", barSymbol.tpe <:< fooSymbol.tpe)
+ assertFalse("Foo should be a superclass of Foo", fooSymbol.tpe <:< barSymbol.tpe)
+ }
+
+}
diff --git a/test/junit/scala/tools/testing/AssertThrowsTest.scala b/test/junit/scala/tools/testing/AssertThrowsTest.scala
new file mode 100644
index 0000000000..a70519e63c
--- /dev/null
+++ b/test/junit/scala/tools/testing/AssertThrowsTest.scala
@@ -0,0 +1,34 @@
+package scala.tools
+package testing
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import AssertUtil.assertThrows
+
+@RunWith(classOf[JUnit4])
+class AssertThrowsTest {
+ class Foo extends Exception
+ class SubFoo extends Foo
+ class Bar extends Exception
+
+ @Test
+ def catchFoo = assertThrows[Foo] { throw new Foo }
+
+ @Test
+ def catchSubclass = assertThrows[Foo] { throw new SubFoo }
+
+ @Test
+ def rethrowBar =
+ assertTrue("exception wasn't rethrown", {
+ try {
+ assertThrows[Foo] { throw new Bar }
+ false
+ } catch {
+ case bar: Bar => true
+ case e: Throwable => fail(s"expected Bar but got $e"); false
+ }
+ })
+
+} \ No newline at end of file
diff --git a/test/junit/scala/tools/testing/AssertUtil.scala b/test/junit/scala/tools/testing/AssertUtil.scala
new file mode 100644
index 0000000000..9efac64a97
--- /dev/null
+++ b/test/junit/scala/tools/testing/AssertUtil.scala
@@ -0,0 +1,19 @@
+package scala.tools
+package testing
+
+/** This module contains additional higher-level assert statements
+ * that are ultimately based on junit.Assert primitives.
+ */
+object AssertUtil {
+ /** Check if exception T (or a subclass) was thrown during evaluation of f.
+ * If any other exception or throwable is found instead it will be re-thrown.
+ */
+ def assertThrows[T <: Exception](f: => Any)(implicit manifest: Manifest[T]): Unit =
+ try f
+ catch {
+ case e: Exception =>
+ val clazz = manifest.erasure.asInstanceOf[Class[T]]
+ if (!clazz.isAssignableFrom(e.getClass))
+ throw e
+ }
+} \ No newline at end of file
diff --git a/test/junit/scala/util/matching/regextract-char.scala b/test/junit/scala/util/matching/regextract-char.scala
new file mode 100644
index 0000000000..50fdcd9d46
--- /dev/null
+++ b/test/junit/scala/util/matching/regextract-char.scala
@@ -0,0 +1,58 @@
+
+package scala.util.matching
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import PartialFunction._
+
+/** Regex can match a Char.
+ * If the pattern includes a group,
+ * always return a single char.
+ */
+@RunWith(classOf[JUnit4])
+class CharRegexTest {
+ implicit class Averrable(val b: Boolean) /*extends AnyVal*/ {
+ def yes = assert(b)
+ def no = assert(!b)
+ }
+ val c: Char = 'c' // "cat"(0)
+ val d: Char = 'D' // "Dog"(0)
+
+ @Test def comparesGroupCorrectly(): Unit = {
+ val r = """(\p{Lower})""".r
+ cond(c) { case r(x) => true } .yes
+ cond(c) { case r(_) => true } .yes
+ cond(c) { case r(_*) => true } .yes
+ cond(c) { case r() => true } .no
+
+ cond(d) { case r(x) => true } .no
+ cond(d) { case r(_) => true } .no
+ cond(d) { case r(_*) => true } .no
+ cond(d) { case r() => true } .no
+ }
+
+ @Test def comparesNoGroupCorrectly(): Unit = {
+ val rnc = """\p{Lower}""".r
+ cond(c) { case rnc(x) => true } .no
+ cond(c) { case rnc(_) => true } .no
+ cond(c) { case rnc(_*) => true } .yes
+ cond(c) { case rnc() => true } .yes
+
+ cond(d) { case rnc(x) => true } .no
+ cond(d) { case rnc(_) => true } .no
+ cond(d) { case rnc(_*) => true } .no
+ cond(d) { case rnc() => true } .no
+ }
+
+ @Test(expected = classOf[MatchError])
+ def failCorrectly(): Unit = {
+ val headAndTail = """(\p{Lower})([a-z]+)""".r
+ val n = "cat"(0) match {
+ case headAndTail(ht @ _*) => ht.size
+ }
+ assert(false, s"Match size $n")
+ }
+}
diff --git a/test/pending/junit/scala/util/t7265.scala b/test/junit/scala/util/t7265.scala
index 3b8fa80dbe..71f085d21d 100644
--- a/test/pending/junit/scala/util/t7265.scala
+++ b/test/junit/scala/util/t7265.scala
@@ -30,6 +30,7 @@ class SpecVersionTest {
assert(sut isJavaAtLeast "1.6")
assert(sut isJavaAtLeast "1.7")
assert(!(sut isJavaAtLeast "1.8"))
+ assert(!(sut isJavaAtLeast "1.71"))
}
@Test(expected = classOf[NumberFormatException])
def badVersion(): Unit = {
@@ -40,6 +41,18 @@ class SpecVersionTest {
sut isJavaAtLeast "1"
}
@Test(expected = classOf[NumberFormatException])
+ def noVersion(): Unit = {
+ sut isJavaAtLeast ""
+ }
+ @Test(expected = classOf[NumberFormatException])
+ def dotOnly(): Unit = {
+ sut isJavaAtLeast "."
+ }
+ @Test(expected = classOf[NumberFormatException])
+ def leadingDot(): Unit = {
+ sut isJavaAtLeast ".5"
+ }
+ @Test(expected = classOf[NumberFormatException])
def notASpec(): Unit = {
sut isJavaAtLeast "1.7.1"
}
diff --git a/test/partest b/test/partest
index 0259cdb791..f396459c6d 100755
--- a/test/partest
+++ b/test/partest
@@ -1,6 +1,5 @@
#!/usr/bin/env bash
#
-
##############################################################################
# Scala test runner 2.10.0
##############################################################################
@@ -11,6 +10,17 @@
# PARTICULAR PURPOSE.
##############################################################################
+findScalaHome () {
+ # see SI-2092 and SI-5792
+ local source="${BASH_SOURCE[0]}"
+ while [ -h "$source" ] ; do
+ local linked="$(readlink "$source")"
+ local dir="$( cd -P $(dirname "$source") && cd -P $(dirname "$linked") && pwd )"
+ source="$dir/$(basename "$linked")"
+ done
+ ( ( cd -P "$(dirname "$source")/.." > /dev/null ) && pwd )
+}
+
# Use tput to detect color-capable terminal.
term_colors=$(tput colors 2>/dev/null)
if [[ $? == 0 ]] && [[ $term_colors -gt 2 ]]; then
@@ -29,52 +39,47 @@ case "`uname`" in
esac
# Finding the root folder for this Scala distribution
-SOURCE=$0;
-SCRIPT=`basename "$SOURCE"`;
-while [ -h "$SOURCE" ]; do
- SCRIPT=`basename "$SOURCE"`;
- LOOKUP=`ls -ld "$SOURCE"`;
- TARGET=`expr "$LOOKUP" : '.*-> \(.*\)$'`;
- if expr "${TARGET:-.}/" : '/.*/$' > /dev/null; then
- SOURCE=${TARGET:-.};
- else
- SOURCE=`dirname "$SOURCE"`/${TARGET:-.};
- fi;
-done;
-
-# see #2092
-SCALA_HOME=`dirname "$SOURCE"`
-SCALA_HOME=`cd "$SCALA_HOME"; pwd -P`
-SCALA_HOME=`cd "$SCALA_HOME"/..; pwd`
+SCALA_HOME="$(findScalaHome)"
if $cygwin; then
SCALA_HOME=`cygpath --windows --short-name "$SCALA_HOME"`
SCALA_HOME=`cygpath --unix "$SCALA_HOME"`
fi
-# Constructing the extension classpath
-EXT_CLASSPATH=""
-if [ -z "$EXT_CLASSPATH" ] ; then
- if [ -f "$SCALA_HOME/lib/scala-partest.jar" ] ; then
- for ext in "$SCALA_HOME"/lib/* ; do
- if [ -z "$EXT_CLASSPATH" ] ; then
- EXT_CLASSPATH="$ext"
- else
- EXT_CLASSPATH="$EXT_CLASSPATH:$ext"
- fi
- done
- elif [ -f "$SCALA_HOME/build/pack/lib/scala-partest.jar" ] ; then
- for lib in `echo "scala-partest scala-library scala-parser-combinators scala-xml scala-reflect scala-compiler diffutils"`; do
- ext="$SCALA_HOME/build/pack/lib/$lib.jar"
- if [ -z "$EXT_CLASSPATH" ] ; then
- EXT_CLASSPATH="$ext"
- else
- EXT_CLASSPATH="$EXT_CLASSPATH:$ext"
- fi
- done
+# Let ant construct the classpath used to run partest (downloading partest from maven if necessary)
+# PARTEST_CLASSPATH=""
+if [ -z "$PARTEST_CLASSPATH" ] ; then
+ if [ ! -f "$SCALA_HOME/build/pack/partest.properties" ] ; then
+ (cd "$SCALA_HOME" && ant -q test.suite.init) # builds pack, downloads partest and writes classpath to build/pack/partest.properties
fi
+
+ PARTEST_CLASSPATH=$( cat "$SCALA_HOME/build/pack/partest.properties" | grep partest.classpath | sed -e 's/\\:/:/g' | cut -f2- -d= )
+
+ # sanity check, disabled to save time
+ # $( javap -classpath $PARTEST_CLASSPATH scala.tools.partest.nest.NestRunner &> /dev/null ) || unset PARTEST_CLASSPATH
fi
+# if [ -z "$PARTEST_CLASSPATH" ] ; then
+# if [ -f "$SCALA_HOME/lib/scala-partest.jar" ] ; then
+# for ext in "$SCALA_HOME"/lib/* ; do
+# if [ -z "$PARTEST_CLASSPATH" ] ; then
+# PARTEST_CLASSPATH="$ext"
+# else
+# PARTEST_CLASSPATH="$PARTEST_CLASSPATH:$ext"
+# fi
+# done
+# elif [ -f "$SCALA_HOME/build/pack/lib/scala-partest.jar" ] ; then
+# for lib in `echo "scala-partest scala-library scala-parser-combinators scala-xml scala-reflect scala-compiler diffutils"`; do
+# ext="$SCALA_HOME/build/pack/lib/$lib.jar"
+# if [ -z "$PARTEST_CLASSPATH" ] ; then
+# PARTEST_CLASSPATH="$ext"
+# else
+# PARTEST_CLASSPATH="$PARTEST_CLASSPATH:$ext"
+# fi
+# done
+# fi
+# fi
+
# Locate a javac command
# Try: JAVA_HOME, sibling to specific JAVACMD, or PATH
# Don't fail if there is no javac, since not all tests require it.
@@ -107,7 +112,7 @@ if $cygwin; then
JAVAC_CMD=`cygpath --$format "$JAVAC_CMD"`
fi
SCALA_HOME=`cygpath --$format "$SCALA_HOME"`
- EXT_CLASSPATH=`cygpath --path --$format "$EXT_CLASSPATH"`
+ PARTEST_CLASSPATH=`cygpath --path --$format "$PARTEST_CLASSPATH"`
fi
# last arg wins, so if JAVA_OPTS already contains -Xmx or -Xms the
@@ -125,14 +130,17 @@ if [ ! -z "${PARTEST_DEBUG}" ] ; then
partestDebugStr="-Dpartest.debug=${PARTEST_DEBUG}"
fi
+# note that variables which may intentionally be empty must not
+# be quoted: otherwise an empty string will appear as a command line
+# argument, and java will think that is the program to run.
"${JAVACMD:=java}" \
- $JAVA_OPTS -cp "$EXT_CLASSPATH" \
+ $JAVA_OPTS -cp "$PARTEST_CLASSPATH" \
${partestDebugStr} \
- "$color_opts" \
+ ${color_opts} \
-Dfile.encoding=UTF-8 \
-Dscala.home="${SCALA_HOME}" \
-Dpartest.javacmd="${JAVACMD}" \
-Dpartest.java_opts="${JAVA_OPTS}" \
-Dpartest.scalac_opts="${SCALAC_OPTS}" \
-Dpartest.javac_cmd="${JAVAC_CMD}" \
- scala.tools.partest.nest.NestRunner "$@"
+ scala.tools.partest.nest.ConsoleRunner "$@"
diff --git a/test/pending/neg/t6680a.scala b/test/pending/neg/t6680a.scala
new file mode 100644
index 0000000000..745334b1cd
--- /dev/null
+++ b/test/pending/neg/t6680a.scala
@@ -0,0 +1,13 @@
+case class Cell[A](var x: A)
+object Test {
+ def f1(x: Any) = x match { case y @ Cell(_) => y } // Inferred type is Cell[Any]
+ // def f2(x: Cell[_]) = x match { case y @ Cell(_) => y } // Inferred type is Cell[_]
+ // def f3[A](x: Cell[A]) = x match { case y @ Cell(_) => y } // Inferred type is Cell[A]
+
+ def main(args: Array[String]): Unit = {
+ // val x = new Cell(1)
+ // val y = f1(x)
+ // y.x = "abc"
+ // println(x.x + 1)
+ }
+} \ No newline at end of file
diff --git a/test/pending/neg/t6680b.check b/test/pending/neg/t6680b.check
new file mode 100644
index 0000000000..a16812d91d
--- /dev/null
+++ b/test/pending/neg/t6680b.check
@@ -0,0 +1,6 @@
+t6680b.scala:8: error: type mismatch;
+ found : String("not what you\'d expect")
+ required: ?Hidden1 where type ?Hidden1 (this is a GADT skolem)
+ case Concrete(f) => f("not what you'd expect")
+ ^
+one error found
diff --git a/test/pending/neg/t6680b.scala b/test/pending/neg/t6680b.scala
new file mode 100644
index 0000000000..e9f6468315
--- /dev/null
+++ b/test/pending/neg/t6680b.scala
@@ -0,0 +1,10 @@
+trait Super[+A]
+// `Hidden` must occur in both variance positions (covariant/contravariant) for the sneakiness to work
+// this way type inference will infer Any for `Hidden` and `A` in the pattern below
+case class Concrete[Hidden, +A](havoc: Hidden => Hidden) extends Super[A]
+
+object Test extends App {
+ (Concrete((x: Int) => x): Super[Any]) match {
+ case Concrete(f) => f("not what you'd expect")
+ }
+} \ No newline at end of file
diff --git a/test/pending/neg/t6680c.scala b/test/pending/neg/t6680c.scala
new file mode 100644
index 0000000000..f69663a71b
--- /dev/null
+++ b/test/pending/neg/t6680c.scala
@@ -0,0 +1,17 @@
+package s
+
+trait Stream[+A]
+case class Unfold[S,+A](s: S, f: S => Option[(A,S)]) extends Stream[A]
+
+object Stream {
+ def fromList[A](a: List[A]): Stream[A] =
+ Unfold(a, (l:List[A]) => l.headOption.map((_,l.tail)))
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val res = Stream.fromList(List(1,2,3,4))
+
+ res match { case Unfold(s, f) => f("a string!") }
+ }
+}
diff --git a/test/pending/pos/t7778/Foo_1.java b/test/pending/pos/t7778/Foo_1.java
new file mode 100644
index 0000000000..65431ffd46
--- /dev/null
+++ b/test/pending/pos/t7778/Foo_1.java
@@ -0,0 +1,6 @@
+import java.util.concurrent.Callable;
+
+public abstract class Foo_1<T> implements Callable<Foo_1<Object>.Inner> {
+ public abstract class Inner {
+ }
+}
diff --git a/test/pending/pos/t7778/Test_2.scala b/test/pending/pos/t7778/Test_2.scala
new file mode 100644
index 0000000000..306303a99e
--- /dev/null
+++ b/test/pending/pos/t7778/Test_2.scala
@@ -0,0 +1,3 @@
+class Test {
+ null: Foo_1[_]
+}
diff --git a/test/pending/run/t7733.check b/test/pending/run/t7733.check
new file mode 100644
index 0000000000..19765bd501
--- /dev/null
+++ b/test/pending/run/t7733.check
@@ -0,0 +1 @@
+null
diff --git a/test/pending/run/t7733/Separate_1.scala b/test/pending/run/t7733/Separate_1.scala
new file mode 100644
index 0000000000..a326ecd53e
--- /dev/null
+++ b/test/pending/run/t7733/Separate_1.scala
@@ -0,0 +1,5 @@
+package test
+
+class Separate {
+ for (i <- 1 to 10) println(i)
+} \ No newline at end of file
diff --git a/test/pending/run/t7733/Test_2.scala b/test/pending/run/t7733/Test_2.scala
new file mode 100644
index 0000000000..28358574ec
--- /dev/null
+++ b/test/pending/run/t7733/Test_2.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val code = tb.parse("{ val x: test.Separate$$anonfun$1 = null; x }")
+ println(tb.eval(code))
+} \ No newline at end of file
diff --git a/tools/partest-ack b/tools/partest-ack
index f7d5063292..551f92684f 100755
--- a/tools/partest-ack
+++ b/tools/partest-ack
@@ -2,18 +2,29 @@
#
# wrapper around partest for fine-grained test selection via ack
-declare quiet failed update partest_debug file_regex partest_args ack_args cotouched
+declare quiet failed update partest_debug
+declare cotouched since sortCommand
+declare -a ack_args partest_args scalac_args
+
+base="$(cd "$(dirname "$0")"/.. && pwd)"
+cd "$base" || { echo "Could not change to base directory $base" && exit 1; }
+filesdir="test/files"
+sortCommand="sort -u"
+
+# have to enumerate good test dirs since partest chokes and fails
+# on continuations, bench, etc. tests
+pathRegex="$filesdir/(pos|neg|jvm|run|scalap|presentation)/[^/.]+([.]scala)?\$"
[[ $# -gt 0 ]] || {
cat <<EOM
Usage: $0 <regex> [-dfquvp] [ack options]
- -d pass --debug to partest
-f pass --failed to partest
- -q DON'T pass --show-log and --show-diff to partest
+ -q pass --terse to partest
-u pass --update-check to partest
- -v pass --verbose to partest
-p <path> select tests appearing in commits where <path> was also modified
+ -s <time> select tests touched since <time> (git format, e.g. 1.month.ago)
+ -r run tests in random order
Given a regular expression (and optionally, any arguments accepted by ack)
runs all the tests for which any associated file matches the regex. Associated
@@ -24,118 +35,107 @@ You must have ack installed: http://betterthangrep.com/ack-standalone
Examples:
- > tools/partest-ack monad
- % tests-with-matching-paths ... 2
- % tests-with-matching-code ... 2
- # 4 tests to run.
+ > tools/partest-ack 'case (class|object) Baz'
+ % testsWithMatchingPaths ... 0
+ % testsWithMatchingCode ... 3
+ # 3 tests to run.
+
+ > tools/partest-ack -s 12.hours.ago
+ % testsTouchedSinceGitTime ... 33
+ # 33 tests to run.
> tools/partest-ack -p src/library/scala/Enumeration.scala
- % tests-modified-in-same-commit ... 84
- # 84 tests to run.
+ % testsModifiedInSameCommit ... 80
+ # 80 tests to run.
> tools/partest-ack -f
% tests-which-failed ... 42
# 42 tests to run.
+
+ > tools/partest-ack "kinds of the type arguments"
+ % testsWithMatchingPaths ... 0
+ % testsWithMatchingCode ... 6
+ # 6 tests to run.
EOM
exit 0
}
-# The leading : in :achs suppresses some errors. Each letter is a valid
-# option. If an option takes an argument, a colon follows it, e.g.
-# it would be :ach:s if -h took an argument.
-while getopts :fuvdp: opt; do
+while getopts :fuvdrp:s: opt; do
case $opt in
- d) partest_debug=true && partest_args="$partest_args --debug" ;;
- f) failed=true && partest_args="$partest_args --failed" ;;
+ f) failed=true && partest_args+=" --failed" ;;
p) cotouched="$cotouched $OPTARG" ;;
- q) quiet=true ;;
- u) partest_args="$partest_args --update-check" ;;
- v) partest_args="$partest_args --verbose" ;;
- :) echo "Option -$OPTARG requires an argument." >&2 ;; # this case is called for a missing option argument
- *) echo "Unrecognized argument $OPTARG" ;; # this is the catch-all implying an unknown option
+ r) sortCommand="randomSort" ;;
+ s) since="$OPTARG" ;;
+ q) partest_args+=" --terse" ;;
+ u) partest_args+=" --update-check" ;;
+ v) partest_args+=" --verbose" ;;
+ :) echo "Option -$OPTARG requires an argument." >&2 ;;
+ *) ack_args+="-$OPTARG" ;; # don't drop unknown args, assume they're for ack
esac
done
shift $((OPTIND-1))
-file_regex="$1"
-ack_args="$*"
+ack_args=( "${ack_args[@]}" "$@" )
-tests () {
- find test/files -mindepth 2 -maxdepth 2 -name '*.scala' -o -type d
-}
+# Echo the argument only if it matches our idea of a test and exists.
+isPath () { [[ "$1" =~ $pathRegex ]] && [[ -e "$1" ]]; }
-pathsToTests () {
- for path in $(perl -pe 's#^(test/files/[^/]+/[^/.]+).*$#$1#'); do
- if [[ -d "$path" ]]; then
- echo "$path"
- elif [[ -f "$path.scala" ]]; then
- echo "$path.scala"
- fi
- done | sort -u
-}
-
-tests-with-matching-paths() {
- local re="$1"
- for p in $(find test/files -type f); do
- [[ $p =~ $re ]] && echo "$p"
+# Filter stdin down to actual test paths.
+asTestPaths () {
+ while read p; do
+ p1="${p%.*}"
+ isPath "$p1" && echo "$p1"
+ isPath "$p1.scala" && echo "$p1.scala"
done
}
-tests-which-failed () {
- for f in $(find test/files -name '*.log'); do
- echo ${f%-*}
- done
-}
-
-tests-modified-in-same-commit() {
- [[ $# -gt 0 ]] && \
- for rev in $(git rev-list HEAD -- "$@"); do
- git --no-pager show --pretty="format:" --name-only "$rev" -- test/files
- done
-}
-
-tests-with-matching-code() {
- ack --noenv --text --files-with-matches "$@" -- test/files
-}
+# These methods all just create paths which may or may not be tests
+# all are filtered through "asTestPaths" which limits the output to actual tests
+regexPathTests () { find "$filesdir" | ack --noenv "$@"; }
+failedTests () { for p in $(find "$filesdir" -name '*.log'); do p1=${p%.log} && p2=${p1%-*} && echo "$p2"; done; }
+sinceTests() { git log --since="$@" --name-only --pretty="format:" -- "$filesdir"; }
+regexCodeTests () { ack --noenv --text --files-with-matches "$@" -- "$filesdir"; }
+sameCommitTests() { for rev in $(git rev-list HEAD -- "$@"); do git --no-pager show --pretty="format:" --name-only "$rev" -- "$filesdir"; done; }
countStdout () {
local -i count=0
while read line; do
- printf "$line\n"
- count+=1
+ printf "$line\n" && count+=1
done
printf >&2 " $count\n"
}
+randomSort () {
+ sort -u | while read line; do echo "$RANDOM $line"; done | sort | sed -E 's/^[0-9]+ //'
+}
+
testRun () {
- printf >&2 "%% %-30s ... " "$1"
- "$@" | pathsToTests | countStdout
+ local description="$1" && shift
+ printf >&2 "%% tests %-25s ... " "$description"
+ "$@" | asTestPaths | sort -u | countStdout | egrep -v '^[ ]*$'
}
allMatches() {
- [[ -n $file_regex ]] && testRun tests-with-matching-paths $file_regex
- [[ -n $cotouched ]] && testRun tests-modified-in-same-commit $cotouched
- [[ -n $ack_args ]] && testRun tests-with-matching-code $ack_args
- [[ -n $failed ]] && testRun tests-which-failed
+ [[ -n $ack_args ]] && testRun "with matching paths" regexPathTests "${ack_args[@]}"
+ [[ -n $ack_args ]] && testRun "with matching code" regexCodeTests "${ack_args[@]}"
+ [[ -n $cotouched ]] && testRun "modified in same commit" sameCommitTests $cotouched
+ [[ -n $since ]] && testRun "modified since time" sinceTests "$since"
+ [[ -n $failed ]] && testRun "failed on last run" failedTests
}
-paths=$(allMatches | sort -u)
-[[ -n $quiet ]] || partest_args="--show-diff --show-log $partest_args"
+paths=$(allMatches | $sortCommand)
-if [[ -z $paths ]] && [[ -z $failed ]]; then
- echo >&2 "No matching tests."
-else
- count=$(echo $(echo "$paths" | wc -w))
+[[ -z $paths ]] && [[ -z $failed ]] && echo >&2 "No matching tests." && exit 0;
- # Output a command line which will re-run these same tests.
- echo "# $count tests to run."
- printf "%-52s %s\n" "test/partest $partest_args" "\\"
- for path in $paths; do
- printf " %-50s %s\n" "$path" "\\"
- done
- echo ' ""'
+count=$(echo $(echo "$paths" | wc -w))
+[[ "$count" -eq 0 ]] && echo >&2 "No tests to run." && exit 0;
+
+# Output a command line which will re-run these same tests.
+echo "# $count tests to run."
+printf "%-52s %s\n" "$base/test/partest ${partest_args[@]}" "\\"
+for path in $paths; do printf " %-50s %s\n" "$path" "\\"; done
+echo ' ""'
- test/partest $partest_args $paths
-fi
+test/partest "${partest_args[@]}" $paths
diff --git a/versions.properties b/versions.properties
new file mode 100644
index 0000000000..044c57bb0f
--- /dev/null
+++ b/versions.properties
@@ -0,0 +1,7 @@
+starr.version=2.11.0-M4
+
+# the below is used for depending on dependencies like partest
+scala.binary.version=2.11.0-M4
+partest.version.number=1.0-RC4
+scala-xml.version.number=1.0-RC2
+scala-parser-combinators.version.number=1.0-RC1