summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actors/scala/actors/Scheduler.scala1
-rw-r--r--src/actors/scala/actors/remote/TcpService.scala2
-rw-r--r--src/build/dbuild-meta-json-gen.scala11
-rw-r--r--src/build/maven/maven-deploy.xml1
-rw-r--r--src/build/maven/scala-partest-pom.xml62
-rw-r--r--src/build/pack.xml5
-rw-r--r--src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala2
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Validators.scala8
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Context.scala1
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Synthetics.scala66
-rw-r--r--src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala2
-rw-r--r--src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala3
-rw-r--r--src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala2
-rw-r--r--src/compiler/scala/reflect/macros/util/Helpers.scala7
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala20
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Settings.scala2
-rw-r--r--src/compiler/scala/tools/cmd/CommandLine.scala6
-rw-r--r--src/compiler/scala/tools/cmd/CommandLineParser.scala10
-rw-r--r--src/compiler/scala/tools/cmd/Opt.scala8
-rw-r--r--src/compiler/scala/tools/cmd/Reference.scala22
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala107
-rw-r--r--src/compiler/scala/tools/cmd/gen/Codegen.scala6
-rw-r--r--src/compiler/scala/tools/cmd/package.scala12
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala6
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala30
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala185
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala8
-rw-r--r--src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala21
-rw-r--r--src/compiler/scala/tools/nsc/io/package.scala14
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala26
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala27
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala49
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala82
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala37
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala674
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala15
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala86
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala27
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala36
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala130
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala64
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala475
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala55
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala518
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala74
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala9
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala3
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala10
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala94
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala21
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala2
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala4
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala1
-rw-r--r--src/eclipse/partest/.classpath6
-rw-r--r--src/eclipse/partest/.project10
-rw-r--r--src/intellij/partest.iml.SAMPLE10
-rw-r--r--src/library/scala/Boolean.scala42
-rw-r--r--src/library/scala/Byte.scala336
-rw-r--r--src/library/scala/Char.scala336
-rw-r--r--src/library/scala/Double.scala325
-rw-r--r--src/library/scala/Float.scala330
-rw-r--r--src/library/scala/Int.scala336
-rw-r--r--src/library/scala/Long.scala336
-rw-r--r--src/library/scala/Short.scala336
-rw-r--r--src/library/scala/StringContext.scala3
-rw-r--r--src/library/scala/Unit.scala9
-rw-r--r--src/library/scala/annotation/compileTimeOnly.scala22
-rw-r--r--src/library/scala/collection/immutable/List.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala1
-rw-r--r--src/library/scala/concurrent/Lock.scala1
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala40
-rw-r--r--src/library/scala/util/Properties.scala7
-rw-r--r--src/library/scala/util/matching/Regex.scala38
-rw-r--r--src/partest-extras/scala/tools/partest/ASMConverters.scala (renamed from src/partest/scala/tools/partest/ASMConverters.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/AsmNode.scala (renamed from src/partest/scala/tools/partest/AsmNode.scala)7
-rw-r--r--src/partest-extras/scala/tools/partest/BytecodeTest.scala (renamed from src/partest/scala/tools/partest/BytecodeTest.scala)19
-rw-r--r--src/partest-extras/scala/tools/partest/JavapTest.scala (renamed from src/partest/scala/tools/partest/JavapTest.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/ReplTest.scala (renamed from src/partest/scala/tools/partest/ReplTest.scala)5
-rw-r--r--src/partest-extras/scala/tools/partest/SigTest.scala (renamed from src/partest/scala/tools/partest/SigTest.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/Util.scala52
-rw-r--r--src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala (renamed from src/partest/scala/tools/partest/instrumented/Instrumentation.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/instrumented/Profiler.java82
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java49
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF (renamed from src/partest/scala/tools/partest/javaagent/MANIFEST.MF)0
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java (renamed from src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java)4
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java25
-rw-r--r--src/partest/README31
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala60
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala128
-rw-r--r--src/partest/scala/tools/partest/IcodeTest.scala43
-rw-r--r--src/partest/scala/tools/partest/MemoryTest.scala38
-rw-r--r--src/partest/scala/tools/partest/PartestDefaults.scala28
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala207
-rw-r--r--src/partest/scala/tools/partest/SecurityTest.scala19
-rw-r--r--src/partest/scala/tools/partest/StoreReporterDirectTest.scala15
-rw-r--r--src/partest/scala/tools/partest/TestKinds.scala66
-rw-r--r--src/partest/scala/tools/partest/TestState.scala65
-rw-r--r--src/partest/scala/tools/partest/TestUtil.scala38
-rw-r--r--src/partest/scala/tools/partest/antlib.xml4
-rw-r--r--src/partest/scala/tools/partest/instrumented/Profiler.java82
-rw-r--r--src/partest/scala/tools/partest/javaagent/ASMTransformer.java49
-rw-r--r--src/partest/scala/tools/partest/javaagent/ProfilingAgent.java25
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala30
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala189
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala224
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala54
-rw-r--r--src/partest/scala/tools/partest/nest/DirectCompiler.scala105
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala165
-rw-r--r--src/partest/scala/tools/partest/nest/NestRunner.scala15
-rw-r--r--src/partest/scala/tools/partest/nest/NestUI.scala182
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala88
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala99
-rw-r--r--src/partest/scala/tools/partest/nest/Runner.scala894
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala85
-rw-r--r--src/partest/scala/tools/partest/nest/StreamCapture.scala53
-rw-r--r--src/partest/scala/tools/partest/package.scala241
-rw-r--r--src/partest/scala/tools/partest/utils/Properties.scala18
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala5
-rw-r--r--src/reflect/scala/reflect/api/Mirrors.scala12
-rw-r--r--src/reflect/scala/reflect/api/Names.scala4
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala6
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala112
-rw-r--r--src/reflect/scala/reflect/internal/Kinds.scala22
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala3
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala14
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala41
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala24
-rw-r--r--src/reflect/scala/reflect/internal/TypeDebugging.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala24
-rw-r--r--src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala32
-rw-r--r--src/reflect/scala/reflect/internal/annotations/package.scala6
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala6
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala36
-rw-r--r--src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala4
-rw-r--r--src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/TriState.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/WeakHashSet.scala31
-rw-r--r--src/reflect/scala/reflect/macros/Context.scala3
-rw-r--r--src/reflect/scala/reflect/macros/Synthetics.scala107
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectSetup.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ExprTyper.scala26
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala147
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala4
-rw-r--r--src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala8
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Naming.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Power.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Results.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/package.scala4
-rw-r--r--src/scalacheck/org/scalacheck/Arbitrary.scala447
-rw-r--r--src/scalacheck/org/scalacheck/Arg.scala20
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala148
-rw-r--r--src/scalacheck/org/scalacheck/ConsoleReporter.scala52
-rw-r--r--src/scalacheck/org/scalacheck/Gen.scala542
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala127
-rw-r--r--src/scalacheck/org/scalacheck/Prop.scala818
-rw-r--r--src/scalacheck/org/scalacheck/Properties.scala96
-rw-r--r--src/scalacheck/org/scalacheck/ScalaCheckFramework.scala92
-rw-r--r--src/scalacheck/org/scalacheck/Shrink.scala208
-rw-r--r--src/scalacheck/org/scalacheck/Test.scala392
-rw-r--r--src/scalacheck/org/scalacheck/util/Buildable.scala63
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala101
-rw-r--r--src/scalacheck/org/scalacheck/util/FreqMap.scala65
-rw-r--r--src/scalacheck/org/scalacheck/util/StdRand.scala12
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala6
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala1
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala9
-rw-r--r--src/scalap/scala/tools/scalap/Arguments.scala1
-rw-r--r--src/scalap/scala/tools/scalap/ByteArrayReader.scala13
-rw-r--r--src/scalap/scala/tools/scalap/Classfile.scala2
-rw-r--r--src/scalap/scala/tools/scalap/Classfiles.scala2
-rw-r--r--src/scalap/scala/tools/scalap/CodeWriter.scala8
-rw-r--r--src/scalap/scala/tools/scalap/Decode.scala30
-rw-r--r--src/scalap/scala/tools/scalap/JavaWriter.scala4
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala18
-rw-r--r--src/scalap/scala/tools/scalap/MetaParser.scala7
-rw-r--r--src/scalap/scala/tools/scalap/Properties.scala3
-rw-r--r--src/scalap/scala/tools/scalap/rules/Memoisable.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala)23
-rw-r--r--src/scalap/scala/tools/scalap/rules/Result.scala69
-rw-r--r--src/scalap/scala/tools/scalap/rules/Rule.scala172
-rw-r--r--src/scalap/scala/tools/scalap/rules/Rules.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/Rules.scala)62
-rw-r--r--src/scalap/scala/tools/scalap/rules/SeqRule.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala)57
-rw-r--r--src/scalap/scala/tools/scalap/rules/package.scala6
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/ClassFileParser.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala)93
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/Flags.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala)7
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/ScalaSig.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala)83
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/ScalaSigPrinter.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala)51
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/SourceFileAttributeParser.scala (renamed from src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala)12
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/Symbol.scala70
-rw-r--r--src/scalap/scala/tools/scalap/scalasig/Type.scala22
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Result.scala72
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Rule.scala177
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala73
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala25
-rw-r--r--src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala19
-rwxr-xr-xsrc/xml/scala/xml/Elem.scala1
216 files changed, 3554 insertions, 10908 deletions
diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala
index 5b5b4a946d..67c8e5cd10 100644
--- a/src/actors/scala/actors/Scheduler.scala
+++ b/src/actors/scala/actors/Scheduler.scala
@@ -9,7 +9,6 @@
package scala.actors
-import java.util.concurrent._
import scheduler.{DelegatingScheduler, ForkJoinScheduler, ResizableThreadPoolScheduler, ThreadPoolConfig}
/**
diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala
index ad78ff784c..75e36b2738 100644
--- a/src/actors/scala/actors/remote/TcpService.scala
+++ b/src/actors/scala/actors/remote/TcpService.scala
@@ -67,7 +67,7 @@ object TcpService {
timeout =>
try {
val to = timeout.toInt
- Debug.info("Using socket timeout $to")
+ Debug.info(s"Using socket timeout $to")
Some(to)
} catch {
case e: NumberFormatException =>
diff --git a/src/build/dbuild-meta-json-gen.scala b/src/build/dbuild-meta-json-gen.scala
index 42214dd191..73eee8ac3a 100644
--- a/src/build/dbuild-meta-json-gen.scala
+++ b/src/build/dbuild-meta-json-gen.scala
@@ -1,6 +1,6 @@
// use this script to generate dbuild-meta.json
// make sure the version is specified correctly,
-// update the dependency structura and
+// update the dependency structure and
// check out distributed-build and run `sbt console`:
// TODO: also generate build.xml and eclipse config from a similar data-structure
@@ -40,15 +40,6 @@ val meta =
Seq(ProjectRef("scala-parser-combinators", "org.scala-lang")),
Seq(ProjectRef("scala-library", "org.scala-lang"))),
- Project("scalacheck", "org.scala-lang",
- Seq(ProjectRef("scalacheck", "org.scala-lang")),
- Seq(ProjectRef("scala-library", "org.scala-lang"), ProjectRef("scala-actors", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
-
- Project("scala-partest", "org.scala-lang",
- Seq(ProjectRef("scala-partest", "org.scala-lang")),
- Seq(ProjectRef("scala-compiler", "org.scala-lang"), // TODO: refine to scala-repl
- ProjectRef("scalap", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scalacheck", "org.scala-lang"))),
-
Project("scaladoc", "org.scala-lang",
Seq(ProjectRef("scaladoc", "org.scala-lang")),
Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 84a12066f5..f52a7888ce 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -110,7 +110,6 @@
<deploy-one name="scala-library" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-xml" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-parser-combinators" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-partest" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-reflect" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-swing" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scalap" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
diff --git a/src/build/maven/scala-partest-pom.xml b/src/build/maven/scala-partest-pom.xml
deleted file mode 100644
index ac05f242d5..0000000000
--- a/src/build/maven/scala-partest-pom.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-partest</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Parallel Test Framework</name>
- <description>testing framework for the Scala compiler.</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
-
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Typesafe</id>
- <name>Typesafe, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 6b6579ce12..fa030300ac 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -158,7 +158,6 @@ MAIN DISTRIBUTION PACKAGING
<mvn-copy-lib mvn.artifact.name="scala-compiler"/>
<mvn-copy-lib mvn.artifact.name="scala-swing"/>
<mvn-copy-lib mvn.artifact.name="scala-actors"/>
- <mvn-copy-lib mvn.artifact.name="scala-partest"/>
<mvn-copy-lib mvn.artifact.name="scalap"/>
</target>
@@ -210,10 +209,6 @@ MAIN DISTRIBUTION PACKAGING
basedir="${build-docs.dir}/scalap">
<include name="**/*"/>
</jar>
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
- basedir="${build-docs.dir}/partest">
- <include name="**/*"/>
- </jar>
<jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
index 32c6da8007..2e82e34bd9 100644
--- a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
+++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
@@ -2,7 +2,6 @@ package scala.reflect.macros
package compiler
import scala.tools.nsc.Global
-import scala.reflect.macros.contexts.Context
abstract class DefaultMacroCompiler extends Resolvers
with Validators
@@ -11,7 +10,6 @@ abstract class DefaultMacroCompiler extends Resolvers
import global._
val typer: global.analyzer.Typer
- private implicit val context0 = typer.context
val context = typer.context
val macroDdef: DefDef
diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala
index 60cfc94a23..af17fd87c0 100644
--- a/src/compiler/scala/reflect/macros/compiler/Validators.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala
@@ -11,8 +11,6 @@ trait Validators {
import global._
import analyzer._
import definitions._
- import treeInfo._
- import typer.infer._
def validateMacroImplRef() = {
sanityCheck()
@@ -83,7 +81,11 @@ trait Validators {
// Technically this can be just an alias to MethodType, but promoting it to a first-class entity
// provides better encapsulation and convenient syntax for pattern matching.
- private case class MacroImplSig(tparams: List[Symbol], paramss: List[List[Symbol]], ret: Type)
+ private case class MacroImplSig(tparams: List[Symbol], paramss: List[List[Symbol]], ret: Type) {
+ private def tparams_s = if (tparams.isEmpty) "" else tparams.map(_.defString).mkString("[", ", ", "]")
+ private def paramss_s = paramss map (ps => ps.map(s => s"${s.name}: ${s.tpe_*}").mkString("(", ", ", ")")) mkString ""
+ override def toString = "MacroImplSig(" + tparams_s + paramss_s + ret + ")"
+ }
/** An actual macro implementation signature extracted from a macro implementation method.
*
diff --git a/src/compiler/scala/reflect/macros/contexts/Context.scala b/src/compiler/scala/reflect/macros/contexts/Context.scala
index bd1d7d5248..1355a839d9 100644
--- a/src/compiler/scala/reflect/macros/contexts/Context.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Context.scala
@@ -14,7 +14,6 @@ abstract class Context extends scala.reflect.macros.Context
with Parsers
with Evals
with ExprUtils
- with Synthetics
with Traces {
val universe: Global
diff --git a/src/compiler/scala/reflect/macros/contexts/Synthetics.scala b/src/compiler/scala/reflect/macros/contexts/Synthetics.scala
deleted file mode 100644
index ada16a8113..0000000000
--- a/src/compiler/scala/reflect/macros/contexts/Synthetics.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- */
-
-package scala.reflect.macros
-package contexts
-
-import scala.reflect.internal.Flags._
-import scala.reflect.internal.util.BatchSourceFile
-import scala.reflect.io.VirtualFile
-
-trait Synthetics {
- self: Context =>
-
- import global._
- import mirror.wrapMissing
-
- // getClassIfDefined and getModuleIfDefined cannot be used here
- // because they don't work for stuff declared in the empty package
- // (as specified in SLS, code inside non-empty packages cannot see
- // declarations from the empty package, so compiler internals
- // default to ignoring contents of the empty package)
- // to the contrast, staticModule and staticClass are designed
- // to be a part of the reflection API and, therefore, they
- // correctly resolve all names
- private def topLevelSymbol(name: Name): Symbol = wrapMissing {
- if (name.isTermName) mirror.staticModule(name.toString)
- else mirror.staticClass(name.toString)
- }
-
- def topLevelDef(name: Name): Tree =
- enclosingRun.units.toList.map(_.body).flatMap {
- // it's okay to check `stat.symbol` here, because currently macros expand strictly after namer
- // which means that by the earliest time one can call this method all top-level definitions will have already been entered
- case PackageDef(_, stats) => stats filter (stat => stat.symbol != NoSymbol && stat.symbol == topLevelSymbol(name))
- case _ => Nil // should never happen, but better be safe than sorry
- }.headOption getOrElse EmptyTree
-
- def topLevelRef(name: Name): Tree = {
- if (topLevelDef(name).nonEmpty) gen.mkUnattributedRef(name)
- else EmptyTree
- }
-
- def introduceTopLevel[T: PackageSpec](packagePrototype: T, definition: universe.ImplDef): RefTree =
- introduceTopLevel(packagePrototype, List(definition)).head
-
- def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: universe.ImplDef*): List[RefTree] =
- introduceTopLevel(packagePrototype, definitions.toList)
-
- private def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: List[universe.ImplDef]): List[RefTree] = {
- val code @ PackageDef(pid, _) = implicitly[PackageSpec[T]].mkPackageDef(packagePrototype, definitions)
- universe.currentRun.compileLate(code)
- definitions map (definition => Select(pid, definition.name))
- }
-
- protected def mkPackageDef(name: String, stats: List[Tree]) = gen.mkPackageDef(name, stats)
-
- protected def mkPackageDef(name: TermName, stats: List[Tree]) = gen.mkPackageDef(name.toString, stats)
-
- protected def mkPackageDef(tree: RefTree, stats: List[Tree]) = PackageDef(tree, stats)
-
- protected def mkPackageDef(sym: Symbol, stats: List[Tree]) = {
- assert(sym hasFlag PACKAGE, s"expected a package or package class symbol, found: $sym")
- gen.mkPackageDef(sym.fullName.toString, stats)
- }
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
index 3ef11fad9d..450cb4d9ea 100644
--- a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
+++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
@@ -10,8 +10,6 @@ trait JavaReflectionRuntimes {
trait JavaReflectionResolvers {
self: MacroRuntimeResolver =>
- import global._
-
def resolveJavaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = {
val implClass = Class.forName(className, true, classLoader)
val implMeths = implClass.getDeclaredMethods.find(_.getName == methName)
diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
index 0f89163803..ffdbe11151 100644
--- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
+++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
@@ -1,11 +1,8 @@
package scala.reflect.macros
package runtime
-import scala.collection.mutable.{Map => MutableMap}
import scala.reflect.internal.Flags._
import scala.reflect.runtime.ReflectionUtils
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.nsc.util.AbstractFileClassLoader
trait MacroRuntimes extends JavaReflectionRuntimes with ScalaReflectionRuntimes {
self: scala.tools.nsc.typechecker.Analyzer =>
diff --git a/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala
index 1999e525ff..50f64310f8 100644
--- a/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala
+++ b/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala
@@ -9,8 +9,6 @@ trait ScalaReflectionRuntimes {
trait ScalaReflectionResolvers {
self: MacroRuntimeResolver =>
- import global._
-
def resolveScalaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = {
val macroMirror: ru.JavaMirror = ru.runtimeMirror(classLoader)
val implContainerSym = macroMirror.classSymbol(Class.forName(className, true, classLoader))
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
index 9b7680717e..f12582a3a1 100644
--- a/src/compiler/scala/reflect/macros/util/Helpers.scala
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -23,7 +23,7 @@ trait Helpers {
* or to streamline creation of the list of macro arguments.
*/
def transformTypeTagEvidenceParams(macroImplRef: Tree, transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
- val treeInfo.MacroImplReference(isBundle, owner, macroImpl, _) = macroImplRef
+ val treeInfo.MacroImplReference(isBundle, _, macroImpl, _) = macroImplRef
val paramss = macroImpl.paramss
if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
val rc =
@@ -44,11 +44,6 @@ trait Helpers {
if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
}
- private def dealiasAndRewrap(tp: Type)(fn: Type => Type): Type = {
- if (isRepeatedParamType(tp)) scalaRepeatedType(fn(tp.typeArgs.head.dealias))
- else fn(tp.dealias)
- }
-
/** Increases metalevel of the type, i.e. transforms:
* * T to c.Expr[T]
*
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 7578def687..7610df67dc 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -8,6 +8,7 @@ trait Reshape {
import global._
import definitions._
+ import treeInfo.Unapplied
/**
* Rolls back certain changes that were introduced during typechecking of the reifee.
@@ -65,22 +66,9 @@ trait Reshape {
case block @ Block(stats, expr) =>
val stats1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(stats))
Block(stats1, expr).copyAttrs(block)
- case unapply @ UnApply(fun, args) =>
- def extractExtractor(tree: Tree): Tree = {
- val Apply(fun, args) = tree
- args match {
- case List(Ident(special)) if special == nme.SELECTOR_DUMMY =>
- val Select(extractor, flavor) = fun
- assert(flavor == nme.unapply || flavor == nme.unapplySeq)
- extractor
- case _ =>
- extractExtractor(fun)
- }
- }
-
+ case unapply @ UnApply(Unapplied(Select(fun, nme.unapply | nme.unapplySeq)), args) =>
if (reifyDebug) println("unapplying unapply: " + tree)
- val fun1 = extractExtractor(fun)
- Apply(fun1, args).copyAttrs(unapply)
+ Apply(fun, args).copyAttrs(unapply)
case _ =>
tree
}
@@ -256,7 +244,7 @@ trait Reshape {
val flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions
val mods2 = toPreTyperModifiers(mods1, ddef.symbol)
- ValDef(mods2, name1.toTermName, tpt0, extractRhs(rhs0))
+ ValDef(mods2, name1, tpt0, extractRhs(rhs0))
}
private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = {
diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala
index 4cbc03d8d4..a86af73fe3 100644
--- a/src/compiler/scala/tools/ant/sabbus/Settings.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala
@@ -93,7 +93,7 @@ class Settings {
case _ => false
}
- override lazy val hashCode: Int = Seq(
+ override lazy val hashCode: Int = Seq[Any](
gBf,
uncheckedBf,
classpathBf,
diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala
index e44752eb6e..781cc564cb 100644
--- a/src/compiler/scala/tools/cmd/CommandLine.scala
+++ b/src/compiler/scala/tools/cmd/CommandLine.scala
@@ -24,13 +24,13 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
val Terminator = "--"
val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
- def mapForUnary(opt: String) = Map(opt -> ValueForUnaryOption)
+ def mapForUnary(opt: String) = Map(fromOpt(opt) -> ValueForUnaryOption)
def errorFn(msg: String) = println(msg)
/** argMap is option -> argument (or "" if it is a unary argument)
* residualArgs are what is left after removing the options and their args.
*/
- lazy val (argMap, residualArgs) = {
+ lazy val (argMap, residualArgs): (Map[String, String], List[String]) = {
val residualBuffer = new ListBuffer[String]
def loop(args: List[String]): Map[String, String] = {
@@ -72,7 +72,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
if (x2 == Terminator) mapForUnary(x1) ++ residual(xs)
else if (isUnaryOption(x1)) mapForUnary(x1) ++ loop(args.tail)
- else if (isBinaryOption(x1)) Map(x1 -> x2) ++ loop(xs)
+ else if (isBinaryOption(x1)) Map(fromOpt(x1) -> x2) ++ loop(xs)
else if (isUnknown(x1)) loop(args.tail)
else residual(List(x1)) ++ loop(args.tail)
}
diff --git a/src/compiler/scala/tools/cmd/CommandLineParser.scala b/src/compiler/scala/tools/cmd/CommandLineParser.scala
index ef55178594..6132eff557 100644
--- a/src/compiler/scala/tools/cmd/CommandLineParser.scala
+++ b/src/compiler/scala/tools/cmd/CommandLineParser.scala
@@ -40,16 +40,16 @@ object CommandLineParser {
// parse `in` for an argument, return it and the remainder of the input (or an error message)
// (argument may be in single/double quotes, taking escaping into account, quotes are stripped)
private def argument(in: String): Either[String, (String, String)] = in match {
- case DoubleQuoted(arg, rest) => Right(arg, rest)
- case SingleQuoted(arg, rest) => Right(arg, rest)
- case Word(arg, rest) => Right(arg, rest)
- case _ => Left("Illegal argument: "+ in)
+ case DoubleQuoted(arg, rest) => Right((arg, rest))
+ case SingleQuoted(arg, rest) => Right((arg, rest))
+ case Word(arg, rest) => Right((arg, rest))
+ case _ => Left(s"Illegal argument: $in")
}
// parse a list of whitespace-separated arguments (ignoring whitespace in quoted arguments)
@tailrec private def commandLine(in: String, accum: List[String] = Nil): Either[String, (List[String], String)] = {
val trimmed = in.trim
- if (trimmed.isEmpty) Right(accum.reverse, "")
+ if (trimmed.isEmpty) Right((accum.reverse, ""))
else argument(trimmed) match {
case Right((arg, next)) =>
(next span Character.isWhitespace) match {
diff --git a/src/compiler/scala/tools/cmd/Opt.scala b/src/compiler/scala/tools/cmd/Opt.scala
index 2c193128f1..df3d0c4462 100644
--- a/src/compiler/scala/tools/cmd/Opt.scala
+++ b/src/compiler/scala/tools/cmd/Opt.scala
@@ -26,10 +26,10 @@ object Opt {
trait Implicit {
def name: String
def programInfo: Info
- protected def opt = toOpt(name)
+ protected def opt = fromOpt(name)
def --? : Boolean // --opt is set
- def --> (body: => Unit): Unit // if --opt is set, execute body
+ def --> (body: => Unit): Boolean // if --opt is set, execute body
def --| : Option[String] // --opt <arg: String> is optional, result is Option[String]
def --^[T: FromString] : Option[T] // --opt <arg: T> is optional, result is Option[T]
@@ -51,7 +51,7 @@ object Opt {
import options._
def --? = { addUnary(opt) ; false }
- def --> (body: => Unit) = { addUnary(opt) }
+ def --> (body: => Unit) = { addUnary(opt) ; false }
def --| = { addBinary(opt) ; None }
def --^[T: FromString] = { addBinary(opt) ; None }
@@ -65,7 +65,7 @@ object Opt {
class Instance(val programInfo: Info, val parsed: CommandLine, val name: String) extends Implicit with Error {
def --? = parsed isSet opt
- def --> (body: => Unit) = if (parsed isSet opt) body
+ def --> (body: => Unit) = { val isSet = parsed isSet opt ; if (isSet) body ; isSet }
def --| = parsed get opt
def --^[T: FromString] = {
val fs = implicitly[FromString[T]]
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
index ec2a414065..62b6c893cf 100644
--- a/src/compiler/scala/tools/cmd/Reference.scala
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -23,13 +23,13 @@ trait Reference extends Spec {
def helpMsg = options.helpMsg
def propertyArgs: List[String] = Nil
- def isUnaryOption(s: String) = unary contains toOpt(s)
- def isBinaryOption(s: String) = binary contains toOpt(s)
- def isExpandOption(s: String) = expansionMap contains toOpt(s)
+ def isUnaryOption(s: String) = unary contains fromOpt(s)
+ def isBinaryOption(s: String) = binary contains fromOpt(s)
+ def isExpandOption(s: String) = expansionMap contains fromOpt(s)
- def expandArg(arg: String) = expansionMap.getOrElse(fromOpt(arg), List(arg))
+ def expandArg(arg: String): List[String] = expansionMap.getOrElse(fromOpt(arg), List(arg))
- protected def help(str: => String) = addHelp(() => str)
+ protected def help(str: => String): Unit = addHelp(() => str)
type ThisCommandLine <: CommandLine
@@ -53,20 +53,20 @@ object Reference {
def helpFormatStr = " %-" + longestArg + "s %s"
def defaultFormatStr = (" " * (longestArg + 7)) + "%s"
- def addUnary(s: String) = _unary +:= s
- def addBinary(s: String) = _binary +:= s
+ def addUnary(s: String): Unit = _unary +:= s
+ def addBinary(s: String): Unit = _binary +:= s
def addExpand(opt: String, expanded: List[String]) =
_expand += (opt -> expanded)
- def mapHelp(g: String => String) = {
+ def mapHelp(g: String => String): Unit = {
val idx = _help.length - 1
val f = _help(idx)
_help(idx) = () => g(f())
}
- def addHelp(f: () => String) = _help += f
+ def addHelp(f: () => String): Unit = _help += f
def addHelpAlias(f: () => String) = mapHelp { s =>
val str = "alias for '%s'" format f()
def noHelp = (helpFormatStr.format("", "")).length == s.length
@@ -74,13 +74,13 @@ object Reference {
s + str2
}
- def addHelpDefault(f: () => String) = mapHelp { s =>
+ def addHelpDefault(f: () => String): Unit = mapHelp { s =>
val str = "(default: %s)" format f()
if (s.length + str.length < MaxLine) s + " " + str
else defaultFormatStr.format(s, str)
}
- def addHelpEnvDefault(name: String) = mapHelp { s =>
+ def addHelpEnvDefault(name: String): Unit = mapHelp { s =>
val line1 = "%s (default: %s)".format(s, name)
val envNow = envOrNone(name) map ("'" + _ + "'") getOrElse "unset"
val line2 = defaultFormatStr.format("Currently " + envNow)
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 7e01afac2b..842851b4f6 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -6,24 +6,23 @@
package scala.tools.cmd
package gen
-/** Code generation of the AnyVal types and their companions.
- */
+/** Code generation of the AnyVal types and their companions. */
trait AnyValReps {
self: AnyVals =>
- sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) {
+ sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String)
+ extends AnyValRep(name,repr,javaEquiv) {
case class Op(op : String, doc : String)
private def companionCoercions(tos: AnyValRep*) = {
tos.toList map (to =>
- """implicit def @javaequiv@2%s(x: @name@): %s = x.to%s""".format(to.javaEquiv, to.name, to.name)
+ s"implicit def @javaequiv@2${to.javaEquiv}(x: @name@): ${to.name} = x.to${to.name}"
)
}
- def coercionCommentExtra = ""
- def coercionComment = """
-/** Language mandated coercions from @name@ to "wider" types.%s
- */""".format(coercionCommentExtra)
+ def coercionComment =
+"""/** Language mandated coercions from @name@ to "wider" types. */
+import scala.language.implicitConversions"""
def implicitCoercions: List[String] = {
val coercions = this match {
@@ -41,12 +40,8 @@ trait AnyValReps {
def isCardinal: Boolean = isIntegerType(this)
def unaryOps = {
val ops = List(
- Op("+", "/**\n" +
- " * Returns this value, unmodified.\n" +
- " */"),
- Op("-", "/**\n" +
- " * Returns the negation of this value.\n" +
- " */"))
+ Op("+", "/** Returns this value, unmodified. */"),
+ Op("-", "/** Returns the negation of this value. */"))
if(isCardinal)
Op("~", "/**\n" +
@@ -95,7 +90,7 @@ trait AnyValReps {
" */"))
else Nil
- def shiftOps =
+ def shiftOps =
if (isCardinal)
List(
Op("<<", "/**\n" +
@@ -127,20 +122,20 @@ trait AnyValReps {
" */"))
else Nil
- def comparisonOps = List(
- Op("==", "/**\n * Returns `true` if this value is equal to x, `false` otherwise.\n */"),
- Op("!=", "/**\n * Returns `true` if this value is not equal to x, `false` otherwise.\n */"),
- Op("<", "/**\n * Returns `true` if this value is less than x, `false` otherwise.\n */"),
- Op("<=", "/**\n * Returns `true` if this value is less than or equal to x, `false` otherwise.\n */"),
- Op(">", "/**\n * Returns `true` if this value is greater than x, `false` otherwise.\n */"),
- Op(">=", "/**\n * Returns `true` if this value is greater than or equal to x, `false` otherwise.\n */"))
+ def comparisonOps = List(
+ Op("==", "/** Returns `true` if this value is equal to x, `false` otherwise. */"),
+ Op("!=", "/** Returns `true` if this value is not equal to x, `false` otherwise. */"),
+ Op("<", "/** Returns `true` if this value is less than x, `false` otherwise. */"),
+ Op("<=", "/** Returns `true` if this value is less than or equal to x, `false` otherwise. */"),
+ Op(">", "/** Returns `true` if this value is greater than x, `false` otherwise. */"),
+ Op(">=", "/** Returns `true` if this value is greater than or equal to x, `false` otherwise. */"))
def otherOps = List(
- Op("+", "/**\n * Returns the sum of this value and `x`.\n */"),
- Op("-", "/**\n * Returns the difference of this value and `x`.\n */"),
- Op("*", "/**\n * Returns the product of this value and `x`.\n */"),
- Op("/", "/**\n * Returns the quotient of this value and `x`.\n */"),
- Op("%", "/**\n * Returns the remainder of the division of this value by `x`.\n */"))
+ Op("+", "/** Returns the sum of this value and `x`. */"),
+ Op("-", "/** Returns the difference of this value and `x`. */"),
+ Op("*", "/** Returns the product of this value and `x`. */"),
+ Op("/", "/** Returns the quotient of this value and `x`. */"),
+ Op("%", "/** Returns the remainder of the division of this value by `x`. */"))
// Given two numeric value types S and T , the operation type of S and T is defined as follows:
// If both S and T are subrange types then the operation type of S and T is Int.
@@ -278,8 +273,7 @@ trait AnyValReps {
}
trait AnyValTemplates {
- def headerTemplate = ("""
-/* __ *\
+ def headerTemplate = """/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
@@ -287,12 +281,13 @@ trait AnyValTemplates {
** |/ **
\* */
-%s
-package scala
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
-import scala.language.implicitConversions
+package scala
-""".trim.format(timestampString) + "\n\n")
+"""
def classDocTemplate = ("""
/** `@name@`@representation@ (equivalent to Java's `@javaequiv@` primitive type) is a
@@ -304,8 +299,6 @@ import scala.language.implicitConversions
*/
""".trim + "\n")
- def timestampString = "// DO NOT EDIT, CHANGES WILL BE LOST.\n"
-
def allCompanions = """
/** Transform a value type into a boxed reference type.
*@boxRunTimeDoc@
@@ -324,20 +317,17 @@ def box(x: @name@): @boxed@ = @boxImpl@
*/
def unbox(x: java.lang.Object): @name@ = @unboxImpl@
-/** The String representation of the scala.@name@ companion object.
- */
+/** The String representation of the scala.@name@ companion object. */
override def toString = "object scala.@name@"
"""
def nonUnitCompanions = "" // todo
def cardinalCompanion = """
-/** The smallest value representable as a @name@.
- */
+/** The smallest value representable as a @name@. */
final val MinValue = @boxed@.MIN_VALUE
-/** The largest value representable as a @name@.
- */
+/** The largest value representable as a @name@. */
final val MaxValue = @boxed@.MAX_VALUE
"""
@@ -372,18 +362,16 @@ class AnyVals extends AnyValReps with AnyValTemplates {
object D extends AnyValNum("Double", Some("64-bit IEEE-754 floating point number"), "double")
object Z extends AnyValRep("Boolean", None, "boolean") {
def classLines = """
-/**
- * Negates a Boolean expression.
- *
- * - `!a` results in `false` if and only if `a` evaluates to `true` and
- * - `!a` results in `true` if and only if `a` evaluates to `false`.
- *
- * @return the negated expression
- */
+/** Negates a Boolean expression.
+ *
+ * - `!a` results in `false` if and only if `a` evaluates to `true` and
+ * - `!a` results in `true` if and only if `a` evaluates to `false`.
+ *
+ * @return the negated expression
+ */
def unary_! : Boolean
-/**
- * Compares two Boolean expressions and returns `true` if they evaluate to the same value.
+/** Compares two Boolean expressions and returns `true` if they evaluate to the same value.
*
* `a == b` returns `true` if and only if
* - `a` and `b` are `true` or
@@ -400,8 +388,7 @@ def ==(x: Boolean): Boolean
*/
def !=(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
*
* `a || b` returns `true` if and only if
* - `a` is `true` or
@@ -414,8 +401,7 @@ def !=(x: Boolean): Boolean
*/
def ||(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
*
* `a && b` returns `true` if and only if
* - `a` and `b` are `true`.
@@ -430,8 +416,7 @@ def &&(x: Boolean): Boolean
// def ||(x: => Boolean): Boolean
// def &&(x: => Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
*
* `a | b` returns `true` if and only if
* - `a` is `true` or
@@ -442,8 +427,7 @@ def &&(x: Boolean): Boolean
*/
def |(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+/** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
*
* `a & b` returns `true` if and only if
* - `a` and `b` are `true`.
@@ -452,8 +436,7 @@ def |(x: Boolean): Boolean
*/
def &(x: Boolean): Boolean
-/**
- * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+/** Compares two Boolean expressions and returns `true` if they evaluate to a different value.
*
* `a ^ b` returns `true` if and only if
* - `a` is `true` and `b` is `false` or
@@ -499,5 +482,3 @@ override def getClass(): Class[Boolean] = null
def make() = values map (x => (x.name, x.make()))
}
-
-object AnyVals extends AnyVals { }
diff --git a/src/compiler/scala/tools/cmd/gen/Codegen.scala b/src/compiler/scala/tools/cmd/gen/Codegen.scala
index b49322ab4a..c3aa527ef2 100644
--- a/src/compiler/scala/tools/cmd/gen/Codegen.scala
+++ b/src/compiler/scala/tools/cmd/gen/Codegen.scala
@@ -6,11 +6,9 @@
package scala.tools.cmd
package gen
-import scala.language.postfixOps
-
class Codegen(args: List[String]) extends {
val parsed = CodegenSpec(args: _*)
-} with CodegenSpec with Instance { }
+} with CodegenSpec with Instance
object Codegen {
def echo(msg: String) = Console println msg
@@ -31,7 +29,7 @@ object Codegen {
val av = new AnyVals { }
av.make() foreach { case (name, code ) =>
- val file = out / (name + ".scala") toFile;
+ val file = (out / (name + ".scala")).toFile
echo("Writing: " + file)
file writeAll code
}
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
index 7d67fa738b..9754becf10 100644
--- a/src/compiler/scala/tools/cmd/package.scala
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -13,19 +13,19 @@ package object cmd {
implicit def implicitConversions = scala.language.implicitConversions
implicit def postfixOps = scala.language.postfixOps
- private[cmd] def debug(msg: String) = println(msg)
+ private[cmd] def debug(msg: String): Unit = println(msg)
def runAndExit(body: => Unit): Nothing = {
body
sys.exit(0)
}
- def toOpt(s: String) = if (s startsWith "--") s else "--" + s
- def fromOpt(s: String) = s stripPrefix "--"
- def toArgs(line: String) = CommandLineParser tokenize line
- def fromArgs(args: List[String]) = args mkString " "
+ def toOpt(s: String): String = if (s startsWith "--") s else "--" + s
+ def fromOpt(s: String): String = s stripPrefix "--"
+ def toArgs(line: String): List[String] = CommandLineParser tokenize line
+ def fromArgs(args: List[String]): String = args mkString " "
- def stripQuotes(s: String) = {
+ def stripQuotes(s: String): String = {
def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c
if (List('"', '\'') exists isQuotedBy) s.tail.init else s
}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index b52e6fdf57..f7437e4e6c 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -57,8 +57,8 @@ trait CompilationUnits { self: Global =>
// SBT compatibility (SI-6875)
//
// imagine we have a file named A.scala, which defines a trait named Foo and a module named Main
- // Main contains a call to a macro, which calls c.introduceTopLevel to define a mock for Foo
- // c.introduceTopLevel creates a virtual file Virt35af32.scala, which contains a class named FooMock extending Foo,
+ // Main contains a call to a macro, which calls compileLate to define a mock for Foo
+ // compileLate creates a virtual file Virt35af32.scala, which contains a class named FooMock extending Foo,
// and macro expansion instantiates FooMock. the stage is now set. let's see what happens next.
//
// without this workaround in scalac or without being patched itself, sbt will think that
@@ -91,7 +91,7 @@ trait CompilationUnits { self: Global =>
debuglog(s"removing synthetic $sym from $self")
map -= sym
}
- def get(sym: Symbol): Option[Tree] = logResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
+ def get(sym: Symbol): Option[Tree] = debuglogResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
map get sym
}
def keys: Iterable[Symbol] = map.keys
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index a6c69091c5..3f2d759a6d 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -220,12 +220,15 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// not deprecated yet, but a method called "error" imported into
// nearly every trait really must go. For now using globalError.
- def error(msg: String) = globalError(msg)
- override def inform(msg: String) = reporter.echo(msg)
- override def globalError(msg: String) = reporter.error(NoPosition, msg)
- override def warning(msg: String) =
- if (settings.fatalWarnings) globalError(msg)
- else reporter.warning(NoPosition, msg)
+ def error(msg: String) = globalError(msg)
+
+ override def inform(msg: String) = inform(NoPosition, msg)
+ override def globalError(msg: String) = globalError(NoPosition, msg)
+ override def warning(msg: String) = warning(NoPosition, msg)
+
+ def globalError(pos: Position, msg: String) = reporter.error(pos, msg)
+ def warning(pos: Position, msg: String) = if (settings.fatalWarnings) globalError(pos, msg) else reporter.warning(pos, msg)
+ def inform(pos: Position, msg: String) = reporter.echo(pos, msg)
// Getting in front of Predef's asserts to supplement with more info.
// This has the happy side effect of masking the one argument forms
@@ -258,17 +261,22 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (settings.debug)
body
}
+
+ override protected def isDeveloper = settings.developer || super.isDeveloper
+
/** This is for WARNINGS which should reach the ears of scala developers
* whenever they occur, but are not useful for normal users. They should
* be precise, explanatory, and infrequent. Please don't use this as a
* logging mechanism. !!! is prefixed to all messages issued via this route
* to make them visually distinct.
*/
- @inline final override def devWarning(msg: => String) {
- if (settings.developer || settings.debug)
- warning("!!! " + msg)
+ @inline final override def devWarning(msg: => String): Unit = devWarning(NoPosition, msg)
+ @inline final def devWarning(pos: Position, msg: => String) {
+ def pos_s = if (pos eq NoPosition) "" else s" [@ $pos]"
+ if (isDeveloper)
+ warning(pos, "!!! " + msg)
else
- log("!!! " + msg) // such warnings always at least logged
+ log(s"!!!$pos_s $msg") // such warnings always at least logged
}
def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg))
@@ -1107,7 +1115,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
"symbol owners" -> ownerChainString(sym),
"call site" -> (site.fullLocationString + " in " + site.enclosingPackage)
)
- ("\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
+ ("\n " + errorMessage + "\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
}
catch { case _: Exception | _: TypeError => errorMessage }
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index b17de9b9d5..d7a32c3be0 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -83,6 +83,7 @@ trait TreeDSL {
def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other)
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
+ def INT_- (other: Tree) = fn(target, getMember(IntClass, nme.MINUS), other)
// generic operations on ByteClass, IntClass, LongClass
def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other)
@@ -187,7 +188,7 @@ trait TreeDSL {
def vparamss: List[List[ValDef]]
type ResultTreeType = DefDef
- def mkTree(rhs: Tree): DefDef = DefDef(mods, name, tparams, vparamss, tpt, rhs)
+ def mkTree(rhs: Tree): DefDef = DefDef(mods, name.toTermName, tparams, vparamss, tpt, rhs)
}
class DefSymStart(val sym: Symbol) extends SymVODDStart with DefCreator {
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 641ab9c279..381ffb1ed9 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -66,10 +66,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
*/
def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): ClassDef = {
// "if they have symbols they should be owned by `sym`"
- assert(
- mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
- ((mmap(vparamss)(_.symbol), sym))
- )
+ assert(mforall(vparamss)(_.symbol.owner == sym), (mmap(vparamss)(_.symbol), sym))
ClassDef(sym,
gen.mkTemplate(sym.info.parents map TypeTree,
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index eb924a811b..94270e4cf3 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -334,22 +334,27 @@ self =>
def parseStartRule: () => Tree
- /** This is the general parse entry point.
- */
- def parse(): Tree = {
- val t = parseStartRule()
+ def parseRule[T](rule: this.type => T): T = {
+ val t = rule(this)
accept(EOF)
t
}
+ /** This is the general parse entry point.
+ */
+ def parse(): Tree = parseRule(_.parseStartRule())
+
+ /** This is alternative entry point for repl, script runner, toolbox and quasiquotes.
+ */
+ def parseStats(): List[Tree] = parseRule(_.templateStats())
+
/** This is the parse entry point for code which is not self-contained, e.g.
* a script which is a series of template statements. They will be
* swaddled in Trees until the AST is equivalent to the one returned
* by compilationUnit().
*/
def scriptBody(): Tree = {
- val stmts = templateStats()
- accept(EOF)
+ val stmts = parseStats()
def mainModuleName = newTermName(settings.script.value)
/* If there is only a single object template in the file and it has a
@@ -563,8 +568,8 @@ self =>
and
}
- def expectedMsg(token: Int): String =
- token2string(token) + " expected but " +token2string(in.token) + " found."
+ def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found."
+ def expectedMsg(token: Int): String = expectedMsgTemplate(token2string(token), token2string(in.token))
/** Consume one token of the specified type, or signal an error if it is not there. */
def accept(token: Int): Int = {
@@ -627,6 +632,8 @@ self =>
def isAnnotation: Boolean = in.token == AT
+ def isCaseDefStart: Boolean = in.token == CASE
+
def isLocalModifier: Boolean = in.token match {
case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
case _ => false
@@ -1137,32 +1144,70 @@ self =>
})
}
- private def interpolatedString(inPattern: Boolean): Tree = atPos(in.offset) {
- val start = in.offset
- val interpolator = in.name
+ /** Handle placeholder syntax.
+ * If evaluating the tree produces placeholders, then make it a function.
+ */
+ private def withPlaceholders(tree: =>Tree, isAny: Boolean): Tree = {
+ val savedPlaceholderParams = placeholderParams
+ placeholderParams = List()
+ var res = tree
+ if (placeholderParams.nonEmpty && !isWildcard(res)) {
+ res = atPos(res.pos)(Function(placeholderParams.reverse, res))
+ if (isAny) placeholderParams foreach (_.tpt match {
+ case tpt @ TypeTree() => tpt setType definitions.AnyTpe
+ case _ => // some ascription
+ })
+ placeholderParams = List()
+ }
+ placeholderParams = placeholderParams ::: savedPlaceholderParams
+ res
+ }
- val partsBuf = new ListBuffer[Tree]
- val exprBuf = new ListBuffer[Tree]
+ /** Consume a USCORE and create a fresh synthetic placeholder param. */
+ private def freshPlaceholder(): Tree = {
+ val start = in.offset
+ val pname = freshName("x$")
in.nextToken()
- while (in.token == STRINGPART) {
- partsBuf += literal()
- exprBuf += (
- if (inPattern) dropAnyBraces(pattern())
- else in.token match {
- case IDENTIFIER => atPos(in.offset)(Ident(ident()))
- case LBRACE => expr()
- case THIS => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY))
- case _ => syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected", skipIt = true)(EmptyTree)
- }
- )
- }
- if (in.token == STRINGLIT) partsBuf += literal()
+ val id = atPos(start)(Ident(pname))
+ val param = atPos(id.pos.focus)(gen.mkSyntheticParam(pname.toTermName))
+ placeholderParams = param :: placeholderParams
+ id
+ }
+
+ private def interpolatedString(inPattern: Boolean): Tree = {
+ def errpolation() = syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected",
+ skipIt = true)(EmptyTree)
+ // Like Swiss cheese, with holes
+ def stringCheese: Tree = atPos(in.offset) {
+ val start = in.offset
+ val interpolator = in.name
- val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
- val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
- t2 setPos t2.pos.makeTransparent
- val t3 = Select(t2, interpolator) setPos t2.pos
- atPos(start) { Apply(t3, exprBuf.toList) }
+ val partsBuf = new ListBuffer[Tree]
+ val exprBuf = new ListBuffer[Tree]
+ in.nextToken()
+ while (in.token == STRINGPART) {
+ partsBuf += literal()
+ exprBuf += (
+ if (inPattern) dropAnyBraces(pattern())
+ else in.token match {
+ case IDENTIFIER => atPos(in.offset)(Ident(ident()))
+ //case USCORE => freshPlaceholder() // ifonly etapolation
+ case LBRACE => expr() // dropAnyBraces(expr0(Local))
+ case THIS => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY))
+ case _ => errpolation()
+ }
+ )
+ }
+ if (in.token == STRINGLIT) partsBuf += literal()
+
+ val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
+ val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
+ t2 setPos t2.pos.makeTransparent
+ val t3 = Select(t2, interpolator) setPos t2.pos
+ atPos(start) { Apply(t3, exprBuf.toList) }
+ }
+ if (inPattern) stringCheese
+ else withPlaceholders(stringCheese, isAny = true) // strinterpolator params are Any* by definition
}
/* ------------- NEW LINES ------------------------------------------------- */
@@ -1260,18 +1305,7 @@ self =>
*/
def expr(): Tree = expr(Local)
- def expr(location: Int): Tree = {
- val savedPlaceholderParams = placeholderParams
- placeholderParams = List()
- var res = expr0(location)
- if (!placeholderParams.isEmpty && !isWildcard(res)) {
- res = atPos(res.pos){ Function(placeholderParams.reverse, res) }
- placeholderParams = List()
- }
- placeholderParams = placeholderParams ::: savedPlaceholderParams
- res
- }
-
+ def expr(location: Int): Tree = withPlaceholders(expr0(location), isAny = false)
def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
case IF =>
@@ -1298,7 +1332,7 @@ self =>
in.nextToken()
if (in.token != LBRACE) catchFromExpr()
else inBracesOrNil {
- if (in.token == CASE) caseClauses()
+ if (isCaseDefStart) caseClauses()
else catchFromExpr()
}
}
@@ -1520,13 +1554,7 @@ self =>
case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
path(thisOK = true, typeOK = false)
case USCORE =>
- val start = in.offset
- val pname = freshName("x$")
- in.nextToken()
- val id = atPos(start) (Ident(pname))
- val param = atPos(id.pos.focus){ gen.mkSyntheticParam(pname.toTermName) }
- placeholderParams = param :: placeholderParams
- id
+ freshPlaceholder()
case LPAREN =>
atPos(in.offset)(makeParens(commaSeparated(expr())))
case LBRACE =>
@@ -1613,7 +1641,7 @@ self =>
*/
def blockExpr(): Tree = atPos(in.offset) {
inBraces {
- if (in.token == CASE) Match(EmptyTree, caseClauses())
+ if (isCaseDefStart) Match(EmptyTree, caseClauses())
else block()
}
}
@@ -2542,7 +2570,7 @@ self =>
}
expr()
}
- DefDef(newmods, name, tparams, vparamss, restype, rhs)
+ DefDef(newmods, name.toTermName, tparams, vparamss, restype, rhs)
}
signalParseProgress(result.pos)
result
@@ -2605,7 +2633,7 @@ self =>
case EQUALS =>
in.nextToken()
TypeDef(mods, name, tparams, typ())
- case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE =>
+ case t if t == SUPERTYPE || t == SUBTYPE || t == COMMA || t == RBRACE || isStatSep(t) =>
TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds())
case _ =>
syntaxErrorOrIncompleteAnd("`=', `>:', or `<:' expected", skipIt = true)(EmptyTree)
@@ -2665,7 +2693,7 @@ self =>
syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", skipIt = false)
classContextBounds = List()
}
- val constrAnnots = constructorAnnotations()
+ val constrAnnots = if (!mods.isTrait) constructorAnnotations() else Nil
val (constrMods, vparamss) =
if (mods.isTrait) (Modifiers(Flags.TRAIT), List())
else (accessModifierOpt(), paramClauses(name, classContextBounds, ofCaseClass = mods.isCase))
@@ -2906,27 +2934,14 @@ self =>
stats.toList
}
- /** Informal - for the repl and other direct parser accessors.
- */
- def templateStats(): List[Tree] = templateStatSeq(isPre = false)._2 match {
- case Nil => EmptyTree.asList
- case stats => stats
- }
-
/** {{{
- * TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
- * TemplateStat ::= Import
- * | Annotations Modifiers Def
- * | Annotations Modifiers Dcl
- * | Expr1
- * | super ArgumentExprs {ArgumentExprs}
- * |
+ * TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStats
* }}}
* @param isPre specifies whether in early initializer (true) or not (false)
*/
def templateStatSeq(isPre : Boolean): (ValDef, List[Tree]) = checkNoEscapingPlaceholders {
var self: ValDef = emptyValDef
- val stats = new ListBuffer[Tree]
+ var firstOpt: Option[Tree] = None
if (isExprIntro) {
in.flushDoc
val first = expr(InTemplate) // @S: first statement is potentially converted so cannot be stubbed.
@@ -2943,10 +2958,25 @@ self =>
}
in.nextToken()
} else {
- stats += first
+ firstOpt = Some(first)
acceptStatSepOpt()
}
}
+ (self, firstOpt ++: templateStats())
+ }
+
+ /** {{{
+ * TemplateStats ::= TemplateStat {semi TemplateStat}
+ * TemplateStat ::= Import
+ * | Annotations Modifiers Def
+ * | Annotations Modifiers Dcl
+ * | Expr1
+ * | super ArgumentExprs {ArgumentExprs}
+ * |
+ * }}}
+ */
+ def templateStats(): List[Tree] = {
+ val stats = new ListBuffer[Tree]
while (!isStatSeqEnd) {
if (in.token == IMPORT) {
in.flushDoc
@@ -2961,7 +2991,14 @@ self =>
}
acceptStatSepOpt()
}
- (self, stats.toList)
+ stats.toList
+ }
+
+ /** Informal - for the repl and other direct parser accessors.
+ */
+ def templateStatsCompat(): List[Tree] = templateStats() match {
+ case Nil => EmptyTree.asList
+ case stats => stats
}
/** {{{
@@ -3026,14 +3063,14 @@ self =>
*/
def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd && in.token != CASE) {
+ while (!isStatSeqEnd && !isCaseDefStart) {
if (in.token == IMPORT) {
stats ++= importClause()
acceptStatSep()
}
else if (isExprIntro) {
stats += statement(InBlock)
- if (in.token != RBRACE && in.token != CASE) acceptStatSep()
+ if (in.token != RBRACE && !isCaseDefStart) acceptStatSep()
}
else if (isDefIntro || isLocalModifier || isAnnotation) {
if (in.token == IMPLICIT) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 2a8412b105..6957f85689 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -741,6 +741,10 @@ trait Scanners extends ScannersCommon {
finishStringPart()
nextRawChar()
next.token = LBRACE
+ } else if (ch == '_') {
+ finishStringPart()
+ nextRawChar()
+ next.token = USCORE
} else if (Character.isUnicodeIdentifierStart(ch)) {
finishStringPart()
do {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 666f19851d..ed694023d7 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -39,7 +39,7 @@ abstract class TreeBuilder {
* x becomes x @ _
* x: T becomes x @ (_: T)
*/
- private object patvarTransformer extends Transformer {
+ object patvarTransformer extends Transformer {
override def transform(tree: Tree): Tree = tree match {
case Ident(name) if (treeInfo.isVarPattern(tree) && name != nme.WILDCARD) =>
atPos(tree.pos)(Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD))))
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 56191cc981..09095879bf 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -78,10 +78,10 @@ abstract class Inliners extends SubComponent {
assert(clazz != NoSymbol, "Walked up past Object.superClass looking for " + sym +
", most likely this reveals the TFA at fault (receiver and callee don't match).")
if (sym.owner == clazz || isBottomType(clazz)) sym
- else sym.overridingSymbol(clazz) match {
- case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
- case imp => imp
- }
+ else sym.overridingSymbol(clazz) orElse (
+ if (sym.owner.isTrait) sym
+ else lookup(clazz.superClass)
+ )
}
if (needsLookup) {
val concreteMethod = lookup(clazz)
diff --git a/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala b/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
deleted file mode 100644
index 98c3d27202..0000000000
--- a/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.util.concurrent._
-
-class DaemonThreadFactory extends ThreadFactory {
- def newThread(r: Runnable): Thread = {
- val thread = new Thread(r)
- thread setDaemon true
- thread
- }
-}
-
-object DaemonThreadFactory {
- def newPool() = Executors.newCachedThreadPool(new DaemonThreadFactory)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 0b2db115fb..5f2f90c284 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -5,8 +5,6 @@
package scala.tools.nsc
-import java.util.concurrent.{ Future, Callable }
-import java.util.{ Timer, TimerTask }
import scala.language.implicitConversions
package object io {
@@ -29,16 +27,4 @@ package object io {
type JFile = java.io.File
implicit def enrichManifest(m: JManifest): Jar.WManifest = Jar.WManifest(m)
- private lazy val daemonThreadPool = DaemonThreadFactory.newPool()
-
- def runnable(body: => Unit): Runnable = new Runnable { override def run() = body }
- def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
- def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body)
-
- def newThread(f: Thread => Unit)(body: => Unit): Thread = {
- val thread = new Thread(runnable(body))
- f(thread)
- thread.start
- thread
- }
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 2a799acbc7..239ecb4f8a 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -510,7 +510,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (inInterface) mods1 |= Flags.DEFERRED
List {
atPos(pos) {
- DefDef(mods1, name, tparams, List(vparams), rtpt, body)
+ DefDef(mods1, name.toTermName, tparams, List(vparams), rtpt, body)
}
}
} else {
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index c6ea6b23e5..14e3f5b642 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -74,7 +74,6 @@ abstract class ClassfileParser {
def srcfile = srcfile0
private def optimized = settings.optimise.value
- private def currentIsTopLevel = !(currentClass.decodedName containsChar '$')
// u1, u2, and u4 are what these data types are called in the JVM spec.
// They are an unsigned byte, unsigned char, and unsigned int respectively.
@@ -349,7 +348,7 @@ abstract class ClassfileParser {
/** Throws an exception signaling a bad tag at given address. */
protected def errorBadTag(start: Int) =
- abort("bad constant pool tag ${in.buf(start)} at byte $start")
+ abort(s"bad constant pool tag ${in.buf(start)} at byte $start")
}
private def loadClassSymbol(name: Name): Symbol = {
@@ -660,14 +659,14 @@ abstract class ClassfileParser {
}
accept('>')
assert(xs.length > 0, tp)
- logResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)))
+ debuglogResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)))
}
// isMonomorphicType is false if the info is incomplete, as it usually is here
// so have to check unsafeTypeParams.isEmpty before worrying about raw type case below,
// or we'll create a boatload of needless existentials.
else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) tp
// raw type - existentially quantify all type parameters
- else logResult(s"raw type from $classSym")(unsafeClassExistentialType(classSym))
+ else debuglogResult(s"raw type from $classSym")(unsafeClassExistentialType(classSym))
case tp =>
assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp")
tp
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 0dcf4d00b7..d9d08dde1e 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -367,29 +367,3 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
}
}
}
-/*
- val ensureNoEscapes = new TypeTraverser {
- def ensureNoEscape(sym: Symbol) {
- if (sym.hasFlag(PRIVATE)) {
- var o = currentOwner;
- while (o != NoSymbol && o != sym.owner && !o.isLocal && !o.hasFlag(PRIVATE))
- o = o.owner
- if (o == sym.owner) sym.makeNotPrivate(base);
- }
- }
- def traverse(t: Type): TypeTraverser = {
- t match {
- case TypeRef(qual, sym, args) =>
- ensureNoEscape(sym)
- mapOver(t)
- case ClassInfoType(parents, decls, clazz) =>
- parents foreach { p => traverse; () }
- traverse(t.typeOfThis)
- case _ =>
- mapOver(t)
- }
- this
- }
- }
-
-*/
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 0f65b11e9b..0a66ba8a32 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -525,8 +525,7 @@ abstract class Erasure extends AddInterfaces
private def isDifferentErasedValueType(tpe: Type, other: Type) =
isErasedValueType(tpe) && (tpe ne other)
- private def isPrimitiveValueMember(sym: Symbol) =
- sym != NoSymbol && isPrimitiveValueClass(sym.owner)
+ private def isPrimitiveValueMember(sym: Symbol) = isPrimitiveValueClass(sym.owner)
@inline private def box(tree: Tree, target: => String): Tree = {
val result = box1(tree)
@@ -706,7 +705,8 @@ abstract class Erasure extends AddInterfaces
// }
typed(untyped)
}
- } else tree
+ } else qual1
+
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_isInstanceOf =>
targ.tpe match {
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index ce495ca8ca..515fa66cfa 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -489,7 +489,7 @@ abstract class LambdaLift extends InfoTransform {
treeCopy.Assign(tree, qual, rhs)
case Ident(name) =>
val tree1 =
- if (sym != NoSymbol && sym.isTerm && !sym.isLabel)
+ if (sym.isTerm && !sym.isLabel)
if (sym.isMethod)
atPos(tree.pos)(memberRef(sym))
else if (sym.isLocal && !isSameOwnerEnclosure(sym))
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 1c44e86aca..3ec4d16bf5 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -734,10 +734,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
sym
}
- if (sym ne NoSymbol)
- sym
- else
- createBitmap
+ sym orElse createBitmap
}
def maskForOffset(offset: Int, sym: Symbol, kind: ClassSymbol): Tree = {
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e2ce2743f7..16c803e2e8 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -64,7 +64,6 @@ abstract class UnCurry extends InfoTransform
class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
private var needTryLift = false
- private var inPattern = false
private var inConstructorFlag = 0L
private val byNameArgs = mutable.HashSet[Tree]()
private val noApply = mutable.HashSet[Tree]()
@@ -79,12 +78,6 @@ abstract class UnCurry extends InfoTransform
@inline private def useNewMembers[T](owner: Symbol)(f: List[Tree] => T): T =
f(newMembers.remove(owner).getOrElse(Nil).toList)
- @inline private def withInPattern[T](value: Boolean)(body: => T): T = {
- inPattern = value
- try body
- finally inPattern = !value
- }
-
private def newFunction0(body: Tree): Tree = {
val result = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function]
log("Change owner from %s to %s in %s".format(currentOwner, result.symbol, result.body))
@@ -119,16 +112,6 @@ abstract class UnCurry extends InfoTransform
&& (isByName(tree.symbol))
)
- /** Uncurry a type of a tree node.
- * This function is sensitive to whether or not we are in a pattern -- when in a pattern
- * additional parameter sections of a case class are skipped.
- */
- def uncurryTreeType(tp: Type): Type = tp match {
- case MethodType(params, MethodType(params1, restpe)) if inPattern =>
- uncurryTreeType(MethodType(params, restpe))
- case _ =>
- uncurry(tp)
- }
// ------- Handling non-local returns -------------------------------------------------
@@ -327,7 +310,7 @@ abstract class UnCurry extends InfoTransform
}
else {
def mkArray = mkArrayValue(args drop (formals.length - 1), varargsElemType)
- if (isJava || inPattern) mkArray
+ if (isJava) mkArray
else if (args.isEmpty) gen.mkNil // avoid needlessly double-wrapping an empty argument list
else arrayToSequence(mkArray, varargsElemType)
}
@@ -474,10 +457,10 @@ abstract class UnCurry extends InfoTransform
else
super.transform(tree)
case UnApply(fn, args) =>
- val fn1 = withInPattern(value = false)(transform(fn))
+ val fn1 = transform(fn)
val args1 = transformTrees(fn.symbol.name match {
case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args))
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, localTyper.expectedPatternTypes(fn, args))
case _ => sys.error("internal error: UnApply node has wrong symbol")
})
treeCopy.UnApply(tree, fn1, args1)
@@ -510,7 +493,7 @@ abstract class UnCurry extends InfoTransform
else super.transform(tree)
case CaseDef(pat, guard, body) =>
- val pat1 = withInPattern(value = true)(transform(pat))
+ val pat1 = transform(pat)
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
case fun @ Function(_, _) =>
@@ -532,7 +515,7 @@ abstract class UnCurry extends InfoTransform
}
)
assert(result.tpe != null, result.shortClass + " tpe is null:\n" + result)
- result setType uncurryTreeType(result.tpe)
+ result modifyType uncurry
}
def postTransform(tree: Tree): Tree = exitingUncurry {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index 069484ff65..45aa1106f0 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -13,7 +13,6 @@ import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
import scala.reflect.internal.util.HashSet
-
trait Logic extends Debugging {
import PatternMatchingStats._
@@ -494,7 +493,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
import global.{ConstantType, Constant, SingletonType, Literal, Ident, singleType}
- import global.definitions.{AnyClass, UnitClass}
+ import global.definitions._
// all our variables range over types
@@ -549,7 +548,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
def tp: Type
def wideTp: Type
- def isAny = wideTp.typeSymbol == AnyClass
+ def isAny = wideTp =:= AnyTpe
def isValue: Boolean //= tp.isStable
// note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
@@ -564,6 +563,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
// (At least conceptually: `true` is an instance of class `Boolean`)
private def widenToClass(tp: Type): Type =
if (tp.typeSymbol.isClass) tp
+ else if (tp.baseClasses.isEmpty) sys.error("Bad type: " + tp)
else tp.baseType(tp.baseClasses.head)
object TypeConst extends TypeConstExtractor {
@@ -606,7 +606,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
if (tp.isInstanceOf[SingletonType]) tp
else p match {
case Literal(c) =>
- if (c.tpe.typeSymbol == UnitClass) c.tpe
+ if (c.tpe =:= UnitTpe) c.tpe
else ConstantType(c)
case Ident(_) if p.symbol.isStable =>
// for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index f089c8f5a5..8feb87210e 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -21,7 +21,7 @@ trait TreeAndTypeAnalysis extends Debugging {
// unfortunately this is not true in general:
// SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefTpe)
def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
- val tpValue = tp.typeSymbol.isPrimitiveValueClass
+ val tpValue = isPrimitiveValueType(tp)
// pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
// (and the subtype is respectively a value type or not a value type)
@@ -59,17 +59,20 @@ trait TreeAndTypeAnalysis extends Debugging {
debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym))))
None
case sym =>
- val subclasses = (
- sym.sealedDescendants.toList sortBy (_.sealedSortName)
+ val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")(
// symbols which are both sealed and abstract need not be covered themselves, because
// all of their children must be and they cannot otherwise be created.
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
- debug.patmat("enum sealed -- subclasses: "+ ((sym, subclasses)))
+ sym.sealedDescendants.toList
+ sortBy (_.sealedSortName)
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
+ )
val tpApprox = typer.infer.approximateAbstracts(tp)
val pre = tpApprox.prefix
+
+ Some(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") {
// valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
- val validSubTypes = (subclasses flatMap {sym =>
+ subclasses flatMap { sym =>
// have to filter out children which cannot match: see ticket #3683 for an example
// compare to the fully known type `tp` (modulo abstract types),
// so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
@@ -81,9 +84,8 @@ trait TreeAndTypeAnalysis extends Debugging {
// debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
else None
- })
- debug.patmat("enum sealed "+ ((tp, tpApprox)) + " as "+ validSubTypes)
- Some(validSubTypes)
+ }
+ })
}
// approximate a type to the static type that is fully checkable at run time,
@@ -104,10 +106,7 @@ trait TreeAndTypeAnalysis extends Debugging {
mapOver(tp)
}
}
-
- val res = typeArgsToWildcardsExceptArray(tp)
- debug.patmat("checkable "+((tp, res)))
- res
+ debug.patmatResult(s"checkableType($tp)")(typeArgsToWildcardsExceptArray(tp))
}
// a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
@@ -136,20 +135,17 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
var currId = 0
}
case class Test(prop: Prop, treeMaker: TreeMaker) {
- // private val reusedBy = new scala.collection.mutable.HashSet[Test]
+ // private val reusedBy = new mutable.HashSet[Test]
var reuses: Option[Test] = None
def registerReuseBy(later: Test): Unit = {
assert(later.reuses.isEmpty, later.reuses)
// reusedBy += later
later.reuses = Some(this)
}
-
val id = { Test.currId += 1; Test.currId}
- override def toString =
- "T"+ id + "C("+ prop +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
+ override def toString = s"T${id}C($prop)"
}
-
class TreeMakersToPropsIgnoreNullChecks(root: Symbol) extends TreeMakersToProps(root) {
override def uniqueNonNullProp(p: Tree): Prop = True
}
@@ -158,9 +154,9 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
class TreeMakersToProps(val root: Symbol) {
prepareNewAnalysis() // reset hash consing for Var and Const
- private[this] val uniqueEqualityProps = new scala.collection.mutable.HashMap[(Tree, Tree), Eq]
- private[this] val uniqueNonNullProps = new scala.collection.mutable.HashMap[Tree, Not]
- private[this] val uniqueTypeProps = new scala.collection.mutable.HashMap[(Tree, Type), Eq]
+ private[this] val uniqueEqualityProps = new mutable.HashMap[(Tree, Tree), Eq]
+ private[this] val uniqueNonNullProps = new mutable.HashMap[Tree, Not]
+ private[this] val uniqueTypeProps = new mutable.HashMap[(Tree, Type), Eq]
def uniqueEqualityProp(testedPath: Tree, rhs: Tree): Prop =
uniqueEqualityProps getOrElseUpdate((testedPath, rhs), Eq(Var(testedPath), ValueConst(rhs)))
@@ -222,7 +218,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
// so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
case (f, t) =>
- t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
+ t.isInstanceOf[Ident] && t.symbol.exists && pointsToBound(f)
}
val (boundFrom, boundTo) = boundSubst.unzip
val (unboundFrom, unboundTo) = unboundSubst.unzip
@@ -624,9 +620,9 @@ trait MatchAnalysis extends MatchApproximation {
private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
- private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
- private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
- private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
+ private lazy val ctorParams = if (ctor.paramss.isEmpty) Nil else ctor.paramss.head
+ private lazy val cls = ctor.safeOwner
+ private lazy val caseFieldAccs = cls.caseFieldAccessors
def addField(symbol: Symbol, assign: VariableAssignment) {
// SI-7669 Only register this field if if this class contains it.
@@ -686,8 +682,7 @@ trait MatchAnalysis extends MatchApproximation {
// TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
case _ => NoExample
}
- debug.patmat("described as: "+ res)
- res
+ debug.patmatResult("described as")(res)
}
override def toString = toCounterExample().toString
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
index 1e4c56529c..cf74f0fb11 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -62,45 +62,44 @@ trait MatchCodeGen extends Interface {
def codegen: AbsCodegen
abstract class CommonCodegen extends AbsCodegen { import CODE._
- def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
- def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
- def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
- def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
- def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
+ def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+ def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
+
+ // Right now this blindly calls drop on the result of the unapplySeq
+ // unless it verifiably has no drop method (this is the case in particular
+ // with Array.) You should not actually have to write a method called drop
+ // for name-based matching, but this was an expedient route for the basics.
+ def drop(tgt: Tree)(n: Int): Tree = {
+ def callDirect = fn(tgt, nme.drop, LIT(n))
+ def callRuntime = Apply(REF(traversableDropMethod), tgt :: LIT(n) :: Nil)
+ def needsRuntime = (tgt.tpe ne null) && (typeOfMemberNamedDrop(tgt.tpe) == NoType)
+
+ if (needsRuntime) callRuntime else callDirect
+ }
+
+ // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder)
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, any = true, wrapInApply = false)
- // duplicated out of frustration with cast generation
- def mkZero(tp: Type): Tree = {
- tp.typeSymbol match {
- case UnitClass => Literal(Constant(()))
- case BooleanClass => Literal(Constant(false))
- case FloatClass => Literal(Constant(0.0f))
- case DoubleClass => Literal(Constant(0.0d))
- case ByteClass => Literal(Constant(0.toByte))
- case ShortClass => Literal(Constant(0.toShort))
- case IntClass => Literal(Constant(0))
- case LongClass => Literal(Constant(0L))
- case CharClass => Literal(Constant(0.toChar))
- case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
- }
+ def mkZero(tp: Type): Tree = gen.mkConstantZero(tp) match {
+ case Constant(null) => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+ case const => Literal(const)
}
}
}
trait PureMatchMonadInterface extends MatchMonadInterface {
val matchStrategy: Tree
-
- def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
- def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
- protected def matchMonadSym = oneSig.finalResultType.typeSymbol
-
import CODE._
def _match(n: Name): SelectStart = matchStrategy DOT n
- private lazy val oneSig: Type = typer.typedOperator(_match(vpmName.one)).tpe // TODO: error message
+ // TODO: error message
+ private lazy val oneType = typer.typedOperator(_match(vpmName.one)).tpe
+ override def pureType(tp: Type): Type = firstParamType(appliedType(oneType, tp :: Nil))
}
trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
@@ -132,13 +131,7 @@ trait MatchCodeGen extends Interface {
}
}
- trait OptimizedMatchMonadInterface extends MatchMonadInterface {
- override def inMatchMonad(tp: Type): Type = optionType(tp)
- override def pureType(tp: Type): Type = tp
- override protected def matchMonadSym = OptionClass
- }
-
- trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
+ trait OptimizedCodegen extends CodegenCore with TypedSubstitution with MatchMonadInterface {
override def codegen: AbsCodegen = optimizedCodegen
// when we know we're targetting Option, do some inlining the optimizer won't do
@@ -154,9 +147,8 @@ trait MatchCodeGen extends Interface {
* if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
*/
def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
- val matchEnd = newSynthCaseLabel("matchEnd")
val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
- matchEnd setInfo MethodType(List(matchRes), restpe)
+ val matchEnd = newSynthCaseLabel("matchEnd") setInfo MethodType(List(matchRes), restpe)
def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
var _currCase = newCaseSym
@@ -168,23 +160,22 @@ trait MatchCodeGen extends Interface {
LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
}
-
// must compute catchAll after caseLabels (side-effects nextCase)
// catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
// if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
val catchAllDef = matchFailGen map { matchFailGen =>
- val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+ val scrutRef = scrutSym.fold(EmptyTree: Tree)(REF) // for alternatives
LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
} toList // at most 1 element
// scrutSym == NoSymbol when generating an alternatives matcher
- val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
+ val scrutDef = scrutSym.fold(List[Tree]())(sym => (VAL(sym) === scrut) :: Nil) // for alternatives
// the generated block is taken apart in TailCalls under the following assumptions
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
Block(
scrutDef ++ caseDefs ++ catchAllDef,
LabelDef(matchEnd, List(matchRes), REF(matchRes))
@@ -206,15 +197,14 @@ trait MatchCodeGen extends Interface {
// next: MatchMonad[U]
// returns MatchMonad[U]
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
- val tp = inMatchMonad(b.tpe)
- val prevSym = freshSym(prev.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
-
+ val prevSym = freshSym(prev.pos, prev.tpe, "o")
BLOCK(
VAL(prevSym) === prev,
// must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
- ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+ ifThenElseZero(
+ NOT(prevSym DOT vpmName.isEmpty),
+ Substitution(b, prevSym DOT vpmName.get)(next)
+ )
)
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala
new file mode 100644
index 0000000000..0d08120e43
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala
@@ -0,0 +1,37 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+/** Segregating this super hacky CPS code. */
+trait MatchCps {
+ self: PatternMatching =>
+
+ import global._
+
+ // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
+ private object CpsSymbols {
+ private def cpsSymbol(name: String) = rootMirror.getClassIfDefined(s"scala.util.continuations.$name")
+
+ val MarkerCPSAdaptPlus = cpsSymbol("cpsPlus")
+ val MarkerCPSAdaptMinus = cpsSymbol("cpsMinus")
+ val MarkerCPSSynth = cpsSymbol("cpsSynth")
+ val MarkerCPSTypes = cpsSymbol("cpsParam")
+ val stripTriggerCPSAnns = Set[Symbol](MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
+ val strippedCPSAnns = stripTriggerCPSAnns + MarkerCPSTypes
+
+ // when one of the internal cps-type-state annotations is present, strip all CPS annotations
+ // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
+ // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
+ def removeCPSFromPt(pt: Type): Type = (
+ if (MarkerCPSAdaptPlus.exists && (stripTriggerCPSAnns exists pt.hasAnnotation))
+ pt filterAnnotations (ann => !(strippedCPSAnns exists ann.matches))
+ else
+ pt
+ )
+ }
+ def removeCPSFromPt(pt: Type): Type = CpsSymbols removeCPSFromPt pt
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index 9854e4ef62..ec45789687 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -210,7 +210,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
// }
//// SWITCHES -- TODO: operate on Tests rather than TreeMakers
- trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface {
+ trait SwitchEmission extends TreeMakers with MatchMonadInterface {
import treeInfo.isGuardedCase
abstract class SwitchMaker {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index fcee142932..75335f7920 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -12,86 +12,165 @@ import scala.reflect.internal.util.Statistics
/** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers.
*/
-trait MatchTranslation { self: PatternMatching =>
+trait MatchTranslation {
+ self: PatternMatching =>
+
import PatternMatchingStats._
import global._
import definitions._
import global.analyzer.{ErrorUtils, formalTypes}
+ import treeInfo.{ WildcardStarArg, Unapplied, isStar, unbind }
+ import CODE._
+
+ // Always map repeated params to sequences
+ private def setVarInfo(sym: Symbol, info: Type) =
+ sym setInfo debug.patmatResult(s"changing ${sym.defString} to")(repeatedToSeq(info))
+
+ private def hasSym(t: Tree) = t.symbol != null && t.symbol != NoSymbol
- trait MatchTranslator extends TreeMakers {
+ trait MatchTranslator extends TreeMakers with TreeMakerWarnings {
import typer.context
- // Why is it so difficult to say "here's a name and a context, give me any
- // matching symbol in scope" ? I am sure this code is wrong, but attempts to
- // use the scopes of the contexts in the enclosing context chain discover
- // nothing. How to associate a name with a symbol would would be a wonderful
- // linkage for which to establish a canonical acquisition mechanism.
- def matchingSymbolInScope(pat: Tree): Symbol = {
- def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
- case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
- case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
- case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
- case _ => NoSymbol
- }
- pat match {
- case Bind(name, _) =>
- context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
- res orElse declarationOfName(ctx.owner.rawInfo, name))
- case _ => NoSymbol
+ object SymbolBound {
+ def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match {
+ case Bind(_, expr) if hasSym(tree) => Some(tree.symbol -> expr)
+ case _ => None
}
}
- // Issue better warnings than "unreachable code" when people mis-use
- // variable patterns thinking they bind to existing identifiers.
- //
- // Possible TODO: more deeply nested variable patterns, like
- // case (a, b) => 1 ; case (c, d) => 2
- // However this is a pain (at least the way I'm going about it)
- // and I have to think these detailed errors are primarily useful
- // for beginners, not people writing nested pattern matches.
- def checkMatchVariablePatterns(cases: List[CaseDef]) {
- // A string describing the first variable pattern
- var vpat: String = null
- // Using an iterator so we can recognize the last case
- val it = cases.iterator
-
- def addendum(pat: Tree) = {
- matchingSymbolInScope(pat) match {
- case NoSymbol => ""
- case sym =>
- val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
- s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ def newBoundTree(tree: Tree, pt: Type): BoundTree = tree match {
+ case SymbolBound(sym, expr) => BoundTree(setVarInfo(sym, pt), expr)
+ case _ => BoundTree(setVarInfo(freshSym(tree.pos, prefix = "p"), pt), tree)
+ }
+
+ final case class BoundTree(binder: Symbol, tree: Tree) {
+ private lazy val extractor = ExtractorCall(tree)
+
+ def pos = tree.pos
+ def tpe = binder.info.dealiasWiden // the type of the variable bound to the pattern
+ def pt = unbound match {
+ case Star(tpt) => this glbWith seqType(tpt.tpe)
+ case TypeBound(tpe) => tpe
+ case tree => tree.tpe
+ }
+ def repeatedType = unbound match {
+ case Star(tpt) => tpt.tpe
+ case _ => NoType
+ }
+ def glbWith(other: Type) = glb(tpe :: other :: Nil).normalize
+
+ object SymbolAndTypeBound {
+ def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
+ case SymbolBound(sym, SymbolAndTypeBound(_, tpe)) => Some(sym -> tpe)
+ case TypeBound(tpe) => Some(binder -> tpe)
+ case _ => None
}
}
- while (it.hasNext) {
- val cdef = it.next()
- // If a default case has been seen, then every succeeding case is unreachable.
- if (vpat != null)
- context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
- // If this is a default case and more cases follow, warn about this one so
- // we have a reason to mention its pattern variable name and any corresponding
- // symbol in scope. Errors will follow from the remaining cases, at least
- // once we make the above warning an error.
- else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
- val vpatName = cdef.pat match {
- case Bind(name, _) => s" '$name'"
- case _ => ""
- }
- vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
- context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ object TypeBound {
+ def unapply(tree: Tree): Option[Type] = unbind(tree) match {
+ case Typed(Ident(_), _) if tree.tpe != null => Some(tree.tpe)
+ case _ => None
}
}
+
+ private def rebindTo(pattern: Tree) = BoundTree(binder, pattern)
+ private def step(treeMakers: TreeMaker*)(subpatterns: BoundTree*): TranslationStep = TranslationStep(treeMakers.toList, subpatterns.toList)
+
+ private def bindingStep(sub: Symbol, subpattern: Tree) = step(SubstOnlyTreeMaker(sub, binder))(rebindTo(subpattern))
+ private def equalityTestStep() = step(EqualityTestTreeMaker(binder, tree, pos))()
+ private def typeTestStep(sub: Symbol, subPt: Type) = step(TypeTestTreeMaker(sub, binder, subPt, glbWith(subPt))(pos))()
+ private def alternativesStep(alts: List[Tree]) = step(AlternativesTreeMaker(binder, translatedAlts(alts), alts.head.pos))()
+ private def translatedAlts(alts: List[Tree]) = alts map (alt => rebindTo(alt).translate())
+ private def noStep() = step()()
+
+ private def unsupportedPatternMsg = sm"""
+ |unsupported pattern: ${tree.shortClass} / $this (this is a scalac bug.)
+ |""".trim
+
+ // example check: List[Int] <:< ::[Int]
+ private def extractorStep(): TranslationStep = {
+ import extractor.{ paramType, treeMaker }
+ if (!extractor.isTyped)
+ ErrorUtils.issueNormalTypeError(tree, "Could not typecheck extractor call: "+ extractor)(context)
+
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ lazy val typeTest = TypeTestTreeMaker(binder, binder, paramType, paramType)(pos, extractorArgTypeTest = true)
+ // check whether typetest implies binder is not null,
+ // even though the eventual null check will be on typeTest.nextBinder
+ // it'll be equal to binder casted to paramType anyway (and the type test is on binder)
+ def extraction: TreeMaker = treeMaker(typeTest.nextBinder, typeTest impliesBinderNonNull binder, pos)
+
+ // paramType = the type expected by the unapply
+ // TODO: paramType may contain unbound type params (run/t2800, run/t3530)
+ val makers = (
+ // Statically conforms to paramType
+ if (this ensureConformsTo paramType) treeMaker(binder, false, pos) :: Nil
+ else typeTest :: extraction :: Nil
+ )
+ step(makers: _*)(extractor.subBoundTrees: _*)
+ }
+
+ // Summary of translation cases. I moved the excerpts from the specification further below so all
+ // the logic can be seen at once.
+ //
+ // [1] skip wildcard trees -- no point in checking them
+ // [2] extractor and constructor patterns
+ // [3] replace subpatBinder by patBinder, as if the Bind was not there.
+ // It must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type,
+ // this is not guaranteed until we cast
+ // [4] typed patterns - a typed pattern never has any subtrees
+ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
+ // [5] literal and stable id patterns
+ // [6] pattern alternatives
+ // [7] symbol-less bind patterns - this happens in certain ill-formed programs, there'll be an error later
+ // don't fail here though (or should we?)
+ def nextStep(): TranslationStep = tree match {
+ case WildcardPattern() => noStep()
+ case _: UnApply | _: Apply => extractorStep()
+ case SymbolAndTypeBound(sym, tpe) => typeTestStep(sym, tpe)
+ case TypeBound(tpe) => typeTestStep(binder, tpe)
+ case SymbolBound(sym, expr) => bindingStep(sym, expr)
+ case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) => equalityTestStep()
+ case Alternative(alts) => alternativesStep(alts)
+ case _ => context.unit.error(pos, unsupportedPatternMsg) ; noStep()
+ }
+ def translate(): List[TreeMaker] = nextStep() merge (_.translate())
+
+ private def setInfo(paramType: Type): Boolean = {
+ devWarning(s"resetting info of $this to $paramType")
+ setVarInfo(binder, paramType)
+ true
+ }
+ // If <:< but not =:=, no type test needed, but the tree maker relies on the binder having
+ // exactly paramType (and not just some type compatible with it.) SI-6624 shows this is necessary
+ // because apparently patBinder may have an unfortunate type (.decls don't have the case field
+ // accessors) TODO: get to the bottom of this -- I assume it happens when type checking
+ // infers a weird type for an unapply call. By going back to the parameterType for the
+ // extractor call we get a saner type, so let's just do that for now.
+ def ensureConformsTo(paramType: Type): Boolean = (
+ (tpe =:= paramType)
+ || (tpe <:< paramType) && setInfo(paramType)
+ )
+
+ private def concreteType = tpe.bounds.hi
+ private def unbound = unbind(tree)
+ private def tpe_s = if (pt <:< concreteType) "" + pt else s"$pt (binder: $tpe)"
+ private def at_s = unbound match {
+ case WildcardPattern() => ""
+ case pat => s" @ $pat"
+ }
+ override def toString = s"${binder.name}: $tpe_s$at_s"
}
- // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
- private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
- private lazy val MarkerCPSAdaptMinus = rootMirror.getClassIfDefined("scala.util.continuations.cpsMinus")
- private lazy val MarkerCPSSynth = rootMirror.getClassIfDefined("scala.util.continuations.cpsSynth")
- private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
- private lazy val MarkerCPSTypes = rootMirror.getClassIfDefined("scala.util.continuations.cpsParam")
- private lazy val strippedCPSAnns = MarkerCPSTypes :: stripTriggerCPSAnns
- private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
+ // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
+ final case class TranslationStep(makers: List[TreeMaker], subpatterns: List[BoundTree]) {
+ def merge(f: BoundTree => List[TreeMaker]): List[TreeMaker] = makers ::: (subpatterns flatMap f)
+ override def toString = if (subpatterns.isEmpty) "" else subpatterns.mkString("(", ", ", ")")
+ }
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -107,10 +186,8 @@ trait MatchTranslation { self: PatternMatching =>
val Match(selector, cases) = match_
val (nonSyntheticCases, defaultOverride) = cases match {
- case init :+ last if treeInfo isSyntheticDefaultCase last =>
- (init, Some(((scrut: Tree) => last.body)))
- case _ =>
- (cases, None)
+ case init :+ last if treeInfo isSyntheticDefaultCase last => (init, Some(((scrut: Tree) => last.body)))
+ case _ => (cases, None)
}
checkMatchVariablePatterns(nonSyntheticCases)
@@ -127,18 +204,11 @@ trait MatchTranslation { self: PatternMatching =>
val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
- val origPt = match_.tpe
// when one of the internal cps-type-state annotations is present, strip all CPS annotations
- // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
- // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
- val ptUnCPS =
- if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
- removeCPSAdaptAnnotations(origPt)
- else origPt
-
+ val origPt = removeCPSFromPt(match_.tpe)
// relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
// pt is the skolemized version
- val pt = repeatedToSeq(ptUnCPS)
+ val pt = repeatedToSeq(origPt)
// val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
@@ -183,7 +253,7 @@ trait MatchTranslation { self: PatternMatching =>
CaseDef(
Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
EmptyTree,
- combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
+ combineCasesNoSubstOnly(REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(REF(exSym))))
)
})
}
@@ -191,8 +261,6 @@ trait MatchTranslation { self: PatternMatching =>
typer.typedCases(catches, ThrowableTpe, WildcardType)
}
-
-
/** The translation of `pat if guard => body` has two aspects:
* 1) the substitution due to the variables bound by patterns
* 2) the combination of the extractor calls using `flatMap`.
@@ -221,166 +289,12 @@ trait MatchTranslation { self: PatternMatching =>
* a function that will take care of binding and substitution of the next ast (to the right).
*
*/
- def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
- translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = {
+ val CaseDef(pattern, guard, body) = caseDef
+ translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt)
}
- def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
- // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
- type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
- def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
- def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
-
- val pos = patTree.pos
-
- def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
- if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
- // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
-
- debug.patmat("translateExtractorPattern checking parameter type: "+ ((patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType)))
-
- // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
- // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
- // (it will later result in a type test when `tp` is not a subtype of `b.info`)
- // TODO: can we simplify this, together with the Bound case?
- (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
- debug.patmat("changing "+ b +" : "+ b.info +" -> "+ tp)
- b setInfo tp
- }
-
- // example check: List[Int] <:< ::[Int]
- // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
- val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
- if (patBinder.info.widen <:< extractor.paramType) {
- // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
- // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
- // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
- // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now
- /* TODO: uncomment when `settings.developer` and `devWarning` become available
- if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
- devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
- */
- (Nil, patBinder setInfo extractor.paramType, false)
- } else {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
-
- // check whether typetest implies patBinder is not null,
- // even though the eventual null check will be on patBinderOrCasted
- // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
- (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
- }
-
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
- }
-
-
- object MaybeBoundTyped {
- /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
- * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
- * The returned type is the one inferred by inferTypedPattern (`owntype`)
- *
- * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
- */
- def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
- // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
- case Bound(subpatBinder, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
- case Bind(_, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
- case Typed(Ident(_), tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
- case _ => None
- }
- }
-
- val (treeMakers, subpats) = patTree match {
- // skip wildcard trees -- no point in checking them
- case WildcardPattern() => noFurtherSubPats()
- case UnApply(unfun, args) =>
- // TODO: check unargs == args
- // debug.patmat("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
- translateExtractorPattern(ExtractorCall(unfun, args))
-
- /* A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
- It consists of a stable identifier c, followed by element patterns p1, ..., pn.
- The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
-
- If the case class is monomorphic, then it must conform to the expected type of the pattern,
- and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
-
- If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
- The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
-
- The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
- A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
- **/
- case Apply(fun, args) =>
- ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
- ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
- noFurtherSubPats()
- }
-
- /* A typed pattern x : T consists of a pattern variable x and a type pattern T.
- The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
- This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
- */
- // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
- case MaybeBoundTyped(subPatBinder, pt) =>
- val next = glb(List(dealiasWiden(patBinder.info), pt)).normalize
- // a typed pattern never has any subtrees
- noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
-
- /* A pattern binder x@p consists of a pattern variable x and a pattern p.
- The type of the variable x is the static type T of the pattern p.
- This pattern matches any value v matched by the pattern p,
- provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
- and it binds the variable name to that value.
- */
- case Bound(subpatBinder, p) =>
- // replace subpatBinder by patBinder (as if the Bind was not there)
- withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
- // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
- (patBinder, p)
- )
-
- /* 8.1.4 Literal Patterns
- A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
- The type of L must conform to the expected type of the pattern.
-
- 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
- The pattern matches any value v such that r == v (§12.1).
- The type of r must conform to the expected type of the pattern.
- */
- case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
- noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
-
- case Alternative(alts) =>
- noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
-
- /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
- case class Foo(x: Int, y: String)
- case class Bar(z: Int)
-
- def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
- */
-
- case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
- debug.patmat("WARNING: Bind tree with unbound symbol "+ patTree)
- noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
-
- // case Star(_) | ArrayValue => error("stone age pattern relics encountered!")
-
- case _ =>
- typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
- noFurtherSubPats()
- }
-
- treeMakers ++ subpats.flatMap { case (binder, pat) =>
- translatePattern(binder, pat) // recurse on subpatterns
- }
- }
+ def translatePattern(bound: BoundTree): List[TreeMaker] = bound.translate()
def translateGuard(guard: Tree): List[TreeMaker] =
if (guard == EmptyTree) Nil
@@ -395,28 +309,87 @@ trait MatchTranslation { self: PatternMatching =>
def translateBody(body: Tree, matchPt: Type): TreeMaker =
BodyTreeMaker(body, matchPt)
+ // Some notes from the specification
+
+ /*A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
+ It consists of a stable identifier c, followed by element patterns p1, ..., pn.
+ The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
+
+ If the case class is monomorphic, then it must conform to the expected type of the pattern,
+ and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected
+ types of the element patterns p1, ..., pn.
+
+ If the case class is polymorphic, then its type parameters are instantiated so that the
+ instantiation of c conforms to the expected type of the pattern.
+ The instantiated formal parameter types of c’s primary constructor are then taken as the
+ expected types of the component patterns p1, ..., pn.
+
+ The pattern matches all objects created from constructor invocations c(v1, ..., vn)
+ where each element pattern pi matches the corresponding value vi .
+ A special case arises when c’s formal parameter types end in a repeated parameter.
+ This is further discussed in (§8.1.9).
+ **/
+
+ /* A typed pattern x : T consists of a pattern variable x and a type pattern T.
+ The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+ This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
+ */
+
+ /* A pattern binder x@p consists of a pattern variable x and a pattern p.
+ The type of the variable x is the static type T of the pattern p.
+ This pattern matches any value v matched by the pattern p,
+ provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+ and it binds the variable name to that value.
+ */
+
+ /* 8.1.4 Literal Patterns
+ A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+ The type of L must conform to the expected type of the pattern.
+
+ 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
+ The pattern matches any value v such that r == v (§12.1).
+ The type of r must conform to the expected type of the pattern.
+ */
+
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
object ExtractorCall {
- def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
- def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
+ // TODO: check unargs == args
+ def apply(tree: Tree): ExtractorCall = tree match {
+ case UnApply(unfun, args) => new ExtractorCallRegular(unfun, args) // extractor
+ case Apply(fun, args) => new ExtractorCallProd(fun, args) // case class
+ }
}
- abstract class ExtractorCall(val args: List[Tree]) {
- val nbSubPats = args.length
+ abstract class ExtractorCall {
+ def fun: Tree
+ def args: List[Tree]
- // everything okay, captain?
- def isTyped : Boolean
+ val nbSubPats = args.length
+ val starLength = if (hasStar) 1 else 0
+ val nonStarLength = args.length - starLength
+ // everything okay, captain?
+ def isTyped: Boolean
def isSeq: Boolean
- lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
+
+ private def hasStar = nbSubPats > 0 && isStar(args.last)
+ private def isNonEmptySeq = nbSubPats > 0 && isSeq
+
+ /** This is special cased so that a single pattern will accept any extractor
+ * result, even if it's a tuple (SI-6675)
+ */
+ def isSingle = nbSubPats == 1 && !isSeq
// to which type should the previous binder be casted?
def paramType : Type
+ protected def rawSubPatTypes: List[Type]
+ protected def resultType: Type
+
/** Create the TreeMaker that embodies this extractor call
*
* `binder` has been casted to `paramType` if necessary
@@ -427,79 +400,91 @@ trait MatchTranslation { self: PatternMatching =>
// `subPatBinders` are the variables bound by this pattern in the following patterns
// subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
- lazy val subPatBinders = args map {
- case Bound(b, p) => b
- case p => freshSym(p.pos, prefix = "p")
- }
-
- lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
- case (b, Bound(_, p)) => (b, p)
- case bp => bp
- }
+ // must set infos to `subPatTypes`, which are provided by extractor's result,
+ // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
+ // (it will later result in a type test when `tp` is not a subtype of `b.info`)
+ // TODO: can we simplify this, together with the Bound case?
+ def subPatBinders = subBoundTrees map (_.binder)
+ lazy val subBoundTrees = (args, subPatTypes).zipped map newBoundTree
// never store these in local variables (for PreserveSubPatBinders)
- lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
- case (b, PatternBoundToUnderscore()) => b
- }.toSet
-
- def subPatTypes: List[Type] =
- if(isSeq) {
- val TypeRef(pre, SeqClass, args) = seqTp
- // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
- val formalsWithRepeated = rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args)
-
- if (lastIsStar) formalTypes(formalsWithRepeated, nbSubPats - 1) :+ seqTp
- else formalTypes(formalsWithRepeated, nbSubPats)
- } else rawSubPatTypes
-
- protected def rawSubPatTypes: List[Type]
-
- protected def seqTp = rawSubPatTypes.last baseType SeqClass
- protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
- protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
- protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
- protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
- protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
- protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
+ lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet
+
+ // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
+ private def nonStarSubPatTypes = formalTypes(rawInit :+ repeatedType, nonStarLength)
+
+ def subPatTypes: List[Type] = (
+ if (rawSubPatTypes.isEmpty || !isSeq) rawSubPatTypes
+ else if (hasStar) nonStarSubPatTypes :+ sequenceType
+ else nonStarSubPatTypes
+ )
+
+ private def rawGet = typeOfMemberNamedGetOrSelf(resultType)
+ private def emptySub = rawSubPatTypes.isEmpty
+ private def rawInit = rawSubPatTypes dropRight 1
+ protected def sequenceType = typeOfLastSelectorOrSelf(rawGet)
+ protected def elementType = elementTypeOfLastSelectorOrSelf(rawGet)
+ protected def repeatedType = scalaRepeatedType(elementType)
+
+ // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
+ protected def firstIndexingBinder = rawSubPatTypes.length - 1
+ protected def lastIndexingBinder = nbSubPats - 1 - starLength
+ protected def expectedLength = lastIndexingBinder - firstIndexingBinder + 1
+
+ private def productElemsToN(binder: Symbol, n: Int): List[Tree] = 1 to n map tupleSel(binder) toList
+ private def genTake(binder: Symbol, n: Int): List[Tree] = (0 until n).toList map (codegen index seqTree(binder))
+ private def genDrop(binder: Symbol, n: Int): List[Tree] = codegen.drop(seqTree(binder))(expectedLength) :: Nil
+
+ // codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder + 1)
protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require isSeq
+ // the trees that select the subpatterns on the extractor's result,
+ // referenced by `binder`
protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
- val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
- val nbIndexingIndices = indexingIndices.length
-
+ def lastTrees: List[Tree] = (
+ if (!hasStar) Nil
+ else if (expectedLength == 0) seqTree(binder) :: Nil
+ else genDrop(binder, expectedLength)
+ )
// this error-condition has already been checked by checkStarPatOK:
// if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
- // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
- (((1 to firstIndexingBinder) map tupleSel(binder)) ++
- // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
- (indexingIndices map codegen.index(seqTree(binder))) ++
- // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
- (if(!lastIsStar) Nil else List(
- if(nbIndexingIndices == 0) seqTree(binder)
- else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+
+ // [1] there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+ // [2] then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+ // [3] the last one -- if the last subpattern is a sequence wildcard:
+ // drop the prefix (indexed by the refs on the preceding line), return the remainder
+ ( productElemsToN(binder, firstIndexingBinder)
+ ++ genTake(binder, expectedLength)
+ ++ lastTrees
+ ).toList
}
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require (nbSubPats > 0 && (!lastIsStar || isSeq))
protected def subPatRefs(binder: Symbol): List[Tree] =
- if (nbSubPats == 0) Nil
- else if (isSeq) subPatRefsSeq(binder)
- else ((1 to nbSubPats) map tupleSel(binder)).toList
+ if (isNonEmptySeq) subPatRefsSeq(binder) else productElemsToN(binder, nbSubPats)
+
+ private def compareInts(t1: Tree, t2: Tree) =
+ gen.mkMethodCall(termMember(ScalaPackage, "math"), TermName("signum"), Nil, (t1 INT_- t2) :: Nil)
protected def lengthGuard(binder: Symbol): Option[Tree] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- checkedLength map { expectedLength => import CODE._
+ checkedLength map { expectedLength =>
// `binder.lengthCompare(expectedLength)`
- def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
+ // ...if binder has a lengthCompare method, otherwise
+ // `scala.math.signum(binder.length - expectedLength)`
+ def checkExpectedLength = sequenceType member nme.lengthCompare match {
+ case NoSymbol => compareInts(Select(seqTree(binder), nme.length), LIT(expectedLength))
+ case lencmp => (seqTree(binder) DOT lencmp)(LIT(expectedLength))
+ }
// the comparison to perform
// when the last subpattern is a wildcard-star the expectedLength is but a lower bound
// (otherwise equality is required)
def compareOp: (Tree, Tree) => Tree =
- if (lastIsStar) _ INT_>= _
- else _ INT_== _
+ if (hasStar) _ INT_>= _
+ else _ INT_== _
// `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
(seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
@@ -507,33 +492,29 @@ trait MatchTranslation { self: PatternMatching =>
def checkedLength: Option[Int] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- if (!isSeq || (expectedLength < minLenToCheck)) None
+ if (!isSeq || expectedLength < starLength) None
else Some(expectedLength)
-
}
// TODO: to be called when there's a def unapplyProd(x: T): U
// U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
- //
// for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
- class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ class ExtractorCallProd(val fun: Tree, val args: List[Tree]) extends ExtractorCall {
// TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
/*override def equals(x$1: Any): Boolean = ...
val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
*/
- // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
- // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
- // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
- // debug.patmat("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
- // debug.patmat("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
+
private def constructorTp = fun.tpe
def isTyped = fun.isTyped
// to which type should the previous binder be casted?
def paramType = constructorTp.finalResultType
+ def resultType = fun.tpe.finalResultType
+
+ def isSeq = isVarArgTypes(rawSubPatTypes)
- def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
protected def rawSubPatTypes = constructorTp.paramTypes
/** Create the TreeMaker that embodies this extractor call
@@ -547,34 +528,36 @@ trait MatchTranslation { self: PatternMatching =>
// binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
// make an exception for classes under the scala package as they should be well-behaved,
// to optimize matching on List
- val mutableBinders =
+ val mutableBinders = (
if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
(paramAccessors exists (_.isMutable)))
subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
else Nil
+ )
// checks binder ne null before chaining to the next extractor
ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
}
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
- override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
+ override protected def tupleSel(binder: Symbol)(i: Int): Tree = {
val accessors = binder.caseFieldAccessors
if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
- override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
+ override def toString() = s"ExtractorCallProd($fun:${fun.tpe} / ${fun.symbol} / args=$args)"
}
- class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
- private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
+ class ExtractorCallRegular(extractorCallIncludingDummy: Tree, val args: List[Tree]) extends ExtractorCall {
+ val Unapplied(fun) = extractorCallIncludingDummy
- def tpe = extractorCall.tpe
- def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
- def paramType = tpe.paramTypes.head
+ def tpe = fun.tpe
+ def paramType = firstParamType(tpe)
def resultType = tpe.finalResultType
- def isSeq = extractorCall.symbol.name == nme.unapplySeq
+ def isTyped = (tpe ne NoType) && fun.isTyped && (resultInMonad ne ErrorType)
+ def isSeq = fun.symbol.name == nme.unapplySeq
+ def isBool = resultType =:= BooleanTpe
/** Create the TreeMaker that embodies this extractor call
*
@@ -587,49 +570,56 @@ trait MatchTranslation { self: PatternMatching =>
* Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
*/
def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
- // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
+ // the extractor call (applied to the binder bound by the flatMap corresponding
+ // to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
- val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
+ // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely
+ // wrong when isSeq, and resultInMonad should always be correct since it comes
+ // directly from the extractor's result type
+ val binder = freshSym(pos, pureType(resultInMonad))
+
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
+ subPatBinders,
+ subPatRefs(binder),
+ isBool,
+ checkedLength,
+ patBinderOrCasted,
+ ignoredSubPatBinders
+ )
}
override protected def seqTree(binder: Symbol): Tree =
- if (firstIndexingBinder == 0) CODE.REF(binder)
+ if (firstIndexingBinder == 0) REF(binder)
else super.seqTree(binder)
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require (nbSubPats > 0 && (!lastIsStar || isSeq))
override protected def subPatRefs(binder: Symbol): List[Tree] =
- if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
+ if (isSingle) REF(binder) :: Nil // special case for extractors
else super.subPatRefs(binder)
protected def spliceApply(binder: Symbol): Tree = {
object splice extends Transformer {
override def transform(t: Tree) = t match {
case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
- treeCopy.Apply(t, x, List(CODE.REF(binder).setPos(i.pos)))
- case _ => super.transform(t)
+ treeCopy.Apply(t, x, (REF(binder) setPos i.pos) :: Nil)
+ case _ =>
+ super.transform(t)
}
}
- splice.transform(extractorCallIncludingDummy)
+ splice transform extractorCallIncludingDummy
}
- // what's the extractor's result type in the monad?
- // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
- protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
- if (resultType.typeSymbol == BooleanClass) UnitTpe
- else matchMonadResult(resultType)
- }
+ // what's the extractor's result type in the monad? It is the type of its nullary member `get`.
+ protected lazy val resultInMonad: Type = if (isBool) UnitTpe else typeOfMemberNamedGet(resultType)
- protected lazy val rawSubPatTypes =
- if (resultInMonad.typeSymbol eq UnitClass) Nil
- else if(!isSeq && nbSubPats == 1) List(resultInMonad)
- else getProductArgs(resultInMonad) match {
- case Nil => List(resultInMonad)
- case x => x
- }
+ protected lazy val rawSubPatTypes = (
+ if (isBool) Nil
+ else if (isSingle) resultInMonad :: Nil // don't go looking for selectors if we only expect one pattern
+ else typesOfSelectorsOrSelf(resultInMonad)
+ )
- override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
+ override def toString() = s"ExtractorCallRegular($fun: $tpe / ${fun.symbol})"
}
/** A conservative approximation of which patterns do not discern anything.
@@ -638,10 +628,9 @@ trait MatchTranslation { self: PatternMatching =>
object WildcardPattern {
def unapply(pat: Tree): Boolean = pat match {
case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
case Star(WildcardPattern()) => true
case x: Ident => treeInfo.isVarPattern(x)
- case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
+ case Alternative(ps) => ps forall unapply
case EmptyTree => true
case _ => false
}
@@ -651,7 +640,7 @@ trait MatchTranslation { self: PatternMatching =>
def unapply(pat: Tree): Boolean = pat match {
case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
case Ident(nme.WILDCARD) => true
- case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Alternative(ps) => ps forall unapply
case Typed(PatternBoundToUnderscore(), _) => true
case _ => false
}
@@ -659,9 +648,8 @@ trait MatchTranslation { self: PatternMatching =>
object Bound {
def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
- case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
- Some((t.symbol, p))
- case _ => None
+ case t@Bind(n, p) if t.hasExistingSymbol => Some((t.symbol, p)) // pos/t2429 does not satisfy these conditions
+ case _ => None
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index baccdcf544..942aa80c34 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -201,6 +201,16 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def extraStoredBinders: Set[Symbol] = Set()
+ debug.patmat(s"""
+ |ExtractorTreeMaker($extractor, $extraCond, $nextBinder) {
+ | $subPatBinders
+ | $subPatRefs
+ | $extractorReturnsBoolean
+ | $checkedLength
+ | $prevBinder
+ | $ignoredSubPatBinders
+ |}""".stripMargin)
+
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val condAndNext = extraCond match {
case Some(cond) =>
@@ -426,7 +436,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case _ if testedBinder.info.widen <:< expectedTp =>
// if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
// since the types conform, no further checking is required
- if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ if (isPrimitiveValueType(expectedTp)) tru
// have to test outer and non-null only when it's a reference type
else if (expectedTp <:< AnyRefTpe) {
// do non-null check first to ensure we won't select outer on null
@@ -587,9 +597,8 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
t.symbol.owner = currentOwner
case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
debug.patmat("def: "+ ((d, d.symbol.ownerChain, currentOwner.ownerChain)))
- if(d.symbol.moduleClass ne NoSymbol)
- d.symbol.moduleClass.owner = currentOwner
+ d.symbol.moduleClass andAlso (_.owner = currentOwner)
d.symbol.owner = currentOwner
// case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
debug.patmat("untouched "+ ((t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)))
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
new file mode 100644
index 0000000000..a7d7680db1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
@@ -0,0 +1,86 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+trait MatchWarnings {
+ self: PatternMatching =>
+
+ import global._
+
+ trait TreeMakerWarnings {
+ self: MatchTranslator =>
+
+ import typer.context
+
+ // Why is it so difficult to say "here's a name and a context, give me any
+ // matching symbol in scope" ? I am sure this code is wrong, but attempts to
+ // use the scopes of the contexts in the enclosing context chain discover
+ // nothing. How to associate a name with a symbol would would be a wonderful
+ // linkage for which to establish a canonical acquisition mechanism.
+ private def matchingSymbolInScope(pat: Tree): Symbol = {
+ def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
+ case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
+ case _ => NoSymbol
+ }
+ pat match {
+ case Bind(name, _) =>
+ context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
+ res orElse declarationOfName(ctx.owner.rawInfo, name))
+ case _ => NoSymbol
+ }
+ }
+
+ // Issue better warnings than "unreachable code" when people mis-use
+ // variable patterns thinking they bind to existing identifiers.
+ //
+ // Possible TODO: more deeply nested variable patterns, like
+ // case (a, b) => 1 ; case (c, d) => 2
+ // However this is a pain (at least the way I'm going about it)
+ // and I have to think these detailed errors are primarily useful
+ // for beginners, not people writing nested pattern matches.
+ def checkMatchVariablePatterns(cases: List[CaseDef]) {
+ // A string describing the first variable pattern
+ var vpat: String = null
+ // Using an iterator so we can recognize the last case
+ val it = cases.iterator
+
+ def addendum(pat: Tree) = {
+ matchingSymbolInScope(pat) match {
+ case NoSymbol => ""
+ case sym =>
+ val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
+ s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ }
+ }
+
+ while (it.hasNext) {
+ val cdef = it.next()
+ // If a default case has been seen, then every succeeding case is unreachable.
+ if (vpat != null)
+ context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
+ // If this is a default case and more cases follow, warn about this one so
+ // we have a reason to mention its pattern variable name and any corresponding
+ // symbol in scope. Errors will follow from the remaining cases, at least
+ // once we make the above warning an error.
+ else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
+ val vpatName = cdef.pat match {
+ case Bind(name, _) => s" '$name'"
+ case _ => ""
+ }
+ vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
+ context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index 63834ae51e..a4944caa2b 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -40,10 +40,12 @@ trait PatternMatching extends Transform with TypingTransformers
with MatchTranslation
with MatchTreeMaking
with MatchCodeGen
+ with MatchCps
with ScalaLogic
with Solving
with MatchAnalysis
- with MatchOptimization {
+ with MatchOptimization
+ with MatchWarnings {
import global._
val phaseName: String = "patmat"
@@ -94,12 +96,17 @@ trait Debugging {
// TODO: the inliner fails to inline the closures to debug.patmat unless the method is nested in an object
object debug {
val printPatmat = global.settings.Ypatmatdebug.value
- @inline final def patmat(s: => String) = if (printPatmat) println(s)
+ @inline final def patmat(s: => String) = if (printPatmat) Console.err.println(s)
+ @inline final def patmatResult[T](s: => String)(result: T): T = {
+ if (printPatmat) Console.err.println(s + ": " + result)
+ result
+ }
}
}
trait Interface extends ast.TreeDSL {
- import global.{newTermName, analyzer, Type, ErrorType, Symbol, Tree}
+ import global._
+ import definitions._
import analyzer.Typer
// 2.10/2.11 compatibility
@@ -166,6 +173,10 @@ trait Interface extends ast.TreeDSL {
trait MatchMonadInterface {
val typer: Typer
val matchOwner = typer.context.owner
+ def pureType(tp: Type): Type = tp
+
+ // Extracting from the monad: tp == { def get: T }, result == T
+ def matchMonadResult(tp: Type) = typeOfMemberNamedGet(tp)
def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
def reportMissingCases(pos: Position, counterExamples: List[String]) = {
@@ -175,16 +186,6 @@ trait Interface extends ast.TreeDSL {
typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
}
-
- def inMatchMonad(tp: Type): Type
- def pureType(tp: Type): Type
- final def matchMonadResult(tp: Type): Type =
- tp.baseType(matchMonadSym).typeArgs match {
- case arg :: Nil => arg
- case _ => ErrorType
- }
-
- protected def matchMonadSym: Symbol
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 31a31df764..67c5666f66 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -246,8 +246,8 @@ trait Checkable {
uncheckedOk(P0) || (P0.widen match {
case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false
case RefinedType(_, decls) if !decls.isEmpty => false
- case p =>
- new CheckabilityChecker(AnyTpe, p) isCheckable
+ case RefinedType(parents, _) => parents forall isCheckable
+ case p => new CheckabilityChecker(AnyTpe, p) isCheckable
})
)
@@ -273,6 +273,8 @@ trait Checkable {
// Matching on types like case _: AnyRef { def bippy: Int } => doesn't work -- yet.
case RefinedType(_, decls) if !decls.isEmpty =>
getContext.unit.warning(tree.pos, s"a pattern match on a refinement type is unchecked")
+ case RefinedType(parents, _) =>
+ parents foreach (p => checkCheckable(tree, p, X, inPattern, canRemedy))
case _ =>
val checker = new CheckabilityChecker(X, P)
log(checker.summaryString)
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 81f5545695..1f4d5cbac2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -517,6 +517,9 @@ trait ContextErrors {
def TooManyArgsPatternError(fun: Tree) =
NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity)
+ def WrongShapeExtractorExpansion(fun: Tree) =
+ NormalTypeError(fun, "extractor macros can only expand into extractor calls")
+
def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
@@ -593,7 +596,12 @@ trait ContextErrors {
}
def CaseClassConstructorError(tree: Tree) = {
- issueNormalTypeError(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method")
+ val baseMessage = tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method"
+ val addendum = directUnapplyMember(tree.symbol.info) match {
+ case sym if hasMultipleNonImplicitParamLists(sym) => s"\nNote: ${sym.defString} exists in ${tree.symbol}, but it cannot be used as an extractor due to its second non-implicit parameter list"
+ case _ => ""
+ }
+ issueNormalTypeError(tree, baseMessage + addendum)
setError(tree)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 60641d6752..86a0d33737 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -269,7 +269,7 @@ trait Contexts { self: Analyzer =>
/** The next enclosing context (potentially `this`) that is owned by a class or method */
def enclClassOrMethod: Context =
- if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
+ if (!owner.exists || owner.isClass || owner.isMethod) this
else outer.enclClassOrMethod
/** The next enclosing context (potentially `this`) that has a `CaseDef` as a tree */
@@ -653,13 +653,8 @@ trait Contexts { self: Analyzer =>
lastAccessCheckDetails = ""
// Console.println("isAccessible(%s, %s, %s)".format(sym, pre, superAccess))
- def accessWithinLinked(ab: Symbol) = {
- val linked = ab.linkedClassOfClass
- // don't have access if there is no linked class
- // (before adding the `ne NoSymbol` check, this was a no-op when linked eq NoSymbol,
- // since `accessWithin(NoSymbol) == true` whatever the symbol)
- (linked ne NoSymbol) && accessWithin(linked)
- }
+ // don't have access if there is no linked class (so exclude linkedClass=NoSymbol)
+ def accessWithinLinked(ab: Symbol) = ab.linkedClassOfClass.fold(false)(accessWithin)
/* Are we inside definition of `ab`? */
def accessWithin(ab: Symbol) = {
@@ -957,7 +952,7 @@ trait Contexts { self: Analyzer =>
// 2) sym.owner is inherited by the correct package object class
// We try to establish 1) by inspecting the owners directly, and then we try
// to rule out 2), and only if both those fail do we resort to looking in the info.
- !sym.isPackage && (sym.owner ne NoSymbol) && (
+ !sym.isPackage && sym.owner.exists && (
if (sym.owner.isPackageObjectClass)
sym.owner.owner == pkgClass
else
@@ -1194,7 +1189,7 @@ trait Contexts { self: Analyzer =>
override final def imports = impInfo :: super.imports
override final def firstImport = Some(impInfo)
override final def isRootImport = !tree.pos.isDefined
- override final def toString = s"ImportContext { $impInfo; outer.owner = ${outer.owner} }"
+ override final def toString = super.toString + " with " + s"ImportContext { $impInfo; outer.owner = ${outer.owner} }"
}
/** A buffer for warnings and errors that are accumulated during speculative type checking. */
@@ -1340,6 +1335,7 @@ trait Contexts { self: Analyzer =>
}
object ContextMode {
+ import scala.language.implicitConversions
private implicit def liftIntBitsToContextState(bits: Int): ContextMode = apply(bits)
def apply(bits: Int): ContextMode = new ContextMode(bits)
final val NOmode: ContextMode = 0
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 0a2628b482..396f3407f3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -74,22 +74,19 @@ abstract class Duplicators extends Analyzer {
override def mapOver(tpe: Type): Type = tpe match {
case TypeRef(NoPrefix, sym, args) if sym.isTypeParameterOrSkolem =>
- var sym1 = context.scope.lookup(sym.name)
- if (sym1 eq NoSymbol) {
- // try harder (look in outer scopes)
- // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
- BodyDuplicator.super.silent(_.typedType(Ident(sym.name))) match {
- case SilentResultValue(t) =>
- sym1 = t.symbol
- debuglog("fixed by trying harder: "+((sym, sym1, context)))
- case _ =>
- }
- }
-// assert(sym1 ne NoSymbol, tpe)
- if ((sym1 ne NoSymbol) && (sym1 ne sym)) {
- debuglog("fixing " + sym + " -> " + sym1)
+ val sym1 = (
+ context.scope lookup sym.name orElse {
+ // try harder (look in outer scopes)
+ // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but
+ // is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
+ BodyDuplicator.super.silent(_ typedType Ident(sym.name)).fold(NoSymbol: Symbol)(_.symbol)
+ } filter (_ ne sym)
+ )
+ if (sym1.exists) {
+ debuglog(s"fixing $sym -> $sym1")
typeRef(NoPrefix, sym1, mapOverArgs(args, sym1.typeParams))
- } else super.mapOver(tpe)
+ }
+ else super.mapOver(tpe)
case TypeRef(pre, sym, args) =>
val newsym = updateSym(sym)
@@ -157,7 +154,7 @@ abstract class Duplicators extends Analyzer {
case vdef @ ValDef(mods, name, _, rhs) if mods.hasFlag(Flags.LAZY) =>
debuglog("ValDef " + name + " sym.info: " + vdef.symbol.info)
invalidSyms(vdef.symbol) = vdef
- val newowner = if (owner != NoSymbol) owner else context.owner
+ val newowner = owner orElse context.owner
val newsym = vdef.symbol.cloneSymbol(newowner)
newsym.setInfo(fixType(vdef.symbol.info))
vdef.symbol = newsym
@@ -362,12 +359,11 @@ abstract class Duplicators extends Analyzer {
case _ =>
debuglog("Duplicators default case: " + tree.summaryString)
debuglog(" ---> " + tree)
- if (tree.hasSymbolField && tree.symbol != NoSymbol && (tree.symbol.owner == AnyClass)) {
+ if (tree.hasSymbolField && tree.symbol.safeOwner == AnyClass)
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
- }
+
val ntree = castType(tree, pt)
- val res = super.typed(ntree, mode, pt)
- res
+ super.typed(ntree, mode, pt)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 100112fec1..4265efc839 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -60,6 +60,8 @@ trait Implicits {
* @return A search result
*/
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = {
+ // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the
+ // work is performed, than at the point where it presently exists.
val shouldPrint = printTypings && !context.undetparams.isEmpty
val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeImpl) else null
val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberImpl) else null
@@ -813,7 +815,7 @@ trait Implicits {
if (search.isDivergent && countdown > 0) {
countdown -= 1
implicitSym = i.sym
- log("discarding divergent implicit ${implicitSym} during implicit search")
+ log(s"discarding divergent implicit $implicitSym during implicit search")
SearchFailure
} else search
}
@@ -1335,12 +1337,18 @@ trait Implicits {
}
}
if (result.isSuccess && isView) {
+ def maybeInvalidConversionError(msg: String) {
+ // We have to check context.ambiguousErrors even though we are calling "issueAmbiguousError"
+ // which ostensibly does exactly that before issuing the error. Why? I have no idea. Test is pos/t7690.
+ if (context.ambiguousErrors)
+ context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, msg))
+ }
if (isInvalidConversionTarget(pt)) {
- context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, "the result type of an implicit conversion must be more specific than AnyRef"))
+ maybeInvalidConversionError("the result type of an implicit conversion must be more specific than AnyRef")
result = SearchFailure
}
else if (isInvalidConversionSource(pt)) {
- context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, "an expression of type Null is ineligible for implicit conversion"))
+ maybeInvalidConversionError("an expression of type Null is ineligible for implicit conversion")
result = SearchFailure
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 06892053fa..50d88d7c4d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -25,18 +25,27 @@ trait Infer extends Checkable {
/** The formal parameter types corresponding to `formals`.
* If `formals` has a repeated last parameter, a list of
- * (nargs - params.length + 1) copies of its type is returned.
- * By-name types are replaced with their underlying type.
+ * (numArgs - numFormals + 1) copies of its type is appended
+ * to the other formals. By-name types are replaced with their
+ * underlying type.
*
* @param removeByName allows keeping ByName parameters. Used in NamesDefaults.
* @param removeRepeated allows keeping repeated parameter (if there's one argument). Used in NamesDefaults.
*/
- def formalTypes(formals: List[Type], nargs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
- val formals1 = if (removeByName) formals mapConserve dropByName else formals
- if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) {
- val ft = formals1.last.dealiasWiden.typeArgs.head
- formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
- } else formals1
+ def formalTypes(formals: List[Type], numArgs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
+ val numFormals = formals.length
+ val formals1 = if (removeByName) formals mapConserve dropByName else formals
+ val expandLast = (
+ (removeRepeated || numFormals != numArgs)
+ && isVarArgTypes(formals1)
+ )
+ def lastType = formals1.last.dealiasWiden.typeArgs.head
+ def expanded(n: Int) = (1 to n).toList map (_ => lastType)
+
+ if (expandLast)
+ formals1.init ::: expanded(numArgs - numFormals + 1)
+ else
+ formals1
}
/** Sorts the alternatives according to the given comparison function.
@@ -67,96 +76,6 @@ trait Infer extends Checkable {
override def complete(sym: Symbol) = ()
}
- /** Returns `(formals, formalsExpanded)` where `formalsExpanded` are the expected types
- * for the `nbSubPats` sub-patterns of an extractor pattern, of which the corresponding
- * unapply[Seq] call is assumed to have result type `resTp`.
- *
- * `formals` are the formal types before expanding a potential repeated parameter (must come last in `formals`, if at all)
- *
- * @param nbSubPats The number of arguments to the extractor pattern
- * @param effectiveNbSubPats `nbSubPats`, unless there is one sub-pattern which, after unwrapping
- * bind patterns, is a Tuple pattern, in which case it is the number of
- * elements. Used to issue warnings about binding a `TupleN` to a single value.
- * @throws TypeError when the unapply[Seq] definition is ill-typed
- * @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
- *
- * This is the spec currently implemented -- TODO: update it.
- *
- * 8.1.8 ExtractorPatterns
- *
- * An extractor pattern x(p1, ..., pn) where n ≥ 0 is of the same syntactic form as a constructor pattern.
- * However, instead of a case class, the stable identifier x denotes an object which has a member method named unapply or unapplySeq that matches the pattern.
- *
- * An `unapply` method with result type `R` in an object `x` matches the
- * pattern `x(p_1, ..., p_n)` if it takes exactly one argument and, either:
- * - `n = 0` and `R =:= Boolean`, or
- * - `n = 1` and `R <:< Option[T]`, for some type `T`.
- * The argument pattern `p1` is typed in turn with expected type `T`.
- * - Or, `n > 1` and `R <:< Option[Product_n[T_1, ..., T_n]]`, for some
- * types `T_1, ..., T_n`. The argument patterns `p_1, ..., p_n` are
- * typed with expected types `T_1, ..., T_n`.
- *
- * An `unapplySeq` method in an object `x` matches the pattern `x(p_1, ..., p_n)`
- * if it takes exactly one argument and its result type is of the form `Option[S]`,
- * where either:
- * - `S` is a subtype of `Seq[U]` for some element type `U`, (set `m = 0`)
- * - or `S` is a `ProductX[T_1, ..., T_m]` and `T_m <: Seq[U]` (`m <= n`).
- *
- * The argument patterns `p_1, ..., p_n` are typed with expected types
- * `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
- *
- */
- def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int,
- unappSym: Symbol, effectiveNbSubPats: Int): (List[Type], List[Type]) = {
- val isUnapplySeq = unappSym.name == nme.unapplySeq
- val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
-
- def seqToRepeatedChecked(tp: Type) = {
- val toRepeated = seqToRepeated(tp)
- if (tp eq toRepeated) throw new TypeError("(the last tuple-component of) the result type of an unapplySeq must be a Seq[_]")
- else toRepeated
- }
-
- // empty list --> error, otherwise length == 1
- lazy val optionArgs = resTp.baseType(OptionClass).typeArgs
- // empty list --> not a ProductN, otherwise product element types
- def productArgs = getProductArgs(optionArgs.head)
-
- val formals =
- // convert Seq[T] to the special repeated argument type
- // so below we can use formalTypes to expand formals to correspond to the number of actuals
- if (isUnapplySeq) {
- if (optionArgs.nonEmpty)
- productArgs match {
- case Nil => List(seqToRepeatedChecked(optionArgs.head))
- case normalTps :+ seqTp => normalTps :+ seqToRepeatedChecked(seqTp)
- }
- else throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.fullLocationString} does not conform to Option[_]")
- } else {
- if (booleanExtractor && nbSubPats == 0) Nil
- else if (optionArgs.nonEmpty)
- if (nbSubPats == 1) {
- val productArity = productArgs.size
- if (productArity > 1 && productArity != effectiveNbSubPats && settings.lint)
- global.currentUnit.warning(pos,
- s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
- optionArgs
- }
- // TODO: update spec to reflect we allow any ProductN, not just TupleN
- else productArgs
- else
- throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.fullLocationString} does not conform to Option[_] or Boolean")
- }
-
- // for unapplySeq, replace last vararg by as many instances as required by nbSubPats
- val formalsExpanded =
- if (isUnapplySeq && formals.nonEmpty) formalTypes(formals, nbSubPats)
- else formals
-
- if (formalsExpanded.lengthCompare(nbSubPats) != 0) (null, null)
- else (formals, formalsExpanded)
- }
-
/** A fresh type variable with given type parameter as origin.
*/
def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
@@ -700,7 +619,7 @@ trait Infer extends Checkable {
tp nonPrivateMember nme.apply match {
case NoSymbol => tp
case sym if !sym.isOverloaded && sym.isPublic => OverloadedType(tp, sym.alternatives)
- case sym => OverloadedType(tp, sym filter (_.isPublic) alternatives)
+ case sym => OverloadedType(tp, sym.filter(_.isPublic).alternatives)
}
}
@@ -1190,6 +1109,17 @@ trait Infer extends Checkable {
val tparam = tvar.origin.typeSymbol
val TypeBounds(lo0, hi0) = tparam.info.bounds
val tb @ TypeBounds(lo1, hi1) = instBounds(tvar)
+ val enclCase = context.enclosingCaseDef
+
+ log("\n" + sm"""
+ |-----
+ | enclCase: ${enclCase.tree}
+ | saved: ${enclCase.savedTypeBounds}
+ | tparam: ${tparam.shortSymbolClass}
+ | def_s: ${tparam.defString}
+ | seen_s: ${tparam.defStringSeenAs(tb)}
+ |-----
+ """.trim)
if (lo1 <:< hi1) {
if (lo1 <:< lo0 && hi0 <:< hi1) // bounds unimproved
@@ -1197,7 +1127,7 @@ trait Infer extends Checkable {
else if (tparam == lo1.typeSymbolDirect || tparam == hi1.typeSymbolDirect)
log(s"cyclical bounds: discarding TypeBounds($lo1, $hi1) for $tparam because $tparam appears as bounds")
else {
- context.enclosingCaseDef pushTypeBounds tparam
+ enclCase pushTypeBounds tparam
tparam setInfo logResult(s"updated bounds: $tparam from ${tparam.info} to")(tb)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 6b9537e27d..b3675d6a82 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -589,18 +589,23 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
/** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`.
* @see MacroExpander
*/
- def macroExpandApply(typer: Typer, expandee: Tree, mode: Mode, pt: Type) = {
+ def macroExpandApply(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = {
object expander extends TermMacroExpander(APPLY_ROLE, typer, expandee, mode, pt) {
override def onSuccess(expanded: Tree) = {
// prematurely annotate the tree with a macro expansion attachment
// so that adapt called indirectly by typer.typed knows that it needs to apply the existential fixup
linkExpandeeAndExpanded(expandee, expanded)
- var expectedTpe = expandee.tpe
- if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
+ // approximation is necessary for whitebox macros to guide type inference
+ // read more in the comments for onDelayed below
+ def approximate(tp: Type) = {
+ val undetparams = tp collect { case tp if tp.typeSymbol.isTypeParameter => tp.typeSymbol }
+ deriveTypeWithWildcards(undetparams)(tp)
+ }
+ val macroPtApprox = approximate(if (isNullaryInvocation(expandee)) expandee.tpe.finalResultType else expandee.tpe)
// `macroExpandApply` is called from `adapt`, where implicit conversions are disabled
// therefore we need to re-enable the conversions back temporarily
- if (macroDebugVerbose) println(s"typecheck #1 (against expectedTpe = $expectedTpe): $expanded")
- val expanded1 = typer.context.withImplicitsEnabled(typer.typed(expanded, mode, expectedTpe))
+ if (macroDebugVerbose) println(s"typecheck #1 (against macroPtApprox = $macroPtApprox): $expanded")
+ val expanded1 = typer.context.withImplicitsEnabled(typer.typed(expanded, mode, macroPtApprox))
if (expanded1.isErrorTyped) {
if (macroDebugVerbose) println(s"typecheck #1 has failed: ${typer.context.reportBuffer.errors}")
expanded1
@@ -612,6 +617,8 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
}
}
override def onDelayed(delayed: Tree) = {
+ // =========== THE SITUATION ===========
+ //
// If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
// then there are two possible situations we're in:
// 1) We're in POLYmode, when the typer tests the waters wrt type inference
@@ -627,12 +634,43 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
// the undetermined type params. Therefore we need to do something ourselves or otherwise this
// expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum
// is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases,
- // but sometimes, if the inferencer lacks information, it will be forced to approximate. This prevents
- // an important class of macros, fundep materializers, from working, which I perceive is a problem we need to solve.
- // For details see SI-7470.
+ // but sometimes, if the inferencer lacks information, it will be forced to approximate.
+ //
+ // =========== THE PROBLEM ===========
+ //
+ // Consider the following example (thanks, Miles!):
+ //
+ // Iso represents an isomorphism between two datatypes:
+ // 1) An arbitrary one (e.g. a random case class)
+ // 2) A uniform representation for all datatypes (e.g. an HList)
+ //
+ // trait Iso[T, U] {
+ // def to(t : T) : U
+ // def from(u : U) : T
+ // }
+ // implicit def materializeIso[T, U]: Iso[T, U] = macro ???
+ //
+ // case class Foo(i: Int, s: String, b: Boolean)
+ // def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
+ // foo(Foo(23, "foo", true))
+ //
+ // In the snippet above, even though we know that there's a fundep going from T to U
+ // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
+ // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
+ // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
+ //
+ // =========== THE SOLUTION ===========
+ //
+ // To give materializers a chance to say their word before vanilla inference kicks in,
+ // we infer as much as possible (e.g. in the example above even though L is hopeless, C still can be inferred to Foo)
+ // and then trigger macro expansion with the undetermined type parameters still there.
+ // Thanks to that the materializer can take a look at what's going on and react accordingly.
val shouldInstantiate = typer.context.undetparams.nonEmpty && !mode.inPolyMode
- if (shouldInstantiate) typer.instantiatePossiblyExpectingUnit(delayed, mode, pt)
- else delayed
+ if (shouldInstantiate) {
+ forced += delayed
+ typer.infer.inferExprInstance(delayed, typer.context.extractUndetparams(), pt, keepNothings = false)
+ macroExpandApply(typer, delayed, mode, pt)
+ } else delayed
}
}
expander(expandee)
@@ -750,10 +788,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
* 2) undetparams (sym.isTypeParameter && !sym.isSkolem)
*/
var hasPendingMacroExpansions = false
+ private val forced = perRunCaches.newWeakSet[Tree]
private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]()
private def isDelayed(expandee: Tree) = delayed contains expandee
private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] =
- delayed.get(expandee).getOrElse {
+ if (forced(expandee)) scala.collection.mutable.Set[Int]()
+ else delayed.getOrElse(expandee, {
val calculated = scala.collection.mutable.Set[Symbol]()
expandee foreach (sub => {
def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym
@@ -762,7 +802,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
})
macroLogVerbose("calculateUndetparams: %s".format(calculated))
calculated map (_.id)
- }
+ })
private val undetparams = perRunCaches.newSet[Int]()
def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = {
undetparams ++= newUndets map (_.id)
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 546186479f..3a5845c8ca 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -25,7 +25,7 @@ trait MethodSynthesis {
type TT[T] = ru.TypeTag[T]
type CT[T] = ClassTag[T]
- def ValOrDefDef(sym: Symbol, body: Tree) =
+ def newValOrDefDef(sym: Symbol, body: Tree) =
if (sym.isLazy) ValDef(sym, body)
else DefDef(sym, body)
@@ -67,7 +67,7 @@ trait MethodSynthesis {
}
private def finishMethod(method: Symbol, f: Symbol => Tree): Tree =
- localTyper typed ValOrDefDef(method, f(method))
+ localTyper typed newValOrDefDef(method, f(method))
private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
val name1 = name.toTermName
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index cac6bd2ef2..2bb2cc1ab4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -390,9 +390,7 @@ trait Namers extends MethodSynthesis {
* has been defined in a separate file.
*/
private def validateCompanionDefs(tree: ImplDef) {
- val sym = tree.symbol
- if (sym eq NoSymbol) return
-
+ val sym = tree.symbol orElse { return }
val ctx = if (context.owner.isPackageObjectClass) context.outer else context
val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name
val clazz = if (sym.isClass) sym else ctx.scope lookupClass tree.name
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
new file mode 100644
index 0000000000..7120aeaaa6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -0,0 +1,475 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package typechecker
+
+import scala.collection.mutable
+import symtab.Flags
+import Mode._
+
+ /**
+ *
+ * A pattern match such as
+ *
+ * x match { case Foo(a, b) => ...}
+ *
+ * Might match an instance of any of the following definitions of Foo.
+ * Note the analogous treatment between case classes and unapplies.
+ *
+ * case class Foo(xs: Int*)
+ * case class Foo(a: Int, xs: Int*)
+ * case class Foo(a: Int, b: Int)
+ * case class Foo(a: Int, b: Int, xs: Int*)
+ *
+ * object Foo { def unapplySeq(x: Any): Option[Seq[Int]] }
+ * object Foo { def unapplySeq(x: Any): Option[(Int, Seq[Int])] }
+ * object Foo { def unapply(x: Any): Option[(Int, Int)] }
+ * object Foo { def unapplySeq(x: Any): Option[(Int, Int, Seq[Int])] }
+ */
+
+trait PatternTypers {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+
+ private object FixedAndRepeatedTypes {
+ def unapply(types: List[Type]) = types match {
+ case init :+ last if isRepeatedParamType(last) => Some((init, dropRepeated(last)))
+ case _ => Some((types, NoType))
+ }
+ }
+
+ // when true:
+ // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
+ // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
+ // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
+ protected def newPatternMatching = true // presently overridden in the presentation compiler
+
+ trait PatternTyper {
+ self: Typer =>
+
+ import TyperErrorGen._
+ import infer._
+
+ private def unit = context.unit
+
+ // If the tree's symbol's type does not define an extractor, maybe the tree's type does.
+ // this is the case when we encounter an arbitrary tree as the target of an unapply call
+ // (rather than something that looks like a constructor call.) (for now, this only happens
+ // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
+ // more common place)
+ private def hasUnapplyMember(tpe: Type): Boolean = reallyExists(unapplyMember(tpe))
+ private def hasUnapplyMember(sym: Symbol): Boolean = hasUnapplyMember(sym.tpe_*)
+ private def hasUnapplyMember(fun: Tree): Boolean = hasUnapplyMember(fun.symbol) || hasUnapplyMember(fun.tpe)
+
+ // ad-hoc overloading resolution to deal with unapplies and case class constructors
+ // If some but not all alternatives survive filtering the tree's symbol with `p`,
+ // then update the tree's symbol and type to exclude the filtered out alternatives.
+ private def inPlaceAdHocOverloadingResolution(fun: Tree)(p: Symbol => Boolean): Tree = fun.symbol filter p match {
+ case sym if sym.exists && (sym ne fun.symbol) => fun setSymbol sym modifyType (tp => filterOverloadedAlts(tp)(p))
+ case _ => fun
+ }
+ private def filterOverloadedAlts(tpe: Type)(p: Symbol => Boolean): Type = tpe match {
+ case OverloadedType(pre, alts) => overloadedType(pre, alts filter p)
+ case tp => tp
+ }
+
+ def typedConstructorPattern(fun0: Tree, pt: Type) = {
+ // Do some ad-hoc overloading resolution and update the tree's symbol and type
+ // do not update the symbol if the tree's symbol's type does not define an unapply member
+ // (e.g. since it's some method that returns an object with an unapply member)
+ val fun = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember)
+ def caseClass = fun.tpe.typeSymbol.linkedClassOfClass
+
+ // Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala
+ // A case class with 23+ params has no unapply method.
+ // A case class constructor be overloaded with unapply methods in the companion.
+ if (caseClass.isCase && !unapplyMember(fun.tpe).isOverloaded)
+ convertToCaseConstructor(fun, caseClass, pt)
+ else if (hasUnapplyMember(fun))
+ fun
+ else
+ CaseClassConstructorError(fun)
+ }
+
+ def expectedPatternTypes(fun: Tree, args: List[Tree]): List[Type] =
+ newExtractorShape(fun, args).expectedPatternTypes
+
+ def typedPatternArgs(fun: Tree, args: List[Tree], mode: Mode): List[Tree] =
+ typedArgsForFormals(args, newExtractorShape(fun, args).formals, mode)
+
+ def typedArgsForFormals(args: List[Tree], formals: List[Type], mode: Mode): List[Tree] = {
+ def typedArgWithFormal(arg: Tree, pt: Type) = {
+ val newMode = if (isByNameParamType(pt)) mode.onlySticky else mode.onlySticky | BYVALmode
+ typedArg(arg, mode, newMode, dropByName(pt))
+ }
+ val FixedAndRepeatedTypes(fixed, elem) = formals
+ val front = (args, fixed).zipped map typedArgWithFormal
+ def rest = context withinStarPatterns (args drop front.length map (typedArgWithFormal(_, elem)))
+
+ elem match {
+ case NoType => front
+ case _ => front ::: rest
+ }
+ }
+
+ private def boundedArrayType(bound: Type): Type = {
+ val tparam = context.owner freshExistential "" setInfo (TypeBounds upper bound)
+ newExistentialType(tparam :: Nil, arrayType(tparam.tpe_*))
+ }
+
+ protected def typedStarInPattern(tree: Tree, mode: Mode, pt: Type) = {
+ val Typed(expr, tpt) = tree
+ val exprTyped = typed(expr, mode)
+ val baseClass = exprTyped.tpe.typeSymbol match {
+ case ArrayClass => ArrayClass
+ case _ => SeqClass
+ }
+ val starType = baseClass match {
+ case ArrayClass if isPrimitiveValueType(pt) || !isFullyDefined(pt) => arrayType(pt)
+ case ArrayClass => boundedArrayType(pt)
+ case _ => seqType(pt)
+ }
+ val exprAdapted = adapt(exprTyped, mode, starType)
+ exprAdapted.tpe baseType baseClass match {
+ case TypeRef(_, _, elemtp :: Nil) => treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
+ case _ => setError(tree)
+ }
+ }
+
+ protected def typedInPattern(tree: Typed, mode: Mode, pt: Type) = {
+ val Typed(expr, tpt) = tree
+ val tptTyped = typedType(tpt, mode)
+ val tpe = tptTyped.tpe
+ val exprTyped = typed(expr, mode, tpe.deconst)
+ val extractor = extractorForUncheckedType(tpt.pos, tpe)
+
+ val canRemedy = tpe match {
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case RefinedType(parents, _) if parents exists isUncheckable => false
+ case _ => extractor.nonEmpty
+ }
+
+ val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy)
+ val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped) setType ownType
+
+ extractor match {
+ case EmptyTree => treeTyped
+ case _ => wrapClassTagUnapply(treeTyped, extractor, tpe)
+ }
+ }
+
+ def newExtractorShape(tree: Tree): ExtractorShape = tree match {
+ case Apply(fun, args) => ExtractorShape(fun, args)
+ case UnApply(fun, args) => ExtractorShape(fun, args)
+ }
+ def newExtractorShape(fun: Tree, args: List[Tree]): ExtractorShape = ExtractorShape(fun, args)
+
+ case class CaseClassInfo(clazz: Symbol, classType: Type) {
+ def constructor = clazz.primaryConstructor
+ def constructorType = classType.prefix memberType clazz memberType constructor
+ def paramTypes = constructorType.paramTypes
+ def accessors = clazz.caseFieldAccessors
+ def accessorTypes = accessors map (m => (classType memberType m).finalResultType)
+ // def inverted = MethodType(clazz :: Nil, tupleType(accessorTypes))
+ }
+ object NoCaseClassInfo extends CaseClassInfo(NoSymbol, NoType) {
+ override def toString = "NoCaseClassInfo"
+ }
+
+ case class UnapplyMethodInfo(unapply: Symbol, tpe: Type) {
+ def name = unapply.name
+ def isUnapplySeq = name == nme.unapplySeq
+ def unapplyType = tpe memberType method
+ def resultType = tpe.finalResultType
+ def method = unapplyMember(tpe)
+ def paramType = firstParamType(unapplyType)
+ def rawGet = if (isBool) UnitTpe else typeOfMemberNamedGetOrSelf(resultType)
+ def rawTypes = if (isBool) Nil else typesOfSelectorsOrSelf(rawGet)
+ def rawArity = rawTypes.size
+ def isBool = resultType =:= BooleanTpe // aka "Tuple0" or "Option[Unit]"
+ def isNothing = rawGet =:= NothingTpe
+ def isCase = method.isCase
+ }
+
+ object NoUnapplyMethodInfo extends UnapplyMethodInfo(NoSymbol, NoType) {
+ override def toString = "NoUnapplyMethodInfo"
+ }
+
+ case class ExtractorShape(fun: Tree, args: List[Tree]) {
+ def pos = fun.pos
+ private def symbol = fun.symbol
+ private def tpe = fun.tpe
+
+ val ccInfo = tpe.typeSymbol.linkedClassOfClass match {
+ case clazz if clazz.isCase => CaseClassInfo(clazz, tpe)
+ case _ => NoCaseClassInfo
+ }
+ val exInfo = UnapplyMethodInfo(symbol, tpe)
+ import exInfo.{ rawGet, rawTypes, isUnapplySeq }
+
+ override def toString = s"ExtractorShape($fun, $args)"
+
+ def unapplyMethod = exInfo.method
+ def unapplyType = exInfo.unapplyType
+ def unapplyParamType = exInfo.paramType
+ def caseClass = ccInfo.clazz
+ def enclClass = symbol.enclClass
+
+ // TODO - merge these. The difference between these two methods is that expectedPatternTypes
+ // expands the list of types so it is the same length as the number of patterns, whereas formals
+ // leaves the varargs type unexpanded.
+ def formals = (
+ if (isUnapplySeq) productTypes :+ varargsType
+ else if (elementArity == 0) productTypes
+ else if (isSingle) squishIntoOne()
+ else wrongArity(patternFixedArity)
+ )
+ def expectedPatternTypes = elementArity match {
+ case 0 => productTypes
+ case _ if elementArity > 0 && isUnapplySeq => productTypes ::: elementTypes
+ case _ if productArity > 1 && patternFixedArity == 1 => squishIntoOne()
+ case _ => wrongArity(patternFixedArity)
+ }
+
+ def elementType = elementTypeOfLastSelectorOrSelf(rawGet)
+
+ private def hasBogusExtractor = directUnapplyMember(tpe).exists && !unapplyMethod.exists
+ private def expectedArity = "" + productArity + ( if (isUnapplySeq) "+" else "")
+ private def wrongArityMsg(n: Int) = (
+ if (hasBogusExtractor) s"$enclClass does not define a valid extractor method"
+ else s"wrong number of patterns for $enclClass offering $rawTypes_s: expected $expectedArity, found $n"
+ )
+ private def rawTypes_s = rawTypes match {
+ case Nil => "()"
+ case tp :: Nil => "" + tp
+ case tps => tps.mkString("(", ", ", ")")
+ }
+
+ private def err(msg: String) = { unit.error(pos, msg) ; throw new TypeError(msg) }
+ private def wrongArity(n: Int) = err(wrongArityMsg(n))
+
+ def squishIntoOne() = {
+ if (settings.lint)
+ unit.warning(pos, s"$enclClass expects $expectedArity patterns to hold $rawGet but crushing into $productArity-tuple to fit single pattern (SI-6675)")
+
+ rawGet :: Nil
+ }
+ // elementArity is the number of non-sequence patterns minus the
+ // the number of non-sequence product elements returned by the extractor.
+ // If it is zero, there is a perfect match between those parts, and
+ // if there is a wildcard star it will match any sequence.
+ // If it is positive, there are more patterns than products,
+ // so a sequence will have to fill in the elements. If it is negative,
+ // there are more products than patterns, which is a compile time error.
+ def elementArity = patternFixedArity - productArity
+ def patternFixedArity = treeInfo effectivePatternArity args
+ def productArity = productTypes.size
+ def isSingle = !isUnapplySeq && (patternFixedArity == 1)
+
+ def productTypes = if (isUnapplySeq) rawTypes dropRight 1 else rawTypes
+ def elementTypes = List.fill(elementArity)(elementType)
+ def varargsType = scalaRepeatedType(elementType)
+ }
+
+ private class VariantToSkolemMap extends TypeMap(trackVariance = true) {
+ private val skolemBuffer = mutable.ListBuffer[TypeSymbol]()
+
+ def skolems = try skolemBuffer.toList finally skolemBuffer.clear()
+ def apply(tp: Type): Type = mapOver(tp) match {
+ // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see SI-5189
+ case tp @ TypeRef(NoPrefix, tpSym, Nil) if tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
+ if (variance.isInvariant) {
+ // if (variance.isInvariant) tpSym.tpeHK.bounds
+ devWarning(s"variantToSkolem skipping rewrite of $tpSym due to invariance")
+ return tp
+ }
+ val bounds = (
+ if (variance.isPositive) TypeBounds.upper(tpSym.tpeHK)
+ else TypeBounds.lower(tpSym.tpeHK)
+ )
+ // origin must be the type param so we can deskolemize
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
+ skolemBuffer += skolem
+ skolem.tpe_*
+ case tp1 => tp1
+ }
+ }
+ /*
+ * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
+ * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
+ *
+ * Consider the following example:
+ *
+ * class AbsWrapperCov[+A]
+ * case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
+ *
+ * def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
+ * case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
+ * wrapped // : Wrapped[_ <: T]
+ * }
+ *
+ * this method should type check if and only if Wrapped is covariant in its type parameter
+ *
+ * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
+ * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
+ * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
+ *
+ * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
+ * we can simply replace skolems that represent method type parameters as seen from the method's body
+ * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
+ * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
+ *
+ * see test/files/../t5189*.scala
+ */
+ private def convertToCaseConstructor(tree: Tree, caseClass: Symbol, pt: Type): Tree = {
+ val variantToSkolem = new VariantToSkolemMap
+ val caseConstructorType = tree.tpe.prefix memberType caseClass memberType caseClass.primaryConstructor
+ val tree1 = TypeTree(caseConstructorType) setOriginal tree
+
+ // have to open up the existential and put the skolems in scope
+ // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
+ val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
+ val freeVars = variantToSkolem.skolems
+
+ // use "tree" for the context, not context.tree: don't make another CaseDef context,
+ // as instantiateTypeVar's bounds would end up there
+ log(sm"""|convert to case constructor {
+ | tree: $tree: ${tree.tpe}
+ | ptSafe: $ptSafe
+ | context.tree: ${context.tree}: ${context.tree.tpe}
+ |}""".trim)
+
+ val ctorContext = context.makeNewScope(tree, context.owner)
+ freeVars foreach ctorContext.scope.enter
+ newTyper(ctorContext).infer.inferConstructorInstance(tree1, caseClass.typeParams, ptSafe)
+
+ // simplify types without losing safety,
+ // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
+ val extrapolator = new ExistentialExtrapolation(freeVars)
+ def extrapolate(tp: Type) = extrapolator extrapolate tp
+
+ // once the containing CaseDef has been type checked (see typedCase),
+ // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
+ tree1 modifyType {
+ case MethodType(ctorArgs, restpe) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
+ copyMethodType(tree1.tpe, ctorArgs map (_ modifyInfo extrapolate), extrapolate(restpe)) // no need to clone ctorArgs, this is OUR method type
+ case tp => tp
+ }
+ }
+
+ def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
+ def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
+ def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
+
+ if (args.length > MaxTupleArity)
+ return duplErrorTree(TooManyArgsPatternError(fun))
+
+ def freshArgType(tp: Type): Type = tp match {
+ case MethodType(param :: _, _) => param.tpe
+ case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, freshArgType(restpe))(polyType)
+ case OverloadedType(_, _) => OverloadedUnapplyError(fun) ; ErrorType
+ case _ => UnapplyWithSingleArgError(fun) ; ErrorType
+ }
+ val shape = newExtractorShape(fun, args)
+ import shape.{ unapplyParamType, unapplyType, unapplyMethod }
+
+ def extractor = extractorForUncheckedType(shape.pos, unapplyParamType)
+ def canRemedy = unapplyParamType match {
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case RefinedType(parents, _) if parents exists isUncheckable => false
+ case _ => extractor.nonEmpty
+ }
+
+ def freshUnapplyArgType(): Type = {
+ val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree))
+ val unapplyContext = context.makeNewScope(context.tree, context.owner)
+ freeVars foreach unapplyContext.scope.enter
+ val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy)
+ // turn any unresolved type variables in freevars into existential skolems
+ val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
+ pattp.substSym(freeVars, skolems)
+ }
+
+ val unapplyArg = (
+ context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, Flags.SYNTHETIC) setInfo (
+ if (isApplicableSafe(Nil, unapplyType, pt :: Nil, WildcardType)) pt
+ else freshUnapplyArgType()
+ )
+ )
+ // clearing the type is necessary so that ref will be stabilized; see bug 881
+ val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapplyMethod), Ident(unapplyArg) :: Nil))
+
+ def makeTypedUnApply() = {
+ // the union of the expected type and the inferred type of the argument to unapply
+ val glbType = glb(ensureFullyDefined(pt) :: unapplyArg.tpe_* :: Nil)
+ val wrapInTypeTest = canRemedy && !(fun1.symbol.owner isNonBottomSubClass ClassTagClass)
+ val args1 = typedPatternArgs(fun1, args, mode)
+ val result = UnApply(fun1, args1) setPos tree.pos setType glbType
+
+ if (wrapInTypeTest)
+ wrapClassTagUnapply(result, extractor, glbType)
+ else
+ result
+ }
+
+ if (fun1.tpe.isErroneous)
+ duplErrTree
+ else if (unapplyMethod.isMacro && !fun1.isInstanceOf[Apply])
+ duplErrorTree(WrongShapeExtractorExpansion(tree))
+ else
+ makeTypedUnApply()
+ }
+
+ def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
+ // TODO: disable when in unchecked match
+ // we don't create a new Context for a Match, so find the CaseDef,
+ // then go out one level and navigate back to the match that has this case
+ val args = List(uncheckedPattern)
+ val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
+ // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
+ // and re-typechecks of the target of the unapply call in PATTERNmode,
+ // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
+ // but an arbitrary tree as is the case here
+ val res = doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
+
+ log(sm"""
+ |wrapClassTagUnapply {
+ | pattern: $uncheckedPattern
+ | extract: $classTagExtractor
+ | pt: $pt
+ | res: $res
+ |}""".trim)
+
+ res
+ }
+
+ // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
+ // return the corresponding extractor (an instance of ClassTag[`pt`])
+ def extractorForUncheckedType(pos: Position, pt: Type): Tree = {
+ if (isPastTyper || (pt eq NoType)) EmptyTree else {
+ pt match {
+ case RefinedType(parents, decls) if !decls.isEmpty || (parents exists isUncheckable) => return EmptyTree
+ case _ =>
+ }
+ // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
+ // but at least make a proper type before passing it elsewhere
+ val pt1 = pt.dealiasWiden match {
+ case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
+ case pt1 => pt1
+ }
+ if (isCheckable(pt1)) EmptyTree
+ else resolveClassTag(pos, pt1) match {
+ case tree if unapplyMember(tree.tpe).exists => tree
+ case _ => devWarning(s"Cannot create runtime type test for $pt1") ; EmptyTree
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 1b6963b598..5929cab1d1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -113,6 +113,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
var localTyper: analyzer.Typer = typer
var currentApplication: Tree = EmptyTree
var inPattern: Boolean = false
+ @inline final def savingInPattern[A](body: => A): A = {
+ val saved = inPattern
+ try body finally inPattern = saved
+ }
+
var checkedCombinations = Set[List[Type]]()
// only one overloaded alternative is allowed to define default arguments
@@ -211,7 +216,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
// Delaying calling memberType as long as possible
- if (inherited ne NoSymbol) {
+ if (inherited.exists) {
val jtpe = toJavaRepeatedParam(self memberType member)
// this is a bit tortuous: we look for non-private members or bridges
// if we find a bridge everything is OK. If we find another member,
@@ -1371,6 +1376,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
member.typeParams.map(_.info.bounds.hi.widen) foreach checkAccessibilityOfType
}
+ private def checkByNameRightAssociativeDef(tree: DefDef) {
+ tree match {
+ case DefDef(_, name, _, params :: _, _, _) =>
+ if (settings.lint && !treeInfo.isLeftAssoc(name.decodedName) && params.exists(p => isByName(p.symbol)))
+ unit.warning(tree.pos,
+ "by-name parameters will be evaluated eagerly when called as a right-associative infix operator. For more details, see SI-1980.")
+ case _ =>
+ }
+ }
+
/** Check that a deprecated val or def does not override a
* concrete, non-deprecated method. If it does, then
* deprecation is meaningless.
@@ -1414,6 +1429,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
private def applyRefchecksToAnnotations(tree: Tree): Unit = {
def applyChecks(annots: List[AnnotationInfo]) = {
+ annots foreach (annot => checkCompileTimeOnly(annot.atp.typeSymbol, annot.pos))
checkAnnotations(annots map (_.atp), tree)
transformTrees(annots flatMap (_.args))
}
@@ -1516,7 +1532,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
checkCompileTimeOnly(sym, tree.pos)
checkDelayedInitSelect(qual, sym, tree.pos)
- if (sym eq NoSymbol)
+ if (!sym.exists)
devWarning("Select node has NoSymbol! " + tree + " / " + tree.tpe)
else if (sym.hasLocalFlag)
varianceValidator.checkForEscape(sym, currentClass)
@@ -1594,6 +1610,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic)
checkAccessibilityOfReferencedTypes(tree)
}
+ tree match {
+ case dd: DefDef => checkByNameRightAssociativeDef(dd)
+ case _ =>
+ }
tree
case Template(parents, self, body) =>
@@ -1648,6 +1668,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
tree
case Ident(name) =>
+ checkCompileTimeOnly(tree.symbol, tree.pos)
transformCaseApply(tree,
if (name != nme.WILDCARD && name != tpnme.WILDCARD_STAR) {
assert(sym != NoSymbol, "transformCaseApply: name = " + name.debugString + " tree = " + tree + " / " + tree.getClass) //debug
@@ -1667,19 +1688,33 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case _ => tree
}
+
// skip refchecks in patterns....
result = result match {
case CaseDef(pat, guard, body) =>
- inPattern = true
- val pat1 = transform(pat)
- inPattern = false
+ val pat1 = savingInPattern {
+ inPattern = true
+ transform(pat)
+ }
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
case LabelDef(_, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
- val old = inPattern
- inPattern = true
- val res = deriveLabelDef(result)(transform)
- inPattern = old
- res
+ savingInPattern {
+ inPattern = true
+ deriveLabelDef(result)(transform)
+ }
+ case Apply(fun, args) if fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol) =>
+ savingInPattern {
+ // SI-7756 If we were in a translated pattern, we can now switch out of pattern mode, as the label apply signals
+ // that we are in the user-supplied code in the case body.
+ //
+ // Relies on the translation of:
+ // (null: Any) match { case x: List[_] => x; x.reverse; case _ => }'
+ // to:
+ // <synthetic> val x2: List[_] = (x1.asInstanceOf[List[_]]: List[_]);
+ // matchEnd4({ x2; x2.reverse}) // case body is an argument to a label apply.
+ inPattern = false
+ super.transform(result)
+ }
case _ =>
super.transform(result)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 6933b10a0a..12d6bb2e6a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -269,8 +269,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
&& sym.enclClass != currentClass
&& !sym.owner.isPackageClass // SI-7091 no accessor needed package owned (ie, top level) symbols
&& !sym.owner.isTrait
- && (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
- && (qual.symbol.info.member(sym.name) ne NoSymbol)
+ && sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass
+ && qual.symbol.info.member(sym.name).exists
&& !needsProtectedAccessor(sym, tree.pos)
)
if (shouldEnsureAccessor) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index b4a37f9943..0c5f798c23 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -143,7 +143,7 @@ trait TypeDiagnostics {
def defaultMessage = moduleMessage + preResultString + tree.tpe
def applyMessage = defaultMessage + tree.symbol.locationString
- if ((sym eq null) || (sym eq NoSymbol)) {
+ if (!tree.hasExistingSymbol) {
if (isTyperInPattern) patternMessage
else exprMessage
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index d2ff47626d..cccd0949a2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -26,7 +26,7 @@ import Mode._
* @author Martin Odersky
* @version 1.0
*/
-trait Typers extends Adaptations with Tags with TypersTracking {
+trait Typers extends Adaptations with Tags with TypersTracking with PatternTypers {
self: Analyzer =>
import global._
@@ -36,7 +36,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
final def forArgMode(fun: Tree, mode: Mode) =
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode else mode
- // printResult(s"forArgMode($fun, $mode) gets SCCmode")(mode | SCCmode)
// namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
// is cached here and re-used in typedDefDef / typedValDef
// Also used to cache imports type-checked by namer.
@@ -63,6 +62,10 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
sealed abstract class SilentResult[+T] {
+ @inline final def fold[U](none: => U)(f: T => U): U = this match {
+ case SilentResultValue(value) => f(value)
+ case _ => none
+ }
@inline final def map[U](f: T => U): SilentResult[U] = this match {
case SilentResultValue(value) => SilentResultValue(f(value))
case x: SilentTypeError => x
@@ -90,13 +93,7 @@ trait Typers extends Adaptations with Tags with TypersTracking {
private final val InterpolatorCodeRegex = """\$\{.*?\}""".r
private final val InterpolatorIdentRegex = """\$\w+""".r
- // when true:
- // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
- // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
- // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
- protected def newPatternMatching = true // presently overridden in the presentation compiler
-
- abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
+ abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with PatternTyper with TyperContextErrors {
import context0.unit
import typeDebug.{ ptTree, ptBlock, ptLine, inGreen, inRed }
import TyperErrorGen._
@@ -912,121 +909,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
}
- /*
- * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
- * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
- *
- * Consider the following example:
- *
- * class AbsWrapperCov[+A]
- * case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
- *
- * def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
- * case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
- * wrapped // : Wrapped[_ <: T]
- * }
- *
- * this method should type check if and only if Wrapped is covariant in its type parameter
- *
- * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
- * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
- * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
- *
- * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
- * we can simply replace skolems that represent method type parameters as seen from the method's body
- * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
- * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
- *
- * see test/files/../t5189*.scala
- */
- def adaptConstrPattern(): Tree = { // (5)
- def hasUnapplyMember(tp: Type) = reallyExists(unapplyMember(tp))
- val overloadedExtractorOfObject = tree.symbol filter (sym => hasUnapplyMember(sym.tpe))
- // if the tree's symbol's type does not define an extractor, maybe the tree's type does.
- // this is the case when we encounter an arbitrary tree as the target of an unapply call
- // (rather than something that looks like a constructor call.) (for now, this only happens
- // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
- // more common place)
- val extractor = overloadedExtractorOfObject orElse unapplyMember(tree.tpe)
- def convertToCaseConstructor(clazz: Symbol): TypeTree = {
- // convert synthetic unapply of case class to case class constructor
- val prefix = tree.tpe.prefix
- val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
- .setOriginal(tree)
-
- val skolems = new mutable.ListBuffer[TypeSymbol]
- object variantToSkolem extends TypeMap(trackVariance = true) {
- def apply(tp: Type) = mapOver(tp) match {
- // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see SI-5189
- case TypeRef(NoPrefix, tpSym, Nil) if !variance.isInvariant && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
- // must initialize or tpSym.tpe might see random type params!!
- // without this, we'll get very weird types inferred in test/scaladoc/run/SI-5933.scala
- // TODO: why is that??
- tpSym.initialize
- val bounds = if (variance.isPositive) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
- // origin must be the type param so we can deskolemize
- val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
- // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
- skolems += skolem
- skolem.tpe
- case tp1 => tp1
- }
- }
-
- // have to open up the existential and put the skolems in scope
- // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
- val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
- val freeVars = skolems.toList
-
- // use "tree" for the context, not context.tree: don't make another CaseDef context,
- // as instantiateTypeVar's bounds would end up there
- val ctorContext = context.makeNewScope(tree, context.owner)
- freeVars foreach ctorContext.scope.enter
- newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
-
- // simplify types without losing safety,
- // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
- val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
- val extrapolated = tree1.tpe match {
- case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
- ctorArgs foreach (p => p.info = extrapolate(p.info)) // no need to clone, this is OUR method type
- copyMethodType(tree1.tpe, ctorArgs, extrapolate(res))
- case tp => tp
- }
-
- // once the containing CaseDef has been type checked (see typedCase),
- // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
- tree1 setType extrapolated
- }
-
- if (extractor != NoSymbol) {
- // if we did some ad-hoc overloading resolution, update the tree's symbol
- // do not update the symbol if the tree's symbol's type does not define an unapply member
- // (e.g. since it's some method that returns an object with an unapply member)
- if (overloadedExtractorOfObject != NoSymbol)
- tree setSymbol overloadedExtractorOfObject
-
- tree.tpe match {
- case OverloadedType(pre, alts) => tree setType overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe)))
- case _ =>
- }
- val unapply = unapplyMember(extractor.tpe)
- val clazz = unapplyParameterType(unapply)
-
- if (unapply.isCase && clazz.isCase) {
- convertToCaseConstructor(clazz)
- } else {
- tree
- }
- } else {
- val clazz = tree.tpe.typeSymbol.linkedClassOfClass
- if (clazz.isCase)
- convertToCaseConstructor(clazz)
- else
- CaseClassConstructorError(tree)
- }
- }
-
def insertApply(): Tree = {
assert(!context.inTypeConstructorAllowed, mode) //@M
val adapted = adaptToName(tree, nme.apply)
@@ -1213,7 +1095,7 @@ trait Typers extends Adaptations with Tags with TypersTracking {
else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree))
macroExpandApply(this, tree, mode, pt)
else if (mode.typingConstructorPattern)
- adaptConstrPattern()
+ typedConstructorPattern(tree, pt)
else if (shouldInsertApply(tree))
insertApply()
else if (hasUndetsInMonoMode) { // (9)
@@ -2495,7 +2377,7 @@ trait Typers extends Adaptations with Tags with TypersTracking {
// list, so substitute the final result type of the method, i.e. the type
// of the case class.
if (pat1.tpe.paramSectionCount > 0)
- pat1 setType pat1.tpe.finalResultType
+ pat1 modifyType (_.finalResultType)
for (bind @ Bind(name, _) <- cdef.pat)
if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
@@ -2510,8 +2392,10 @@ trait Typers extends Adaptations with Tags with TypersTracking {
// insert a cast if something typechecked under the GADT constraints,
// but not in real life (i.e., now that's we've reset the method's type skolems'
// infos back to their pre-GADT-constraint state)
- if (isFullyDefined(pt) && !(body1.tpe <:< pt))
+ if (isFullyDefined(pt) && !(body1.tpe <:< pt)) {
+ log(s"Adding cast to pattern because ${body1.tpe} does not conform to expected type $pt")
body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden))
+ }
}
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
@@ -3026,32 +2910,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
def typedArgs(args: List[Tree], mode: Mode) =
args mapConserve (arg => typedArg(arg, mode, NOmode, WildcardType))
- /** Type trees in `args0` against corresponding expected type in `adapted0`.
- *
- * The mode in which each argument is typed is derived from `mode` and
- * whether the arg was originally by-name or var-arg (need `formals0` for that)
- * the default is by-val, of course.
- *
- * (docs reverse-engineered -- AM)
- */
- def typedArgs(args0: List[Tree], mode: Mode, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
- def loop(args: List[Tree], formals: List[Type], adapted: List[Type]): List[Tree] = {
- if (args.isEmpty || adapted.isEmpty) Nil
- else {
- // No formals left or * indicates varargs.
- val isVarArgs = formals.isEmpty || formals.tail.isEmpty && isRepeatedParamType(formals.head)
- val isByName = formals.nonEmpty && isByNameParamType(formals.head)
- def typedMode = if (isByName) mode.onlySticky else mode.onlySticky | BYVALmode
- def body = typedArg(args.head, mode, typedMode, adapted.head)
- def arg1 = if (isVarArgs) context.withinStarPatterns(body) else body
-
- // formals may be empty, so don't call tail
- arg1 :: loop(args.tail, formals drop 1, adapted.tail)
- }
- }
- loop(args0, formals0, adapted0)
- }
-
/** Does function need to be instantiated, because a missing parameter
* in an argument closure overlaps with an uninstantiated formal?
*/
@@ -3288,22 +3146,20 @@ trait Typers extends Adaptations with Tags with TypersTracking {
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
def handleMonomorphicCall: Tree = {
- // In order for checkDead not to be misled by the unfortunate special
- // case of AnyRef#synchronized (which is implemented with signature T => T
- // but behaves as if it were (=> T) => T) we need to know what is the actual
- // target of a call. Since this information is no longer available from
- // typedArg, it is recorded here.
- val args1 =
- // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
- // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
- // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
- // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
- // casting breaks SI-6145,
- // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
- if (!phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol))
+ // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
+ // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
+ // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
+ // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
+ // casting breaks SI-6145,
+ // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
+ def noExpectedType = !phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol)
+
+ val args1 = (
+ if (noExpectedType)
typedArgs(args, forArgMode(fun, mode))
else
- typedArgs(args, forArgMode(fun, mode), paramTypes, formals)
+ typedArgsForFormals(args, paramTypes, forArgMode(fun, mode))
+ )
// instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
// val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
@@ -3387,129 +3243,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
}
- def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
- def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
- def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
-
- val otpe = fun.tpe
-
- if (args.length > MaxTupleArity)
- return duplErrorTree(TooManyArgsPatternError(fun))
-
- //
- def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
- case MethodType(param :: _, _) =>
- (Nil, param.tpe)
- case PolyType(tparams, restpe) =>
- createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
- // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
- case OverloadedType(_, _) =>
- OverloadedUnapplyError(fun)
- (Nil, ErrorType)
- case _ =>
- UnapplyWithSingleArgError(fun)
- (Nil, ErrorType)
- }
-
- val unapp = unapplyMember(otpe)
- val unappType = otpe.memberType(unapp)
- val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
- val arg = Ident(argDummy) setType pt
-
- val uncheckedTypeExtractor =
- if (unappType.paramTypes.nonEmpty)
- extractorForUncheckedType(tree.pos, unappType.paramTypes.head)
- else None
-
- if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
- //Console.println(s"UNAPP: need to typetest, arg: ${arg.tpe} unappType: $unappType")
- val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
- val unapplyContext = context.makeNewScope(context.tree, context.owner)
- freeVars foreach unapplyContext.scope.enter
-
- val typer1 = newTyper(unapplyContext)
- val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe, canRemedy = uncheckedTypeExtractor.nonEmpty)
-
- // turn any unresolved type variables in freevars into existential skolems
- val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
- arg setType pattp.substSym(freeVars, skolems)
- argDummy setInfo arg.tpe
- }
-
- // clearing the type is necessary so that ref will be stabilized; see bug 881
- val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapp), List(arg)))
-
- if (fun1.tpe.isErroneous) duplErrTree
- else {
- val resTp = fun1.tpe.finalResultType.dealiasWiden
- val nbSubPats = args.length
- val (formals, formalsExpanded) =
- extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol, treeInfo.effectivePatternArity(args))
- if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
- else {
- val args1 = typedArgs(args, mode, formals, formalsExpanded)
- val pt1 = ensureFullyDefined(pt) // SI-1048
- val itype = glb(List(pt1, arg.tpe))
- arg setType pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
- val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
-
- // if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
- // skip if the unapply's type is not a method type with (at least, but really it should be exactly) one argument
- // also skip if we already wrapped a classtag extractor (so we don't keep doing that forever)
- if (uncheckedTypeExtractor.isEmpty || fun1.symbol.owner.isNonBottomSubClass(ClassTagClass)) unapply
- else wrapClassTagUnapply(unapply, uncheckedTypeExtractor.get, unappType.paramTypes.head)
- }
- }
- }
-
- def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
- // TODO: disable when in unchecked match
- // we don't create a new Context for a Match, so find the CaseDef, then go out one level and navigate back to the match that has this case
- // val thisCase = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
- // val unchecked = thisCase.outer.tree.collect{case Match(selector, cases) if cases contains thisCase => selector} match {
- // case List(Typed(_, tpt)) if tpt.tpe hasAnnotation UncheckedClass => true
- // case t => println("outer tree: "+ (t, thisCase, thisCase.outer.tree)); false
- // }
- // println("wrapClassTagUnapply"+ (!isPastTyper && infer.containsUnchecked(pt), pt, uncheckedPattern))
- // println("wrapClassTagUnapply: "+ extractor)
- // println(util.Position.formatMessage(uncheckedPattern.pos, "made unchecked type test into a checked one", true))
-
- val args = List(uncheckedPattern)
- val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
- // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
- // and re-typechecks of the target of the unapply call in PATTERNmode,
- // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
- // but an arbitrary tree as is the case here
- doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
- }
-
- // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
- // return the corresponding extractor (an instance of ClassTag[`pt`])
- def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (isPastTyper) None else {
- // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
- // but at least make a proper type before passing it elsewhere
- val pt1 = pt.dealiasWiden match {
- case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
- case pt1 => pt1
- }
- pt1 match {
- // if at least one of the types in an intersection is checkable, use the checkable ones
- // this avoids problems as in run/matchonseq.scala, where the expected type is `Coll with scala.collection.SeqLike`
- // Coll is an abstract type, but SeqLike of course is not
- case RefinedType(ps, _) if ps.length > 1 && (ps exists infer.isCheckable) =>
- None
-
- case ptCheckable if infer isUncheckable ptCheckable =>
- val classTagExtractor = resolveClassTag(pos, ptCheckable)
-
- if (classTagExtractor != EmptyTree && unapplyMember(classTagExtractor.tpe) != NoSymbol)
- Some(classTagExtractor)
- else None
-
- case _ => None
- }
- }
-
/**
* Convert an annotation constructor call into an AnnotationInfo.
*/
@@ -3757,11 +3490,15 @@ trait Typers extends Adaptations with Tags with TypersTracking {
/** convert local symbols and skolems to existentials */
def packedType(tree: Tree, owner: Symbol): Type = {
- def defines(tree: Tree, sym: Symbol) =
- sym.isExistentialSkolem && sym.unpackLocation == tree ||
- tree.isDef && tree.symbol == sym
- def isVisibleParameter(sym: Symbol) =
- sym.isParameter && (sym.owner == owner) && (sym.isType || !owner.isAnonymousFunction)
+ def defines(tree: Tree, sym: Symbol) = (
+ sym.isExistentialSkolem && sym.unpackLocation == tree
+ || tree.isDef && tree.symbol == sym
+ )
+ def isVisibleParameter(sym: Symbol) = (
+ sym.isParameter
+ && (sym.owner == owner)
+ && (sym.isType || !owner.isAnonymousFunction)
+ )
def containsDef(owner: Symbol, sym: Symbol): Boolean =
(!sym.hasPackageFlag) && {
var o = sym.owner
@@ -4992,14 +4729,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
}
-
- def typedUnApply(tree: UnApply) = {
- val fun1 = typed(tree.fun)
- val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args), tree.args.length)
- val args1 = map2(tree.args, tpes)(typedPattern)
- treeCopy.UnApply(tree, fun1, args1) setType pt
- }
-
def issueTryWarnings(tree: Try): Try = {
def checkForCatchAll(cdef: CaseDef) {
def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
@@ -5051,61 +4780,28 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
def typedTyped(tree: Typed) = {
- val expr = tree.expr
- val tpt = tree.tpt
- tpt match {
- case Function(List(), EmptyTree) =>
- // find out whether the programmer is trying to eta-expand a macro def
- // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
- // that typecheck must not trigger macro expansions, so we explicitly prohibit them
- // however we cannot do `context.withMacrosDisabled`
- // because `expr` might contain nested macro calls (see SI-6673)
- val exprTyped = typed1(suppressMacroExpansion(expr), mode, pt)
- exprTyped match {
- case macroDef if treeInfo.isMacroApplication(macroDef) =>
- MacroEtaError(exprTyped)
- case _ =>
- typedEta(checkDead(exprTyped))
- }
-
- case t if treeInfo isWildcardStarType t =>
- val exprTyped = typed(expr, mode.onlySticky)
- def subArrayType(pt: Type) =
- if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
- else {
- val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
- newExistentialType(List(tparam), arrayType(tparam.tpe))
- }
-
- val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match {
- case ArrayClass => (adapt(exprTyped, mode.onlySticky, subArrayType(pt)), ArrayClass)
- case _ => (adapt(exprTyped, mode.onlySticky, seqType(pt)), SeqClass)
- }
- exprAdapted.tpe.baseType(baseClass) match {
- case TypeRef(_, _, List(elemtp)) =>
- treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
- case _ =>
- setError(tree)
+ if (treeInfo isWildcardStarType tree.tpt)
+ typedStarInPattern(tree, mode.onlySticky, pt)
+ else if (mode.inPatternMode)
+ typedInPattern(tree, mode.onlySticky, pt)
+ else tree match {
+ // find out whether the programmer is trying to eta-expand a macro def
+ // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
+ // that typecheck must not trigger macro expansions, so we explicitly prohibit them
+ // however we cannot do `context.withMacrosDisabled`
+ // because `expr` might contain nested macro calls (see SI-6673)
+ //
+ // Note: apparently `Function(Nil, EmptyTree)` is the secret parser marker
+ // which means trailing underscore.
+ case Typed(expr, Function(Nil, EmptyTree)) =>
+ typed1(suppressMacroExpansion(expr), mode, pt) match {
+ case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef)
+ case exprTyped => typedEta(checkDead(exprTyped))
}
-
- case _ =>
- val tptTyped = typedType(tpt, mode)
- val exprTyped = typed(expr, mode.onlySticky, tptTyped.tpe.deconst)
- val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
-
- if (mode.inPatternMode) {
- val uncheckedTypeExtractor = extractorForUncheckedType(tpt.pos, tptTyped.tpe)
- // make fully defined to avoid bounded wildcard types that may be in pt from calling dropExistential (SI-2038)
- val ptDefined = ensureFullyDefined(pt) // FIXME this is probably redundant now that we don't dropExistenial in pattern mode.
- val ownType = inferTypedPattern(tptTyped, tptTyped.tpe, ptDefined, canRemedy = uncheckedTypeExtractor.nonEmpty)
- treeTyped setType ownType
-
- uncheckedTypeExtractor match {
- case None => treeTyped
- case Some(extractor) => wrapClassTagUnapply(treeTyped, extractor, tptTyped.tpe)
- }
- } else
- treeTyped setType tptTyped.tpe
+ case Typed(expr, tpt) =>
+ val tpt1 = typedType(tpt, mode) // type the ascribed type first
+ val expr1 = typed(expr, mode.onlySticky, tpt1.tpe.deconst) // then type the expression with tpt1 as the expected type
+ treeCopy.Typed(tree, expr1, tpt1) setType tpt1.tpe
}
}
@@ -5241,11 +4937,13 @@ trait Typers extends Adaptations with Tags with TypersTracking {
case _ => tree
}
}
- else
+ else {
// we should get here only when something before failed
// and we try again (@see tryTypedApply). In that case we can assign
// whatever type to tree; we just have to survive until a real error message is issued.
+ devWarning(tree.pos, s"Assigning Any type to TypeTree because tree.original is null: tree is $tree/${System.identityHashCode(tree)}, sym=${tree.symbol}, tpe=${tree.tpe}")
tree setType AnyTpe
+ }
}
def typedFunction(fun: Function) = {
if (fun.symbol == NoSymbol)
@@ -5254,52 +4952,80 @@ trait Typers extends Adaptations with Tags with TypersTracking {
typerWithLocalContext(context.makeNewScope(fun, fun.symbol))(_.typedFunction(fun, mode, pt))
}
- // begin typed1
- //if (settings.debug.value && tree.isDef) log("typing definition of "+sym);//DEBUG
- tree match {
- case tree: Ident => typedIdentOrWildcard(tree)
- case tree: Select => typedSelectOrSuperCall(tree)
- case tree: Apply => typedApply(tree)
+ // Trees only allowed during pattern mode.
+ def typedInPatternMode(tree: Tree): Tree = tree match {
+ case tree: Alternative => typedAlternative(tree)
+ case tree: Star => typedStar(tree)
+ case _ => abort(s"unexpected tree in pattern mode: ${tree.getClass}\n$tree")
+ }
+
+ def typedTypTree(tree: TypTree): Tree = tree match {
case tree: TypeTree => typedTypeTree(tree)
- case tree: Literal => typedLiteral(tree)
- case tree: This => typedThis(tree)
- case tree: ValDef => typedValDef(tree)
- case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
- case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
- case tree: If => typedIf(tree)
- case tree: TypeApply => typedTypeApply(tree)
case tree: AppliedTypeTree => typedAppliedTypeTree(tree)
- case tree: Bind => typedBind(tree)
- case tree: Function => typedFunction(tree)
- case tree: Match => typedVirtualizedMatch(tree)
- case tree: New => typedNew(tree)
- case tree: Assign => typedAssign(tree.lhs, tree.rhs)
- case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
- case tree: Super => typedSuper(tree)
case tree: TypeBoundsTree => typedTypeBoundsTree(tree)
- case tree: Typed => typedTyped(tree)
- case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
- case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
- case tree: TypeDef => typedTypeDef(tree)
- case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
- case tree: PackageDef => typedPackageDef(tree)
- case tree: DocDef => typedDocDef(tree, mode, pt)
- case tree: Annotated => typedAnnotated(tree)
case tree: SingletonTypeTree => typedSingletonTypeTree(tree)
case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree)
case tree: CompoundTypeTree => typedCompoundTypeTree(tree)
case tree: ExistentialTypeTree => typedExistentialTypeTree(tree)
- case tree: Return => typedReturn(tree)
- case tree: Try => typedTry(tree)
- case tree: Throw => typedThrow(tree)
- case tree: Alternative => typedAlternative(tree)
- case tree: Star => typedStar(tree)
- case tree: UnApply => typedUnApply(tree)
- case tree: ArrayValue => typedArrayValue(tree)
- case tree: ApplyDynamic => typedApplyDynamic(tree)
- case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
- case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
+ case _ => abort(s"unexpected type-representing tree: ${tree.getClass}\n$tree")
+ }
+
+ def typedMemberDef(tree: MemberDef): Tree = tree match {
+ case tree: ValDef => typedValDef(tree)
+ case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
+ case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
+ case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
+ case tree: TypeDef => typedTypeDef(tree)
+ case tree: PackageDef => typedPackageDef(tree)
+ case _ => abort(s"unexpected member def: ${tree.getClass}\n$tree")
+ }
+
+ // Trees not allowed during pattern mode.
+ def typedOutsidePatternMode(tree: Tree): Tree = tree match {
+ case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
+ case tree: If => typedIf(tree)
+ case tree: TypeApply => typedTypeApply(tree)
+ case tree: Function => typedFunction(tree)
+ case tree: Match => typedVirtualizedMatch(tree)
+ case tree: New => typedNew(tree)
+ case tree: Assign => typedAssign(tree.lhs, tree.rhs)
+ case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
+ case tree: Super => typedSuper(tree)
+ case tree: Annotated => typedAnnotated(tree)
+ case tree: Return => typedReturn(tree)
+ case tree: Try => typedTry(tree)
+ case tree: Throw => typedThrow(tree)
+ case tree: ArrayValue => typedArrayValue(tree)
+ case tree: ApplyDynamic => typedApplyDynamic(tree)
+ case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
+ case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
+ case tree: DocDef => typedDocDef(tree, mode, pt)
+ case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
+ }
+
+ // Trees allowed in or out of pattern mode.
+ def typedInAnyMode(tree: Tree): Tree = tree match {
+ case tree: Ident => typedIdentOrWildcard(tree)
+ case tree: Bind => typedBind(tree)
+ case tree: Apply => typedApply(tree)
+ case tree: Select => typedSelectOrSuperCall(tree)
+ case tree: Literal => typedLiteral(tree)
+ case tree: Typed => typedTyped(tree)
+ case tree: This => typedThis(tree) // SI-6104
+ case tree: UnApply => abort(s"unexpected UnApply $tree") // turns out UnApply never reaches here
+ case _ =>
+ if (mode.inPatternMode)
+ typedInPatternMode(tree)
+ else
+ typedOutsidePatternMode(tree)
+ }
+
+ // begin typed1
+ tree match {
+ case tree: TypTree => typedTypTree(tree)
+ case tree: MemberDef => typedMemberDef(tree)
+ case _ => typedInAnyMode(tree)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 47c859bb5c..5049fec65b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -12,8 +12,7 @@ import symtab.Flags._
* @author Martin Odersky
* @version 1.0
*/
-trait Unapplies extends ast.TreeDSL
-{
+trait Unapplies extends ast.TreeDSL {
self: Analyzer =>
import global._
@@ -21,7 +20,8 @@ trait Unapplies extends ast.TreeDSL
import CODE.{ CASE => _, _ }
import treeInfo.{ isRepeatedParamType, isByNameParamType }
- private val unapplyParamName = nme.x_0
+ private def unapplyParamName = nme.x_0
+ private def caseMods = Modifiers(SYNTHETIC | CASE)
// In the typeCompleter (templateSig) of a case class (resp it's module),
// synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
@@ -30,39 +30,17 @@ trait Unapplies extends ast.TreeDSL
// moduleClass symbol of the companion module.
class ClassForCaseCompanionAttachment(val caseClass: ClassDef)
- /** returns type list for return type of the extraction
- * @see extractorFormalTypes
+ /** Returns unapply or unapplySeq if available, without further checks.
*/
- def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, args: List[Tree]) = {
- assert(ufn.isMethod, ufn)
- val nbSubPats = args.length
- //Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
- ufn.name match {
- case nme.unapply | nme.unapplySeq =>
- val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn, treeInfo.effectivePatternArity(args))
- if (formals == null) throw new TypeError(s"$ufn of type $ufntpe cannot extract $nbSubPats sub-patterns")
- else formals
- case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
- }
- }
+ def directUnapplyMember(tp: Type): Symbol = (tp member nme.unapply) orElse (tp member nme.unapplySeq)
- /** returns unapply or unapplySeq if available */
- def unapplyMember(tp: Type): Symbol = (tp member nme.unapply) match {
- case NoSymbol => tp member nme.unapplySeq
- case unapp => unapp
- }
+ /** Filters out unapplies with multiple (non-implicit) parameter lists,
+ * as they cannot be used as extractors
+ */
+ def unapplyMember(tp: Type): Symbol = directUnapplyMember(tp) filter (sym => !hasMultipleNonImplicitParamLists(sym))
object ExtractorType {
- def unapply(tp: Type): Option[Symbol] = {
- val member = unapplyMember(tp)
- if (member.exists) Some(member) else None
- }
- }
-
- /** returns unapply member's parameter type. */
- def unapplyParameterType(extractor: Symbol) = extractor.tpe.params match {
- case p :: Nil => p.tpe.typeSymbol
- case _ => NoSymbol
+ def unapply(tp: Type): Option[Symbol] = unapplyMember(tp).toOption
}
def copyUntyped[T <: Tree](tree: T): T =
@@ -93,25 +71,19 @@ trait Unapplies extends ast.TreeDSL
*/
private def caseClassUnapplyReturnValue(param: Name, caseclazz: ClassDef) = {
def caseFieldAccessorValue(selector: ValDef): Tree = {
- val accessorName = selector.name
- val privateLocalParamAccessor = caseclazz.impl.body.collectFirst {
- case dd: ValOrDefDef if dd.name == accessorName && dd.mods.isPrivateLocal => dd.symbol
- }
- privateLocalParamAccessor match {
- case None =>
- // Selecting by name seems to be the most straight forward way here to
- // avoid forcing the symbol of the case class in order to list the accessors.
- val maybeRenamedAccessorName = caseAccessorName(caseclazz.symbol, accessorName)
- Ident(param) DOT maybeRenamedAccessorName
- case Some(sym) =>
- // But, that gives a misleading error message in neg/t1422.scala, where a case
- // class has an illegal private[this] parameter. We can detect this by checking
- // the modifiers on the param accessors.
- //
- // We just generate a call to that param accessor here, which gives us an inaccessible
- // symbol error, as before.
- Ident(param) DOT sym
+ // Selecting by name seems to be the most straight forward way here to
+ // avoid forcing the symbol of the case class in order to list the accessors.
+ def selectByName = Ident(param) DOT caseAccessorName(caseclazz.symbol, selector.name)
+ // But, that gives a misleading error message in neg/t1422.scala, where a case
+ // class has an illegal private[this] parameter. We can detect this by checking
+ // the modifiers on the param accessors.
+ // We just generate a call to that param accessor here, which gives us an inaccessible
+ // symbol error, as before.
+ def localAccessor = caseclazz.impl.body find {
+ case t @ ValOrDefDef(mods, selector.name, _, _) => mods.isPrivateLocal
+ case _ => false
}
+ localAccessor.fold(selectByName)(Ident(param) DOT _.symbol)
}
// Working with trees, rather than symbols, to avoid cycles like SI-5082
@@ -153,8 +125,6 @@ trait Unapplies extends ast.TreeDSL
gen.mkTemplate(parents, emptyValDef, NoMods, Nil, body, cdef.impl.pos.focus))
}
- private val caseMods = Modifiers(SYNTHETIC | CASE)
-
/** The apply method corresponding to a case class
*/
def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index 752aac5c8c..ea3c9d8dde 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -8,6 +8,7 @@ package tools
package nsc
import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
+import scala.compat.Platform.EOL
package object util {
@@ -78,6 +79,14 @@ package object util {
s"$clazz$msg @ $frame"
}
+ def stackTracePrefixString(ex: Throwable)(p: StackTraceElement => Boolean): String = {
+ val frames = ex.getStackTrace takeWhile p map (" at " + _)
+ val msg = ex.getMessage match { case null => "" ; case s => s": $s" }
+ val clazz = ex.getClass.getName
+
+ s"$clazz$msg" +: frames mkString EOL
+ }
+
lazy val trace = new SimpleTracer(System.out)
@deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index 8e1bcb5f87..4e3761454d 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -94,7 +94,8 @@ abstract class MacroImplementations {
def errorAtIndex(idx: Int, msg: String) = c.error(new OffsetPosition(strTree.pos.source, strTree.pos.point + idx), msg)
def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string")
def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character")
- def nonEscapedPercent(idx: Int) = errorAtIndex(idx, "percent signs not directly following splicees must be escaped")
+ def nonEscapedPercent(idx: Int) = errorAtIndex(idx,
+ "conversions must follow a splice; use %% for literal %, %n for newline")
// STEP 1: handle argument conversion
// 1) "...${smth}" => okay, equivalent to "...${smth}%s"
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index afaca3396c..8d2f200e99 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -269,17 +269,13 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
def parse(code: String): Tree = {
- val run = new Run
reporter.reset()
- val wrappedCode = "object wrapper {" + EOL + code + EOL + "}"
- val file = new BatchSourceFile("<toolbox>", wrappedCode)
+ val file = new BatchSourceFile("<toolbox>", code)
val unit = new CompilationUnit(file)
- phase = run.parserPhase
- val parser = newUnitParser(unit)
- val wrappedTree = parser.parse()
+ val parsed = newUnitParser(unit).parseStats()
throwIfErrors()
- val PackageDef(_, List(ModuleDef(_, _, Template(_, _, _ :: parsed)))) = wrappedTree
parsed match {
+ case Nil => EmptyTree
case expr :: Nil => expr
case stats :+ expr => Block(stats, expr)
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
index 9a6ba56c18..18a806e5ff 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -17,32 +17,38 @@ trait Parsers { self: Quasiquotes =>
abstract class Parser extends {
val global: self.global.type = self.global
} with ScalaParser {
- /** Wraps given code to obtain a desired parser mode.
- * This way we can just re-use standard parser entry point.
- */
- def wrapCode(code: String): String =
- s"object wrapper { self => $EOL $code $EOL }"
-
- def unwrapTree(wrappedTree: Tree): Tree = {
- val PackageDef(_, List(ModuleDef(_, _, Template(_, _, _ :: parsed)))) = wrappedTree
- parsed match {
- case tree :: Nil => tree
- case stats :+ tree => Block(stats, tree)
- }
- }
-
def parse(code: String): Tree = {
try {
- val wrapped = wrapCode(code)
- debug(s"wrapped code\n=${wrapped}\n")
- val file = new BatchSourceFile(nme.QUASIQUOTE_FILE, wrapped)
- val tree = new QuasiquoteParser(file).parse()
- unwrapTree(tree)
+ val file = new BatchSourceFile(nme.QUASIQUOTE_FILE, code)
+ new QuasiquoteParser(file).parseRule(entryPoint)
} catch {
- case mi: MalformedInput => c.abort(c.macroApplication.pos, s"syntax error: ${mi.msg}")
+ case mi: MalformedInput => c.abort(correspondingPosition(mi.offset), mi.msg)
+ }
+ }
+
+ def correspondingPosition(offset: Int): Position = {
+ val posMapList = posMap.toList
+ def containsOffset(start: Int, end: Int) = start <= offset && offset <= end
+ def fallbackPosition = posMapList match {
+ case (pos1, (start1, end1)) :: _ if start1 > offset => pos1
+ case _ :+ ((pos2, (start2, end2))) if offset > end2 => pos2.withPoint(pos2.point + (end2 - start2))
}
+ posMapList.sliding(2).collect {
+ case (pos1, (start1, end1)) :: _ if containsOffset(start1, end1) => (pos1, offset - start1)
+ case (pos1, (_, end1)) :: (_, (start2, _)) :: _ if containsOffset(end1, start2) => (pos1, end1)
+ case _ :: (pos2, (start2, end2)) :: _ if containsOffset(start2, end2) => (pos2, offset - start2)
+ }.map { case (pos, offset) =>
+ pos.withPoint(pos.point + offset)
+ }.toList.headOption.getOrElse(fallbackPosition)
}
+ override def token2string(token: Int): String = token match {
+ case EOF => "end of quote"
+ case _ => super.token2string(token)
+ }
+
+ def entryPoint: QuasiquoteParser => Tree
+
class QuasiquoteParser(source0: SourceFile) extends SourceFileParser(source0) {
override val treeBuilder = new ParserTreeBuilder {
// q"(..$xs)"
@@ -73,9 +79,11 @@ trait Parsers { self: Quasiquotes =>
} else
super.caseClause()
- def isHole = isIdent && holeMap.contains(in.name)
+ def isHole: Boolean = isIdent && holeMap.contains(in.name)
- override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
+ override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
+
+ override def isCaseDefStart: Boolean = super.isCaseDefStart || (in.token == EOF)
override def isModifier: Boolean = super.isModifier || (isHole && lookingAhead { isModifier })
@@ -85,6 +93,12 @@ trait Parsers { self: Quasiquotes =>
override def isDclIntro: Boolean = super.isDclIntro || (isHole && lookingAhead { isDclIntro })
+ override def isStatSep(token: Int) = token == EOF || super.isStatSep(token)
+
+ override def expectedMsg(token: Int): String =
+ if (isHole) expectedMsgTemplate(token2string(token), "splicee")
+ else super.expectedMsg(token)
+
// $mods def foo
// $mods T
override def readAnnots(annot: => Tree): List[Tree] = in.token match {
@@ -101,34 +115,26 @@ trait Parsers { self: Quasiquotes =>
}
}
- object TermParser extends Parser
-
- object CaseParser extends Parser {
- override def wrapCode(code: String) = super.wrapCode("something match { case " + code + " }")
-
- override def unwrapTree(wrappedTree: Tree): Tree = {
- val Match(_, head :: tail) = super.unwrapTree(wrappedTree)
- if (tail.nonEmpty)
- c.abort(c.macroApplication.pos, "Can't parse more than one casedef, consider generating a match tree instead")
- head
+ object TermParser extends Parser {
+ def entryPoint = _.templateStats() match {
+ case Nil => EmptyTree
+ case tree :: Nil => tree
+ case stats :+ tree => Block(stats, tree)
}
}
- object PatternParser extends Parser {
- override def wrapCode(code: String) = super.wrapCode("something match { case " + code + " => }")
-
- override def unwrapTree(wrappedTree: Tree): Tree = {
- val Match(_, List(CaseDef(pat, _, _))) = super.unwrapTree(wrappedTree)
- pat
- }
+ object TypeParser extends Parser {
+ def entryPoint = _.typ()
}
- object TypeParser extends Parser {
- override def wrapCode(code: String) = super.wrapCode("type T = " + code)
+ object CaseParser extends Parser {
+ def entryPoint = _.caseClause()
+ }
- override def unwrapTree(wrappedTree: Tree): Tree = {
- val TypeDef(_, _, _, rhs) = super.unwrapTree(wrappedTree)
- rhs
+ object PatternParser extends Parser {
+ def entryPoint = { parser =>
+ val pat = parser.noSeq.pattern1()
+ parser.treeBuilder.patvarTransformer.transform(pat)
}
}
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
index b680c25f76..b3ac1e293a 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -17,18 +17,31 @@ trait Placeholders { self: Quasiquotes =>
// Step 1: Transform Scala source with holes into vanilla Scala source
lazy val holeMap = new HoleMap()
+ lazy val posMap = mutable.ListMap[Position, (Int, Int)]()
lazy val code = {
val sb = new StringBuilder()
val sessionSuffix = randomUUID().toString.replace("-", "").substring(0, 8) + "$"
- foreach2(args, parts.init) { (tree, p) =>
- val (part, cardinality) = parseDots(p)
+ def appendPart(value: String, pos: Position) = {
+ val start = sb.length
+ sb.append(value)
+ val end = sb.length
+ posMap += pos -> (start, end)
+ }
+
+ def appendHole(tree: Tree, cardinality: Cardinality) = {
val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX + sessionSuffix))
- sb.append(part)
sb.append(placeholderName)
holeMap(placeholderName) = Hole(tree, cardinality)
}
- sb.append(parts.last)
+
+ foreach2(args, parts.init) { case (tree, (p, pos)) =>
+ val (part, cardinality) = parseDots(p)
+ appendPart(part, pos)
+ appendHole(tree, cardinality)
+ }
+ val (p, pos) = parts.last
+ appendPart(p, pos)
sb.toString
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
index fe954e0bfd..ee99a5e280 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -17,7 +17,7 @@ abstract class Quasiquotes extends Parsers
lazy val (universe: Tree, args, parts, parse, reify) = c.macroApplication match {
case Apply(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), args0) =>
val parts1 = parts0.map {
- case Literal(Constant(s: String)) => s
+ case lit @ Literal(Constant(s: String)) => s -> lit.pos
case part => c.abort(part.pos, "Quasiquotes can only be used with literal strings")
}
val reify0 = method0 match {
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index bdd6a02043..82f2c5dc74 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -8,8 +8,8 @@ package tools
package util
import scala.tools.reflect.WrappedProperties.AccessControl
-import scala.tools.nsc.{ Settings, GenericRunnerSettings }
-import scala.tools.nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
+import scala.tools.nsc.{ Settings }
+import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
import scala.reflect.io.{ File, Directory, Path, AbstractFile }
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index f260ee4093..3963447de3 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -396,7 +396,6 @@ abstract class CPSAnnotationChecker extends CPSUtils {
* for a tree. All this should do is add annotations. */
override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
- import scala.util.control._
if (!cpsEnabled) {
val report = try hasCpsParamTypes(tpe) catch { case _: MissingRequirementError => false }
if (report)
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 462cbb9c94..5a4448e01a 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -1,14 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="src" path="partest"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scalap"/>
+ <classpathentry kind="src" path="partest-extras"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
<classpathentry kind="var" path="M2_REPO/org/scala-tools/testing/test-interface/0.5/test-interface-0.5.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M4/1.0-RC3/scala-partest_2.11.0-M4-1.0-RC3.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="output" path="build-quick-partest"/>
+ <classpathentry kind="output" path="build-quick-partest-extras"/>
</classpath>
diff --git a/src/eclipse/partest/.project b/src/eclipse/partest/.project
index 45c24332ba..5f52d4bf8f 100644
--- a/src/eclipse/partest/.project
+++ b/src/eclipse/partest/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>partest</name>
+ <name>partest-extras</name>
<comment></comment>
<projects>
</projects>
@@ -17,9 +17,9 @@
</natures>
<linkedResources>
<link>
- <name>build-quick-partest</name>
+ <name>build-quick-partest-extras</name>
<type>2</type>
- <locationURI>SCALA_BASEDIR/build/quick/classes/partest</locationURI>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/partest-extras</locationURI>
</link>
<link>
<name>lib</name>
@@ -27,9 +27,9 @@
<locationURI>SCALA_BASEDIR/lib</locationURI>
</link>
<link>
- <name>partest</name>
+ <name>partest-extras</name>
<type>2</type>
- <locationURI>SCALA_BASEDIR/src/partest</locationURI>
+ <locationURI>SCALA_BASEDIR/src/partest-extras</locationURI>
</link>
</linkedResources>
</projectDescription>
diff --git a/src/intellij/partest.iml.SAMPLE b/src/intellij/partest.iml.SAMPLE
index 5b8cfa3f38..893236b621 100644
--- a/src/intellij/partest.iml.SAMPLE
+++ b/src/intellij/partest.iml.SAMPLE
@@ -12,17 +12,11 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/../partest">
- <sourceFolder url="file://$MODULE_DIR$/../partest" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../partest-extras">
+ <sourceFolder url="file://$MODULE_DIR$/../partest-extras" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="library" />
- <orderEntry type="module" module-name="xml" />
- <orderEntry type="module" module-name="reflect" />
- <orderEntry type="module" module-name="actors" />
- <orderEntry type="module" module-name="scalap" />
- <orderEntry type="module" module-name="compiler" />
<orderEntry type="library" name="partest-deps" level="project" />
<orderEntry type="module" module-name="repl" />
</component>
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index ddd11257c6..53b4fb2af2 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Boolean` (equivalent to Java's `boolean` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Boolean` are not
* represented by an object in the underlying runtime system.
@@ -20,18 +20,16 @@ import scala.language.implicitConversions
* which provides useful non-primitive operations.
*/
final abstract class Boolean private extends AnyVal {
- /**
- * Negates a Boolean expression.
- *
- * - `!a` results in `false` if and only if `a` evaluates to `true` and
- * - `!a` results in `true` if and only if `a` evaluates to `false`.
- *
- * @return the negated expression
- */
+ /** Negates a Boolean expression.
+ *
+ * - `!a` results in `false` if and only if `a` evaluates to `true` and
+ * - `!a` results in `true` if and only if `a` evaluates to `false`.
+ *
+ * @return the negated expression
+ */
def unary_! : Boolean
- /**
- * Compares two Boolean expressions and returns `true` if they evaluate to the same value.
+ /** Compares two Boolean expressions and returns `true` if they evaluate to the same value.
*
* `a == b` returns `true` if and only if
* - `a` and `b` are `true` or
@@ -48,8 +46,7 @@ final abstract class Boolean private extends AnyVal {
*/
def !=(x: Boolean): Boolean
- /**
- * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+ /** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
*
* `a || b` returns `true` if and only if
* - `a` is `true` or
@@ -62,8 +59,7 @@ final abstract class Boolean private extends AnyVal {
*/
def ||(x: Boolean): Boolean
- /**
- * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+ /** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
*
* `a && b` returns `true` if and only if
* - `a` and `b` are `true`.
@@ -78,8 +74,7 @@ final abstract class Boolean private extends AnyVal {
// def ||(x: => Boolean): Boolean
// def &&(x: => Boolean): Boolean
- /**
- * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+ /** Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
*
* `a | b` returns `true` if and only if
* - `a` is `true` or
@@ -90,8 +85,7 @@ final abstract class Boolean private extends AnyVal {
*/
def |(x: Boolean): Boolean
- /**
- * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+ /** Compares two Boolean expressions and returns `true` if both of them evaluate to true.
*
* `a & b` returns `true` if and only if
* - `a` and `b` are `true`.
@@ -100,8 +94,7 @@ final abstract class Boolean private extends AnyVal {
*/
def &(x: Boolean): Boolean
- /**
- * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+ /** Compares two Boolean expressions and returns `true` if they evaluate to a different value.
*
* `a ^ b` returns `true` if and only if
* - `a` is `true` and `b` is `false` or
@@ -135,8 +128,7 @@ object Boolean extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Boolean = x.asInstanceOf[java.lang.Boolean].booleanValue()
- /** The String representation of the scala.Boolean companion object.
- */
+ /** The String representation of the scala.Boolean companion object. */
override def toString = "object scala.Boolean"
}
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index 2510e859c0..413231c0d1 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Byte`, a 8-bit signed integer (equivalent to Java's `byte` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Byte` are not
* represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Byte private extends AnyVal {
* }}}
*/
def unary_~ : Int
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Int
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Int
def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Byte private extends AnyVal {
*/
def >>(x: Long): Int
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
/**
@@ -447,161 +359,89 @@ final abstract class Byte private extends AnyVal {
*/
def ^(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Byte] = null
}
object Byte extends AnyValCompanion {
- /** The smallest value representable as a Byte.
- */
+ /** The smallest value representable as a Byte. */
final val MinValue = java.lang.Byte.MIN_VALUE
- /** The largest value representable as a Byte.
- */
+ /** The largest value representable as a Byte. */
final val MaxValue = java.lang.Byte.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -625,12 +465,10 @@ object Byte extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Byte = x.asInstanceOf[java.lang.Byte].byteValue()
- /** The String representation of the scala.Byte companion object.
- */
+ /** The String representation of the scala.Byte companion object. */
override def toString = "object scala.Byte"
-
- /** Language mandated coercions from Byte to "wider" types.
- */
+ /** Language mandated coercions from Byte to "wider" types. */
+ import scala.language.implicitConversions
implicit def byte2short(x: Byte): Short = x.toShort
implicit def byte2int(x: Byte): Int = x.toInt
implicit def byte2long(x: Byte): Long = x.toLong
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index 1c9a2ba44f..ec2d48c181 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Char`, a 16-bit unsigned integer (equivalent to Java's `char` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Char` are not
* represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Char private extends AnyVal {
* }}}
*/
def unary_~ : Int
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Int
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Int
def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Char private extends AnyVal {
*/
def >>(x: Long): Int
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
/**
@@ -447,161 +359,89 @@ final abstract class Char private extends AnyVal {
*/
def ^(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Char] = null
}
object Char extends AnyValCompanion {
- /** The smallest value representable as a Char.
- */
+ /** The smallest value representable as a Char. */
final val MinValue = java.lang.Character.MIN_VALUE
- /** The largest value representable as a Char.
- */
+ /** The largest value representable as a Char. */
final val MaxValue = java.lang.Character.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -625,12 +465,10 @@ object Char extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Char = x.asInstanceOf[java.lang.Character].charValue()
- /** The String representation of the scala.Char companion object.
- */
+ /** The String representation of the scala.Char companion object. */
override def toString = "object scala.Char"
-
- /** Language mandated coercions from Char to "wider" types.
- */
+ /** Language mandated coercions from Char to "wider" types. */
+ import scala.language.implicitConversions
implicit def char2int(x: Char): Int = x.toInt
implicit def char2long(x: Char): Long = x.toLong
implicit def char2float(x: Char): Float = x.toFloat
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index ce081bbec1..a58fa3ed25 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Double`, a 64-bit IEEE-754 floating point number (equivalent to Java's `double` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Double` are not
* represented by an object in the underlying runtime system.
@@ -28,334 +28,176 @@ final abstract class Double private extends AnyVal {
def toFloat: Float
def toDouble: Double
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Double
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Double
def +(x: String): String
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Double
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Double] = null
@@ -401,8 +243,7 @@ object Double extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Double = x.asInstanceOf[java.lang.Double].doubleValue()
- /** The String representation of the scala.Double companion object.
- */
+ /** The String representation of the scala.Double companion object. */
override def toString = "object scala.Double"
}
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index 4ff2d509b8..3c59057a8d 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Float`, a 32-bit IEEE-754 floating point number (equivalent to Java's `float` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Float` are not
* represented by an object in the underlying runtime system.
@@ -28,334 +28,176 @@ final abstract class Float private extends AnyVal {
def toFloat: Float
def toDouble: Double
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Float
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Float
def +(x: String): String
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Float] = null
@@ -401,12 +243,10 @@ object Float extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Float = x.asInstanceOf[java.lang.Float].floatValue()
- /** The String representation of the scala.Float companion object.
- */
+ /** The String representation of the scala.Float companion object. */
override def toString = "object scala.Float"
-
- /** Language mandated coercions from Float to "wider" types.
- */
+ /** Language mandated coercions from Float to "wider" types. */
+ import scala.language.implicitConversions
implicit def float2double(x: Float): Double = x.toDouble
}
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index 6a27195b10..72e5ebf81b 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Int`, a 32-bit signed integer (equivalent to Java's `int` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Int` are not
* represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Int private extends AnyVal {
* }}}
*/
def unary_~ : Int
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Int
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Int
def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Int private extends AnyVal {
*/
def >>(x: Long): Int
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
/**
@@ -447,161 +359,89 @@ final abstract class Int private extends AnyVal {
*/
def ^(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Int] = null
}
object Int extends AnyValCompanion {
- /** The smallest value representable as a Int.
- */
+ /** The smallest value representable as a Int. */
final val MinValue = java.lang.Integer.MIN_VALUE
- /** The largest value representable as a Int.
- */
+ /** The largest value representable as a Int. */
final val MaxValue = java.lang.Integer.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -625,12 +465,10 @@ object Int extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Int = x.asInstanceOf[java.lang.Integer].intValue()
- /** The String representation of the scala.Int companion object.
- */
+ /** The String representation of the scala.Int companion object. */
override def toString = "object scala.Int"
-
- /** Language mandated coercions from Int to "wider" types.
- */
+ /** Language mandated coercions from Int to "wider" types. */
+ import scala.language.implicitConversions
implicit def int2long(x: Int): Long = x.toLong
implicit def int2float(x: Int): Float = x.toFloat
implicit def int2double(x: Int): Double = x.toDouble
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index 4d369ae010..1bd0fe88b1 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Long`, a 64-bit signed integer (equivalent to Java's `long` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Long` are not
* represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Long private extends AnyVal {
* }}}
*/
def unary_~ : Long
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Long
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Long
def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Long private extends AnyVal {
*/
def >>(x: Long): Long
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
/**
@@ -447,161 +359,89 @@ final abstract class Long private extends AnyVal {
*/
def ^(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Long] = null
}
object Long extends AnyValCompanion {
- /** The smallest value representable as a Long.
- */
+ /** The smallest value representable as a Long. */
final val MinValue = java.lang.Long.MIN_VALUE
- /** The largest value representable as a Long.
- */
+ /** The largest value representable as a Long. */
final val MaxValue = java.lang.Long.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -625,12 +465,10 @@ object Long extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Long = x.asInstanceOf[java.lang.Long].longValue()
- /** The String representation of the scala.Long companion object.
- */
+ /** The String representation of the scala.Long companion object. */
override def toString = "object scala.Long"
-
- /** Language mandated coercions from Long to "wider" types.
- */
+ /** Language mandated coercions from Long to "wider" types. */
+ import scala.language.implicitConversions
implicit def long2float(x: Long): Float = x.toFloat
implicit def long2double(x: Long): Double = x.toDouble
}
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 4f91c51550..36b9ec4df9 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Short`, a 16-bit signed integer (equivalent to Java's `short` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Short` are not
* represented by an object in the underlying runtime system.
@@ -37,13 +37,9 @@ final abstract class Short private extends AnyVal {
* }}}
*/
def unary_~ : Int
- /**
- * Returns this value, unmodified.
- */
+ /** Returns this value, unmodified. */
def unary_+ : Int
- /**
- * Returns the negation of this value.
- */
+ /** Returns the negation of this value. */
def unary_- : Int
def +(x: String): String
@@ -105,178 +101,94 @@ final abstract class Short private extends AnyVal {
*/
def >>(x: Long): Int
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Byte): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Short): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Char): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Int): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Long): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Float): Boolean
- /**
- * Returns `true` if this value is equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is equal to x, `false` otherwise. */
def ==(x: Double): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Byte): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Short): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Char): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Int): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Long): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Float): Boolean
- /**
- * Returns `true` if this value is not equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is not equal to x, `false` otherwise. */
def !=(x: Double): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Short): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Char): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Int): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Long): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Float): Boolean
- /**
- * Returns `true` if this value is less than x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than x, `false` otherwise. */
def <(x: Double): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Byte): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Short): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Char): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Int): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Long): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Float): Boolean
- /**
- * Returns `true` if this value is less than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is less than or equal to x, `false` otherwise. */
def <=(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than x, `false` otherwise. */
def >(x: Double): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Byte): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Short): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Char): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Int): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Long): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Float): Boolean
- /**
- * Returns `true` if this value is greater than or equal to x, `false` otherwise.
- */
+ /** Returns `true` if this value is greater than or equal to x, `false` otherwise. */
def >=(x: Double): Boolean
/**
@@ -447,161 +359,89 @@ final abstract class Short private extends AnyVal {
*/
def ^(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Byte): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Short): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Char): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Int): Int
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Long): Long
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Float): Float
- /**
- * Returns the sum of this value and `x`.
- */
+ /** Returns the sum of this value and `x`. */
def +(x: Double): Double
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Byte): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Short): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Char): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Int): Int
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Long): Long
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Float): Float
- /**
- * Returns the difference of this value and `x`.
- */
+ /** Returns the difference of this value and `x`. */
def -(x: Double): Double
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Byte): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Short): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Char): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Int): Int
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Long): Long
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Float): Float
- /**
- * Returns the product of this value and `x`.
- */
+ /** Returns the product of this value and `x`. */
def *(x: Double): Double
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Byte): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Short): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Char): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Int): Int
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Long): Long
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Float): Float
- /**
- * Returns the quotient of this value and `x`.
- */
+ /** Returns the quotient of this value and `x`. */
def /(x: Double): Double
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Byte): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Short): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Char): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Int): Int
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Long): Long
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Float): Float
- /**
- * Returns the remainder of the division of this value by `x`.
- */
+ /** Returns the remainder of the division of this value by `x`. */
def %(x: Double): Double
override def getClass(): Class[Short] = null
}
object Short extends AnyValCompanion {
- /** The smallest value representable as a Short.
- */
+ /** The smallest value representable as a Short. */
final val MinValue = java.lang.Short.MIN_VALUE
- /** The largest value representable as a Short.
- */
+ /** The largest value representable as a Short. */
final val MaxValue = java.lang.Short.MAX_VALUE
/** Transform a value type into a boxed reference type.
@@ -625,12 +465,10 @@ object Short extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Short = x.asInstanceOf[java.lang.Short].shortValue()
- /** The String representation of the scala.Short companion object.
- */
+ /** The String representation of the scala.Short companion object. */
override def toString = "object scala.Short"
-
- /** Language mandated coercions from Short to "wider" types.
- */
+ /** Language mandated coercions from Short to "wider" types. */
+ import scala.language.implicitConversions
implicit def short2int(x: Short): Int = x.toInt
implicit def short2long(x: Short): Long = x.toLong
implicit def short2float(x: Short): Float = x.toFloat
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 42fb2f36e8..70f95750da 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -59,7 +59,8 @@ case class StringContext(parts: String*) {
*/
def checkLengths(args: Seq[Any]): Unit =
if (parts.length != args.length + 1)
- throw new IllegalArgumentException("wrong number of arguments for interpolated string")
+ throw new IllegalArgumentException("wrong number of arguments ("+ args.length
+ +") for interpolated string with "+ parts.length +" parts")
/** The simple string interpolator.
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index 0e59a184d1..018ad24a99 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-// DO NOT EDIT, CHANGES WILL BE LOST.
+// DO NOT EDIT, CHANGES WILL BE LOST
+// This auto-generated code can be modified in scala.tools.cmd.gen.
+// Afterwards, running tools/codegen-anyvals regenerates this source file.
package scala
-import scala.language.implicitConversions
-
/** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type
* `Unit`, `()`, and it is not represented by any object in the underlying
@@ -41,8 +41,7 @@ object Unit extends AnyValCompanion {
*/
def unbox(x: java.lang.Object): Unit = ()
- /** The String representation of the scala.Unit companion object.
- */
+ /** The String representation of the scala.Unit companion object. */
override def toString = "object scala.Unit"
}
diff --git a/src/library/scala/annotation/compileTimeOnly.scala b/src/library/scala/annotation/compileTimeOnly.scala
new file mode 100644
index 0000000000..942e9cad8c
--- /dev/null
+++ b/src/library/scala/annotation/compileTimeOnly.scala
@@ -0,0 +1,22 @@
+package scala.annotation
+
+import scala.annotation.meta._
+
+/**
+ * An annotation that designates that an annottee should not be referred to after
+ * type checking (which includes macro expansion).
+ *
+ * Examples of potential use:
+ * 1) The annottee can only appear in the arguments of some other macro
+ * that will eliminate it from the AST during expansion.
+ * 2) The annottee is a macro and should have been expanded away,
+ * so if hasn't, something wrong has happened.
+ * (Comes in handy to provide better support for new macro flavors,
+ * e.g. macro annotations, that can't be expanded by the vanilla compiler).
+ *
+ * @param message the error message to print during compilation if a reference remains
+ * after type checking
+ * @since 2.11.0
+ */
+@getter @setter @beanGetter @beanSetter @companionClass @companionMethod
+final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index c01694960c..b11368acdf 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -161,6 +161,8 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* @inheritdoc
*/
@inline final def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
+ // Note to developers: there exists a duplication between this function and `reflect.internal.util.Collections#map2Conserve`.
+ // If any successful optimization attempts or other changes are made, please rehash them there too.
@tailrec
def loop(mapped: ListBuffer[B], unchanged: List[A], pending: List[A]): List[B] =
if (pending.isEmpty) {
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index cc25b5b4b2..5ab2bb81c6 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -30,6 +30,7 @@ trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combin
def result: To = allocateAndCopy
def clear() = { chain.clear() }
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
+ import language.existentials // FIXME: See SI-7750
if (other.isInstanceOf[LazyCombiner[_, _, _]]) {
val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]]
newLazyCombiner(chain ++= that.chain)
diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala
index 4b8139702f..1c00c0e91f 100644
--- a/src/library/scala/concurrent/Lock.scala
+++ b/src/library/scala/concurrent/Lock.scala
@@ -14,6 +14,7 @@ package scala.concurrent
*
* @author Martin Odersky
* @version 1.0, 10/03/2003
+ * @deprecated("Use java.util.concurrent.locks.Lock", "2.11.0")
*/
class Lock {
var available = true
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index c049de3a28..315f56bd4e 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -12,7 +12,7 @@ package runtime
import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
-import scala.collection.generic.{ Sorted }
+import scala.collection.generic.{ Sorted, IsTraversableLike }
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import java.lang.{ Class => jClass }
@@ -48,6 +48,10 @@ object ScalaRunTime {
names.toSet
}
+ // A helper method to make my life in the pattern matcher a lot easier.
+ def drop[Repr](coll: Repr, num: Int)(implicit traversable: IsTraversableLike[Repr]): Repr =
+ traversable conversion coll drop num
+
/** Return the class object representing an array with element class `clazz`.
*/
def arrayClass(clazz: jClass[_]): jClass[_] = {
@@ -267,7 +271,18 @@ object ScalaRunTime {
}
def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala."
def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc."
- def isXmlClass(x: AnyRef) = packageOf(x) startsWith "scala.xml."
+
+ // We use reflection because the scala.xml package might not be available
+ def isSubClassOf(potentialSubClass: Class[_], ofClass: String) =
+ try {
+ val classLoader = potentialSubClass.getClassLoader
+ val clazz = Class.forName(ofClass, /*initialize =*/ false, classLoader)
+ clazz.isAssignableFrom(potentialSubClass)
+ } catch {
+ case cnfe: ClassNotFoundException => false
+ }
+ def isXmlNode(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.Node")
+ def isXmlMetaData(potentialSubClass: Class[_]) = isSubClassOf(potentialSubClass, "scala.xml.MetaData")
// When doing our own iteration is dangerous
def useOwnToString(x: Any) = x match {
@@ -279,11 +294,12 @@ object ScalaRunTime {
case _: StringLike[_] => true
// Don't want to evaluate any elements in a view
case _: TraversableView[_, _] => true
- // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData] -> catch those and more by isXmlClass(x)
+ // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData]
+ // -> catch those by isXmlNode and isXmlMetaData.
// Don't want to a) traverse infinity or b) be overly helpful with peoples' custom
// collections which may have useful toString methods - ticket #3710
// or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s.
- case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlClass(x)
+ case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x) || isXmlNode(x.getClass) || isXmlMetaData(x.getClass)
// Otherwise, nothing could possibly go wrong
case _ => false
}
@@ -324,7 +340,7 @@ object ScalaRunTime {
// to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes.
try inner(arg)
catch {
- case _: StackOverflowError | _: UnsupportedOperationException | _: AssertionError => "" + arg
+ case _: UnsupportedOperationException | _: AssertionError => "" + arg
}
}
@@ -335,20 +351,6 @@ object ScalaRunTime {
nl + s + "\n"
}
- private[scala] def checkZip(what: String, coll1: TraversableOnce[_], coll2: TraversableOnce[_]) {
- if (sys.props contains "scala.debug.zip") {
- val xs = coll1.toIndexedSeq
- val ys = coll2.toIndexedSeq
- if (xs.length != ys.length) {
- Console.err.println(
- "Mismatched zip in " + what + ":\n" +
- " this: " + xs.mkString(", ") + "\n" +
- " that: " + ys.mkString(", ")
- )
- (new Exception).getStackTrace.drop(2).take(10).foreach(println)
- }
- }
- }
def box[T](clazz: jClass[T]): jClass[_] = clazz match {
case java.lang.Byte.TYPE => classOf[java.lang.Byte]
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 02c461f3c6..8b63a73638 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -131,10 +131,9 @@ private[scala] trait PropertiesTrait {
def javaVmName = propOrEmpty("java.vm.name")
def javaVmVendor = propOrEmpty("java.vm.vendor")
def javaVmVersion = propOrEmpty("java.vm.version")
- // this property must remain less-well-known until 2.11
- private def javaSpecVersion = propOrEmpty("java.specification.version")
- //private def javaSpecVendor = propOrEmpty("java.specification.vendor")
- //private def javaSpecName = propOrEmpty("java.specification.name")
+ def javaSpecVersion = propOrEmpty("java.specification.version")
+ def javaSpecVendor = propOrEmpty("java.specification.vendor")
+ def javaSpecName = propOrEmpty("java.specification.name")
def osName = propOrEmpty("os.name")
def scalaHome = propOrEmpty("scala.home")
def tmpDir = propOrEmpty("java.io.tmpdir")
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 8eac0a2520..439b30e714 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -194,6 +194,44 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends
else None
}
+ /** Tries to match the String representation of a [[scala.Char]].
+ * If the match succeeds, the result is the first matching
+ * group if any groups are defined, or an empty Sequence otherwise.
+ *
+ * For example:
+ *
+ * {{{
+ * val cat = "cat"
+ * // the case must consume the group to match
+ * val r = """(\p{Lower})""".r
+ * cat(0) match { case r(x) => true }
+ * cat(0) match { case r(_) => true }
+ * cat(0) match { case r(_*) => true }
+ * cat(0) match { case r() => true } // no match
+ *
+ * // there is no group to extract
+ * val r = """\p{Lower}""".r
+ * cat(0) match { case r(x) => true } // no match
+ * cat(0) match { case r(_) => true } // no match
+ * cat(0) match { case r(_*) => true } // matches
+ * cat(0) match { case r() => true } // matches
+ *
+ * // even if there are multiple groups, only one is returned
+ * val r = """((.))""".r
+ * cat(0) match { case r(_) => true } // matches
+ * cat(0) match { case r(_,_) => true } // no match
+ * }}}
+ *
+ * @param c The Char to match
+ * @return The match
+ */
+ def unapplySeq(c: Char): Option[Seq[Char]] = {
+ val m = pattern matcher c.toString
+ if (runMatcher(m)) {
+ if (m.groupCount > 0) Some(m group 1) else Some(Nil)
+ } else None
+ }
+
/** Tries to match on a [[scala.util.matching.Regex.Match]].
* A previously failed match results in None.
* If a successful match was made against the current pattern, then that result is used.
diff --git a/src/partest/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala
index d618e086f4..d618e086f4 100644
--- a/src/partest/scala/tools/partest/ASMConverters.scala
+++ b/src/partest-extras/scala/tools/partest/ASMConverters.scala
diff --git a/src/partest/scala/tools/partest/AsmNode.scala b/src/partest-extras/scala/tools/partest/AsmNode.scala
index d181436676..e6a91498d1 100644
--- a/src/partest/scala/tools/partest/AsmNode.scala
+++ b/src/partest-extras/scala/tools/partest/AsmNode.scala
@@ -16,10 +16,11 @@ sealed trait AsmNode[+T] {
def visibleAnnotations: List[AnnotationNode]
def invisibleAnnotations: List[AnnotationNode]
def characteristics = f"$name%15s $desc%-30s$accessString$sigString"
+ def erasedCharacteristics = f"$name%15s $desc%-30s$accessString"
- private def accessString = if (access == 0) "" else " " + Modifier.toString(access)
- private def sigString = if (signature == null) "" else " " + signature
- override def toString = characteristics
+ private def accessString = if (access == 0) "" else " " + Modifier.toString(access)
+ private def sigString = if (signature == null) "" else " " + signature
+ override def toString = characteristics
}
object AsmNode {
diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
index 172fa29189..7650a892fd 100644
--- a/src/partest/scala/tools/partest/BytecodeTest.scala
+++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
@@ -48,19 +48,30 @@ abstract class BytecodeTest extends ASMConverters {
// descriptors and generic signatures? Method bodies are not considered, and
// the names of the classes containing the methods are substituted so they do
// not appear as differences.
- def sameMethodAndFieldSignatures(clazzA: ClassNode, clazzB: ClassNode): Boolean = {
+ def sameMethodAndFieldSignatures(clazzA: ClassNode, clazzB: ClassNode) =
+ sameCharacteristics(clazzA, clazzB)(_.characteristics)
+
+ // Same as sameMethodAndFieldSignatures, but ignoring generic signatures.
+ // This allows for methods which receive the same descriptor but differing
+ // generic signatures. In particular, this happens with value classes,
+ // which get a generic signature where a method written in terms of the
+ // underlying values does not.
+ def sameMethodAndFieldDescriptors(clazzA: ClassNode, clazzB: ClassNode) =
+ sameCharacteristics(clazzA, clazzB)(_.erasedCharacteristics)
+
+ private def sameCharacteristics(clazzA: ClassNode, clazzB: ClassNode)(f: AsmNode[_] => String): Boolean = {
val ms1 = clazzA.fieldsAndMethods.toIndexedSeq
val ms2 = clazzB.fieldsAndMethods.toIndexedSeq
val name1 = clazzA.name
val name2 = clazzB.name
if (ms1.length != ms2.length) {
- println("Different member counts in $name1 and $name2")
+ println(s"Different member counts in $name1 and $name2")
false
}
else (ms1, ms2).zipped forall { (m1, m2) =>
- val c1 = m1.characteristics
- val c2 = m2.characteristics.replaceAllLiterally(name2, name1)
+ val c1 = f(m1)
+ val c2 = f(m2).replaceAllLiterally(name2, name1)
if (c1 == c2)
println(s"[ok] $m1")
else
diff --git a/src/partest/scala/tools/partest/JavapTest.scala b/src/partest-extras/scala/tools/partest/JavapTest.scala
index 3cb3dc6ca8..3cb3dc6ca8 100644
--- a/src/partest/scala/tools/partest/JavapTest.scala
+++ b/src/partest-extras/scala/tools/partest/JavapTest.scala
diff --git a/src/partest/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala
index 7381b8af54..7cc2dd39a9 100644
--- a/src/partest/scala/tools/partest/ReplTest.scala
+++ b/src/partest-extras/scala/tools/partest/ReplTest.scala
@@ -7,7 +7,6 @@ package scala.tools.partest
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.ILoop
-import scala.tools.partest.nest.FileUtil
import java.lang.reflect.{ Method => JMethod, Field => JField }
/** A trait for testing repl code. It drops the first line
@@ -31,7 +30,7 @@ abstract class ReplTest extends DirectTest {
def show() = eval() foreach println
}
-abstract class SessionTest extends ReplTest with FileUtil {
+abstract class SessionTest extends ReplTest {
def session: String
override final def code = expected filter (_.startsWith(prompt)) map (_.drop(prompt.length)) mkString "\n"
def expected = session.stripMargin.lines.toList
@@ -39,6 +38,6 @@ abstract class SessionTest extends ReplTest with FileUtil {
override def show() = {
val out = eval().toList
if (out.size != expected.size) Console println s"Expected ${expected.size} lines, got ${out.size}"
- if (out != expected) Console print compareContents(expected, out, "expected", "actual")
+ if (out != expected) Console print nest.FileManager.compareContents(expected, out, "expected", "actual")
}
}
diff --git a/src/partest/scala/tools/partest/SigTest.scala b/src/partest-extras/scala/tools/partest/SigTest.scala
index fe233a4fb5..fe233a4fb5 100644
--- a/src/partest/scala/tools/partest/SigTest.scala
+++ b/src/partest-extras/scala/tools/partest/SigTest.scala
diff --git a/src/partest-extras/scala/tools/partest/Util.scala b/src/partest-extras/scala/tools/partest/Util.scala
new file mode 100644
index 0000000000..114658b0cd
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/Util.scala
@@ -0,0 +1,52 @@
+package scala.tools.partest
+
+import scala.language.experimental.macros
+
+object Util {
+ /**
+ * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
+ * {{{
+ * trace> "".isEmpty
+ * res: Boolean = true
+ *
+ * }}}
+ *
+ * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
+ * test code in a string.
+ */
+ def trace[A](a: A) = macro traceImpl[A]
+
+ import scala.reflect.macros.Context
+ def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
+ import c.universe._
+ import definitions._
+
+ // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0,
+ // because this impairs reflection refactorings
+ //
+ // val exprCode = c.literal(show(a.tree))
+ // val exprType = c.literal(show(a.actualType))
+ // reify {
+ // println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
+ // a.splice
+ // }
+
+ c.Expr(Block(
+ List(Apply(
+ Select(Ident(PredefModule), TermName("println")),
+ List(Apply(
+ Select(Apply(
+ Select(Ident(ScalaPackage), TermName("StringContext")),
+ List(
+ Literal(Constant("trace> ")),
+ Literal(Constant("\\nres: ")),
+ Literal(Constant(" = ")),
+ Literal(Constant("\\n")))),
+ TermName("s")),
+ List(
+ Literal(Constant(show(a.tree))),
+ Literal(Constant(show(a.actualType))),
+ a.tree))))),
+ a.tree))
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala
index 18dd740208..18dd740208 100644
--- a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
+++ b/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala
diff --git a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
new file mode 100644
index 0000000000..d6b62e1d9e
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
@@ -0,0 +1,82 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.instrumented;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
+ * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
+ *
+ * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
+ * because it must be universally accessible for instrumentation needs. If you want to profile your test use
+ * {@link Instrumentation} instead.
+ */
+public class Profiler {
+
+ private static boolean isProfiling = false;
+ private static Map<MethodCallTrace, Integer> counts = new HashMap<MethodCallTrace, Integer>();
+
+ static public class MethodCallTrace {
+ final String className;
+ final String methodName;
+ final String methodDescriptor;
+
+ public MethodCallTrace(final String className, final String methodName, final String methodDescriptor) {
+ this.className = className;
+ this.methodName = methodName;
+ this.methodDescriptor = methodDescriptor;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof MethodCallTrace)) {
+ return false;
+ } else {
+ MethodCallTrace that = (MethodCallTrace) obj;
+ return that.className.equals(className) && that.methodName.equals(methodName) && that.methodDescriptor.equals(methodDescriptor);
+ }
+ }
+ @Override
+ public int hashCode() {
+ return className.hashCode() ^ methodName.hashCode() ^ methodDescriptor.hashCode();
+ }
+ }
+
+ public static void startProfiling() {
+ isProfiling = true;
+ }
+
+ public static void stopProfiling() {
+ isProfiling = false;
+ }
+
+ public static boolean isProfiling() {
+ return isProfiling;
+ }
+
+ public static void resetProfiling() {
+ counts = new HashMap<MethodCallTrace, Integer>();
+ }
+
+ public static void methodCalled(final String className, final String methodName, final String methodDescriptor) {
+ if (isProfiling) {
+ MethodCallTrace trace = new MethodCallTrace(className, methodName, methodDescriptor);
+ Integer counter = counts.get(trace);
+ if (counter == null) {
+ counts.put(trace, 1);
+ } else {
+ counts.put(trace, counter+1);
+ }
+ }
+ }
+
+ public static Map<MethodCallTrace, Integer> getStatistics() {
+ return new HashMap<MethodCallTrace, Integer>(counts);
+ }
+
+}
diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java
new file mode 100644
index 0000000000..86f5e64516
--- /dev/null
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java
@@ -0,0 +1,49 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import java.lang.instrument.ClassFileTransformer;
+import java.security.ProtectionDomain;
+
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.ClassWriter;
+
+public class ASMTransformer implements ClassFileTransformer {
+
+ private boolean shouldTransform(String className) {
+ return
+ // do not instrument instrumentation logic (in order to avoid infinite recursion)
+ !className.startsWith("scala/tools/partest/instrumented/") &&
+ !className.startsWith("scala/tools/partest/javaagent/") &&
+ // we instrument all classes from empty package
+ (!className.contains("/") ||
+ // we instrument all classes from scala package
+ className.startsWith("scala/") ||
+ // we instrument all classes from `instrumented` package
+ className.startsWith("instrumented/"));
+ }
+
+ public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
+ if (shouldTransform(className)) {
+ ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
+ @Override protected String getCommonSuperClass(final String type1, final String type2) {
+ // Since we are not recomputing stack frame map, this should never be called we override this method because
+ // default implementation uses reflection for implementation and might try to load the class that we are
+ // currently processing. That leads to weird results like swallowed exceptions and classes being not
+ // transformed.
+ throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
+ ") while transforming " + className);
+ }
+ };
+ ProfilerVisitor visitor = new ProfilerVisitor(writer);
+ ClassReader reader = new ClassReader(classfileBuffer);
+ reader.accept(visitor, 0);
+ return writer.toByteArray();
+ } else {
+ return classfileBuffer;
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/javaagent/MANIFEST.MF b/src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF
index be0fee46a2..be0fee46a2 100644
--- a/src/partest/scala/tools/partest/javaagent/MANIFEST.MF
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
index 8306327b14..b1b100fbb0 100644
--- a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
@@ -10,7 +10,7 @@ import scala.tools.asm.MethodVisitor;
import scala.tools.asm.Opcodes;
public class ProfilerVisitor extends ClassVisitor implements Opcodes {
-
+
private static String profilerClass = "scala/tools/partest/instrumented/Profiler";
public ProfilerVisitor(final ClassVisitor cv) {
@@ -53,7 +53,7 @@ public class ProfilerVisitor extends ClassVisitor implements Opcodes {
"(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
}
}
- return mv;
+ return mv;
}
}
diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java
new file mode 100644
index 0000000000..819a5cc39b
--- /dev/null
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java
@@ -0,0 +1,25 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import java.lang.instrument.Instrumentation;
+import java.lang.instrument.UnmodifiableClassException;
+
+/**
+ * Profiling agent that instruments byte-code to insert calls to
+ * {@link scala.tools.partest.instrumented.Profiler#methodCalled(String, String, String)}
+ * by using ASM library for byte-code manipulation.
+ */
+public class ProfilingAgent {
+ public static void premain(String args, Instrumentation inst) throws UnmodifiableClassException {
+ // NOTE: we are adding transformer that won't be applied to classes that are already loaded
+ // This should be ok because premain should be executed before main is executed so Scala library
+ // and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
+ // not depend on Scala library. In case our assumptions are wrong we can always insert call to
+ // inst.retransformClasses.
+ inst.addTransformer(new ASMTransformer(), false);
+ }
+}
diff --git a/src/partest/README b/src/partest/README
deleted file mode 100644
index 17594dbb1e..0000000000
--- a/src/partest/README
+++ /dev/null
@@ -1,31 +0,0 @@
-How partest chooses the compiler / library:
-
- * ''-Dpartest.build=build/four-pack'' -> will search for libraries in
- ''lib'' directory of given path
- * ''--pack'' -> will set ''partest.build=build/pack'', and run all tests.
- add ''--[kind]'' to run a selected set of tests.
- * auto detection:
- - partest.build property -> ''bin'' / ''lib'' directories
- - distribution (''dists/latest'')
- - supersabbus pack (''build/pack'')
- - sabbus quick (''build/quick'')
- - installed dist (test files in ''misc/scala-test/files'')
-
-How partest choses test files: the test files must be accessible from
-the directory on which partest is run. So the test files must be either
-at:
- * ./test/files
- * ./files (cwd is "test")
- * ./misc/scala-test/files (installed scala distribution)
-
-Other arguments:
- * --pos next files test a compilation success
- * --neg next files test a compilation failure
- * --run next files test the interpreter and all backends
- * --jvm next files test the JVM backend
- * --res next files test the resident compiler
- * --shootout next files are shootout tests
- * --script next files test the script runner
- * ''-Dpartest.scalac_opts=...'' -> add compiler options
- * ''--verbose'' -> print verbose messages
- * ''-Dpartest.debug=true'' -> print debug messages
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
deleted file mode 100644
index df4a81dee2..0000000000
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.reflect.runtime.{universe => ru}
-import scala.tools.nsc._
-
-/** For testing compiler internals directly.
- * Each source code string in "sources" will be compiled, and
- * the check function will be called with the source code and the
- * resulting CompilationUnit. The check implementation should
- * test for what it wants to test and fail (via assert or other
- * exception) if it is not happy.
- */
-abstract class CompilerTest extends DirectTest {
- def check(source: String, unit: global.CompilationUnit): Unit
-
- lazy val global: Global = newCompiler()
- lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
- import global._
- import definitions.{ compilerTypeFromTag }
-
- override def extraSettings = "-usejavacp -d " + testOutput.path
-
- def show() = (sources, units).zipped foreach check
-
- // Override at least one of these...
- def code = ""
- def sources: List[String] = List(code)
-
- // Utility functions
- class MkType(sym: Symbol) {
- def apply[M](implicit t: ru.TypeTag[M]): Type =
- if (sym eq NoSymbol) NoType
- else appliedType(sym, compilerTypeFromTag(t))
- }
- implicit def mkMkType(sym: Symbol) = new MkType(sym)
-
- def allMembers(root: Symbol): List[Symbol] = {
- def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = {
- val latest = roots flatMap (_.info.members) filterNot (seen contains _)
- if (latest.isEmpty) seen.toList.sortWith(_ isLess _)
- else loop(seen ++ latest, latest)
- }
- loop(Set(), List(root))
- }
-
- class SymsInPackage(pkgName: String) {
- def pkg = rootMirror.getPackage(pkgName)
- def classes = allMembers(pkg) filter (_.isClass)
- def modules = allMembers(pkg) filter (_.isModule)
- def symbols = classes ++ terms filterNot (_ eq NoSymbol)
- def terms = allMembers(pkg) filter (s => s.isTerm && !s.isConstructor)
- def tparams = classes flatMap (_.info.typeParams)
- def tpes = symbols map (_.tpe) distinct
- }
-}
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
deleted file mode 100644
index 2e6c3baa02..0000000000
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ /dev/null
@@ -1,128 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.tools.nsc._
-import settings.ScalaVersion
-import util.{ SourceFile, BatchSourceFile }
-import reporters.{Reporter, ConsoleReporter}
-import scala.tools.cmd.CommandLineParser
-
-/** A class for testing code which is embedded as a string.
- * It allows for more complete control over settings, compiler
- * configuration, sequence of events, etc. than does partest.
- */
-abstract class DirectTest extends App {
- // The program being tested in some fashion
- def code: String
- // produce the output to be compared against a checkfile
- def show(): Unit
-
- // the test file or dir, and output directory
- def testPath = SFile(sys.props("partest.test-path"))
- def testOutput = Directory(sys.props("partest.output"))
-
- // override to add additional settings with strings
- def extraSettings: String = ""
- // a default Settings object
- def settings: Settings = newSettings(CommandLineParser tokenize extraSettings)
- // a custom Settings object
- def newSettings(args: List[String]) = {
- val s = new Settings
- val allArgs = args ++ (CommandLineParser tokenize debugSettings)
- log("newSettings: allArgs = " + allArgs)
- s processArguments (allArgs, true)
- s
- }
- // new compiler
- def newCompiler(args: String*): Global = {
- val settings = newSettings((CommandLineParser tokenize ("-d \"" + testOutput.path + "\" " + extraSettings)) ++ args.toList)
- newCompiler(settings)
- }
-
- def newCompiler(settings: Settings): Global = Global(settings, reporter(settings))
-
- def reporter(settings: Settings): Reporter = new ConsoleReporter(settings)
-
- private def newSourcesWithExtension(ext: String)(codes: String*): List[BatchSourceFile] =
- codes.toList.zipWithIndex map {
- case (src, idx) => new BatchSourceFile(s"newSource${idx + 1}.$ext", src)
- }
-
- def newJavaSources(codes: String*) = newSourcesWithExtension("java")(codes: _*)
- def newSources(codes: String*) = newSourcesWithExtension("scala")(codes: _*)
-
- def compileString(global: Global)(sourceCode: String): Boolean = {
- withRun(global)(_ compileSources newSources(sourceCode))
- !global.reporter.hasErrors
- }
-
- def javaCompilationUnits(global: Global)(sourceCodes: String*) = {
- sourceFilesToCompiledUnits(global)(newJavaSources(sourceCodes: _*))
- }
-
- def sourceFilesToCompiledUnits(global: Global)(files: List[SourceFile]) = {
- withRun(global) { run =>
- run compileSources files
- run.units.toList
- }
- }
-
- def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
- val units = sourceFilesToCompiledUnits(global)(newSources(sourceCodes: _*))
- if (global.reporter.hasErrors) {
- global.reporter.flush()
- sys.error("Compilation failure.")
- }
- units
- }
-
- def withRun[T](global: Global)(f: global.Run => T): T = {
- global.reporter.reset()
- f(new global.Run)
- }
-
- // compile the code, optionally first adding to the settings
- def compile(args: String*) = compileString(newCompiler(args: _*))(code)
-
- /** Constructor/main body **/
- try show()
- catch { case t: Exception => println(t.getMessage) ; t.printStackTrace ; sys.exit(1) }
-
- /** Debugger interest only below this line **/
- protected def isDebug = (sys.props contains "partest.debug") || (sys.env contains "PARTEST_DEBUG")
- protected def debugSettings = sys.props.getOrElse("partest.debug.settings", "")
-
- final def log(msg: => Any) {
- if (isDebug) Console.err println msg
- }
-
- /**
- * Run a test only if the current java version is at least the version specified.
- */
- def testUnderJavaAtLeast[A](version: String)(yesRun: =>A) = new TestUnderJavaAtLeast(version, { yesRun })
-
- class TestUnderJavaAtLeast[A](version: String, yesRun: => A) {
- val javaVersion = System.getProperty("java.specification.version")
-
- // the "ScalaVersion" class parses Java specification versions just fine
- val requiredJavaVersion = ScalaVersion(version)
- val executingJavaVersion = ScalaVersion(javaVersion)
- val shouldRun = executingJavaVersion >= requiredJavaVersion
- val preamble = if (shouldRun) "Attempting" else "Doing fallback for"
-
- def logInfo() = log(s"$preamble java $version specific test under java version $javaVersion")
-
- /*
- * If the current java version is at least 'version' then 'yesRun' is evaluated
- * otherwise 'fallback' is
- */
- def otherwise(fallback: =>A): A = {
- logInfo()
- if (shouldRun) yesRun else fallback
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/IcodeTest.scala b/src/partest/scala/tools/partest/IcodeTest.scala
deleted file mode 100644
index b12ec0de61..0000000000
--- a/src/partest/scala/tools/partest/IcodeTest.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.tools.partest.nest.FileUtil.compareContents
-
-/** A trait for testing icode. All you need is this in a
- * partest source file:
- * {{{
- * object Test extends IcodeTest
- * }}}
- * And then the generated output will be the icode for everything
- * in that file. See source for possible customizations.
- */
-abstract class IcodeTest extends DirectTest {
- // override to check icode at a different point.
- def printIcodeAfterPhase = "icode"
- // override to use source code other than the file being tested.
- def code = testPath.slurp()
-
- override def extraSettings: String = "-usejavacp -Xprint-icode:" + printIcodeAfterPhase
-
- // Compile, read in all the *.icode files, delete them, and return their contents
- def collectIcode(args: String*): List[String] = {
- compile("-d" :: testOutput.path :: args.toList : _*)
- val icodeFiles = testOutput.files.toList filter (_ hasExtension "icode")
-
- try icodeFiles sortBy (_.name) flatMap (f => f.lines.toList)
- finally icodeFiles foreach (f => f.delete())
- }
-
- // Default show() compiles the code with and without optimization and
- // outputs the diff.
- def show() {
- val lines1 = collectIcode("")
- val lines2 = collectIcode("-optimise")
-
- println(compareContents(lines1, lines2))
- }
-}
diff --git a/src/partest/scala/tools/partest/MemoryTest.scala b/src/partest/scala/tools/partest/MemoryTest.scala
deleted file mode 100644
index 58d25d2f01..0000000000
--- a/src/partest/scala/tools/partest/MemoryTest.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package scala.tools.partest
-
-abstract class MemoryTest {
- def maxDelta: Double
- def calcsPerIter: Int
- def calc(): Unit
-
- def main(args: Array[String]) {
- val rt = Runtime.getRuntime()
- def memUsage() = {
- import java.lang.management._
- import scala.collection.JavaConverters._
- val pools = ManagementFactory.getMemoryPoolMXBeans.asScala
- pools.map(_.getUsage.getUsed).sum / 1000000d
- }
-
- val history = scala.collection.mutable.ListBuffer[Double]()
- def stressTestIter() = {
- var i = 0
- while (i < calcsPerIter) { calc(); i += 1 }
- 1 to 5 foreach (_ => rt.gc())
- history += memUsage
- }
-
- 1 to 5 foreach (_ => stressTestIter())
- val reference = memUsage()
- 1 to 5 foreach (_ => stressTestIter())
- 1 to 5 foreach (_ => rt.gc())
- val result = memUsage()
- history += result
-
- val delta = result - reference
- if (delta > maxDelta) {
- println("FAILED")
- history foreach (mb => println(mb + " Mb"))
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
deleted file mode 100644
index 8478edeb4d..0000000000
--- a/src/partest/scala/tools/partest/PartestDefaults.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.tools
-package partest
-
-import scala.concurrent.duration.Duration
-import scala.tools.nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
-import java.lang.Runtime.{ getRuntime => runtime }
-
-object PartestDefaults {
-
- def testRootName = propOrNone("partest.root")
- def srcDirName = propOrElse("partest.srcdir", "files")
- def testRootDir = testRootName map (x => Directory(x))
-
- // def classPath = propOrElse("partest.classpath", "")
- def classPath = PathResolver.Environment.javaUserClassPath // XXX
-
- def javaCmd = propOrElse("partest.javacmd", "java")
- def javacCmd = propOrElse("partest.javac_cmd", "javac")
- def javaOpts = propOrElse("partest.java_opts", "")
- def scalacOpts = propOrElse("partest.scalac_opts", "")
-
- def testBuild = propOrNone("partest.build")
- def errorCount = propOrElse("partest.errors", "0").toInt
- def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse runtime.availableProcessors
- def waitTime = propOrNone("partest.timeout") map (Duration.apply) getOrElse Duration("4 hours")
-
- //def timeout = "1200000" // per-test timeout
-}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
deleted file mode 100644
index 8b88021dbf..0000000000
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ /dev/null
@@ -1,207 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-
-import scala.util.Properties.setProp
-import scala.tools.ant.sabbus.CompilationPathProperty
-import java.lang.reflect.Method
-import org.apache.tools.ant.Task
-import org.apache.tools.ant.types.{ Reference, FileSet}
-import org.apache.tools.ant.types.Commandline.Argument
-import scala.tools.ant.ScalaTask
-
-/** An Ant task to execute the Scala test suite (NSC).
- *
- * This task can take the following parameters as attributes:
- * - `srcdir`,
- * - `classpath`,
- * - `classpathref`,
- * - `erroronfailed`,
- * - `javacmd`,
- * - `javaccmd`,
- * - `scalacopts`,
- * - `debug`,
- * - `junitreportdir`.
- *
- * It also takes the following parameters as nested elements:
- * - `compilationpath`.
- *
- * @author Philippe Haller
- */
-class PartestTask extends Task with CompilationPathProperty with ScalaTask {
- type Path = org.apache.tools.ant.types.Path
-
- private var kinds: List[String] = Nil
- private var classpath: Option[Path] = None
- private var debug = false
- private var errorOnFailed: Boolean = true
- private var jUnitReportDir: Option[File] = None
- private var javaccmd: Option[File] = None
- private var javacmd: Option[File] = Option(sys.props("java.home")) map (x => new File(x, "bin/java"))
- private var scalacArgs: Option[Seq[Argument]] = None
- private var srcDir: Option[String] = None
- private var colors: Int = 0
-
- def setSrcDir(input: String) {
- srcDir = Some(input)
- }
-
- def setColors(input: String) {
- try colors = input.toInt catch { case _: NumberFormatException => () }
- if (colors > 0)
- sys.props("partest.colors") = colors.toString
- }
-
- def setClasspath(input: Path) {
- if (classpath.isEmpty)
- classpath = Some(input)
- else
- classpath.get.append(input)
- }
-
- def createClasspath(): Path = {
- if (classpath.isEmpty) classpath = Some(new Path(getProject()))
- classpath.get.createPath()
- }
-
- def setClasspathref(input: Reference) {
- createClasspath().setRefid(input)
- }
- def setErrorOnFailed(input: Boolean) {
- errorOnFailed = input
- }
-
- def setJavaCmd(input: File) {
- javacmd = Some(input)
- }
-
- def setKinds(input: String) {
- kinds = words(input)
- }
-
- def setJavacCmd(input: File) {
- javaccmd = Some(input)
- }
-
- def setScalacOpts(input: String) {
- val s = input.split(' ').map { s => val a = new Argument; a.setValue(s); a }
- scalacArgs = Some(scalacArgs.getOrElse(Seq()) ++ s)
- }
-
- def createCompilerArg(): Argument = {
- val a = new Argument
- scalacArgs = Some(scalacArgs.getOrElse(Seq()) :+ a)
- a
- }
-
- def setDebug(input: Boolean) {
- debug = input
- }
-
- def setJUnitReportDir(input: File) {
- jUnitReportDir = Some(input)
- }
-
- override def execute() {
- if (debug || sys.props.contains("partest.debug")) {
- nest.NestUI.setDebug()
- }
-
- srcDir foreach (x => setProp("partest.srcdir", x))
-
- val classpath = this.compilationPath getOrElse sys.error("Mandatory attribute 'compilationPath' is not set.")
- val cpfiles = classpath.list map { fs => new File(fs) } toList
- def findCp(name: String) = cpfiles find (f =>
- (f.getName == s"scala-$name.jar")
- || (f.absolutePathSegments endsWith Seq("classes", name))
- ) getOrElse sys.error(s"Provided classpath does not contain a Scala $name element.")
-
- val scalaLibrary = findCp("library")
- val scalaReflect = findCp("reflect")
- val scalaCompiler = findCp("compiler")
- val scalaPartest = findCp("partest")
- val scalaActors = findCp("actors")
-
- def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
- val parts = a.getParts
- if (parts eq null) Nil else parts.toSeq
- })
-
- val antRunner = new scala.tools.partest.nest.AntRunner
- val antFileManager = antRunner.fileManager
-
- // antFileManager.failed = runFailed
- antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*)
- antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
- antFileManager.LATEST_REFLECT = scalaReflect.getAbsolutePath
- antFileManager.LATEST_COMP = scalaCompiler.getAbsolutePath
- antFileManager.LATEST_PARTEST = scalaPartest.getAbsolutePath
- antFileManager.LATEST_ACTORS = scalaActors.getAbsolutePath
-
- javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
- javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
- scalacArgsFlat foreach (antFileManager.SCALAC_OPTS ++= _)
-
- def runSet(kind: String, files: Array[File]): (Int, Int, List[String]) = {
- if (files.isEmpty) (0, 0, List())
- else {
- log(s"Running ${files.length} tests in '$kind' at $now")
- // log(s"Tests: ${files.toList}")
- val results = antRunner.reflectiveRunTestsForFiles(files, kind)
- val (passed, failed) = results partition (_.isOk)
- val numPassed = passed.size
- val numFailed = failed.size
- def failedMessages = failed map (_.longStatus)
-
- log(s"Completed '$kind' at $now")
-
- // create JUnit Report xml files if directory was specified
- jUnitReportDir foreach { d =>
- d.mkdir
-
- val report = testReport(kind, results, numPassed, numFailed)
- scala.xml.XML.save(d.getAbsolutePath+"/"+kind+".xml", report)
- }
-
- (numPassed, numFailed, failedMessages)
- }
- }
-
- val _results = kinds map (k => runSet(k, TestKinds testsFor k map (_.jfile) toArray))
- val allSuccesses = _results map (_._1) sum
- val allFailures = _results map (_._2) sum
- val allFailedPaths = _results flatMap (_._3)
-
- def f = if (errorOnFailed && allFailures > 0) buildError(_: String) else log(_: String)
- def s = if (allFailures > 1) "s" else ""
- val msg =
- if (allFailures > 0)
- "Test suite finished with %d case%s failing:\n".format(allFailures, s)+
- allFailedPaths.mkString("\n")
- else if (allSuccesses == 0) "There were no tests to run."
- else "Test suite finished with no failures."
-
- f(msg)
- }
-
- private def oneResult(res: TestState) =
- <testcase name={res.testIdent}>{
- if (res.isOk) scala.xml.NodeSeq.Empty
- else <failure message="Test failed"/>
- }</testcase>
-
- private def testReport(kind: String, results: Iterable[TestState], succs: Int, fails: Int) =
- <testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
- <properties/>
- {
- results map oneResult
- }
- </testsuite>
-}
diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala
deleted file mode 100644
index 1f1c8a95ea..0000000000
--- a/src/partest/scala/tools/partest/SecurityTest.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import java.security._
-import java.util._
-
-abstract class SecurityTest extends App {
- def throwIt(x: Any) = throw new AccessControlException("" + x)
- def propertyCheck(p: PropertyPermission): Unit = throwIt(p)
-
- def check(perm: Permission): Unit = perm match {
- case p: PropertyPermission => propertyCheck(p)
- case _ => ()
- }
-}
diff --git a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala b/src/partest/scala/tools/partest/StoreReporterDirectTest.scala
deleted file mode 100644
index 7f3604c86c..0000000000
--- a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package scala.tools.partest
-
-import scala.tools.nsc.Settings
-import scala.tools.nsc.reporters.StoreReporter
-import scala.collection.mutable
-
-trait StoreReporterDirectTest extends DirectTest {
- lazy val storeReporter: StoreReporter = new scala.tools.nsc.reporters.StoreReporter()
-
- /** Discards all but the first message issued at a given position. */
- def filteredInfos: Seq[storeReporter.Info] = storeReporter.infos.groupBy(_.pos).map(_._2.head).toList
-
- /** Hook into [[scala.tools.partest.DirectTest]] to install the custom reporter */
- override def reporter(settings: Settings) = storeReporter
-}
diff --git a/src/partest/scala/tools/partest/TestKinds.scala b/src/partest/scala/tools/partest/TestKinds.scala
deleted file mode 100644
index b4e8afd0d2..0000000000
--- a/src/partest/scala/tools/partest/TestKinds.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package scala.tools
-package partest
-
-import nest.PathSettings.srcDir
-
-object TestKinds {
- val standardKinds = ("pos neg run jvm res scalacheck scalap specialized instrumented presentation ant" split "\\s+").toList
-
- def denotesTestFile(p: Path) = p.isFile && p.hasExtension("scala", "res", "xml")
- def denotesTestDir(p: Path) = kindOf(p) match {
- case "res" => false
- case _ => p.isDirectory && p.extension == ""
- }
- def denotesTestPath(p: Path) = denotesTestDir(p) || denotesTestFile(p)
-
- // TODO
- def isTestForPartest(p: Path) = (
- (p.name == "intentional-failure.scala")
- || (p.path contains "test-for-partest")
- )
-
- def kindOf(p: Path) = {
- p.toAbsolute.segments takeRight 2 head
-
- // (srcDir relativize p.toCanonical).segments match {
- // case (".." :: "scaladoc" :: xs) => xs.head
- // case xs => xs.head
- // }
- }
- def logOf(p: Path) = {
- p.parent / s"${p.stripExtension}-${kindOf(p)}.log"
- // p.parent / s"${p.stripExtension}.log"
- }
-
- // true if a test path matches the --grep expression.
- private def pathMatchesExpr(path: Path, expr: String) = {
- // Matches the expression if any source file contains the expr,
- // or if the checkfile contains it, or if the filename contains
- // it (the last is case-insensitive.)
- def matches(p: Path) = (
- (p.path.toLowerCase contains expr.toLowerCase)
- || (p.fileContents contains expr)
- )
- def candidates = {
- (path changeExtension "check") +: {
- if (path.isFile) List(path)
- else path.toDirectory.deepList() filter (_.isJavaOrScala) toList
- }
- }
-
- (candidates exists matches)
- }
-
- def groupedTests(paths: List[Path]): List[(String, List[Path])] =
- (paths.distinct groupBy kindOf).toList sortBy (standardKinds indexOf _._1)
-
- /** Includes tests for testing partest. */
- private def allTestsForKind(kind: String): List[Path] =
- (srcDir / kind toDirectory).list.toList filter denotesTestPath
-
- def testsForPartest: List[Path] = standardKinds flatMap allTestsForKind filter isTestForPartest
- def testsFor(kind: String): List[Path] = allTestsForKind(kind) filterNot isTestForPartest
- def grepFor(expr: String): List[Path] = standardTests filter (t => pathMatchesExpr(t, expr))
- def standardTests: List[Path] = standardKinds flatMap testsFor
- def failedTests: List[Path] = standardTests filter (p => logOf(p).isFile)
-}
diff --git a/src/partest/scala/tools/partest/TestState.scala b/src/partest/scala/tools/partest/TestState.scala
deleted file mode 100644
index dbe8a222a5..0000000000
--- a/src/partest/scala/tools/partest/TestState.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-package scala.tools.partest
-
-import scala.tools.nsc.FatalError
-import scala.tools.nsc.util.stackTraceString
-
-sealed abstract class TestState {
- def testFile: File
- def what: String
- def reason: String
- def transcript: List[String]
-
- def isOk = false
- def isSkipped = false
- def testIdent = testFile.testIdent
- def transcriptString = transcript mkString EOL
-
- def identAndReason = testIdent + reasonString
- def status = s"$what - $identAndReason"
- def longStatus = status + transcriptString
- def reasonString = if (reason == "") "" else s" [$reason]"
-
- def shortStatus = if (isOk) "ok" else "!!"
-
- override def toString = status
-}
-
-object TestState {
- case class Uninitialized(testFile: File) extends TestState {
- def what = "uninitialized"
- def reason = what
- def transcript = Nil
- override def shortStatus = "??"
- }
- case class Pass(testFile: File) extends TestState {
- def what = "pass"
- override def isOk = true
- def transcript: List[String] = Nil
- def reason = ""
- }
- case class Updated(testFile: File) extends TestState {
- def what = "updated"
- override def isOk = true
- def transcript: List[String] = Nil
- def reason = "updated check file"
- override def shortStatus = "++"
- }
- case class Skip(testFile: File, reason: String) extends TestState {
- def what = "skip"
- override def isOk = true
- override def isSkipped = true
- def transcript: List[String] = Nil
- override def shortStatus = "--"
- }
- case class Fail(testFile: File, reason: String, transcript: List[String]) extends TestState {
- def what = "fail"
- }
- case class Crash(testFile: File, caught: Throwable, transcript: List[String]) extends TestState {
- def what = "crash"
- def reason = s"caught $caught_s - ${caught.getMessage}"
-
- private def caught_s = (caught.getClass.getName split '.').last
- private def stack_s = stackTraceString(caught)
- override def transcriptString = nljoin(super.transcriptString, caught_s)
- }
-}
diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala
deleted file mode 100644
index 5c177ac962..0000000000
--- a/src/partest/scala/tools/partest/TestUtil.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package scala.tools.partest
-
-import scala.reflect.{ classTag, ClassTag }
-
-trait TestUtil {
- /** Given function and block of code, evaluates code block,
- * calls function with nanoseconds elapsed, and returns block result.
- */
- def timed[T](f: Long => Unit)(body: => T): T = {
- val start = System.nanoTime
- val result = body
- val end = System.nanoTime
-
- f(end - start)
- result
- }
- /** Times body and returns (nanos, result).
- */
- def alsoNanos[T](body: => T): (Long, T) = {
- var nanos = 0L
- val result = timed(nanos = _)(body)
-
- (nanos, result)
- }
- def nanos(body: => Unit): Long = alsoNanos(body)._1
-
- def intercept[T <: Exception : ClassTag](code: => Unit): Unit =
- try {
- code
- assert(false, "did not throw " + classTag[T])
- } catch {
- case ex: Exception if classTag[T].runtimeClass isInstance ex =>
- }
-}
-
-// Used in tests.
-object TestUtil extends TestUtil {
-}
diff --git a/src/partest/scala/tools/partest/antlib.xml b/src/partest/scala/tools/partest/antlib.xml
deleted file mode 100644
index b3b98e853f..0000000000
--- a/src/partest/scala/tools/partest/antlib.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<antlib>
- <taskdef name="partest"
- classname="scala.tools.partest.PartestTask"/>
-</antlib>
diff --git a/src/partest/scala/tools/partest/instrumented/Profiler.java b/src/partest/scala/tools/partest/instrumented/Profiler.java
deleted file mode 100644
index e267e197e7..0000000000
--- a/src/partest/scala/tools/partest/instrumented/Profiler.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.instrumented;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
- * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
- *
- * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
- * because it must be universally accessible for instrumentation needs. If you want to profile your test use
- * {@link Instrumentation} instead.
- */
-public class Profiler {
-
- private static boolean isProfiling = false;
- private static Map<MethodCallTrace, Integer> counts = new HashMap<MethodCallTrace, Integer>();
-
- static public class MethodCallTrace {
- final String className;
- final String methodName;
- final String methodDescriptor;
-
- public MethodCallTrace(final String className, final String methodName, final String methodDescriptor) {
- this.className = className;
- this.methodName = methodName;
- this.methodDescriptor = methodDescriptor;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof MethodCallTrace)) {
- return false;
- } else {
- MethodCallTrace that = (MethodCallTrace) obj;
- return that.className.equals(className) && that.methodName.equals(methodName) && that.methodDescriptor.equals(methodDescriptor);
- }
- }
- @Override
- public int hashCode() {
- return className.hashCode() ^ methodName.hashCode() ^ methodDescriptor.hashCode();
- }
- }
-
- public static void startProfiling() {
- isProfiling = true;
- }
-
- public static void stopProfiling() {
- isProfiling = false;
- }
-
- public static boolean isProfiling() {
- return isProfiling;
- }
-
- public static void resetProfiling() {
- counts = new HashMap<MethodCallTrace, Integer>();
- }
-
- public static void methodCalled(final String className, final String methodName, final String methodDescriptor) {
- if (isProfiling) {
- MethodCallTrace trace = new MethodCallTrace(className, methodName, methodDescriptor);
- Integer counter = counts.get(trace);
- if (counter == null) {
- counts.put(trace, 1);
- } else {
- counts.put(trace, counter+1);
- }
- }
- }
-
- public static Map<MethodCallTrace, Integer> getStatistics() {
- return new HashMap<MethodCallTrace, Integer>(counts);
- }
-
-}
diff --git a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
deleted file mode 100644
index 878c8613d5..0000000000
--- a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.javaagent;
-
-import java.lang.instrument.ClassFileTransformer;
-import java.security.ProtectionDomain;
-
-import scala.tools.asm.ClassReader;
-import scala.tools.asm.ClassWriter;
-
-public class ASMTransformer implements ClassFileTransformer {
-
- private boolean shouldTransform(String className) {
- return
- // do not instrument instrumentation logic (in order to avoid infinite recursion)
- !className.startsWith("scala/tools/partest/instrumented/") &&
- !className.startsWith("scala/tools/partest/javaagent/") &&
- // we instrument all classes from empty package
- (!className.contains("/") ||
- // we instrument all classes from scala package
- className.startsWith("scala/") ||
- // we instrument all classes from `instrumented` package
- className.startsWith("instrumented/"));
- }
-
- public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
- if (shouldTransform(className)) {
- ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
- @Override protected String getCommonSuperClass(final String type1, final String type2) {
- // Since we are not recomputing stack frame map, this should never be called we override this method because
- // default implementation uses reflection for implementation and might try to load the class that we are
- // currently processing. That leads to weird results like swallowed exceptions and classes being not
- // transformed.
- throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
- ") while transforming " + className);
- }
- };
- ProfilerVisitor visitor = new ProfilerVisitor(writer);
- ClassReader reader = new ClassReader(classfileBuffer);
- reader.accept(visitor, 0);
- return writer.toByteArray();
- } else {
- return classfileBuffer;
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
deleted file mode 100644
index 3b18987040..0000000000
--- a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.javaagent;
-
-import java.lang.instrument.Instrumentation;
-import java.lang.instrument.UnmodifiableClassException;
-
-/**
- * Profiling agent that instruments byte-code to insert calls to
- * {@link scala.tools.partest.instrumented.Profiler#methodCalled(String, String, String)}
- * by using ASM library for byte-code manipulation.
- */
-public class ProfilingAgent {
- public static void premain(String args, Instrumentation inst) throws UnmodifiableClassException {
- // NOTE: we are adding transformer that won't be applied to classes that are already loaded
- // This should be ok because premain should be executed before main is executed so Scala library
- // and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
- // not depend on Scala library. In case our assumptions are wrong we can always insert call to
- // inst.retransformClasses.
- inst.addTransformer(new ASMTransformer(), false);
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
deleted file mode 100644
index 1d3b79171b..0000000000
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.partest
-package nest
-
-class AntRunner extends DirectRunner {
-
- val fileManager = new FileManager {
- var JAVACMD: String = "java"
- var JAVAC_CMD: String = "javac"
- var CLASSPATH: String = _
- var LATEST_LIB: String = _
- var LATEST_REFLECT: String = _
- var LATEST_COMP: String = _
- var LATEST_PARTEST: String = _
- var LATEST_ACTORS: String = _
- val testRootPath: String = "test"
- val testRootDir: Directory = Directory(testRootPath)
- }
-
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): List[TestState] =
- runTestsForFiles(kindFiles.toList, kind)
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
deleted file mode 100644
index b436675d3a..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ /dev/null
@@ -1,189 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-
-
-package scala.tools.partest
-package nest
-
-import java.io.{ FilenameFilter, IOException }
-import java.net.URI
-import scala.util.Properties.{ propOrElse, scalaCmd, scalacCmd }
-import scala.tools.nsc.{ io, util }
-import PathResolver.{ Environment, Defaults }
-
-class ConsoleFileManager extends FileManager {
- var testBuild: Option[String] = PartestDefaults.testBuild
- def testBuildFile = testBuild map (testParent / _)
-
- var testClasses: Option[String] = None
-
- def this(buildPath: String, rawClasses: Boolean) = {
- this()
- if (rawClasses)
- testClasses = Some(buildPath)
- else
- testBuild = Some(buildPath)
- // re-run because initialization of default
- // constructor must be updated
- findLatest()
- }
-
- def this(buildPath: String) = {
- this(buildPath, false)
- }
-
- def this(buildPath: String, rawClasses: Boolean, moreOpts: String) = {
- this(buildPath, rawClasses)
- SCALAC_OPTS = SCALAC_OPTS ++ moreOpts.split(' ').toSeq.filter(_.length > 0)
- }
-
- lazy val srcDir = PathSettings.srcDir
- lazy val testRootDir = PathSettings.testRoot
- lazy val testRootPath = testRootDir.toAbsolute.path
- def testParent = testRootDir.parent
-
- var CLASSPATH = PartestDefaults.classPath
- var JAVACMD = PartestDefaults.javaCmd
- var JAVAC_CMD = PartestDefaults.javacCmd
-
-
- vlog("CLASSPATH: "+CLASSPATH)
-
- if (!srcDir.isDirectory) {
- NestUI.failure("Source directory \"" + srcDir.path + "\" not found")
- sys.exit(1)
- }
-
- CLASSPATH = {
- val libs = (srcDir / Directory("lib")).files filter (_ hasExtension "jar") map (_.toCanonical.path)
-
- // add all jars in libs
- (CLASSPATH :: libs.toList) mkString pathSeparator
- }
-
- def findLatest() {
- vlog("test parent: "+testParent)
-
- def prefixFileWith(parent: File, relPath: String) = (SFile(parent) / relPath).toCanonical
- def prefixFile(relPath: String) = (testParent / relPath).toCanonical
-
- if (!testClasses.isEmpty) {
- testClassesDir = Path(testClasses.get).toCanonical.toDirectory
- vlog("Running with classes in "+testClassesDir)
-
- latestLibFile = testClassesDir / "library"
- latestActorsFile = testClassesDir / "library" / "actors"
- latestReflectFile = testClassesDir / "reflect"
- latestCompFile = testClassesDir / "compiler"
- latestPartestFile = testClassesDir / "partest"
- }
- else if (testBuild.isDefined) {
- val dir = Path(testBuild.get)
- vlog("Running on "+dir)
- latestLibFile = dir / "lib/scala-library.jar"
- latestActorsFile = dir / "lib/scala-actors.jar"
- latestReflectFile = dir / "lib/scala-reflect.jar"
- latestCompFile = dir / "lib/scala-compiler.jar"
- latestPartestFile = dir / "lib/scala-partest.jar"
- }
- else {
- def setupQuick() {
- vlog("Running build/quick")
- latestLibFile = prefixFile("build/quick/classes/library")
- latestActorsFile = prefixFile("build/quick/classes/library/actors")
- latestReflectFile = prefixFile("build/quick/classes/reflect")
- latestCompFile = prefixFile("build/quick/classes/compiler")
- latestPartestFile = prefixFile("build/quick/classes/partest")
- }
-
- def setupInst() {
- vlog("Running dist (installed)")
- val p = testParent.getParentFile
- latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
- latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
- latestReflectFile = prefixFileWith(p, "lib/scala-reflect.jar")
- latestCompFile = prefixFileWith(p, "lib/scala-compiler.jar")
- latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
- }
-
- def setupDist() {
- vlog("Running dists/latest")
- latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
- latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
- latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar")
- latestCompFile = prefixFile("dists/latest/lib/scala-compiler.jar")
- latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
- }
-
- def setupPack() {
- vlog("Running build/pack")
- latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
- latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
- latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar")
- latestCompFile = prefixFile("build/pack/lib/scala-compiler.jar")
- latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
- }
-
- def mostRecentOf(base: String, names: String*) =
- names map (x => prefixFile(base + "/" + x).lastModified) reduceLeft (_ max _)
-
- // detect most recent build
- val quickTime = mostRecentOf("build/quick/classes", "compiler/compiler.properties", "reflect/reflect.properties", "library/library.properties")
- val packTime = mostRecentOf("build/pack/lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
- val distTime = mostRecentOf("dists/latest/lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
- val instTime = mostRecentOf("lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
-
- val pairs = Map(
- (quickTime, () => setupQuick()),
- (packTime, () => setupPack()),
- (distTime, () => setupDist()),
- (instTime, () => setupInst())
- )
-
- // run setup based on most recent time
- pairs(pairs.keys max)()
- }
-
- LATEST_LIB = latestLibFile.getAbsolutePath
- LATEST_REFLECT = latestReflectFile.getAbsolutePath
- LATEST_COMP = latestCompFile.getAbsolutePath
- LATEST_PARTEST = latestPartestFile.getAbsolutePath
- LATEST_ACTORS = latestActorsFile.getAbsolutePath
- }
-
- var LATEST_LIB: String = ""
- var LATEST_REFLECT: String = ""
- var LATEST_COMP: String = ""
- var LATEST_PARTEST: String = ""
- var LATEST_ACTORS: String = ""
-
- var latestLibFile: File = _
- var latestActorsFile: File = _
- var latestReflectFile: File = _
- var latestCompFile: File = _
- var latestPartestFile: File = _
- //def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile
- //def latestScalapFile: File = new File(latestLibFile.getParentFile, "scalap.jar")
- var testClassesDir: Directory = _
- // initialize above fields
- findLatest()
-
- /*
- def getFiles(kind: String, cond: Path => Boolean): List[File] = {
- def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _)
-
- val dir = Directory(srcDir / kind)
-
- if (dir.isDirectory) NestUI.verbose("look in %s for tests" format dir)
- else NestUI.failure("Directory '%s' not found" format dir)
-
- val files = dir.list filterNot ignoreDir filter cond toList
-
- ( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile)
- }
- */
- var latestFjbgFile: File = _
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
deleted file mode 100644
index 332131ca3a..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ /dev/null
@@ -1,224 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-package nest
-
-import utils.Properties._
-import scala.tools.nsc.Properties.{ versionMsg, setProp }
-import scala.collection.{ mutable, immutable }
-import PathSettings.srcDir
-import TestKinds._
-import scala.reflect.internal.util.Collections.distinctBy
-import scala.tools.cmd.{ CommandLine, CommandLineParser, Instance }
-
-class ConsoleRunner(argstr: String) extends {
- val parsed = ConsoleRunnerSpec.creator(CommandLineParser tokenize argstr)
-} with DirectRunner with ConsoleRunnerSpec with Instance {
- import NestUI._
- import NestUI.color._
-
- // So we can ctrl-C a test run and still hear all
- // the buffered failure info.
- scala.sys addShutdownHook issueSummaryReport()
-
- var fileManager: ConsoleFileManager = _
-
- private var totalTests = 0
- private val passedTests = mutable.ListBuffer[TestState]()
- private val failedTests = mutable.ListBuffer[TestState]()
-
- def comment(s: String) = echo(magenta("# " + s))
- def levyJudgment() = {
- if (totalTests == 0) echoMixed("No tests to run.")
- else if (elapsedMillis == 0) echoMixed("Test Run ABORTED")
- else if (isSuccess) echoPassed("Test Run PASSED")
- else echoFailed("Test Run FAILED")
- }
-
- def passFailString(passed: Int, failed: Int, skipped: Int): String = {
- val total = passed + failed + skipped
- val isSuccess = failed == 0
- def p0 = s"$passed/$total"
- def p = ( if (isSuccess) bold(green(p0)) else p0 ) + " passed"
- def f = if (failed == 0) "" else bold(red("" + failed)) + " failed"
- def s = if (skipped == 0) "" else bold(yellow("" + skipped)) + " skipped"
-
- oempty(p, f, s) mkString ", "
- }
-
- private var summarizing = false
- private var elapsedMillis = 0L
- private var expectedFailures = 0
- private def isSuccess = failedTests.size == expectedFailures
-
- def issueSummaryReport() {
- // Don't run twice
- if (!summarizing) {
- summarizing = true
-
- val passed0 = passedTests.toList
- val failed0 = failedTests.toList
- val passed = passed0.size
- val failed = failed0.size
- val skipped = totalTests - (passed + failed)
- val passFail = passFailString(passed, failed, skipped)
- val elapsed = if (elapsedMillis > 0) " (elapsed time: " + elapsedString(elapsedMillis) + ")" else ""
- val message = passFail + elapsed
-
- if (failed0.nonEmpty) {
- if (isPartestVerbose) {
- echo(bold(cyan("##### Transcripts from failed tests #####\n")))
- failed0 foreach { state =>
- comment("partest " + state.testFile)
- echo(state.transcriptString + "\n")
- }
- }
-
- def files_s = failed0.map(_.testFile).mkString(""" \""" + "\n ")
- echo("# Failed test paths (this command will update checkfiles)")
- echo("test/partest --update-check \\\n " + files_s + "\n")
- }
-
- echo(message)
- levyJudgment()
- }
- }
-
- def run(): Unit = {
- if (optDebug) NestUI.setDebug()
- if (optVerbose) NestUI.setVerbose()
- if (optTerse) NestUI.setTerse()
- if (optShowDiff) NestUI.setDiffOnFail()
-
- // Early return on no args, version, or invalid args
- if (optVersion) return echo(versionMsg)
- if ((argstr == "") || optHelp) return NestUI.usage()
-
- val (individualTests, invalid) = parsed.residualArgs map (p => Path(p)) partition denotesTestPath
- if (invalid.nonEmpty) {
- if (isPartestVerbose)
- invalid foreach (p => echoWarning(s"Discarding invalid test path " + p))
- else if (!isPartestTerse)
- echoWarning(s"Discarding ${invalid.size} invalid test paths")
- }
-
- optSourcePath foreach (x => setProp("partest.srcdir", x))
- optTimeout foreach (x => setProp("partest.timeout", x))
-
- fileManager =
- if (optBuildPath.isDefined) new ConsoleFileManager(optBuildPath.get)
- else if (optClassPath.isDefined) new ConsoleFileManager(optClassPath.get, true)
- else if (optPack) new ConsoleFileManager("build/pack")
- else new ConsoleFileManager // auto detection, see ConsoleFileManager.findLatest
-
- fileManager.updateCheck = optUpdateCheck
- fileManager.failed = optFailed
-
- val partestTests = (
- if (optSelfTest) TestKinds.testsForPartest
- else Nil
- )
-
- val grepExpr = optGrep getOrElse ""
-
- // If --grep is given we suck in every file it matches.
- val greppedTests = if (grepExpr == "") Nil else {
- val paths = grepFor(grepExpr)
- if (paths.isEmpty)
- echoWarning(s"grep string '$grepExpr' matched no tests.\n")
-
- paths.sortBy(_.toString)
- }
-
- val isRerun = optFailed
- val rerunTests = if (isRerun) TestKinds.failedTests else Nil
- def miscTests = partestTests ++ individualTests ++ greppedTests ++ rerunTests
-
- val givenKinds = standardKinds filter parsed.isSet
- val kinds = (
- if (optAll) standardKinds
- else if (givenKinds.nonEmpty) givenKinds
- else if (invalid.isEmpty && miscTests.isEmpty && !isRerun) standardKinds // If no kinds, --grep, or individual tests were given, assume --all
- else Nil
- )
- val kindsTests = kinds flatMap testsFor
- val dir =
- if (fileManager.testClasses.isDefined) fileManager.testClassesDir
- else fileManager.testBuildFile getOrElse {
- fileManager.latestCompFile.getParentFile.getParentFile.getAbsoluteFile
- }
-
- def testContributors = {
- List(
- if (partestTests.isEmpty) "" else "partest self-tests",
- if (rerunTests.isEmpty) "" else "previously failed tests",
- if (kindsTests.isEmpty) "" else s"${kinds.size} named test categories",
- if (greppedTests.isEmpty) "" else s"${greppedTests.size} tests matching '$grepExpr'",
- if (individualTests.isEmpty) "" else "specified tests"
- ) filterNot (_ == "") mkString ", "
- }
-
- def banner = {
- val vmBin = javaHome + fileSeparator + "bin"
- val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
- val vmOpts = fileManager.JAVA_OPTS
-
- s"""|Scala compiler classes in: $dir
- |Scala version is: $versionMsg
- |Scalac options are: ${fileManager.SCALAC_OPTS mkString " "}
- |Java binaries in: $vmBin
- |Java runtime is: $vmName
- |Java options are: $vmOpts
- |Source directory is: $srcDir
- |Available processors: ${Runtime.getRuntime().availableProcessors()}
- |Java Classpath: ${sys.props("java.class.path")}
- """.stripMargin
- }
-
- chatty(banner)
-
- val allTests: List[Path] = distinctBy(miscTests ++ kindsTests)(_.toCanonical) sortBy (_.toString)
- val grouped = (allTests groupBy kindOf).toList sortBy (x => standardKinds indexOf x._1)
-
- totalTests = allTests.size
- expectedFailures = propOrNone("partest.errors") match {
- case Some(num) => num.toInt
- case _ => 0
- }
- val expectedFailureMessage = if (expectedFailures == 0) "" else s" (expecting $expectedFailures to fail)"
- echo(s"Selected $totalTests tests drawn from $testContributors$expectedFailureMessage\n")
-
- val (_, millis) = timed {
- for ((kind, paths) <- grouped) {
- val num = paths.size
- val ss = if (num == 1) "" else "s"
- comment(s"starting $num test$ss in $kind")
- val results = runTestsForFiles(paths map (_.jfile), kind)
- val (passed, failed) = results partition (_.isOk)
-
- passedTests ++= passed
- failedTests ++= failed
- if (failed.nonEmpty) {
- comment(passFailString(passed.size, failed.size, 0) + " in " + kind)
- }
- echo("")
- }
- }
- this.elapsedMillis = millis
- issueSummaryReport()
- System exit ( if (isSuccess) 0 else 1 )
- }
-
- run()
-}
-
-object ConsoleRunner {
- def main(args: Array[String]): Unit = {
- new ConsoleRunner(args mkString " ")
- }
-}
-
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala b/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala
deleted file mode 100644
index f9143013e9..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-package scala.tools.partest.nest
-
-import language.postfixOps
-
-import scala.tools.cmd.{ CommandLine, Interpolation, Meta, Reference, Spec }
-
-trait ConsoleRunnerSpec extends Spec with Meta.StdOpts with Interpolation {
- def referenceSpec = ConsoleRunnerSpec
- def programInfo = Spec.Info(
- "console-runner",
- "Usage: NestRunner [options] [test test ...]",
- "scala.tools.partest.nest.ConsoleRunner")
-
- heading("Test categories:")
- val optAll = "all" / "run all tests" --?
- val optPos = "pos" / "run compilation tests (success)" --?
- val optNeg = "neg" / "run compilation tests (failure)" --?
- val optRun = "run" / "run interpreter and backend tests" --?
- val optJvm = "jvm" / "run JVM backend tests" --?
- val optRes = "res" / "run resident compiler tests" --?
- val optAnt = "ant" / "run Ant tests" --?
- val optScalap = "scalap" / "run scalap tests" --?
- val optSpecialized = "specialized" / "run specialization tests" --?
- val optScalacheck = "scalacheck" / "run ScalaCheck tests" --?
- val optInstrumented = "instrumented" / "run instrumented tests" --?
- val optPresentation = "presentation" / "run presentation compiler tests" --?
-
- heading("Test runner options:")
- val optFailed = "failed" / "run only those tests that failed during the last run" --?
- val optTimeout = "timeout" / "aborts the test suite after the given amount of time" --|
- val optPack = "pack" / "pick compiler/reflect/library in build/pack, and run all tests" --?
- val optGrep = "grep" / "run all tests whose source file contains the expression given to grep" --|
- val optUpdateCheck = "update-check" / "instead of failing tests with output change, update checkfile (use with care!)" --?
- val optBuildPath = "buildpath" / "set (relative) path to build jars (ex.: --buildpath build/pack)" --|
- val optClassPath = "classpath" / "set (absolute) path to build classes" --|
- val optSourcePath = "srcpath" / "set (relative) path to test source files (ex.: --srcpath pending)" --|
-
- heading("Test output options:")
- val optShowDiff = "show-diff" / "show diffs for failed tests" --?
- val optVerbose = "verbose" / "show verbose progress information" --?
- val optTerse = "terse" / "show terse progress information" --?
- val optDebug = "debug" / "enable debugging output" --?
-
- heading("Other options:")
- val optVersion = "version" / "show Scala version and exit" --?
- val optSelfTest = "self-test" / "run tests for partest itself" --?
- val optHelp = "help" / "show this page and exit" --?
-
-}
-
-object ConsoleRunnerSpec extends ConsoleRunnerSpec with Reference {
- type ThisCommandLine = CommandLine
- def creator(args: List[String]): ThisCommandLine = new CommandLine(ConsoleRunnerSpec, args)
-}
diff --git a/src/partest/scala/tools/partest/nest/DirectCompiler.scala b/src/partest/scala/tools/partest/nest/DirectCompiler.scala
deleted file mode 100644
index 8e5ff2abc4..0000000000
--- a/src/partest/scala/tools/partest/nest/DirectCompiler.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError }
-import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
-import scala.tools.nsc.util.{ FakePos, stackTraceString }
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
-import scala.reflect.io.AbstractFile
-import scala.reflect.internal.util.Position
-import java.io.{ BufferedReader, PrintWriter, FileReader, Writer, FileWriter }
-
-class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) {
- shortname = true
- // override def error(pos: Position, msg: String): Unit
-}
-
-class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
- def this(cp: String) = this(cp, _ => ())
-
- nowarnings.value = false
- encoding.value = "UTF-8"
- classpath.value = cp
-}
-
-class PartestGlobal(settings: Settings, reporter: Reporter) extends Global(settings, reporter) {
- // override def abort(msg: String): Nothing
- // override def globalError(msg: String): Unit
- // override def supplementErrorMessage(msg: String): String
-}
-class DirectCompiler(val fileManager: FileManager) {
- def newGlobal(settings: Settings, reporter: Reporter): PartestGlobal =
- new PartestGlobal(settings, reporter)
-
- def newGlobal(settings: Settings, logWriter: FileWriter): Global =
- newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter)))
-
- def newSettings(): TestSettings = new TestSettings(fileManager.LATEST_LIB)
- def newSettings(outdir: String): TestSettings = {
- val cp = ClassPath.join(fileManager.LATEST_LIB, outdir)
- val s = new TestSettings(cp)
- s.outdir.value = outdir
- s
- }
-
- def compile(runner: Runner, opts0: List[String], sources: List[File]): TestState = {
- import runner.{ sources => _, _ }
-
- val testSettings = new TestSettings(ClassPath.join(fileManager.LATEST_LIB, outDir.getPath))
- val logWriter = new FileWriter(logFile)
- val srcDir = if (testFile.isDirectory) testFile else Path(testFile).parent.jfile
- val opts = fileManager.updatePluginPath(opts0, AbstractFile getDirectory outDir, AbstractFile getDirectory srcDir)
- val command = new CompilerCommand(opts, testSettings)
- val global = newGlobal(testSettings, logWriter)
- val reporter = global.reporter.asInstanceOf[ExtConsoleReporter]
- def errorCount = reporter.ERROR.count
-
- def defineSettings(s: Settings) = {
- s.outputDirs setSingleOutput outDir.getPath
- // adding codelib.jar to the classpath
- // codelib provides the possibility to override standard reify
- // this shields the massive amount of reification tests from changes in the API
- prependToClasspaths(s, codelib)
- s.classpath append fileManager.CLASSPATH // adding this why?
-
- // add the instrumented library version to classpath
- if (kind == "specialized")
- prependToClasspaths(s, speclib)
-
- // check that option processing succeeded
- opts0.isEmpty || command.ok
- }
-
- if (!defineSettings(testSettings))
- if (opts0.isEmpty)
- reporter.error(null, s"bad settings: $testSettings")
- else
- reporter.error(null, opts0.mkString("bad options: ", space, ""))
-
- def ids = sources.map(_.testIdent) mkString space
- vlog(s"% scalac $ids")
-
- def execCompile() =
- if (command.shouldStopWithInfo) {
- logWriter append (command getInfoMessage global)
- runner genFail "compilation stopped with info"
- } else {
- new global.Run compile sources.map(_.getPath)
- if (!reporter.hasErrors) runner.genPass()
- else {
- reporter.printSummary()
- reporter.writer.close()
- runner.genFail(s"compilation failed with $errorCount errors")
- }
- }
-
- try { execCompile() }
- catch { case t: Throwable => reporter.error(null, t.getMessage) ; runner.genCrash(t) }
- finally { logWriter.close() }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
deleted file mode 100644
index 7bfa8c6e77..0000000000
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{File, FilenameFilter, IOException, StringWriter,
- FileInputStream, FileOutputStream, BufferedReader,
- FileReader, PrintWriter, FileWriter}
-import java.net.URI
-import scala.reflect.io.AbstractFile
-import scala.collection.mutable
-
-trait FileUtil {
- /**
- * Compares two files using difflib to produce a unified diff.
- *
- * @param f1 the first file to be compared
- * @param f2 the second file to be compared
- * @return the unified diff of the compared files or the empty string if they're equal
- */
- def compareFiles(f1: File, f2: File): String = {
- compareContents(io.Source.fromFile(f1).getLines.toSeq, io.Source.fromFile(f2).getLines.toSeq, f1.getName, f2.getName)
- }
-
- /**
- * Compares two lists of lines using difflib to produce a unified diff.
- *
- * @param origLines the first seq of lines to be compared
- * @param newLines the second seq of lines to be compared
- * @param origName file name to be used in unified diff for `origLines`
- * @param newName file name to be used in unified diff for `newLines`
- * @return the unified diff of the `origLines` and `newLines` or the empty string if they're equal
- */
- def compareContents(origLines: Seq[String], newLines: Seq[String], origName: String = "a", newName: String = "b"): String = {
- import collection.JavaConverters._
-
- val diff = difflib.DiffUtils.diff(origLines.asJava, newLines.asJava)
- if (diff.getDeltas.isEmpty) ""
- else difflib.DiffUtils.generateUnifiedDiff(origName, newName, origLines.asJava, diff, 1).asScala.mkString("\n")
- }
-}
-object FileUtil extends FileUtil { }
-
-trait FileManager extends FileUtil {
-
- def testRootDir: Directory
- def testRootPath: String
-
- var JAVACMD: String
- var JAVAC_CMD: String
-
- var CLASSPATH: String
- var LATEST_LIB: String
- var LATEST_REFLECT: String
- var LATEST_COMP: String
- var LATEST_PARTEST: String
- var LATEST_ACTORS: String
-
- protected def relativeToLibrary(what: String): String = {
- def jarname = if (what startsWith "scala") s"$what.jar" else s"scala-$what.jar"
- if (LATEST_LIB endsWith ".jar")
- (SFile(LATEST_LIB).parent / jarname).toAbsolute.path
- else
- (SFile(LATEST_LIB).parent.parent / "classes" / what).toAbsolute.path
- }
- def latestParserCBLib = relativeToLibrary("parser-combinators")
- def latestXmlLib = relativeToLibrary("xml")
- def latestScaladoc = relativeToLibrary("scaladoc")
- def latestInteractive = relativeToLibrary("interactive")
- def latestScalapFile = relativeToLibrary("scalap")
- def latestPaths = List(
- LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS,
- latestParserCBLib, latestXmlLib, latestScalapFile, latestScaladoc, latestInteractive
- )
- def latestFiles = latestPaths map (p => new java.io.File(p))
- def latestUrls = latestFiles map (_.toURI.toURL)
-
- var showDiff = false
- var updateCheck = false
- var showLog = false
- var failed = false
-
- var SCALAC_OPTS = PartestDefaults.scalacOpts.split(' ').toSeq
- var JAVA_OPTS = PartestDefaults.javaOpts
-
- /** Only when --debug is given. */
- lazy val testTimings = new mutable.HashMap[String, Long]
- def recordTestTiming(name: String, milliseconds: Long) =
- synchronized { testTimings(name) = milliseconds }
-
- def getLogFile(dir: File, fileBase: String, kind: String): File =
- new File(dir, fileBase + "-" + kind + ".log")
-
- def getLogFile(file: File, kind: String): File = {
- val dir = file.getParentFile
- val fileBase = basename(file.getName)
-
- getLogFile(dir, fileBase, kind)
- }
-
- def logFileExists(file: File, kind: String) =
- getLogFile(file, kind).canRead
-
- def overwriteFileWith(dest: File, file: File) =
- dest.isFile && copyFile(file, dest)
-
- def copyFile(from: File, dest: File): Boolean = {
- if (from.isDirectory) {
- assert(dest.isDirectory, "cannot copy directory to file")
- val subDir:Directory = Path(dest) / Directory(from.getName)
- subDir.createDirectory()
- from.listFiles.toList forall (copyFile(_, subDir))
- }
- else {
- val to = if (dest.isDirectory) new File(dest, from.getName) else dest
-
- try {
- SFile(to) writeAll SFile(from).slurp()
- true
- }
- catch { case _: IOException => false }
- }
- }
-
- def mapFile(file: File, replace: String => String) {
- val f = SFile(file)
-
- f.printlnAll(f.lines.toList map replace: _*)
- }
-
- /** Massage args to merge plugins and fix paths.
- * Plugin path can be relative to test root, or cwd is out.
- * While we're at it, mix in the baseline options, too.
- * That's how ant passes in the plugins dir.
- */
- def updatePluginPath(args: List[String], out: AbstractFile, srcdir: AbstractFile): List[String] = {
- val dir = testRootDir
- // The given path, or the output dir if ".", or a temp dir if output is virtual (since plugin loading doesn't like virtual)
- def pathOrCwd(p: String) =
- if (p == ".") {
- val plugxml = "scalac-plugin.xml"
- val pout = if (out.isVirtual) Directory.makeTemp() else Path(out.path)
- val srcpath = Path(srcdir.path)
- val pd = (srcpath / plugxml).toFile
- if (pd.exists) pd copyTo (pout / plugxml)
- pout
- } else Path(p)
- def absolutize(path: String) = pathOrCwd(path) match {
- case x if x.isAbsolute => x.path
- case x => (dir / x).toAbsolute.path
- }
-
- val xprefix = "-Xplugin:"
- val (xplugs, others) = args partition (_ startsWith xprefix)
- val Xplugin = if (xplugs.isEmpty) Nil else List(xprefix +
- (xplugs map (_ stripPrefix xprefix) flatMap (_ split pathSeparator) map absolutize mkString pathSeparator)
- )
- SCALAC_OPTS.toList ::: others ::: Xplugin
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/NestRunner.scala b/src/partest/scala/tools/partest/nest/NestRunner.scala
deleted file mode 100644
index e398d2ead9..0000000000
--- a/src/partest/scala/tools/partest/nest/NestRunner.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-object NestRunner {
- def main(args: Array[String]) {
- new ReflectiveRunner main (args mkString " ")
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
deleted file mode 100644
index 5148115905..0000000000
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import java.io.PrintWriter
-
-class Colors(enabled: => Boolean) {
- import Console._
-
- val bold = colored(BOLD)
- val yellow = colored(YELLOW)
- val green = colored(GREEN)
- val blue = colored(BLUE)
- val red = colored(RED)
- val red_b = colored(RED_B)
- val green_b = colored(GREEN_B)
- val cyan = colored(CYAN)
- val magenta = colored(MAGENTA)
-
- private def colored(code: String): String => String =
- s => if (enabled) code + s + RESET else s
-}
-
-object NestUI {
- private val testNum = new java.util.concurrent.atomic.AtomicInteger(1)
- @volatile private var testNumberFmt = "%3d"
- // @volatile private var testNumber = 1
- private def testNumber = testNumberFmt format testNum.getAndIncrement()
- def resetTestNumber(max: Int = -1) {
- testNum set 1
- val width = if (max > 0) max.toString.length else 3
- testNumberFmt = s"%${width}d"
- }
-
- var colorEnabled = sys.props contains "partest.colors"
- val color = new Colors(colorEnabled)
- import color._
-
- val NONE = 0
- val SOME = 1
- val MANY = 2
-
- private var _outline = ""
- private var _success = ""
- private var _failure = ""
- private var _warning = ""
- private var _default = ""
-
- private var dotCount = 0
- private val DotWidth = 72
-
- def leftFlush() {
- if (dotCount != 0) {
- normal("\n")
- dotCount = 0
- }
- }
-
- def statusLine(state: TestState) = {
- import state._
- import TestState._
- val colorizer = state match {
- case _: Skip => yellow
- case _: Updated => cyan
- case s if s.isOk => green
- case _ => red
- }
- val word = bold(colorizer(state.shortStatus))
- f"$word $testNumber - $testIdent%-40s$reasonString"
- }
-
- def reportTest(state: TestState) = {
- if (isTerse && state.isOk) {
- if (dotCount >= DotWidth) {
- outline("\n.")
- dotCount = 1
- }
- else {
- outline(".")
- dotCount += 1
- }
- }
- else {
- echo(statusLine(state))
- if (!state.isOk && isDiffy) {
- val differ = bold(red("% ")) + "diff "
- state.transcript find (_ startsWith differ) foreach (echo(_))
- }
- }
- }
-
- def echo(message: String): Unit = synchronized {
- leftFlush()
- print(message + "\n")
- }
- def chatty(msg: String) = if (isVerbose) echo(msg)
-
- def echoSkipped(msg: String) = echo(yellow(msg))
- def echoPassed(msg: String) = echo(bold(green(msg)))
- def echoFailed(msg: String) = echo(bold(red(msg)))
- def echoMixed(msg: String) = echo(bold(yellow(msg)))
- def echoWarning(msg: String) = echo(bold(red(msg)))
-
- def initialize(number: Int) = number match {
- case MANY =>
- _outline = Console.BOLD + Console.BLACK
- _success = Console.BOLD + Console.GREEN
- _failure = Console.BOLD + Console.RED
- _warning = Console.BOLD + Console.YELLOW
- _default = Console.RESET
- case SOME =>
- _outline = Console.BOLD + Console.BLACK
- _success = Console.RESET
- _failure = Console.BOLD + Console.BLACK
- _warning = Console.BOLD + Console.BLACK
- _default = Console.RESET
- case _ =>
- }
-
- def outline(msg: String) = print(_outline + msg + _default)
- def outline(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_outline + msg + _default)
- }
-
- def success(msg: String) = print(_success + msg + _default)
- def success(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_success + msg + _default)
- }
-
- def failure(msg: String) = print(_failure + msg + _default)
- def failure(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_failure + msg + _default)
- }
-
- def warning(msg: String) = print(_warning + msg + _default)
-
- def normal(msg: String) = print(_default + msg)
- def normal(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_default + msg)
- }
-
- def usage() {
- println(ConsoleRunnerSpec.programInfo.usage)
- println(ConsoleRunnerSpec.helpMsg)
- sys.exit(1)
- }
-
- var _verbose = false
- var _debug = false
- var _terse = false
- var _diff = false
-
- def isVerbose = _verbose
- def isDebug = _debug
- def isTerse = _terse
- def isDiffy = _diff
-
- def setVerbose() {
- _verbose = true
- }
- def setDebug() {
- _debug = true
- }
- def setTerse() {
- _terse = true
- }
- def setDiffOnFail() {
- _diff = true
- }
- def verbose(msg: String) {
- if (isVerbose)
- System.err.println(msg)
- }
- def debug(msg: String) {
- if (isDebug)
- System.err.println(msg)
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
deleted file mode 100644
index 030c515947..0000000000
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io.{ Path, File, Directory }
-import Path._
-
-object PathSettings {
- import PartestDefaults.{ testRootDir, srcDirName }
-
- private def cwd = Directory.Current getOrElse sys.error("user.dir property not set")
- private def isPartestDir(d: Directory) = (d.name == "test") && (d / srcDirName isDirectory)
- private def findJar(d: Directory, name: String): Option[File] = findJar(d.files, name)
- private def findJar(files: Iterator[File], name: String): Option[File] =
- files filter (_ hasExtension "jar") find { _.name startsWith name }
- private def findJarOrFail(name: String, ds: Directory*): File = findJar(ds flatMap (_.files) iterator, name) getOrElse
- sys.error(s"'${name}.jar' not found in '${ds map (_.path) mkString ", "}'.")
-
- // Directory <root>/test
- lazy val testRoot: Directory = testRootDir getOrElse {
- val candidates: List[Directory] = (cwd :: cwd.parents) flatMap (d => List(d, Directory(d / "test")))
-
- candidates find isPartestDir getOrElse sys.error("Directory 'test' not found.")
- }
-
- // Directory <root>/test/files or .../scaladoc
- def srcDir = Directory(testRoot / srcDirName toCanonical)
-
- // Directory <root>/test/files/lib
- lazy val srcLibDir = Directory(srcDir / "lib")
-
- // Directory <root>/test/files/speclib
- lazy val srcSpecLibDir = Directory(srcDir / "speclib")
-
- lazy val srcSpecLib: File = findJar(srcSpecLibDir, "instrumented") getOrElse {
- sys.error("No instrumented.jar found in %s".format(srcSpecLibDir))
- }
-
- // Directory <root>/test/files/codelib
- lazy val srcCodeLibDir = Directory(srcDir / "codelib")
-
- lazy val srcCodeLib: File = (
- findJar(srcCodeLibDir, "code")
- orElse findJar(Directory(testRoot / "files" / "codelib"), "code") // work with --srcpath pending
- getOrElse sys.error("No code.jar found in %s".format(srcCodeLibDir))
- )
-
- lazy val instrumentationAgentLib: File = {
- findJar(buildPackLibDir.files, "scala-partest-javaagent") getOrElse {
- sys.error("No partest-javaagent jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
- }
- }
-
- // Directory <root>/build
- lazy val buildDir: Directory = {
- val bases = testRoot :: testRoot.parents
- // In the classic "ant" build, the relevant subdirectory is called build,
- // but in the postmodern "sbt" build, it is called target. Look for both.
- val dirs = Path.onlyDirs(bases flatMap (x => List(x / "build", x / "target")))
-
- dirs.headOption getOrElse sys.error("Neither 'build' nor 'target' dir found under test root " + testRoot + ".")
- }
-
- // Directory <root>/build/pack/lib
- lazy val buildPackLibDir = Directory(buildDir / "pack" / "lib")
-
- lazy val scalaCheck: File =
- findJar(buildPackLibDir.files ++ srcLibDir.files, "scalacheck") getOrElse {
- sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
- }
-
- lazy val testInterface: File = findJarOrFail("test-interface", buildPackLibDir, srcLibDir)
-
- lazy val diffUtils: File =
- findJar(buildPackLibDir.files, "diffutils") getOrElse sys.error(s"No diffutils.jar found in '$buildPackLibDir'.")
-
- /** The platform-specific support jar, `tools.jar`.
- */
- lazy val platformTools: Option[File] = PathResolver.SupplementalLocations.platformTools
-}
-
-class PathSettings() {
- // def classpathAsURLs: List[URL]
-}
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
deleted file mode 100644
index 3c77a03f1e..0000000000
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io
-import io.Path
-import java.net.URLClassLoader
-
-/* This class is used to load an instance of DirectRunner using
- * a custom class loader.
- * The purpose is to "auto-detect" a good classpath for the
- * rest of the classes (Worker, CompileManager etc.), so that
- * the main NestRunner can be started merely by putting its
- * class on the classpath (ideally).
- */
-class ReflectiveRunner {
- // TODO: we might also use fileManager.CLASSPATH
- // to use the same classes as used by `scala` that
- // was used to start the runner.
- val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner"
-
- private def searchPath(option: String, as: List[String]): Option[String] = as match {
- case `option` :: r :: _ => Some(r)
- case _ :: rest => searchPath(option, rest)
- case Nil => None
- }
-
- def main(args: String) {
- val argList = (args.split("\\s")).toList
-
- if (isPartestDebug)
- showAllJVMInfo
-
- // find out which build to test
- val buildPath = searchPath("--buildpath", argList)
- val classPath = searchPath("--classpath", argList)
- val fileManager =
- if (!buildPath.isEmpty)
- new ConsoleFileManager(buildPath.get)
- else if (!classPath.isEmpty)
- new ConsoleFileManager(classPath.get, true)
- else if (argList contains "--pack")
- new ConsoleFileManager("build/pack")
- else // auto detection
- new ConsoleFileManager
-
- // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
- // when that bug is fixed, the addition of PathSettings.srcCodeLib can be removed
- // we hack into the classloader that will become parent classloader for scalac
- // this way we ensure that reflective macro lookup will pick correct Code.lift
- // it's also used to inject diffutils into the classpath when running partest from the test/partest script
- val srcCodeLibAndDiff = List(PathSettings.srcCodeLib, PathSettings.diffUtils, PathSettings.testInterface)
- val sepUrls = srcCodeLibAndDiff.map(_.toURI.toURL) ::: fileManager.latestUrls
- // this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script
- val sepLoader = new URLClassLoader(sepUrls.toArray, null)
-
- if (isPartestDebug)
- println("Loading classes from:\n " + fileManager.latestUrls.mkString("\n "))
-
- // @partest maintainer: it seems to me that commented lines are incorrect
- // if classPath is not empty, then it has been provided by the --classpath option
- // which points to the root of Scala home (see ConsoleFileManager's testClasses and the true flag in the ctor for more information)
- // this doesn't mean that we had custom Java classpath set, so we don't have to override latestXXXFiles from the file manager
- //
- //val paths = classPath match {
- // case Some(cp) => Nil
- // case _ => files.toList map (_.path)
- //}
-
- setProp("java.class.path", ClassPath.join(fileManager.latestPaths: _*))
-
- // don't let partest find pluginsdir; in ant build, standard plugin has dedicated test suite
- //setProp("scala.home", latestLibFile.parent.parent.path)
- setProp("scala.home", "")
-
- if (isPartestDebug)
- for (prop <- List("java.class.path", "sun.boot.class.path", "java.ext.dirs"))
- println(prop + ": " + propOrEmpty(prop))
-
- try {
- val sepRunnerClass = sepLoader loadClass sepRunnerClassName
- val sepMainMethod = sepRunnerClass.getMethod("main", classOf[Array[String]])
- val cargs: Array[AnyRef] = Array(Array(args))
- sepMainMethod.invoke(null, cargs: _*)
- }
- catch {
- case cnfe: ClassNotFoundException =>
- cnfe.printStackTrace()
- NestUI.failure(sepRunnerClassName +" could not be loaded from:\n")
- sepUrls foreach (x => NestUI.failure(x + "\n"))
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala
deleted file mode 100644
index d7d87bdcf5..0000000000
--- a/src/partest/scala/tools/partest/nest/Runner.scala
+++ /dev/null
@@ -1,894 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-package scala.tools.partest
-package nest
-
-import java.io.{ Console => _, _ }
-import java.net.URL
-import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
-import java.util.concurrent.Executors
-import java.util.concurrent.TimeUnit.NANOSECONDS
-import scala.collection.mutable.ListBuffer
-import scala.concurrent.duration.Duration
-import scala.io.Codec
-import scala.reflect.internal.FatalError
-import scala.sys.process.{ Process, ProcessLogger }
-import scala.tools.nsc.Properties.{ envOrElse, isWin, jdkHome, javaHome, propOrElse, propOrEmpty, setProp }
-import scala.tools.nsc.{ Settings, CompilerCommand, Global }
-import scala.tools.nsc.io.{ AbstractFile, PlainFile }
-import scala.tools.nsc.reporters.ConsoleReporter
-import scala.tools.nsc.util.{ Exceptional, ScalaClassLoader, stackTraceString }
-import scala.tools.scalap.Main.decompileScala
-import scala.tools.scalap.scalax.rules.scalasig.ByteCode
-import scala.util.{ Try, Success, Failure }
-import ClassPath.{ join, split }
-import PartestDefaults.{ javaCmd, javacCmd }
-import TestState.{ Pass, Fail, Crash, Uninitialized, Updated }
-
-trait PartestRunSettings {
- def gitPath: Path
- def reportPath: Path
- def logPath: Path
-
- def testPaths: List[Path]
-
- def gitDiffOptions: List[String]
- def extraScalacOptions: List[String]
- def extraJavaOptions: List[String]
-}
-
-class TestTranscript {
- import NestUI.color._
- private val buf = ListBuffer[String]()
- private def pass(s: String) = bold(green("% ")) + s
- private def fail(s: String) = bold(red("% ")) + s
-
- def add(action: String): this.type = { buf += action ; this }
- def append(text: String) { val s = buf.last ; buf.trimEnd(1) ; buf += (s + text) }
-
- // Colorize prompts according to pass/fail
- def fail: List[String] = buf.toList match {
- case Nil => Nil
- case xs => (xs.init map pass) :+ fail(xs.last)
- }
-}
-
-/** Run a single test. Rubber meets road. */
-class Runner(val testFile: File, fileManager: FileManager, val testRunParams: TestRunParams) {
- import fileManager._
-
- // Override to true to have the outcome of this test displayed
- // whether it passes or not; in general only failures are reported,
- // except for a . per passing test to show progress.
- def isEnumeratedTest = false
-
- private var _lastState: TestState = null
- private var _transcript = new TestTranscript
-
- def lastState = if (_lastState == null) Uninitialized(testFile) else _lastState
- def setLastState(s: TestState) = _lastState = s
- def transcript: List[String] = _transcript.fail ++ logFile.fileLines
- def pushTranscript(msg: String) = _transcript add msg
-
- val parentFile = testFile.getParentFile
- val kind = parentFile.getName
- val fileBase = basename(testFile.getName)
- val logFile = new File(parentFile, s"$fileBase-$kind.log")
- val outFile = logFile changeExtension "obj"
- val checkFile = testFile changeExtension "check"
- val flagsFile = testFile changeExtension "flags"
- val testIdent = testFile.testIdent // e.g. pos/t1234
-
- lazy val outDir = { outFile.mkdirs() ; outFile }
-
- type RanOneTest = (Boolean, LogContext)
-
- def showCrashInfo(t: Throwable) {
- System.err.println("Crashed running test $testIdent: " + t)
- if (!isPartestTerse)
- System.err.println(stackTraceString(t))
- }
- protected def crashHandler: PartialFunction[Throwable, TestState] = {
- case t: InterruptedException =>
- genTimeout()
- case t: Throwable =>
- showCrashInfo(t)
- logFile.appendAll(stackTraceString(t))
- genCrash(t)
- }
-
- def genPass() = Pass(testFile)
- def genFail(reason: String) = Fail(testFile, reason, _transcript.fail)
- def genTimeout() = Fail(testFile, "timed out", _transcript.fail)
- def genCrash(caught: Throwable) = Crash(testFile, caught, _transcript.fail)
- def genUpdated() = Updated(testFile)
-
- def speclib = PathSettings.srcSpecLib.toString // specialization lib
- def codelib = PathSettings.srcCodeLib.toString // reify lib
-
- // Prepend to a classpath, but without incurring duplicate entries
- def prependTo(classpath: String, path: String): String = {
- val segments = ClassPath split classpath
-
- if (segments startsWith path) classpath
- else ClassPath.join(path :: segments distinct: _*)
- }
-
- def prependToJavaClasspath(path: String) {
- val jcp = sys.props.getOrElse("java.class.path", "")
- prependTo(jcp, path) match {
- case `jcp` =>
- case cp => sys.props("java.class.path") = cp
- }
- }
- def prependToClasspaths(s: Settings, path: String) {
- prependToJavaClasspath(path)
- val scp = s.classpath.value
- prependTo(scp, path) match {
- case `scp` =>
- case cp => s.classpath.value = cp
- }
- }
-
- private def workerError(msg: String): Unit = System.err.println("Error: " + msg)
-
- def javac(files: List[File]): TestState = {
- // compile using command-line javac compiler
- val args = Seq(
- javacCmd,
- "-d",
- outDir.getAbsolutePath,
- "-classpath",
- join(outDir.toString, CLASSPATH)
- ) ++ files.map(_.getAbsolutePath)
-
- pushTranscript(args mkString " ")
- val captured = StreamCapture(runCommand(args, logFile))
- if (captured.result) genPass() else {
- logFile appendAll captured.stderr
- genFail("java compilation failed")
- }
- }
-
- def testPrompt = kind match {
- case "res" => "nsc> "
- case _ => "% "
- }
-
- /** Evaluate an action body and update the test state.
- * @param failFn optionally map a result to a test state.
- */
- def nextTestAction[T](body: => T)(failFn: PartialFunction[T, TestState]): T = {
- val result = body
- setLastState( if (failFn isDefinedAt result) failFn(result) else genPass() )
- result
- }
- def nextTestActionExpectTrue(reason: String, body: => Boolean): Boolean = (
- nextTestAction(body) { case false => genFail(reason) }
- )
- def nextTestActionFailing(reason: String): Boolean = nextTestActionExpectTrue(reason, false)
-
- private def assembleTestCommand(outDir: File, logFile: File): List[String] = {
- // check whether there is a ".javaopts" file
- val argsFile = testFile changeExtension "javaopts"
- val argString = file2String(argsFile)
- if (argString != "")
- NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString))
-
- val testFullPath = testFile.getAbsolutePath
-
- // Note! As this currently functions, JAVA_OPTS must precede argString
- // because when an option is repeated to java only the last one wins.
- // That means until now all the .javaopts files were being ignored because
- // they all attempt to change options which are also defined in
- // partest.java_opts, leading to debug output like:
- //
- // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
- // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
- val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil
- val propertyOptions = List(
- "-Dfile.encoding=UTF-8",
- "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
- "-Dpartest.output="+outDir.getAbsolutePath,
- "-Dpartest.lib="+LATEST_LIB,
- "-Dpartest.reflect="+LATEST_REFLECT,
- "-Dpartest.cwd="+outDir.getParent,
- "-Dpartest.test-path="+testFullPath,
- "-Dpartest.testname="+fileBase,
- "-Djavacmd="+javaCmd,
- "-Djavaccmd="+javacCmd,
- "-Duser.language=en",
- "-Duser.country=US"
- ) ++ extras
-
- val classpath = if (extraClasspath != "") join(extraClasspath, CLASSPATH) else CLASSPATH
-
- javaCmd +: (
- (JAVA_OPTS.split(' ') ++ extraJavaOptions.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "").toList ++ Seq(
- "-classpath",
- join(outDir.toString, classpath)
- ) ++ propertyOptions ++ Seq(
- "scala.tools.nsc.MainGenericRunner",
- "-usejavacp",
- "Test",
- "jvm"
- )
- )
- }
-
- /** Runs command redirecting standard out and
- * error out to output file.
- */
- private def runCommand(args: Seq[String], outFile: File): Boolean = {
- //(Process(args) #> outFile !) == 0 or (Process(args) ! pl) == 0
- val pl = ProcessLogger(outFile)
- val nonzero = 17 // rounding down from 17.3
- def run: Int = {
- val p = Process(args) run pl
- try p.exitValue
- catch {
- case e: InterruptedException =>
- NestUI verbose s"Interrupted waiting for command to finish (${args mkString " "})"
- p.destroy
- nonzero
- case t: Throwable =>
- NestUI verbose s"Exception waiting for command to finish: $t (${args mkString " "})"
- p.destroy
- throw t
- }
- finally pl.close()
- }
- (pl buffer run) == 0
- }
-
- private def execTest(outDir: File, logFile: File): Boolean = {
- val cmd = assembleTestCommand(outDir, logFile)
-
- pushTranscript((cmd mkString s" \\$EOL ") + " > " + logFile.getName)
- nextTestAction(runCommand(cmd, logFile)) {
- case false =>
- _transcript append EOL + logFile.fileContents
- genFail("non-zero exit code")
- }
- }
-
- override def toString = s"""Test($testIdent, lastState = $lastState)"""
-
- // result is unused
- def newTestWriters() = {
- val swr = new StringWriter
- val wr = new PrintWriter(swr, true)
- // diff = ""
-
- ((swr, wr))
- }
-
- def fail(what: Any) = {
- NestUI.verbose("scalac: compilation of "+what+" failed\n")
- false
- }
-
- /** Filter the diff for conditional blocks.
- * The check file can contain lines of the form:
- * `#partest java7`
- * where the line contains a conventional flag name.
- * In the diff output, these lines have the form:
- * `> #partest java7`
- * Blocks which don't apply are filtered out,
- * and what remains is the desired diff.
- * Line edit commands such as `0a1,6` don't count
- * as diff, so return a nonempty diff only if
- * material diff output was seen.
- * Filtering the diff output (instead of every check
- * file) means that we only post-process a test that
- * might be failing, in the normal case.
- */
- def diffilter(d: String) = {
- import scala.util.Properties.{javaVersion, isAvian}
- val prefix = "#partest"
- val margin = "> "
- val leader = margin + prefix
- // use lines in block so labeled? Default to sorry, Charlie.
- def retainOn(f: String) = {
- val (invert, token) =
- if (f startsWith "!") (true, f drop 1) else (false, f)
- val cond = token match {
- case "java7" => javaVersion startsWith "1.7"
- case "java6" => javaVersion startsWith "1.6"
- case "avian" => isAvian
- case "true" => true
- case _ => false
- }
- if (invert) !cond else cond
- }
- if (d contains prefix) {
- val sb = new StringBuilder
- var retain = true // use the current line
- var material = false // saw a line of diff
- for (line <- d.lines)
- if (line startsWith leader) {
- val rest = (line stripPrefix leader).trim
- retain = retainOn(rest)
- } else if (retain) {
- if (line startsWith margin) material = true
- sb ++= line
- sb ++= EOL
- }
- if (material) sb.toString else ""
- } else d
- }
-
- def currentDiff = (
- if (checkFile.canRead) diffilter(compareFiles(logFile, checkFile))
- else compareContents(augmentString(file2String(logFile)).lines.toList, Nil)
- )
-
- val gitRunner = List("/usr/local/bin/git", "/usr/bin/git") map (f => new java.io.File(f)) find (_.canRead)
- val gitDiffOptions = "--ignore-space-at-eol --no-index " + propOrEmpty("partest.git_diff_options")
- // --color=always --word-diff
-
- def gitDiff(f1: File, f2: File): Option[String] = {
- try gitRunner map { git =>
- val cmd = s"$git diff $gitDiffOptions $f1 $f2"
- val diff = Process(cmd).lines_!.drop(4).map(_ + "\n").mkString
-
- "\n" + diff
- }
- catch { case t: Exception => None }
- }
-
- /** Normalize the log output by applying test-specific filters
- * and fixing filesystem-specific paths.
- *
- * Line filters are picked up from `filter: pattern` at the top of sources.
- * The filtered line is detected with a simple "contains" test,
- * and yes, "filter" means "filter out" in this context.
- *
- * File paths are detected using the absolute path of the test root.
- * A string that looks like a file path is normalized by replacing
- * the leading segments (the root) with "$ROOT" and by replacing
- * any Windows backslashes with the one true file separator char.
- */
- def normalizeLog() {
- // Apply judiciously; there are line comments in the "stub implementations" error output.
- val slashes = """[/\\]+""".r
- def squashSlashes(s: String) = slashes replaceAllIn (s, "/")
-
- // this string identifies a path and is also snipped from log output.
- // to preserve more of the path, could use fileManager.testRootPath
- val elided = parentFile.getAbsolutePath
-
- // something to mark the elision in the log file (disabled)
- val ellipsis = "" //".../" // using * looks like a comment
-
- // no spaces in test file paths below root, because otherwise how to detect end of path string?
- val pathFinder = raw"""(?i)\Q${elided}${File.separator}\E([\${File.separator}\w]*)""".r
- def canonicalize(s: String): String = (
- pathFinder replaceAllIn (s, m => ellipsis + squashSlashes(m group 1))
- )
-
- def masters = {
- val files = List(new File(parentFile, "filters"), new File(PathSettings.srcDir.path, "filters"))
- files filter (_.exists) flatMap (_.fileLines) map (_.trim) filter (s => !(s startsWith "#"))
- }
- val filters = toolArgs("filter", split = false) ++ masters
- val elisions = ListBuffer[String]()
- //def lineFilter(s: String): Boolean = !(filters exists (s contains _))
- def lineFilter(s: String): Boolean = (
- filters map (_.r) forall { r =>
- val res = (r findFirstIn s).isEmpty
- if (!res) elisions += s
- res
- }
- )
-
- logFile.mapInPlace(canonicalize)(lineFilter)
- if (isPartestVerbose && elisions.nonEmpty) {
- import NestUI.color._
- val emdash = bold(yellow("--"))
- pushTranscript(s"filtering ${logFile.getName}$EOL${elisions mkString (emdash, EOL + emdash, EOL)}")
- }
- }
-
- def diffIsOk: Boolean = {
- // always normalize the log first
- normalizeLog()
- val diff = currentDiff
- // if diff is not empty, is update needed?
- val updating: Option[Boolean] = (
- if (diff == "") None
- else Some(fileManager.updateCheck)
- )
- pushTranscript(s"diff $logFile $checkFile")
- nextTestAction(updating) {
- case Some(true) =>
- NestUI.verbose("Updating checkfile " + checkFile)
- checkFile writeAll file2String(logFile)
- genUpdated()
- case Some(false) =>
- // Get a word-highlighted diff from git if we can find it
- val bestDiff = if (updating.isEmpty) "" else {
- if (checkFile.canRead)
- gitDiff(logFile, checkFile) getOrElse {
- s"diff $logFile $checkFile\n$diff"
- }
- else diff
- }
- _transcript append bestDiff
- genFail("output differs")
- // TestState.fail("output differs", "output differs",
- // genFail("output differs")
- // TestState.Fail("output differs", bestDiff)
- case None => genPass() // redundant default case
- } getOrElse true
- }
-
- /** 1. Creates log file and output directory.
- * 2. Runs script function, providing log file and output directory as arguments.
- * 2b. or, just run the script without context and return a new context
- */
- def runInContext(body: => Boolean): (Boolean, LogContext) = {
- val (swr, wr) = newTestWriters()
- val succeeded = body
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- /** Grouped files in group order, and lex order within each group. */
- def groupedFiles(sources: List[File]): List[List[File]] = (
- if (sources.tail.nonEmpty) {
- val grouped = sources groupBy (_.group)
- grouped.keys.toList.sorted map (k => grouped(k) sortBy (_.getName))
- }
- else List(sources)
- )
-
- /** Source files for the given test file. */
- def sources(file: File): List[File] = (
- if (file.isDirectory)
- file.listFiles.toList filter (_.isJavaOrScala)
- else
- List(file)
- )
-
- def newCompiler = new DirectCompiler(fileManager)
-
- def attemptCompile(sources: List[File]): TestState = {
- val state = newCompiler.compile(this, flagsForCompilation(sources), sources)
- if (!state.isOk)
- _transcript append ("\n" + file2String(logFile))
-
- state
- }
-
- // snort or scarf all the contributing flags files
- def flagsForCompilation(sources: List[File]): List[String] = {
- def argsplitter(s: String) = words(s) filter (_.nonEmpty)
- val perTest = argsplitter(flagsFile.fileContents)
- val perGroup = if (testFile.isDirectory) {
- sources flatMap { f => SFile(Path(f) changeExtension "flags").safeSlurp map argsplitter getOrElse Nil }
- } else Nil
- perTest ++ perGroup
- }
-
- def toolArgs(tool: String, split: Boolean = true): List[String] = {
- def argsplitter(s: String) = if (split) words(s) filter (_.nonEmpty) else List(s)
- def argsFor(f: File): List[String] = {
- import scala.util.matching.Regex
- val p = new Regex(s"(?:.*\\s)?${tool}:(?:\\s*)(.*)?", "args")
- val max = 10
- val src = Path(f).toFile.chars(codec)
- val args = try {
- src.getLines take max collectFirst {
- case s if (p findFirstIn s).nonEmpty => for (m <- p findFirstMatchIn s) yield m group "args"
- }
- } finally src.close()
- args.flatten map argsplitter getOrElse Nil
- }
- sources(testFile) flatMap argsFor
- }
-
- abstract class CompileRound {
- def fs: List[File]
- def result: TestState
- def description: String
-
- def fsString = fs map (_.toString stripPrefix parentFile.toString + "/") mkString " "
- def isOk = result.isOk
- def mkScalacString(): String = {
- val flags = file2String(flagsFile) match {
- case "" => ""
- case s => " " + s
- }
- s"""scalac $fsString"""
- }
- override def toString = description + ( if (result.isOk) "" else "\n" + result.status )
- }
- case class OnlyJava(fs: List[File]) extends CompileRound {
- def description = s"""javac $fsString"""
- lazy val result = { pushTranscript(description) ; javac(fs) }
- }
- case class OnlyScala(fs: List[File]) extends CompileRound {
- def description = mkScalacString()
- lazy val result = { pushTranscript(description) ; attemptCompile(fs) }
- }
- case class ScalaAndJava(fs: List[File]) extends CompileRound {
- def description = mkScalacString()
- lazy val result = { pushTranscript(description) ; attemptCompile(fs) }
- }
-
- def compilationRounds(file: File): List[CompileRound] = (
- (groupedFiles(sources(file)) map mixedCompileGroup).flatten
- )
- def mixedCompileGroup(allFiles: List[File]): List[CompileRound] = {
- val (scalaFiles, javaFiles) = allFiles partition (_.isScala)
- val isMixed = javaFiles.nonEmpty && scalaFiles.nonEmpty
- val round1 = if (scalaFiles.isEmpty) None else Some(ScalaAndJava(allFiles))
- val round2 = if (javaFiles.isEmpty) None else Some(OnlyJava(javaFiles))
- val round3 = if (!isMixed) None else Some(OnlyScala(scalaFiles))
-
- List(round1, round2, round3).flatten
- }
-
- def runNegTest() = runInContext {
- val rounds = compilationRounds(testFile)
-
- // failing means Does Not Compile
- val failing = rounds find (x => nextTestActionExpectTrue("compilation failed", x.isOk) == false)
-
- // which means passing if it checks and didn't crash the compiler
- // or, OK, we'll let you crash the compiler with a FatalError if you supply a check file
- def checked(r: CompileRound) = r.result match {
- case Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => false
- case _ => diffIsOk
- }
-
- failing map (checked) getOrElse nextTestActionFailing("expected compilation failure")
- }
-
- def runTestCommon(andAlso: => Boolean): (Boolean, LogContext) = runInContext {
- compilationRounds(testFile).forall(x => nextTestActionExpectTrue("compilation failed", x.isOk)) && andAlso
- }
-
- // Apache Ant 1.6 or newer
- def ant(args: Seq[String], output: File): Boolean = {
- val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/"))
- val antLibDir = Directory(antDir / "lib")
- val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path
- val antOptions =
- if (NestUI._verbose) List("-verbose", "-noinput")
- else List("-noinput")
- val cmd = javaCmd +: (
- JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq(
- "-classpath",
- antLauncherPath,
- "org.apache.tools.ant.launch.Launcher"
- ) ++ antOptions ++ args
- )
-
- runCommand(cmd, output)
- }
-
- def runAntTest(): (Boolean, LogContext) = {
- val (swr, wr) = newTestWriters()
-
- val succeeded = try {
- val binary = "-Dbinary="+(
- if (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick"
- else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack"
- else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest"
- else "installed"
- )
- val args = Array(binary, "-logfile", logFile.getPath, "-file", testFile.getPath)
- NestUI.verbose("ant "+args.mkString(" "))
-
- pushTranscript(s"ant ${args.mkString(" ")}")
- nextTestActionExpectTrue("ant failed", ant(args, logFile)) && diffIsOk
- }
- catch { // *catch-all*
- case e: Exception =>
- NestUI.warning("caught "+e)
- false
- }
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- def extraClasspath = kind match {
- case "specialized" => PathSettings.srcSpecLib.toString
- case _ => ""
- }
- def extraJavaOptions = kind match {
- case "instrumented" => "-javaagent:"+PathSettings.instrumentationAgentLib
- case _ => ""
- }
-
- def runScalacheckTest() = runTestCommon {
- NestUI verbose f"compilation of $testFile succeeded%n"
-
- // this classloader is test specific: its parent contains library classes and others
- val loader = {
- import PathSettings.scalaCheck
- val locations = List(outDir, scalaCheck.jfile) map (_.getAbsoluteFile.toURI.toURL)
- ScalaClassLoader.fromURLs(locations, getClass.getClassLoader)
- }
- val logWriter = new PrintStream(new FileOutputStream(logFile), true)
-
- def runInFramework(): Boolean = {
- import org.scalatools.testing._
- val f: Framework = loader.instantiate[Framework]("org.scalacheck.ScalaCheckFramework")
- val logger = new Logger {
- def ansiCodesSupported = false //params.env.isSet("colors")
- def error(msg: String) = logWriter println msg
- def warn(msg: String) = logWriter println msg
- def info(msg: String) = logWriter println msg
- def debug(msg: String) = logWriter println msg
- def trace(t: Throwable) = t printStackTrace logWriter
- }
- var bad = 0
- val handler = new EventHandler {
- // testName, description, result, error
- // Result = Success, Failure, Error, Skipped
- def handle(event: Event): Unit = event.result match {
- case Result.Success =>
- //case Result.Skipped => // an exhausted test is skipped, therefore bad
- case _ => bad += 1
- }
- }
- val loggers = Array(logger)
- val r = f.testRunner(loader, loggers).asInstanceOf[Runner2] // why?
- val claas = "Test"
- val fingerprint = f.tests collectFirst { case x: SubclassFingerprint if x.isModule => x }
- val args = toolArgs("scalacheck")
- vlog(s"Run $testFile with args $args")
- // set the context class loader for scaladoc/scalacheck tests (FIX ME)
- ScalaClassLoader(testRunParams.scalaCheckParentClassLoader).asContext {
- r.run(claas, fingerprint.get, handler, args.toArray) // synchronous?
- }
- val ok = (bad == 0)
- if (!ok) _transcript append logFile.fileContents
- ok
- }
- try nextTestActionExpectTrue("ScalaCheck test failed", runInFramework()) finally logWriter.close()
- }
-
- def runResidentTest() = {
- // simulate resident compiler loop
- val prompt = "\nnsc> "
- val (swr, wr) = newTestWriters()
-
- NestUI.verbose(this+" running test "+fileBase)
- val dir = parentFile
- val resFile = new File(dir, fileBase + ".res")
-
- // run compiler in resident mode
- // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
- val sourcedir = logFile.getParentFile.getAbsoluteFile
- val sourcepath = sourcedir.getAbsolutePath+File.separator
- NestUI.verbose("sourcepath: "+sourcepath)
-
- val argList = List(
- "-d", outDir.getAbsoluteFile.getPath,
- "-Xresident",
- "-sourcepath", sourcepath)
-
- // configure input/output files
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut, true)
- val resReader = new BufferedReader(new FileReader(resFile))
- val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true)
-
- // create compiler
- val settings = new Settings(workerError)
- settings.sourcepath.value = sourcepath
- settings.classpath.value = fileManager.CLASSPATH
- val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val command = new CompilerCommand(argList, settings)
- object compiler extends Global(command.settings, reporter)
-
- def resCompile(line: String): Boolean = {
- // NestUI.verbose("compiling "+line)
- val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
- // NestUI.verbose("cmdArgs: "+cmdArgs)
- val sett = new Settings(workerError)
- sett.sourcepath.value = sourcepath
- val command = new CompilerCommand(cmdArgs, sett)
- // "scalac " + command.files.mkString(" ")
- pushTranscript("scalac " + command.files.mkString(" "))
- nextTestActionExpectTrue(
- "compilation failed",
- command.ok && {
- (new compiler.Run) compile command.files
- !reporter.hasErrors
- }
- )
- }
- def loop(): Boolean = {
- logWriter.print(prompt)
- resReader.readLine() match {
- case null | "" => logWriter.close() ; true
- case line => resCompile(line) && loop()
- }
- }
- // res/t687.res depends on ignoring its compilation failure
- // and just looking at the diff, so I made them all do that
- // because this is long enough.
- if (!Output.withRedirected(logWriter)(try loop() finally resReader.close()))
- setLastState(genPass())
-
- (diffIsOk, LogContext(logFile, swr, wr))
- }
-
- def run(): TestState = {
- if (kind == "neg" || (kind endsWith "-neg")) runNegTest()
- else kind match {
- case "pos" => runTestCommon(true)
- case "ant" => runAntTest()
- case "scalacheck" => runScalacheckTest()
- case "res" => runResidentTest()
- case "scalap" => runScalapTest()
- case "script" => runScriptTest()
- case _ => runTestCommon(execTest(outDir, logFile) && diffIsOk)
- }
-
- lastState
- }
-
- def runScalapTest() = runTestCommon {
- val isPackageObject = testFile.getName startsWith "package"
- val className = testFile.getName.stripSuffix(".scala").capitalize + (if (!isPackageObject) "" else ".package")
- val loader = ScalaClassLoader.fromURLs(List(outDir.toURI.toURL), this.getClass.getClassLoader)
- val byteCode = ByteCode forClass (loader loadClass className)
- val result = decompileScala(byteCode.bytes, isPackageObject)
-
- logFile writeAll result
- diffIsOk
- }
- def runScriptTest() = {
- import scala.sys.process._
- val (swr, wr) = newTestWriters()
-
- val args = file2String(testFile changeExtension "args")
- val cmdFile = if (isWin) testFile changeExtension "bat" else testFile
- val succeeded = (((cmdFile + " " + args) #> logFile !) == 0) && diffIsOk
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- def cleanup() {
- if (lastState.isOk)
- logFile.delete()
- if (!isPartestDebug)
- Directory(outDir).deleteRecursively()
- }
-}
-
-case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader)
-
-/** Extended by Ant- and ConsoleRunner for running a set of tests. */
-trait DirectRunner {
- def fileManager: FileManager
-
- import PartestDefaults.{ numThreads, waitTime }
-
- setUncaughtHandler
-
- def runTestsForFiles(kindFiles: List[File], kind: String): List[TestState] = {
-
- NestUI.resetTestNumber(kindFiles.size)
-
- // this special class loader is for the benefit of scaladoc tests, which need a class path
- import PathSettings.{ testInterface, scalaCheck }
- val allUrls = scalaCheck.toURL :: testInterface.toURL :: fileManager.latestUrls
- val parentClassLoader = ScalaClassLoader fromURLs allUrls
- // add scalacheck.jar to a special classloader, but use our loader as parent with test-interface
- //val parentClassLoader = ScalaClassLoader fromURLs (List(scalaCheck.toURL), getClass().getClassLoader)
- val pool = Executors newFixedThreadPool numThreads
- val manager = new RunnerManager(kind, fileManager, TestRunParams(parentClassLoader))
- val futures = kindFiles map (f => pool submit callable(manager runTest f))
-
- pool.shutdown()
- Try (pool.awaitTermination(waitTime) {
- throw TimeoutException(waitTime)
- }) match {
- case Success(_) => futures map (_.get)
- case Failure(e) =>
- e match {
- case TimeoutException(d) =>
- NestUI warning "Thread pool timeout elapsed before all tests were complete!"
- case ie: InterruptedException =>
- NestUI warning "Thread pool was interrupted"
- ie.printStackTrace()
- }
- pool.shutdownNow() // little point in continuing
- // try to get as many completions as possible, in case someone cares
- val results = for (f <- futures) yield {
- try {
- Some(f.get(0, NANOSECONDS))
- } catch {
- case _: Throwable => None
- }
- }
- results.flatten
- }
- }
-}
-
-case class TimeoutException(duration: Duration) extends RuntimeException
-
-class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)])
-
-object LogContext {
- def apply(file: File, swr: StringWriter, wr: PrintWriter): LogContext = {
- require (file != null)
- new LogContext(file, Some((swr, wr)))
- }
- def apply(file: File): LogContext = new LogContext(file, None)
-}
-
-object Output {
- object outRedirect extends Redirecter(out)
- object errRedirect extends Redirecter(err)
-
- System.setOut(outRedirect)
- System.setErr(errRedirect)
-
- import scala.util.DynamicVariable
- private def out = java.lang.System.out
- private def err = java.lang.System.err
- private val redirVar = new DynamicVariable[Option[PrintStream]](None)
-
- class Redirecter(stream: PrintStream) extends PrintStream(new OutputStream {
- def write(b: Int) = withStream(_ write b)
-
- private def withStream(f: PrintStream => Unit) = f(redirVar.value getOrElse stream)
-
- override def write(b: Array[Byte]) = withStream(_ write b)
- override def write(b: Array[Byte], off: Int, len: Int) = withStream(_.write(b, off, len))
- override def flush = withStream(_.flush)
- override def close = withStream(_.close)
- })
-
- // this supports thread-safe nested output redirects
- def withRedirected[T](newstream: PrintStream)(func: => T): T = {
- // note down old redirect destination
- // this may be None in which case outRedirect and errRedirect print to stdout and stderr
- val saved = redirVar.value
- // set new redirecter
- // this one will redirect both out and err to newstream
- redirVar.value = Some(newstream)
-
- try func
- finally {
- newstream.flush()
- redirVar.value = saved
- }
- }
-}
-
-/** Use a Runner to run a test. */
-class RunnerManager(kind: String, fileManager: FileManager, params: TestRunParams) {
- import fileManager._
- fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
- fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
-
- def runTest(testFile: File): TestState = {
- val runner = new Runner(testFile, fileManager, params)
-
- // when option "--failed" is provided execute test only if log
- // is present (which means it failed before)
- if (fileManager.failed && !runner.logFile.canRead)
- runner.genPass()
- else {
- val (state, elapsed) =
- try timed(runner.run())
- catch {
- case t: Throwable => throw new RuntimeException(s"Error running $testFile", t)
- }
- NestUI.reportTest(state)
- runner.cleanup()
- state
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
deleted file mode 100644
index 1cf3aa858f..0000000000
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-package scala.tools.partest
-package nest
-
-import java.io.File
-import scala.tools.nsc.io.{ Directory }
-import scala.util.Properties.setProp
-import scala.collection.JavaConverters._
-
-object SBTRunner extends DirectRunner {
-
- val fileManager = new FileManager {
- var JAVACMD: String = "java"
- var JAVAC_CMD: String = "javac"
- var CLASSPATH: String = _
- var LATEST_LIB: String = _
- var LATEST_REFLECT: String = _
- var LATEST_COMP: String = _
- var LATEST_PARTEST: String = _
- var LATEST_ACTORS: String = _
- val testRootPath: String = "test"
- val testRootDir: Directory = Directory(testRootPath)
- }
-
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): java.util.List[TestState] = {
- def failedOnlyIfRequired(files:List[File]):List[File]={
- if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
- }
- runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind).asJava
- }
-
- case class CommandLineOptions(classpath: Option[String] = None,
- tests: Map[String, Array[File]] = Map(),
- scalacOptions: Seq[String] = Seq(),
- justFailedTests: Boolean = false)
-
- def mainReflect(args: Array[String]): java.util.List[TestState] = {
- setProp("partest.debug", "true")
-
- val Argument = new scala.util.matching.Regex("-(.*)")
- def parseArgs(args: Seq[String], data: CommandLineOptions): CommandLineOptions = args match {
- case Seq("--failed", rest @ _*) => parseArgs(rest, data.copy(justFailedTests = true))
- case Seq("-cp", cp, rest @ _*) => parseArgs(rest, data.copy(classpath=Some(cp)))
- case Seq("-scalacoption", opt, rest @ _*) => parseArgs(rest, data.copy(scalacOptions= data.scalacOptions :+ opt))
- case Seq(Argument(name), runFiles, rest @ _*) => parseArgs(rest, data.copy(tests=data.tests + (name -> runFiles.split(",").map(new File(_)))))
- case Seq() => data
- case x => sys.error("Unknown command line options: " + x)
- }
- val config = parseArgs(args, CommandLineOptions())
- fileManager.SCALAC_OPTS = config.scalacOptions
- fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
-
- def findClasspath(jar: String, name: String): Option[String] = {
- val optJar = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+jar+".*\\.jar"))).headOption
- val optClassDir = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+name+File.separator+"classes"))).headOption
- optJar orElse optClassDir
- }
- // Find scala library jar file...
- fileManager.LATEST_LIB = findClasspath("scala-library", "scala-library") getOrElse sys.error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_REFLECT = findClasspath("scala-reflect", "scala-reflect") getOrElse sys.error("No scala-reflect found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_COMP = findClasspath("scala-compiler", "scala-compiler") getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_PARTEST = findClasspath("scala-partest", "partest") getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_ACTORS = findClasspath("scala-actors", "actors") getOrElse sys.error("No scala-actors found! Classpath = " + fileManager.CLASSPATH)
-
- // TODO - Do something useful here!!!
- fileManager.JAVAC_CMD = "javac"
- fileManager.failed = config.justFailedTests
- // TODO - Make this a flag?
- //fileManager.updateCheck = true
- // Now run and report...
- val runs = config.tests.filterNot(_._2.isEmpty)
- val result = runs.toList flatMap { case (kind, files) => reflectiveRunTestsForFiles(files, kind).asScala }
-
- result.asJava
- }
-
- def main(args: Array[String]): Unit = {
- val failures = mainReflect(args).asScala collect { case s if !s.isOk => s.longStatus }
- // Re-list all failures so we can go figure out what went wrong.
- failures foreach System.err.println
- if(!failures.isEmpty) sys.exit(1)
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/StreamCapture.scala b/src/partest/scala/tools/partest/nest/StreamCapture.scala
deleted file mode 100644
index dc155b1787..0000000000
--- a/src/partest/scala/tools/partest/nest/StreamCapture.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-package scala.tools.partest
-package nest
-
-import java.io.{ Console => _, _ }
-
-object StreamCapture {
- case class Captured[T](stdout: String, stderr: String, result: T) {
- override def toString = s"""
- |result: $result
- |[stdout]
- |$stdout
- |[stderr]
- |$stderr""".stripMargin.trim
- }
-
- private def mkStream = {
- val swr = new StringWriter
- val wr = new PrintWriter(swr, true)
- val ostream = new PrintStream(new OutputStream { def write(b: Int): Unit = wr write b }, true) // autoFlush = true
-
- (ostream, () => { ostream.close() ; swr.toString })
- }
-
- def savingSystem[T](body: => T): T = {
- val savedOut = System.out
- val savedErr = System.err
- try body
- finally {
- System setErr savedErr
- System setOut savedOut
- }
- }
-
- def apply[T](body: => T): Captured[T] = {
- val (outstream, stdoutFn) = mkStream
- val (errstream, stderrFn) = mkStream
-
- val result = savingSystem {
- System setOut outstream
- System setErr errstream
- Console.withOut(outstream) {
- Console.withErr(errstream) {
- body
- }
- }
- }
- Captured(stdoutFn(), stderrFn(), result)
- }
-}
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
deleted file mode 100644
index 5a1afeb77f..0000000000
--- a/src/partest/scala/tools/partest/package.scala
+++ /dev/null
@@ -1,241 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala.tools
-
-import java.util.concurrent.{ Callable, ExecutorService }
-import scala.concurrent.duration.Duration
-import scala.sys.process.javaVmArguments
-import scala.tools.partest.nest.NestUI
-import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional }
-
-package object partest {
- type File = java.io.File
- type SFile = scala.reflect.io.File
- type Directory = scala.reflect.io.Directory
- type Path = scala.reflect.io.Path
- type PathResolver = scala.tools.util.PathResolver
- type ClassPath[T] = scala.tools.nsc.util.ClassPath[T]
- type StringWriter = java.io.StringWriter
-
- val SFile = scala.reflect.io.File
- val Directory = scala.reflect.io.Directory
- val Path = scala.reflect.io.Path
- val PathResolver = scala.tools.util.PathResolver
- val ClassPath = scala.tools.nsc.util.ClassPath
-
- val space = "\u0020"
- val EOL = scala.compat.Platform.EOL
- def onull(s: String) = if (s == null) "" else s
- def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
- def ojoin(xs: String*): String = oempty(xs: _*) mkString space
- def nljoin(xs: String*): String = oempty(xs: _*) mkString EOL
-
- implicit val codec = scala.io.Codec.UTF8
-
- def setUncaughtHandler() = {
- Thread.setDefaultUncaughtExceptionHandler(
- new Thread.UncaughtExceptionHandler {
- def uncaughtException(thread: Thread, t: Throwable) {
- val t1 = Exceptional unwrap t
- System.err.println(s"Uncaught exception on thread $thread: $t1")
- t1.printStackTrace()
- }
- }
- )
- }
-
- /** Sources have a numerical group, specified by name_7 and so on. */
- private val GroupPattern = """.*_(\d+)""".r
-
- implicit class FileOps(val f: File) {
- private def sf = SFile(f)
-
- def testIdent = {
- f.toString split """[/\\]+""" takeRight 2 mkString "/" // e.g. pos/t1234
- }
-
- def mapInPlace(mapFn: String => String)(filterFn: String => Boolean = _ => true): Unit =
- writeAll(fileLines filter filterFn map (x => mapFn(x) + EOL): _*)
-
- def appendAll(strings: String*): Unit = sf.appendAll(strings: _*)
- def writeAll(strings: String*): Unit = sf.writeAll(strings: _*)
- def absolutePathSegments: List[String] = f.getAbsolutePath split """[/\\]+""" toList
-
- def isJava = f.isFile && (sf hasExtension "java")
- def isScala = f.isFile && (sf hasExtension "scala")
- def isJavaOrScala = isJava || isScala
-
- def extension = sf.extension
- def hasExtension(ext: String) = sf hasExtension ext
- def changeExtension(ext: String): File = (sf changeExtension ext).jfile
-
- /** The group number for this source file, or -1 for no group. */
- def group: Int =
- sf.stripExtension match {
- case GroupPattern(g) if g.toInt >= 0 => g.toInt
- case _ => -1
- }
-
- def fileContents: String = try sf.slurp() catch { case _: java.io.FileNotFoundException => "" }
- def fileLines: List[String] = augmentString(fileContents).lines.toList
- }
-
- implicit class PathOps(p: Path) extends FileOps(p.jfile) { }
-
- implicit class Copier(val f: SFile) extends AnyVal {
- def copyTo(dest: Path): Unit = dest.toFile writeAll f.slurp(scala.io.Codec.UTF8)
- }
-
- implicit class LoaderOps(val loader: ClassLoader) extends AnyVal {
- import scala.util.control.Exception.catching
- /** Like ScalaClassLoader.create for the case where the result type is
- * available to the current class loader, implying that the current
- * loader is a parent of `loader`.
- */
- def instantiate[A >: Null](name: String): A = (
- catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
- (loader loadClass name).newInstance.asInstanceOf[A] orNull
- )
- }
-
- implicit class ExecutorOps(val executor: ExecutorService) {
- def awaitTermination[A](wait: Duration)(failing: => A = ()): Option[A] = (
- if (executor awaitTermination (wait.length, wait.unit)) None
- else Some(failing)
- )
- }
-
- implicit def temporaryPath2File(x: Path): File = x.jfile
- implicit def stringPathToJavaFile(path: String): File = new File(path)
-
- implicit lazy val postfixOps = scala.language.postfixOps
- implicit lazy val implicitConversions = scala.language.implicitConversions
-
- def fileSeparator = java.io.File.separator
- def pathSeparator = java.io.File.pathSeparator
-
- def pathToTestIdent(path: Path) = path.jfile.testIdent
-
- def canonicalizeSlashes(line: String) = line.replaceAll("""[/\\]+""", "/")
-
- def words(s: String): List[String] = (s.trim split "\\s+").toList
-
- def timed[T](body: => T): (T, Long) = {
- val t1 = System.currentTimeMillis
- val result = body
- val t2 = System.currentTimeMillis
-
- (result, t2 - t1)
- }
-
- def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
-
- def file2String(f: File): String = f.fileContents
-
- def basename(name: String): String = Path(name).stripExtension
-
- /** In order to allow for spaces in flags/options, this
- * parses .flags, .javaopts, javacopts etc files as follows:
- * If it is exactly one line, it is split (naively) on spaces.
- * If it contains more than one line, each line is its own
- * token, spaces and all.
- */
- def readOptionsFile(file: File): List[String] = {
- file.fileLines match {
- case x :: Nil => words(x)
- case xs => xs map (_.trim)
- }
- }
-
- def findProgram(name: String): Option[File] = {
- val pathDirs = sys.env("PATH") match {
- case null => List("/usr/local/bin", "/usr/bin", "/bin")
- case path => path split "[:;]" filterNot (_ == "") toList
- }
- pathDirs.iterator map (d => new File(d, name)) find (_.canExecute)
- }
-
- def now = (new java.util.Date).toString
- def elapsedString(millis: Long): String = {
- val elapsedSecs = millis/1000
- val elapsedMins = elapsedSecs/60
- val elapsedHrs = elapsedMins/60
- val dispMins = elapsedMins - elapsedHrs * 60
- val dispSecs = elapsedSecs - elapsedMins * 60
-
- "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs)
- }
-
- def vmArgString = javaVmArguments.mkString(
- "Java VM started with arguments: '",
- " ",
- "'"
- )
-
- def allPropertiesString = {
- import scala.collection.JavaConversions._
- System.getProperties.toList.sorted map { case (k, v) => "%s -> %s\n".format(k, v) } mkString ""
- }
-
- def showAllJVMInfo() {
- vlog(vmArgString)
- vlog(allPropertiesString)
- }
-
- import scala.language.experimental.macros
-
- /**
- * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
- * {{{
- * trace> "".isEmpty
- * res: Boolean = true
- *
- * }}}
- *
- * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
- * test code in a string.
- */
- def trace[A](a: A) = macro traceImpl[A]
-
- import scala.reflect.macros.Context
- def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
- import c.universe._
- import definitions._
-
- // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0,
- // because this impairs reflection refactorings
- //
- // val exprCode = c.literal(show(a.tree))
- // val exprType = c.literal(show(a.actualType))
- // reify {
- // println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
- // a.splice
- // }
-
- c.Expr(Block(
- List(Apply(
- Select(Ident(PredefModule), newTermName("println")),
- List(Apply(
- Select(Apply(
- Select(Ident(ScalaPackage), newTermName("StringContext")),
- List(
- Literal(Constant("trace> ")),
- Literal(Constant("\\nres: ")),
- Literal(Constant(" = ")),
- Literal(Constant("\\n")))),
- newTermName("s")),
- List(
- Literal(Constant(show(a.tree))),
- Literal(Constant(show(a.actualType))),
- a.tree))))),
- a.tree))
- }
-
- def isPartestTerse = NestUI.isTerse
- def isPartestDebug = NestUI.isDebug
- def isPartestVerbose = NestUI.isVerbose
-
- def vlog(msg: => String) = if (isPartestVerbose) System.err.println(msg)
-}
diff --git a/src/partest/scala/tools/partest/utils/Properties.scala b/src/partest/scala/tools/partest/utils/Properties.scala
deleted file mode 100644
index b9394b50c9..0000000000
--- a/src/partest/scala/tools/partest/utils/Properties.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.partest
-package utils
-
-/** Loads partest.properties from the jar. */
-object Properties extends scala.util.PropertiesTrait {
- protected def propCategory = "partest"
- protected def pickJarBasedOn = classOf[nest.RunnerManager]
- override def isAvian = super.isAvian
-}
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 009d9dbfdb..5b6ff2325c 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -8,6 +8,7 @@ package reflect
package api
import scala.reflect.runtime.{universe => ru}
+import scala.annotation.compileTimeOnly
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
@@ -91,7 +92,7 @@ trait Exprs { self: Universe =>
* }}}
* because expr of type Expr[T] itself does not have a method foo.
*/
- // @compileTimeOnly("Cannot use splice outside reify")
+ @compileTimeOnly("splice must be enclosed within a reify {} block")
def splice: T
/**
@@ -108,7 +109,7 @@ trait Exprs { self: Universe =>
* object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
* }}}
*/
- // @compileTimeOnly("Cannot use value except for signatures of macro implementations")
+ @compileTimeOnly("cannot use value except for signatures of macro implementations")
val value: T
override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index d702555ba6..ec128e31a3 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -101,7 +101,7 @@ package api
* via `ModuleMirror.instance`). Entry point: `val mm = im.reflectMethod(<method symbol>)`.
* Example:
* {{{
- * scala> val methodX = typeOf[C].declaration(newTermName("x")).asMethod
+ * scala> val methodX = typeOf[C].declaration(TermName("x")).asMethod
* methodX: reflect.runtime.universe.MethodSymbol = method x
*
* scala> val mm = im.reflectMethod(methodX)
@@ -126,7 +126,7 @@ package api
* scala> val im = m.reflect(new C)
* im: reflect.runtime.universe.InstanceMirror = instance mirror for C@5f0c8ac1
*
- * scala> val fieldX = typeOf[C].declaration(newTermName("x")).asTerm.accessed.asTerm
+ * scala> val fieldX = typeOf[C].declaration(TermName("x")).asTerm.accessed.asTerm
* fieldX: reflect.runtime.universe.TermSymbol = value x
* scala> val fmX = im.reflectField(fieldX)
* fmX: reflect.runtime.universe.FieldMirror = field mirror for C.x (bound to C@5f0c8ac1)
@@ -136,7 +136,7 @@ package api
*
* scala> fmX.set(3) // NOTE: can set an underlying value of an immutable field!
*
- * scala> val fieldY = typeOf[C].declaration(newTermName("y")).asTerm.accessed.asTerm
+ * scala> val fieldY = typeOf[C].declaration(TermName("y")).asTerm.accessed.asTerm
* fieldY: reflect.runtime.universe.TermSymbol = variable y
*
* scala> val fmY = im.reflectField(fieldY)
@@ -255,7 +255,7 @@ trait Mirrors { self: Universe =>
* Note also that only accessor MethodMirrors, but not FieldMirrors will accurately reflect overriding behavior.
*
* To get a field symbol by the name of the field you would like to reflect,
- * use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the field>)).asTerm.accessed`.
+ * use `<this mirror>.symbol.typeSignature.member(TermName(<name of the field>)).asTerm.accessed`.
* For further information about member lookup refer to `Symbol.typeSignature`.
*
* The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
@@ -275,7 +275,7 @@ trait Mirrors { self: Universe =>
* that can be used to invoke the method provided.
*
* To get a method symbol by the name of the method you would like to reflect,
- * use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the method>)).asMethod`.
+ * use `<this mirror>.symbol.typeSignature.member(TermName(<name of the method>)).asMethod`.
* For further information about member lookup refer to `Symbol.typeSignature`.
*
* The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
@@ -299,7 +299,7 @@ trait Mirrors { self: Universe =>
* that can be used to get the instance of the object or inspect its companion class.
*
* To get a module symbol by the name of the object you would like to reflect,
- * use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the object>)).asModule`.
+ * use `<this mirror>.symbol.typeSignature.member(TermName(<name of the object>)).asModule`.
* For further information about member lookup refer to `Symbol.typeSignature`.
*
* The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index f74e0ce014..87d7f9fd8e 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -33,13 +33,13 @@ trait Names {
* Enables an alternative notation `"map": TermName` as opposed to `newTermName("map")`.
* @group Names
*/
- implicit def stringToTermName(s: String): TermName = newTermName(s)
+ implicit def stringToTermName(s: String): TermName = TermName(s)
/** An implicit conversion from String to TypeName.
* Enables an alternative notation `"List": TypeName` as opposed to `newTypeName("List")`.
* @group Names
*/
- implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
+ implicit def stringToTypeName(s: String): TypeName = TypeName(s)
/** The abstract type of names.
* @group Names
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index f7a6a68946..443f34ccae 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -572,8 +572,8 @@ trait Trees { self: Universe =>
* @group Extractors
*/
abstract class DefDefExtractor {
- def apply(mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
- def unapply(defDef: DefDef): Option[(Modifiers, Name, List[TypeDef], List[List[ValDef]], Tree, Tree)]
+ def apply(mods: Modifiers, name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
+ def unapply(defDef: DefDef): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)]
}
/** The API that all def defs support
@@ -584,7 +584,7 @@ trait Trees { self: Universe =>
def mods: Modifiers
/** @inheritdoc */
- def name: Name
+ def name: TermName
/** The type parameters of the method. */
def tparams: List[TypeDef]
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 6b7aa2dddf..92f2a64ce9 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -227,10 +227,7 @@ trait Definitions extends api.StandardDefinitions {
scope
}
/** Is this symbol a member of Object or Any? */
- def isUniversalMember(sym: Symbol) = (
- (sym ne NoSymbol)
- && (ObjectClass isSubClass sym.owner)
- )
+ def isUniversalMember(sym: Symbol) = ObjectClass isSubClass sym.owner
/** Is this symbol unimportable? Unimportable symbols include:
* - constructors, because <init> is not a real name
@@ -253,6 +250,13 @@ trait Definitions extends api.StandardDefinitions {
|| tp =:= AnyRefTpe
)
+ def hasMultipleNonImplicitParamLists(member: Symbol): Boolean = hasMultipleNonImplicitParamLists(member.info)
+ def hasMultipleNonImplicitParamLists(info: Type): Boolean = info match {
+ case PolyType(_, restpe) => hasMultipleNonImplicitParamLists(restpe)
+ case MethodType(_, MethodType(p :: _, _)) if !p.isImplicit => true
+ case _ => false
+ }
+
private def fixupAsAnyTrait(tpe: Type): Type = tpe match {
case ClassInfoType(parents, decls, clazz) =>
if (parents.head.typeSymbol == AnyClass) tpe
@@ -384,6 +388,7 @@ trait Definitions extends api.StandardDefinitions {
def arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone)
def ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible)
def arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass)
+ def traversableDropMethod = getMemberMethod(ScalaRunTimeModule, nme.drop)
// classes with special meanings
lazy val StringAddClass = requiredClass[scala.runtime.StringAdd]
@@ -423,6 +428,15 @@ trait Definitions extends api.StandardDefinitions {
def isVarArgsList(params: Seq[Symbol]) = params.nonEmpty && isRepeatedParamType(params.last.tpe)
def isVarArgTypes(formals: Seq[Type]) = formals.nonEmpty && isRepeatedParamType(formals.last)
+ def firstParamType(tpe: Type): Type = tpe.paramTypes match {
+ case p :: _ => p
+ case _ => NoType
+ }
+ def isImplicitParamss(paramss: List[List[Symbol]]) = paramss match {
+ case (p :: _) :: _ => p.isImplicit
+ case _ => false
+ }
+
def hasRepeatedParam(tp: Type): Boolean = tp match {
case MethodType(formals, restpe) => isScalaVarArgs(formals) || hasRepeatedParam(restpe)
case PolyType(_, restpe) => hasRepeatedParam(restpe)
@@ -430,7 +444,12 @@ trait Definitions extends api.StandardDefinitions {
}
// wrapping and unwrapping
- def dropByName(tp: Type): Type = elementExtract(ByNameParamClass, tp) orElse tp
+ def dropByName(tp: Type): Type = elementExtract(ByNameParamClass, tp) orElse tp
+ def dropRepeated(tp: Type): Type = (
+ if (isJavaRepeatedParamType(tp)) elementExtract(JavaRepeatedParamClass, tp) orElse tp
+ else if (isScalaRepeatedParamType(tp)) elementExtract(RepeatedParamClass, tp) orElse tp
+ else tp
+ )
def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse tp
def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp
def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp
@@ -659,21 +678,23 @@ trait Definitions extends api.StandardDefinitions {
def isExactProductType(tp: Type): Boolean = isProductNSymbol(tp.typeSymbol)
/** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */
- def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match {
+ @deprecated("No longer used", "2.11.0") def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match {
case Some(x) => tpe.baseType(x).typeArgs
case _ => Nil
}
- def dropNullaryMethod(tp: Type) = tp match {
- case NullaryMethodType(restpe) => restpe
- case _ => tp
- }
-
- def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
+ @deprecated("No longer used", "2.11.0") def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
case RefinedType(p :: _, _) => p.dealiasWiden
case tp => tp
}
+ def getterMemberTypes(tpe: Type, getters: List[Symbol]): List[Type] =
+ getters map (m => dropNullaryMethod(tpe memberType m))
+
+ def dropNullaryMethod(tp: Type) = tp match {
+ case NullaryMethodType(restpe) => restpe
+ case _ => tp
+ }
def abstractFunctionForFunctionType(tp: Type) = {
assert(isFunctionType(tp), tp)
abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
@@ -696,6 +717,71 @@ trait Definitions extends api.StandardDefinitions {
def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg)
def seqType(arg: Type) = appliedType(SeqClass, arg)
+ // FYI the long clunky name is because it's really hard to put "get" into the
+ // name of a method without it sounding like the method "get"s something, whereas
+ // this method is about a type member which just happens to be named get.
+ def typeOfMemberNamedGet(tp: Type) = resultOfMatchingMethod(tp, nme.get)()
+ def typeOfMemberNamedHead(tp: Type) = resultOfMatchingMethod(tp, nme.head)()
+ def typeOfMemberNamedApply(tp: Type) = resultOfMatchingMethod(tp, nme.apply)(IntTpe)
+ def typeOfMemberNamedDrop(tp: Type) = resultOfMatchingMethod(tp, nme.drop)(IntTpe)
+ def typeOfMemberNamedGetOrSelf(tp: Type) = typeOfMemberNamedGet(tp) orElse tp
+ def typesOfSelectors(tp: Type) = getterMemberTypes(tp, productSelectors(tp))
+ def typesOfCaseAccessors(tp: Type) = getterMemberTypes(tp, tp.typeSymbol.caseFieldAccessors)
+
+ /** If this is a case class, the case field accessors (which may be an empty list.)
+ * Otherwise, if there are any product selectors, that list.
+ * Otherwise, a list containing only the type itself.
+ */
+ def typesOfSelectorsOrSelf(tp: Type): List[Type] = (
+ if (tp.typeSymbol.isCase)
+ typesOfCaseAccessors(tp)
+ else typesOfSelectors(tp) match {
+ case Nil => tp :: Nil
+ case tps => tps
+ }
+ )
+
+ /** If the given type has one or more product selectors, the type of the last one.
+ * Otherwise, the type itself.
+ */
+ def typeOfLastSelectorOrSelf(tp: Type) = typesOfSelectorsOrSelf(tp).last
+
+ def elementTypeOfLastSelectorOrSelf(tp: Type) = {
+ val last = typeOfLastSelectorOrSelf(tp)
+ ( typeOfMemberNamedHead(last)
+ orElse typeOfMemberNamedApply(last)
+ orElse elementType(ArrayClass, last)
+ )
+ }
+
+ /** Returns the method symbols for members _1, _2, ..., _N
+ * which exist in the given type.
+ */
+ def productSelectors(tpe: Type): List[Symbol] = {
+ def loop(n: Int): List[Symbol] = tpe member TermName("_" + n) match {
+ case NoSymbol => Nil
+ case m if m.paramss.nonEmpty => Nil
+ case m => m :: loop(n + 1)
+ }
+ loop(1)
+ }
+
+ /** If `tp` has a term member `name`, the first parameter list of which
+ * matches `paramTypes`, and which either has no further parameter
+ * lists or only an implicit one, then the result type of the matching
+ * method. Otherwise, NoType.
+ */
+ def resultOfMatchingMethod(tp: Type, name: TermName)(paramTypes: Type*): Type = {
+ def matchesParams(member: Symbol) = member.paramss match {
+ case Nil => paramTypes.isEmpty
+ case ps :: rest => (rest.isEmpty || isImplicitParamss(rest)) && (ps corresponds paramTypes)(_.tpe =:= _)
+ }
+ tp member name filter matchesParams match {
+ case NoSymbol => NoType
+ case member => (tp memberType member).finalResultType
+ }
+ }
+
def ClassType(arg: Type) = if (phase.erasedTypes) ClassClass.tpe else appliedType(ClassClass, arg)
/** Can we tell by inspecting the symbol that it will never
@@ -904,7 +990,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
- lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly")
+ lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.annotation.compileTimeOnly")
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance]
diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala
index 46a95c7d26..d1c215713e 100644
--- a/src/reflect/scala/reflect/internal/Kinds.scala
+++ b/src/reflect/scala/reflect/internal/Kinds.scala
@@ -233,7 +233,7 @@ trait Kinds {
/**
* The data structure describing the kind of a given type.
- *
+ *
* Proper types are represented using ProperTypeKind.
*
* Type constructors are reprented using TypeConKind.
@@ -251,7 +251,7 @@ trait Kinds {
* it uses prescribed letters for each level: A, F, X, Y, Z.
*/
def scalaNotation: String
-
+
/** Kind notation used in http://adriaanm.github.com/files/higher.pdf.
* Proper types are expressed as *.
* Type constructors are expressed * -> *(lo, hi) -(+)-> *.
@@ -261,13 +261,13 @@ trait Kinds {
/** Contains bounds either as part of itself or its arguments.
*/
def hasBounds: Boolean = !bounds.isEmptyBounds
-
+
private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState
}
object Kind {
private[internal] sealed trait ScalaNotation
private[internal] sealed case class Head(order: Int, n: Option[Int], alias: Option[String]) extends ScalaNotation {
- override def toString: String = {
+ override def toString: String = {
alias getOrElse {
typeAlias(order) + n.map(_.toString).getOrElse("")
}
@@ -285,7 +285,7 @@ trait Kinds {
}
private[internal] sealed case class Text(value: String) extends ScalaNotation {
override def toString: String = value
- }
+ }
private[internal] case class StringState(tokens: Seq[ScalaNotation]) {
override def toString: String = tokens.mkString
def append(value: String): StringState = StringState(tokens :+ Text(value))
@@ -310,7 +310,7 @@ trait Kinds {
ts map {
case Head(`o`, _, a) => Head(o, None, a)
case t => t
- }
+ }
else ts
})
}
@@ -332,7 +332,7 @@ trait Kinds {
val order = 0
private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState = {
s.append(v.symbolicString).appendHead(order, sym).append(bounds.scalaNotation(_.toString))
- }
+ }
def scalaNotation: String = Kind.Head(order, None, None) + bounds.scalaNotation(_.toString)
def starNotation: String = "*" + bounds.starNotation(_.toString)
}
@@ -344,7 +344,7 @@ trait Kinds {
class TypeConKind(val bounds: TypeBounds, val args: Seq[TypeConKind.Argument]) extends Kind {
import Kind.StringState
- val order = (args map {_.kind.order} max) + 1
+ val order = (args map (_.kind.order)).max + 1
def description: String =
if (order == 1) "This is a type constructor: a 1st-order-kinded type."
else "This is a type constructor that takes type constructor(s): a higher-kinded type."
@@ -380,7 +380,7 @@ trait Kinds {
object TypeConKind {
def apply(args: Seq[TypeConKind.Argument]): TypeConKind = this(TypeBounds.empty, args)
def apply(bounds: TypeBounds, args: Seq[TypeConKind.Argument]): TypeConKind = new TypeConKind(bounds, args)
- def unapply(tck: TypeConKind): Some[(TypeBounds, Seq[TypeConKind.Argument])] = Some(tck.bounds, tck.args)
+ def unapply(tck: TypeConKind): Some[(TypeBounds, Seq[TypeConKind.Argument])] = Some((tck.bounds, tck.args))
case class Argument(variance: Variance, kind: Kind)(val sym: Symbol) {}
}
@@ -389,7 +389,7 @@ trait Kinds {
*/
object inferKind {
import TypeConKind.Argument
-
+
abstract class InferKind {
protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind
protected def infer(sym: Symbol, topLevel: Boolean): Kind = infer(sym.tpeHK, sym.owner, topLevel)
@@ -398,7 +398,7 @@ trait Kinds {
}
def apply(pre: Type): InferKind = new InferKind {
- protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind = {
+ protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind = {
val bounds = if (topLevel) TypeBounds.empty
else tpe.asSeenFrom(pre, owner).bounds
if(!tpe.isHigherKinded) ProperTypeKind(bounds)
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 64713b8d41..7a2287664a 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -626,6 +626,7 @@ trait StdNames {
val clone_ : NameType = "clone"
val collection: NameType = "collection"
val conforms: NameType = "conforms"
+ val compare: NameType = "compare"
val copy: NameType = "copy"
val create: NameType = "create"
val currentMirror: NameType = "currentMirror"
@@ -657,6 +658,7 @@ trait StdNames {
val get: NameType = "get"
val hashCode_ : NameType = "hashCode"
val hash_ : NameType = "hash"
+ val head : NameType = "head"
val immutable: NameType = "immutable"
val implicitly: NameType = "implicitly"
val in: NameType = "in"
@@ -725,6 +727,7 @@ trait StdNames {
val toArray: NameType = "toArray"
val toList: NameType = "toList"
val toObjectArray : NameType = "toObjectArray"
+ val toSeq: NameType = "toSeq"
val TopScope: NameType = "TopScope"
val toString_ : NameType = "toString"
val toTypeConstructor: NameType = "toTypeConstructor"
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index c340670635..a6f9dfc164 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -60,6 +60,7 @@ abstract class SymbolTable extends macros.Universe
def shouldLogAtThisPhase = false
def isPastTyper = false
+ protected def isDeveloper: Boolean = settings.debug
@deprecated("Give us a reason", "2.10.0")
def abort(): Nothing = abort("unknown error")
@@ -69,8 +70,12 @@ abstract class SymbolTable extends macros.Universe
/** Override with final implementation for inlining. */
def debuglog(msg: => String): Unit = if (settings.debug) log(msg)
- def devWarning(msg: => String): Unit = if (settings.debug) Console.err.println(msg)
+ def devWarning(msg: => String): Unit = if (isDeveloper) Console.err.println(msg)
def throwableAsString(t: Throwable): String = "" + t
+ def throwableAsString(t: Throwable, maxFrames: Int): String = t.getStackTrace take maxFrames mkString "\n at "
+
+ @inline final def devWarningDumpStack(msg: => String, maxFrames: Int): Unit =
+ devWarning(msg + "\n" + throwableAsString(new Throwable, maxFrames))
/** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */
def debugStack(t: Throwable): Unit = devWarning(throwableAsString(t))
@@ -111,6 +116,13 @@ abstract class SymbolTable extends macros.Universe
result
}
+ @inline
+ final private[scala] def debuglogResultIf[T](msg: => String, cond: T => Boolean)(result: T): T = {
+ if (cond(result))
+ debuglog(msg + ": " + result)
+
+ result
+ }
// For too long have we suffered in order to sort NAMES.
// I'm pretty sure there's a reasonable default for that.
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index e41038cafc..a8efa938c8 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -153,7 +153,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
def asNameType(n: Name): NameType
- private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
+ // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
+ // The null check is for NoSymbol, which can't pass a reference to itself to the constructor and also
+ // can't call owner_= due to an assertion it contains.
+ private[this] var _rawowner = if (initOwner eq null) this else initOwner
private[this] var _rawflags: Long = _
def rawowner = _rawowner
@@ -610,7 +613,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
final def isLazyAccessor = isLazy && lazyAccessor != NoSymbol
- final def isOverridableMember = !(isClass || isEffectivelyFinal) && (this ne NoSymbol) && owner.isClass
+ final def isOverridableMember = !(isClass || isEffectivelyFinal) && safeOwner.isClass
/** Does this symbol denote a wrapper created by the repl? */
final def isInterpreterWrapper = (
@@ -999,13 +1002,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ owner attribute --------------------------------------------------------------
+ /** In general when seeking the owner of a symbol, one should call `owner`.
+ * The other possibilities include:
+ * - call `safeOwner` if it is expected that the target may be NoSymbol
+ * - call `assertOwner` if it is an unrecoverable error if the target is NoSymbol
+ *
+ * `owner` behaves like `safeOwner`, but logs NoSymbol.owner calls under -Xdev.
+ * `assertOwner` aborts compilation immediately if called on NoSymbol.
+ */
def owner: Symbol = {
if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
rawowner
}
-
- // Like owner, but NoSymbol.owner == NoSymbol instead of throwing an exception.
- final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner
+ final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner
+ final def assertOwner: Symbol = if (this eq NoSymbol) abort("no-symbol does not have an owner") else owner
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
@@ -1781,10 +1791,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
result
}
- @inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
-
- final def toOption: Option[Symbol] = if (exists) Some(this) else None
-
// ------ cloneing -------------------------------------------------------------------
/** A clone of this symbol. */
@@ -2179,8 +2185,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* the recursive knot.
*/
private def canMatchInheritedSymbols = (
- (this ne NoSymbol)
- && owner.isClass
+ owner.isClass
&& !this.isClass
&& !this.isConstructor
)
@@ -2352,6 +2357,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
@inline final def orElse(alt: => Symbol): Symbol = if (this ne NoSymbol) this else alt
@inline final def andAlso(f: Symbol => Unit): Symbol = { if (this ne NoSymbol) f(this) ; this }
+ @inline final def fold[T](none: => T)(f: Symbol => T): T = if (this ne NoSymbol) f(this) else none
+ @inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
+
+ final def toOption: Option[Symbol] = if (exists) Some(this) else None
+
// ------ toString -------------------------------------------------------------------
@@ -3340,7 +3350,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def enclosingPackageClass: Symbol = this
override def enclMethod: Symbol = this
override def associatedFile = NoAbstractFile
- override def ownerChain: List[Symbol] = List()
+ override def owner: Symbol = {
+ devWarningDumpStack("NoSymbol.owner", 15)
+ this
+ }
+ override def ownerChain: List[Symbol] = Nil
override def ownersIterator: Iterator[Symbol] = Iterator.empty
override def alternatives: List[Symbol] = List()
override def reset(completer: Type): this.type = this
@@ -3350,9 +3364,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def accessBoundary(base: Symbol): Symbol = enclosingRootClass
def cloneSymbolImpl(owner: Symbol, newFlags: Long) = abort("NoSymbol.clone()")
override def originalEnclosingMethod = this
-
- override def owner: Symbol =
- abort("no-symbol does not have an owner")
}
protected def makeNoSymbol: NoSymbol = new NoSymbol
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 5c92512193..34fe0afb1a 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -488,7 +488,7 @@ abstract class TreeInfo {
}
object WildcardStarArg {
- def unapply(tree: Typed): Option[Tree] = tree match {
+ def unapply(tree: Tree): Option[Tree] = tree match {
case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => Some(expr)
case _ => None
}
@@ -628,11 +628,12 @@ abstract class TreeInfo {
* case Extractor(a @ (b, c)) => 2
* }}}
*/
- def effectivePatternArity(args: List[Tree]): Int = (args.map(unbind) match {
+ def effectivePatternArity(args: List[Tree]): Int = flattenedPatternArgs(args).length
+
+ def flattenedPatternArgs(args: List[Tree]): List[Tree] = args map unbind match {
case Apply(fun, xs) :: Nil if isTupleSymbol(fun.symbol) => xs
case xs => xs
- }).length
-
+ }
// used in the symbols for labeldefs and valdefs emitted by the pattern matcher
// tailcalls, cps,... use this flag combination to detect translated matches
@@ -772,6 +773,17 @@ abstract class TreeInfo {
unapply(dissectApplied(tree))
}
+ /** Locates the synthetic Apply node corresponding to an extractor's call to
+ * unapply (unwrapping nested Applies) and returns the fun part of that Apply.
+ */
+ object Unapplied {
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case Apply(fun, Ident(nme.SELECTOR_DUMMY) :: Nil) => Some(fun)
+ case Apply(fun, _) => unapply(fun)
+ case _ => None
+ }
+ }
+
/** Is this file the body of a compilation unit which should not
* have Predef imported? This is the case iff the first import in the
* unit explicitly refers to Predef.
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index ceb3b383d7..fab1f45358 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -185,8 +185,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
def replace(from: Tree, to: Tree): Tree =
new TreeReplacer(from, to, positionAware = false) transform this
- def hasSymbolWhich(f: Symbol => Boolean) =
- (symbol ne null) && (symbol ne NoSymbol) && f(symbol)
+ def hasExistingSymbol = (symbol ne null) && (symbol ne NoSymbol)
+ def hasSymbolWhich(f: Symbol => Boolean) = hasExistingSymbol && f(symbol)
def isErroneous = (tpe ne null) && tpe.isErroneous
def isTyped = (tpe ne null) && !tpe.isErroneous
@@ -309,10 +309,18 @@ trait Trees extends api.Trees { self: SymbolTable =>
def rhs: Tree
}
+ object ValOrDefDef {
+ def unapply(tree: Tree): Option[(Modifiers, TermName, Tree, Tree)] = tree match {
+ case ValDef(mods, name, tpt, rhs) => Some((mods, name, tpt, rhs))
+ case DefDef(mods, name, _, _, tpt, rhs) => Some((mods, name, tpt, rhs))
+ case _ => None
+ }
+ }
+
case class ValDef(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) extends ValOrDefDef with ValDefApi
object ValDef extends ValDefExtractor
- case class DefDef(mods: Modifiers, name: Name, tparams: List[TypeDef],
+ case class DefDef(mods: Modifiers, name: TermName, tparams: List[TypeDef],
vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef with DefDefApi
object DefDef extends DefDefExtractor
@@ -1017,14 +1025,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
trait CannotHaveAttrs extends Tree {
override def canHaveAttrs = false
- private def unsupported(what: String, args: Any*) =
- throw new UnsupportedOperationException(s"$what($args) inapplicable for "+self.toString)
+ private def requireLegal(value: Any, allowed: Any, what: String) =
+ require(value == allowed, s"can't set $what for $self to value other than $allowed")
super.setPos(NoPosition)
- override def setPos(pos: Position) = unsupported("setPos", pos)
+ override def setPos(pos: Position) = { requireLegal(pos, NoPosition, "pos"); this }
+ override def pos_=(pos: Position) = setPos(pos)
super.setType(NoType)
- override def tpe_=(t: Type) = if (t != NoType) unsupported("tpe_=", t)
+ override def setType(t: Type) = { requireLegal(t, NoType, "tpe"); this }
+ override def tpe_=(t: Type) = setType(t)
}
case object EmptyTree extends TermTree with CannotHaveAttrs { override def isEmpty = true; val asList = List(this) }
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
index 9c1342e68e..fd64d98ca2 100644
--- a/src/reflect/scala/reflect/internal/TypeDebugging.scala
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -36,7 +36,7 @@ trait TypeDebugging {
case ObjectClass => true
case _ => sym.hasPackageFlag
}
- def skipType(tpe: Type): Boolean = skipSym(tpe.typeSymbolDirect)
+ def skipType(tpe: Type): Boolean = (tpe eq null) || skipSym(tpe.typeSymbolDirect)
def skip(t: Tree): Boolean = t match {
case EmptyTree => true
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 11527d88ca..b4ae384594 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -80,7 +80,8 @@ trait Types
with tpe.CommonOwners
with tpe.GlbLubs
with tpe.TypeMaps
- with tpe.TypeConstraints { self: SymbolTable =>
+ with tpe.TypeConstraints
+ with util.Collections { self: SymbolTable =>
import definitions._
import TypesStats._
@@ -703,7 +704,7 @@ trait Types
case OverloadedType(_, alts) =>
OverloadedType(this, alts)
case tp =>
- tp.asSeenFrom(this, sym.owner)
+ if (sym eq NoSymbol) NoType else tp.asSeenFrom(this, sym.owner)
}
/** Substitute types `to` for occurrences of references to
@@ -3474,7 +3475,7 @@ trait Types
def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) =
if ((parents eq original.parents) && (decls eq original.decls)) original
else {
- val owner = if (original.typeSymbol == NoSymbol) NoSymbol else original.typeSymbol.owner
+ val owner = original.typeSymbol.owner
val result = refinedType(parents, owner)
val syms1 = decls.toList
for (sym <- syms1)
@@ -4014,9 +4015,12 @@ trait Types
def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType)
+ /** This appears to be equivalent to tp.isInstanceof[SingletonType],
+ * except it excludes ConstantTypes.
+ */
def isSingleType(tp: Type) = tp match {
case ThisType(_) | SuperType(_, _) | SingleType(_, _) => true
- case _ => false
+ case _ => false
}
def isConstantType(tp: Type) = tp match {
@@ -4317,18 +4321,6 @@ trait Types
}
}
- /** like map2, but returns list `xs` itself - instead of a copy - if function
- * `f` maps all elements to themselves.
- */
- def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
- if (xs.isEmpty || ys.isEmpty) xs
- else {
- val x1 = f(xs.head, ys.head)
- val xs1 = map2Conserve(xs.tail, ys.tail)(f)
- if ((x1 eq xs.head) && (xs1 eq xs.tail)) xs
- else x1 :: xs1
- }
-
/** Do type arguments `targs` conform to formal parameters `tparams`?
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
deleted file mode 100644
index 2c9f909629..0000000000
--- a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package scala
-package reflect
-package internal
-package annotations
-
-import scala.annotation.meta._
-
-/**
- * An annotation that designates a member should not be referred to after
- * type checking (which includes macro expansion); it must only be used in
- * the arguments of some other macro that will eliminate it from the AST.
- *
- * Later on, this annotation should be removed and implemented with domain-specific macros.
- * If a certain method `inner` mustn't be called outside the context of a given macro `outer`,
- * then it should itself be declared as a macro.
- *
- * Approach #1. Expansion of `inner` checks whether its enclosures contain `outer` and
- * report an error if `outer` is not detected. In principle, we could use this approach right now,
- * but currently enclosures are broken, because contexts aren't exactly famous for keeping precise
- * track of the stack of the trees being typechecked.
- *
- * Approach #2. Default implementation of `inner` is just an invocation of `c.abort`.
- * `outer` is an untyped macro, which expands into a block, which contains a redefinition of `inner`
- * and a call to itself. The redefined `inner` could either be a stub like `Expr.splice` or carry out
- * domain-specific logic.
- *
- * @param message the error message to print during compilation if a reference remains
- * after type checking
- * @since 2.10.1
- */
-@getter @setter @beanGetter @beanSetter
-final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/annotations/package.scala b/src/reflect/scala/reflect/internal/annotations/package.scala
new file mode 100644
index 0000000000..ef299a600c
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/package.scala
@@ -0,0 +1,6 @@
+package scala.reflect.internal
+
+package object annotations {
+ @deprecated("Use scala.annotation.compileTimeOnly instead", "2.11.0")
+ type compileTimeOnly = scala.annotation.compileTimeOnly
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index bebc419c7c..0d9bbfa5e0 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -395,7 +395,7 @@ private[internal] trait TypeMaps {
s"Widened lone occurrence of $tp1 inside existential to $word bound"
}
if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam)
- logResult(msg)(repl)
+ debuglogResult(msg)(repl)
else
tp1
case _ =>
@@ -524,7 +524,7 @@ private[internal] trait TypeMaps {
private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = {
val TypeRef(_, lhsSym, lhsArgs) = lhs
val TypeRef(_, rhsSym, rhsArgs) = rhs
- require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
+ require(lhsSym.owner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
// Find the type parameter position; we'll use the corresponding argument.
// Why are we checking by name rather than by equality? Because for
@@ -539,7 +539,7 @@ private[internal] trait TypeMaps {
else {
// It's easy to get here when working on hardcore type machinery (not to
// mention when not doing so, see above) so let's provide a standout error.
- def own_s(s: Symbol) = s.nameString + " in " + s.safeOwner.nameString
+ def own_s(s: Symbol) = s.nameString + " in " + s.owner.nameString
def explain =
sm"""| sought ${own_s(lhsSym)}
| classSym ${own_s(rhsSym)}
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 580ada8254..90ffe9d9e7 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -60,7 +60,7 @@ trait Erasure {
*/
protected def unboundedGenericArrayLevel(tp: Type): Int = tp match {
case GenericArray(level, core) if !(core <:< AnyRefTpe) => level
- case RefinedType(ps, _) if ps.nonEmpty => logResult(s"Unbounded generic level for $tp is")(ps map unboundedGenericArrayLevel max)
+ case RefinedType(ps, _) if ps.nonEmpty => logResult(s"Unbounded generic level for $tp is")((ps map unboundedGenericArrayLevel).max)
case _ => 0
}
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index e127d577e1..59af819dad 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -53,6 +53,42 @@ trait Collections {
}
lb.toList
}
+
+ /** like map2, but returns list `xs` itself - instead of a copy - if function
+ * `f` maps all elements to themselves.
+ */
+ final def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] = {
+ // Note to developers: there exists a duplication between this function and `List#mapConserve`.
+ // If any successful optimization attempts or other changes are made, please rehash them there too.
+ @tailrec
+ def loop(mapped: ListBuffer[A], unchanged: List[A], pending0: List[A], pending1: List[B]): List[A] = {
+ if (pending0.isEmpty || pending1.isEmpty) {
+ if (mapped eq null) unchanged
+ else mapped.prependToList(unchanged)
+ } else {
+ val head00 = pending0.head
+ val head01 = pending1.head
+ val head1 = f(head00, head01)
+
+ if ((head1 eq head00.asInstanceOf[AnyRef])) {
+ loop(mapped, unchanged, pending0.tail, pending1.tail)
+ } else {
+ val b = if (mapped eq null) new ListBuffer[A] else mapped
+ var xc = unchanged
+ while ((xc ne pending0) && (xc ne pending1)) {
+ b += xc.head
+ xc = xc.tail
+ }
+ b += head1
+ val tail0 = pending0.tail
+ val tail1 = pending1.tail
+ loop(b, tail0, tail0, tail1)
+ }
+ }
+ }
+ loop(null, xs, xs, ys)
+ }
+
final def map3[A, B, C, D](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => D): List[D] = {
if (xs1.isEmpty || xs2.isEmpty || xs3.isEmpty) Nil
else f(xs1.head, xs2.head, xs3.head) :: map3(xs1.tail, xs2.tail, xs3.tail)(f)
diff --git a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
index a7fd787dfc..63ea6e2c49 100644
--- a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
@@ -44,7 +44,7 @@ trait ScalaClassLoader extends JClassLoader {
/** Create an instance of a class with this classloader */
def create(path: String): AnyRef =
- tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull
+ tryToInitializeClass[AnyRef](path).map(_.newInstance()).orNull
/** The actual bytes for a class file, or an empty array if it can't be found. */
def classBytes(className: String): Array[Byte] = classAsStream(className) match {
@@ -116,7 +116,7 @@ object ScalaClassLoader {
/** True if supplied class exists in supplied path */
def classExists(urls: Seq[URL], name: String): Boolean =
- fromURLs(urls) tryToLoadClass name isDefined
+ (fromURLs(urls) tryToLoadClass name).isDefined
/** Finding what jar a clazz or instance came from */
def originOfClass(x: Class[_]): Option[URL] =
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index 97cc19952c..f61c1f3c50 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -92,7 +92,7 @@ trait TraceSymbolActivity {
while (ph != NoPhase && ph.name != "erasure") {
ph = ph.prev
}
- ph
+ if (ph eq NoPhase) phase else ph
}
private def runBeforeErasure[T](body: => T): T = enteringPhase(findErasurePhase)(body)
diff --git a/src/reflect/scala/reflect/internal/util/TriState.scala b/src/reflect/scala/reflect/internal/util/TriState.scala
index c7a35d4637..4074d974d2 100644
--- a/src/reflect/scala/reflect/internal/util/TriState.scala
+++ b/src/reflect/scala/reflect/internal/util/TriState.scala
@@ -3,6 +3,8 @@ package reflect
package internal
package util
+import scala.language.implicitConversions
+
import TriState._
/** A simple true/false/unknown value, for those days when
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index 9b792a3f43..a8bc79d832 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -4,7 +4,7 @@ package reflect.internal.util
import java.lang.ref.{WeakReference, ReferenceQueue}
import scala.annotation.tailrec
import scala.collection.generic.Clearable
-import scala.collection.mutable.{Set => mSet}
+import scala.collection.mutable.{Set => MSet}
/**
* A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other
@@ -16,8 +16,8 @@ import scala.collection.mutable.{Set => mSet}
* This set implmeentation is not in general thread safe without external concurrency control. However it behaves
* properly when GC concurrently collects elements in this set.
*/
-final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with mSet[A] {
-
+final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with MSet[A] {
+
import WeakHashSet._
def this() = this(initialCapacity = WeakHashSet.defaultInitialCapacity, loadFactor = WeakHashSet.defaultLoadFactor)
@@ -47,7 +47,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
candidate *= 2
}
candidate
- }
+ }
/**
* the underlying table of entries which is an array of Entry linked lists
@@ -65,7 +65,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
* find the bucket associated with an elements's hash code
*/
private[this] def bucketFor(hash: Int): Int = {
- // spread the bits around to try to avoid accidental collisions using the
+ // spread the bits around to try to avoid accidental collisions using the
// same algorithm as java.util.HashMap
var h = hash
h ^= h >>> 20 ^ h >>> 12
@@ -98,7 +98,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]]
@tailrec
- def queueLoop {
+ def queueLoop(): Unit = {
val stale = poll()
if (stale != null) {
val bucket = bucketFor(stale.hash)
@@ -109,11 +109,11 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
linkedListLoop(null, table(bucket))
- queueLoop
+ queueLoop()
}
}
-
- queueLoop
+
+ queueLoop()
}
/**
@@ -123,7 +123,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
val oldTable = table
table = new Array[Entry[A]](oldTable.size * 2)
threshhold = computeThreshHold
-
+
@tailrec
def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) {
@tailrec
@@ -225,7 +225,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
def +=(elem: A) = this + elem
// from scala.reflect.interanl.Set
- override def addEntry(x: A) { this += x }
+ override def addEntry(x: A) { this += x }
// remove an element from this set and return this set
override def -(elem: A): this.type = elem match {
@@ -274,6 +274,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
override def foreach[U](f: A => U): Unit = iterator foreach f
+ // It has the `()` because iterator runs `removeStaleEntries()`
override def toList(): List[A] = iterator.toList
// Iterator over all the elements in this set in no particular order
@@ -292,7 +293,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
*/
private[this] var entry: Entry[A] = null
- /**
+ /**
* the element that will be the result of the next call to next()
*/
private[this] var lookaheadelement: A = null.asInstanceOf[A]
@@ -339,7 +340,7 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
* the entries must be stable. If any are garbage collected during validation
* then an assertion may inappropriately fire.
*/
- def fullyValidate {
+ def fullyValidate: Unit = {
var computedCount = 0
var bucket = 0
while (bucket < table.size) {
@@ -407,10 +408,10 @@ final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: D
e = e.tail
}
count
- }
+ }
}
- private[util] def diagnostics = new Diagnostics
+ private[util] def diagnostics = new Diagnostics
}
/**
diff --git a/src/reflect/scala/reflect/macros/Context.scala b/src/reflect/scala/reflect/macros/Context.scala
index 434b7c1b9c..b0c816f4ad 100644
--- a/src/reflect/scala/reflect/macros/Context.scala
+++ b/src/reflect/scala/reflect/macros/Context.scala
@@ -37,8 +37,7 @@ trait Context extends Aliases
with Typers
with Parsers
with Evals
- with ExprUtils
- with Synthetics {
+ with ExprUtils {
/** The compile-time universe. */
val universe: Universe
diff --git a/src/reflect/scala/reflect/macros/Synthetics.scala b/src/reflect/scala/reflect/macros/Synthetics.scala
deleted file mode 100644
index 5e422ee89f..0000000000
--- a/src/reflect/scala/reflect/macros/Synthetics.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-package scala
-package reflect
-package macros
-
-/**
- * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
- *
- * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
- * exposes functions to introduce synthetic definitions.
- *
- * @define TOPLEVEL_TREE Top-level tree is a tree that represents a non-inner class or object in one of the currently compiled source files.
- * Note that top-level isn't equivalent to [[scala.reflect.api.Symbols#SymbolApi.isStatic]],
- * because static also embraces definitions nested in static objects
- *
- * @define INTRODUCE_TOP_LEVEL Allowed definitions include classes (represented by `ClassDef` trees), traits (represented
- * by `ClassDef` trees having the `TRAIT` flag set in `mods`) and objects (represented by `ModuleDef` trees).
- *
- * The definitions are put into the package with a prototype provided in `packagePrototype`.
- * Supported prototypes are (see [[PackageSpec]] for more details):
- * * Strings and names representing a fully-qualified name of the package
- * * Trees that can work as package ids
- * * Package or package class symbols
- *
- * Typical value for a package prototype is a fully-qualified name in a string.
- * For example, to generate a class available at `foo.bar.Test`, call this method as follows:
- *
- * introduceTopLevel("foo.bar", ClassDef(<mods>, TypeName("Test"), <tparams>, <template>))
- *
- * It is possible to add definitions to the empty package by using `nme.EMPTY_PACKAGE_NAME.toString`, but
- * that's not recommended, since such definitions cannot be seen from outside the empty package.
- *
- * Only the multi-parameter overload of this method can be used to introduce companions.
- * If companions are introduced by two different calls, then they will be put into different virtual files, and `scalac`
- * will show an error about companions being defined in different files. By the way, this also means that there's currently no way
- * to define a companion for an existing class or module
- */
-trait Synthetics {
- self: Context =>
-
- import universe._
-
- /** Looks up a top-level definition tree with a given fully-qualified name
- * (term name for modules, type name for classes). $TOPLEVEL_TREE.
- * If such a tree does not exist, returns `EmptyTree`.
- */
- def topLevelDef(name: Name): Tree
-
- /** Returns a reference to a top-level definition tree with a given fully-qualified name
- * (term name for modules, type name for classes). $TOPLEVEL_TREE.
- * If such a tree does not exist, returns `EmptyTree`.
- */
- def topLevelRef(name: Name): Tree
-
- /** Adds a top-level definition to the compiler's symbol table. $INTRODUCE_TOP_LEVEL.
- *
- * Returns a fully-qualified reference to the introduced definition.
- */
- def introduceTopLevel[T: PackageSpec](packagePrototype: T, definition: ImplDef): RefTree
-
- /** Adds a list of top-level definitions to the compiler's symbol table. $INTRODUCE_TOP_LEVEL.
- *
- * Returns a list of fully-qualified references to the introduced definitions.
- */
- def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: ImplDef*): List[RefTree]
-
- /** A factory which can create a package def from a prototype and a list of declarations.
- */
- trait PackageSpec[T] { def mkPackageDef(prototype: T, stats: List[Tree]): PackageDef }
-
- /** Hosts supported package specs.
- */
- object PackageSpec {
- /** Package def can be created from a fully-qualified name and a list of definitions.
- * The name is converted into an Ident or a chain of Selects.
- */
- implicit val stringIsPackageSpec = new PackageSpec[String] {
- def mkPackageDef(prototype: String, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
- }
-
- /** Package def can be created from a fully-qualified term name and a list of definitions.
- * The name is converted into an Ident or a chain of Selects.
- */
- implicit val termNameIsPackageSpec = new PackageSpec[TermName] {
- def mkPackageDef(prototype: TermName, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
- }
-
- /** Package def can be created from a package id tree and a list of definitions.
- * If the tree is not a valid package id, i.e. is not a term-name ident or a chain of term-name selects,
- * then the produced PackageDef will fail compilation at some point in the future.
- */
- implicit val refTreeIsPackageSpec = new PackageSpec[RefTree] {
- def mkPackageDef(prototype: RefTree, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
- }
-
- /** Package def can be created from a package/package class symbol and a list of definitions.
- * If the provided symbol is not a package symbol or a package class symbol, package construction will throw an exception.
- */
- implicit val SymbolIsPackageSpec = new PackageSpec[Symbol] {
- def mkPackageDef(prototype: Symbol, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
- }
- }
-
- protected def mkPackageDef(name: String, stats: List[Tree]): PackageDef
- protected def mkPackageDef(name: TermName, stats: List[Tree]): PackageDef
- protected def mkPackageDef(tree: RefTree, stats: List[Tree]): PackageDef
- protected def mkPackageDef(sym: Symbol, stats: List[Tree]): PackageDef
-}
diff --git a/src/reflect/scala/reflect/runtime/ReflectSetup.scala b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
index 84f159be00..6a364ff0be 100644
--- a/src/reflect/scala/reflect/runtime/ReflectSetup.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
@@ -2,7 +2,7 @@ package scala
package reflect
package runtime
-import internal.{SomePhase, NoPhase, Phase, TreeGen}
+import internal.{SomePhase, NoPhase, Phase}
/** A helper trait to initialize things that need to be set before JavaMirrors and other
* reflect specific traits are initialized */
diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
index 9353215e1e..6406dacc24 100644
--- a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -16,32 +16,6 @@ trait ExprTyper {
import syntaxAnalyzer.UnitParser
import naming.freshInternalVarName
- object codeParser {
- val global: repl.global.type = repl.global
- def applyRule[T](code: String, rule: UnitParser => T): T = {
- reporter.reset()
- val scanner = newUnitParser(code)
- val result = rule(scanner)
-
- if (!reporter.hasErrors)
- scanner.accept(EOF)
-
- result
- }
- def stmts(code: String) = applyRule(code, _.templateStats())
- }
-
- /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
- def parse(line: String): Option[List[Tree]] = debugging(s"""parse("$line")""") {
- var isIncomplete = false
- reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
- val trees = codeParser.stmts(line)
- if (reporter.hasErrors) Some(Nil)
- else if (isIncomplete) None
- else Some(trees)
- }
- }
-
def symbolOfLine(code: String): Symbol = {
def asExpr(): Symbol = {
val name = freshInternalVarName()
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 3a71930383..9596a360a9 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -7,30 +7,25 @@ package scala
package tools.nsc
package interpreter
-import Predef.{ println => _, _ }
-import util.stringFromWriter
-import scala.reflect.internal.util._
-import java.net.URL
-import scala.sys.BooleanProp
-import scala.tools.nsc.io.AbstractFile
-import reporters._
+import PartialFunction.cond
+
+import scala.language.implicitConversions
+
+import scala.collection.mutable
+
+import scala.concurrent.{ Future, ExecutionContext }
+
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ BeanProperty, ClassTag, classTag }
+import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
+
import scala.tools.util.PathResolver
-import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
-import ScalaClassLoader.URLClassLoader
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromWriter, stackTracePrefixString }
import scala.tools.nsc.util.Exceptional.unwrap
-import scala.collection.{ mutable, immutable }
-import scala.reflect.BeanProperty
-import scala.util.Properties.versionString
+
import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
-import java.io.{ StringWriter, Reader }
-import java.util.Arrays
-import IMain._
-import java.util.concurrent.Future
-import scala.reflect.runtime.{ universe => ru }
-import scala.reflect.{ ClassTag, classTag }
-import StdReplTags._
-import scala.language.implicitConversions
/** An interpreter for Scala code.
*
@@ -92,7 +87,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private var _classLoader: util.AbstractFileClassLoader = null // active classloader
private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler
- def compilerClasspath: Seq[URL] = (
+ def compilerClasspath: Seq[java.net.URL] = (
if (isInitializeComplete) global.classPath.asURLs
else new PathResolver(settings).result.asURLs // the compiler's classpath
)
@@ -142,10 +137,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def initialize(postInitSignal: => Unit) {
synchronized {
if (_isInitialized == null) {
- _isInitialized = io.spawn {
- try _initialize()
- finally postInitSignal
- }
+ _isInitialized =
+ Future(try _initialize() finally postInitSignal)(ExecutionContext.global)
}
}
}
@@ -241,7 +234,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
lazy val isettings = new ISettings(this)
/** Instantiate a compiler. Overridable. */
- protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
+ protected def newCompiler(settings: Settings, reporter: reporters.Reporter): ReplGlobal = {
settings.outputDirs setSingleOutput replOutput.dir
settings.exposeEmptyPackage.value = true
new Global(settings, reporter) with ReplGlobal { override def toString: String = "<global>" }
@@ -306,19 +299,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName
def translatePath(path: String) = {
val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path)
- sym match {
- case NoSymbol => None
- case _ => Some(flatPath(sym))
- }
- }
- def translateEnclosingClass(n: String) = {
- def enclosingClass(s: Symbol): Symbol =
- if (s == NoSymbol || s.isClass) s else enclosingClass(s.owner)
- enclosingClass(symbolOfTerm(n)) match {
- case NoSymbol => None
- case c => Some(flatPath(c))
- }
+ sym.toOption map flatPath
}
+ def translateEnclosingClass(n: String) = symbolOfTerm(n).enclClass.toOption map flatPath
private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) {
/** Overridden here to try translating a simple name to the generated
@@ -334,7 +317,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private def makeClassLoader(): util.AbstractFileClassLoader =
new TranslatingClassLoader(parentClassLoader match {
case null => ScalaClassLoader fromURLs compilerClasspath
- case p => new URLClassLoader(compilerClasspath, p)
+ case p => new ScalaClassLoader.URLClassLoader(compilerClasspath, p)
})
// Set the current Java "context" class loader to this interpreter's class loader
@@ -446,9 +429,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
val content = indentCode(line)
val trees = parse(content) match {
- case None => return Left(IR.Incomplete)
- case Some(Nil) => return Left(IR.Error) // parse error or empty input
- case Some(trees) => trees
+ case parse.Incomplete => return Left(IR.Incomplete)
+ case parse.Error => return Left(IR.Error)
+ case parse.Success(trees) => trees
}
repltrace(
trees map (t => {
@@ -466,7 +449,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
// If the last tree is a bare expression, pinpoint where it begins using the
// AST node position and snap the line off there. Rewrite the code embodied
// by the last tree as a ValDef instead, so we can access the value.
- trees.last match {
+ val last = trees.lastOption.getOrElse(EmptyTree)
+ last match {
case _:Assign => // we don't want to include assignments
case _:TermTree | _:Ident | _:Select => // ... but do want other unnamed terms.
val varName = if (synthetic) freshInternalVarName() else freshUserVarName()
@@ -478,7 +462,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
if (trees.size == 1) "val " + varName + " =\n" + content
else {
// The position of the last tree
- val lastpos0 = earliestPosition(trees.last)
+ val lastpos0 = earliestPosition(last)
// Oh boy, the parser throws away parens so "(2+2)" is mispositioned,
// with increasingly hard to decipher positions as we move on to "() => 5",
// (x: Int) => x + 1, and more. So I abandon attempts to finesse and just
@@ -554,7 +538,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
var code = ""
var bound = false
- @throws(classOf[ScriptException])
+ @throws[ScriptException]
def compile(script: String): CompiledScript = {
if (!bound) {
quietBind("engine" -> this.asInstanceOf[ScriptEngine])
@@ -582,9 +566,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
- @throws(classOf[ScriptException])
- def compile(reader: Reader): CompiledScript = {
- val writer = new StringWriter()
+ @throws[ScriptException]
+ def compile(reader: java.io.Reader): CompiledScript = {
+ val writer = new java.io.StringWriter()
var c = reader.read()
while(c != -1) {
writer.write(c)
@@ -604,7 +588,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
* escape. We could have wrapped runtime exceptions just like other
* exceptions in ScriptException, this is a choice.
*/
- @throws(classOf[ScriptException])
+ @throws[ScriptException]
def eval(context: ScriptContext): Object = {
val result = req.lineRep.evalEither match {
case Left(e: RuntimeException) => throw e
@@ -736,10 +720,18 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
throw t
val unwrapped = unwrap(t)
+
+ // Example input: $line3.$read$$iw$$iw$
+ val classNameRegex = (naming.lineRegex + ".*").r
+ def isWrapperInit(x: StackTraceElement) = cond(x.getClassName) {
+ case classNameRegex() if x.getMethodName == nme.CONSTRUCTOR.decoded => true
+ }
+ val stackTrace = util.stackTracePrefixString(unwrapped)(!isWrapperInit(_))
+
withLastExceptionLock[String]({
- directBind[Throwable]("lastException", unwrapped)(tagOfThrowable, classTag[Throwable])
- util.stackTraceString(unwrapped)
- }, util.stackTraceString(unwrapped))
+ directBind[Throwable]("lastException", unwrapped)(StdReplTags.tagOfThrowable, classTag[Throwable])
+ stackTrace
+ }, stackTrace)
}
// TODO: split it out into a package object and a regular
@@ -838,6 +830,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def imports = importedSymbols
def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol
+ val printResults = IMain.this.printResults
+
val lineRep = new ReadEvalPrint()
private var _originalLine: String = null
@@ -871,7 +865,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def fullPath(vname: String) = s"${lineRep.readPath}$accessPath.`$vname`"
/** generate the source code for the object that computes this request */
- private object ObjectSourceCode extends CodeAssembler[MemberHandler] {
+ private object ObjectSourceCode extends IMain.CodeAssembler[MemberHandler] {
def path = originalPath("$intp")
def envLines = {
if (!isReplPower) Nil // power mode only for now
@@ -894,7 +888,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
}
- private object ResultObjectSourceCode extends CodeAssembler[MemberHandler] {
+ private object ResultObjectSourceCode extends IMain.CodeAssembler[MemberHandler] {
/** We only want to generate this code when the result
* is a value which can be referred to as-is.
*/
@@ -993,11 +987,11 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
}
}
- @throws(classOf[ScriptException])
+ @throws[ScriptException]
def eval(script: String, context: ScriptContext): Object = compile(script).eval(context)
- @throws(classOf[ScriptException])
- def eval(reader: Reader, context: ScriptContext): Object = compile(reader).eval(context)
+ @throws[ScriptException]
+ def eval(reader: java.io.Reader, context: ScriptContext): Object = compile(reader).eval(context)
override def finalize = close
@@ -1096,7 +1090,24 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
val repl: IMain.this.type = imain
} with ExprTyper { }
- def parse(line: String): Option[List[Tree]] = exprTyper.parse(line)
+ /** Parse a line into and return parsing result (error, incomplete or success with list of trees) */
+ object parse {
+ abstract sealed class Result
+ case object Error extends Result
+ case object Incomplete extends Result
+ case class Success(trees: List[Tree]) extends Result
+
+ def apply(line: String): Result = debugging(s"""parse("$line")""") {
+ var isIncomplete = false
+ reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
+ reporter.reset()
+ val trees = newUnitParser(line).parseStats()
+ if (reporter.hasErrors) Error
+ else if (isIncomplete) Incomplete
+ else Success(trees)
+ }
+ }
+ }
def symbolOfLine(code: String): Symbol =
exprTyper.symbolOfLine(code)
@@ -1155,10 +1166,12 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
*/
def isShow = code.lines exists (_.trim endsWith "// show")
if (isReplDebug || isShow) {
- beSilentDuring(parse(code)) foreach { ts =>
- ts foreach { t =>
- withoutUnwrapping(echo(asCompactString(t)))
- }
+ beSilentDuring(parse(code)) match {
+ case parse.Success(ts) =>
+ ts foreach { t =>
+ withoutUnwrapping(echo(asCompactString(t)))
+ }
+ case _ =>
}
}
}
@@ -1172,6 +1185,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
/** Utility methods for the Interpreter. */
object IMain {
+ import java.util.Arrays.{ asList => asJavaList }
+
class Factory extends ScriptEngineFactory {
@BeanProperty
val engineName = "Scala Interpreter"
@@ -1180,21 +1195,21 @@ object IMain {
val engineVersion = "1.0"
@BeanProperty
- val extensions: JList[String] = Arrays.asList("scala")
+ val extensions: JList[String] = asJavaList("scala")
@BeanProperty
val languageName = "Scala"
@BeanProperty
- val languageVersion = versionString
+ val languageVersion = scala.util.Properties.versionString
def getMethodCallSyntax(obj: String, m: String, args: String*): String = null
@BeanProperty
- val mimeTypes: JList[String] = Arrays.asList("application/x-scala")
+ val mimeTypes: JList[String] = asJavaList("application/x-scala")
@BeanProperty
- val names: JList[String] = Arrays.asList("scala")
+ val names: JList[String] = asJavaList("scala")
def getOutputStatement(toDisplay: String): String = null
diff --git a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
index 8b8b668c9f..61db8d1748 100644
--- a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -190,10 +190,10 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
// literal Ints, Strings, etc.
object literals extends CompletionAware {
- def simpleParse(code: String): Tree = newUnitParser(code).templateStats().last
+ def simpleParse(code: String): Option[Tree] = newUnitParser(code).parseStats().lastOption
def completions(verbosity: Int) = Nil
- override def follow(id: String) = simpleParse(id) match {
+ override def follow(id: String) = simpleParse(id).flatMap {
case x: Literal => Some(new LiteralCompletion(x))
case _ => None
}
diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
index c6f0cca481..c1faf30385 100644
--- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -71,7 +71,7 @@ trait MemberHandlers {
override def definesImplicit = member.mods.isImplicit
override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
- override def definedSymbols = if (symbol eq NoSymbol) Nil else List(symbol)
+ override def definedSymbols = if (symbol.exists) symbol :: Nil else Nil
}
/** Class to handle one member among all the members included
@@ -107,7 +107,7 @@ trait MemberHandlers {
override def resultExtractionCode(req: Request): String = {
val isInternal = isUserVarName(name) && req.lookupTypeOf(name) == "Unit"
- if (!mods.isPublic || isInternal) ""
+ if (!mods.isPublic || isInternal || !req.printResults) ""
else {
// if this is a lazy val we avoid evaluating it here
val resultString =
@@ -151,11 +151,11 @@ trait MemberHandlers {
"""val %s = %s""".format(name, lhs)
/** Print out lhs instead of the generated varName */
- override def resultExtractionCode(req: Request) = {
+ override def resultExtractionCode(req: Request) = if (req.printResults) {
val lhsType = string2code(req lookupTypeOf name)
val res = string2code(req fullPath name)
""" + "%s: %s = " + %s + "\n" """.format(string2code(lhs.toString), lhsType, res) + "\n"
- }
+ } else ""
}
class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) {
diff --git a/src/repl/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala
index 7f577b3a8b..cf38a2ae3a 100644
--- a/src/repl/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala
@@ -40,7 +40,7 @@ trait Naming {
// $line3.$read$$iw$$iw$Bippy@4a6a00ca
private def noMeta(s: String) = "\\Q" + s + "\\E"
- private lazy val lineRegex = {
+ lazy val lineRegex = {
val sn = sessionNames
val members = List(sn.read, sn.eval, sn.print) map noMeta mkString ("(?:", "|", ")")
debugging("lineRegex")(noMeta(sn.line) + """\d+[./]""" + members + """[$.]""")
diff --git a/src/repl/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala
index da6d271a68..f69a5b487d 100644
--- a/src/repl/scala/tools/nsc/interpreter/Power.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Power.scala
@@ -316,7 +316,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { }
def unit(code: String) = newCompilationUnit(code)
- def trees(code: String) = parse(code) getOrElse Nil
+ def trees(code: String) = parse(code) match { case parse.Success(trees) => trees; case _ => Nil }
override def toString = s"""
|** Power mode status **
diff --git a/src/repl/scala/tools/nsc/interpreter/Results.scala b/src/repl/scala/tools/nsc/interpreter/Results.scala
index e400906a58..a4e1e25cbb 100644
--- a/src/repl/scala/tools/nsc/interpreter/Results.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Results.scala
@@ -19,4 +19,4 @@ object Results {
/** The input was incomplete. The caller should request more input.
*/
case object Incomplete extends Result
-}
+} \ No newline at end of file
diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
index f82c38f5e7..5dc9b65436 100644
--- a/src/repl/scala/tools/nsc/interpreter/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
@@ -145,8 +145,8 @@ package object interpreter extends ReplConfig with ReplStrings {
case sym: TypeSymbol => Some(sym)
case _ => None
}
- (typeFromTypeString orElse typeFromNameTreatedAsTerm orElse typeFromFullName orElse typeOfTerm) foreach { sym =>
- val (kind, tpe) = exitingTyper {
+ (typeFromTypeString orElse typeFromNameTreatedAsTerm orElse typeFromFullName orElse typeOfTerm) foreach { sym =>
+ val (kind, tpe) = exitingTyper {
val tpe = sym.tpeHK
(intp.global.inferKind(NoPrefix)(tpe, sym.owner), tpe)
}
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
deleted file mode 100644
index db4163c8af..0000000000
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ /dev/null
@@ -1,447 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{FreqMap,Buildable}
-
-sealed abstract class Arbitrary[T] {
- val arbitrary: Gen[T]
-}
-
-/** Defines implicit [[org.scalacheck.Arbitrary]] instances for common types.
- * <p>
- * ScalaCheck
- * uses implicit [[org.scalacheck.Arbitrary]] instances when creating properties
- * out of functions with the `Prop.property` method, and when
- * the `Arbitrary.arbitrary` method is used. For example, the
- * following code requires that there exists an implicit
- * `Arbitrary[MyClass]` instance:
- * </p>
- *
- * {{{
- * val myProp = Prop.forAll { myClass: MyClass =>
- * ...
- * }
- *
- * val myGen = Arbitrary.arbitrary[MyClass]
- * }}}
- *
- * <p>
- * The required implicit definition could look like this:
- * </p>
- *
- * {{{
- * implicit val arbMyClass: Arbitrary[MyClass] = Arbitrary(...)
- * }}}
- *
- * <p>
- * The factory method `Arbitrary(...)` takes a generator of type
- * `Gen[T]` and returns an instance of `Arbitrary[T]`.
- * </p>
- *
- * <p>
- * The `Arbitrary` module defines implicit [[org.scalacheck.Arbitrary]]
- * instances for common types, for convenient use in your properties and
- * generators.
- * </p>
- */
-object Arbitrary {
-
- import Gen.{value, choose, sized, listOf, listOf1,
- frequency, oneOf, containerOf, resize}
- import util.StdRand
- import scala.collection.{immutable, mutable}
- import java.util.Date
-
- /** Creates an Arbitrary instance */
- def apply[T](g: => Gen[T]): Arbitrary[T] = new Arbitrary[T] {
- lazy val arbitrary = g
- }
-
- /** Returns an arbitrary generator for the type T. */
- def arbitrary[T](implicit a: Arbitrary[T]): Gen[T] = a.arbitrary
-
- /**** Arbitrary instances for each AnyVal ****/
-
- /** Arbitrary AnyVal */
- implicit lazy val arbAnyVal: Arbitrary[AnyVal] = Arbitrary(oneOf(
- arbitrary[Unit], arbitrary[Boolean], arbitrary[Char], arbitrary[Byte],
- arbitrary[Short], arbitrary[Int], arbitrary[Long], arbitrary[Float],
- arbitrary[Double]
- ))
-
- /** Arbitrary instance of Boolean */
- implicit lazy val arbBool: Arbitrary[Boolean] =
- Arbitrary(oneOf(true, false))
-
- /** Arbitrary instance of Int */
- implicit lazy val arbInt: Arbitrary[Int] = Arbitrary(
- Gen.chooseNum(Int.MinValue, Int.MaxValue)
- )
-
- /** Arbitrary instance of Long */
- implicit lazy val arbLong: Arbitrary[Long] = Arbitrary(
- Gen.chooseNum(Long.MinValue, Long.MaxValue)
- )
-
- /** Arbitrary instance of Float */
- implicit lazy val arbFloat: Arbitrary[Float] = Arbitrary(
- Gen.chooseNum(
- Float.MinValue, Float.MaxValue
- // I find that including these by default is a little TOO testy.
- // Float.Epsilon, Float.NaN, Float.PositiveInfinity, Float.NegativeInfinity
- )
- )
-
- /** Arbitrary instance of Double */
- implicit lazy val arbDouble: Arbitrary[Double] = Arbitrary(
- Gen.chooseNum(
- Double.MinValue / 2, Double.MaxValue / 2
- // As above. Perhaps behind some option?
- // Double.Epsilon, Double.NaN, Double.PositiveInfinity, Double.NegativeInfinity
- )
- )
-
- /** Arbitrary instance of Char */
- implicit lazy val arbChar: Arbitrary[Char] = Arbitrary(
- Gen.frequency(
- (0xD800-Char.MinValue, Gen.choose(Char.MinValue,0xD800-1)),
- (Char.MaxValue-0xDFFF, Gen.choose(0xDFFF+1,Char.MaxValue))
- )
- )
-
- /** Arbitrary instance of Byte */
- implicit lazy val arbByte: Arbitrary[Byte] = Arbitrary(
- Gen.chooseNum(Byte.MinValue, Byte.MaxValue)
- )
-
- /** Arbitrary instance of Short */
- implicit lazy val arbShort: Arbitrary[Short] = Arbitrary(
- Gen.chooseNum(Short.MinValue, Short.MaxValue)
- )
-
- /** Absolutely, totally, 100% arbitrarily chosen Unit. */
- implicit lazy val arbUnit: Arbitrary[Unit] = Arbitrary(value(()))
-
- /**** Arbitrary instances of other common types ****/
-
- /** Arbitrary instance of String */
- implicit lazy val arbString: Arbitrary[String] =
- Arbitrary(arbitrary[List[Char]] map (_.mkString))
-
- /** Arbitrary instance of Date */
- implicit lazy val arbDate: Arbitrary[Date] = Arbitrary(for {
- l <- arbitrary[Long]
- d = new Date
- } yield new Date(d.getTime + l))
-
- /** Arbitrary instance of Throwable */
- implicit lazy val arbThrowable: Arbitrary[Throwable] =
- Arbitrary(value(new Exception))
-
- /** Arbitrary BigInt */
- implicit lazy val arbBigInt: Arbitrary[BigInt] = {
- def chooseBigInt: Gen[BigInt] = sized((s: Int) => choose(-s, s)) map (x => BigInt(x))
- def chooseReallyBigInt = chooseBigInt.combine(choose(32, 128))((x, y) => Some(x.get << y.get))
-
- Arbitrary(
- frequency(
- (5, chooseBigInt),
- (10, chooseReallyBigInt),
- (1, BigInt(0)),
- (1, BigInt(1)),
- (1, BigInt(-1)),
- (1, BigInt(Int.MaxValue) + 1),
- (1, BigInt(Int.MinValue) - 1),
- (1, BigInt(Long.MaxValue)),
- (1, BigInt(Long.MinValue)),
- (1, BigInt(Long.MaxValue) + 1),
- (1, BigInt(Long.MinValue) - 1)
- )
- )
- }
-
- /** Arbitrary BigDecimal */
- implicit lazy val arbBigDecimal: Arbitrary[BigDecimal] = {
- import java.math.MathContext._
- val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
- val bdGen = for {
- x <- arbBigInt.arbitrary
- mc <- mcGen
- limit <- value(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
- scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
- } yield {
- try {
- BigDecimal(x, scale, mc)
- } catch {
- case ae: java.lang.ArithmeticException => BigDecimal(x, scale, UNLIMITED) // Handle the case where scale/precision conflict
- }
- }
- Arbitrary(bdGen)
- }
-
- /** Arbitrary java.lang.Number */
- implicit lazy val arbNumber: Arbitrary[Number] = {
- val gen = Gen.oneOf(
- arbitrary[Byte], arbitrary[Short], arbitrary[Int], arbitrary[Long],
- arbitrary[Float], arbitrary[Double]
- )
- Arbitrary(gen map (_.asInstanceOf[Number]))
- // XXX TODO - restore BigInt and BigDecimal
- // Arbitrary(oneOf(arbBigInt.arbitrary :: (arbs map (_.arbitrary) map toNumber) : _*))
- }
-
- /** Generates an arbitrary property */
- implicit lazy val arbProp: Arbitrary[Prop] = {
- import Prop._
- val undecidedOrPassed = forAll { b: Boolean =>
- b ==> true
- }
- Arbitrary(frequency(
- (4, falsified),
- (4, passed),
- (3, proved),
- (3, undecidedOrPassed),
- (2, undecided),
- (1, exception(null))
- ))
- }
-
- /** Arbitrary instance of test params
- * @deprecated (in 1.10.0) Use `arbTestParameters` instead.
- */
- @deprecated("Use 'arbTestParameters' instead", "1.10.0")
- implicit lazy val arbTestParams: Arbitrary[Test.Params] =
- Arbitrary(for {
- minSuccTests <- choose(10,200)
- maxDiscTests <- choose(100,500)
- mnSize <- choose(0,500)
- sizeDiff <- choose(0,500)
- mxSize <- choose(mnSize, mnSize + sizeDiff)
- ws <- choose(1,4)
- } yield Test.Params(
- minSuccessfulTests = minSuccTests,
- maxDiscardedTests = maxDiscTests,
- minSize = mnSize,
- maxSize = mxSize,
- workers = ws
- ))
-
- /** Arbitrary instance of test parameters */
- implicit lazy val arbTestParameters: Arbitrary[Test.Parameters] =
- Arbitrary(for {
- _minSuccTests <- choose(10,200)
- _maxDiscardRatio <- choose(0.2f,10f)
- _minSize <- choose(0,500)
- sizeDiff <- choose(0,500)
- _maxSize <- choose(_minSize, _minSize + sizeDiff)
- _workers <- choose(1,4)
- } yield new Test.Parameters.Default {
- override val minSuccessfulTests = _minSuccTests
- override val maxDiscardRatio = _maxDiscardRatio
- override val minSize = _minSize
- override val maxSize = _maxSize
- override val workers = _workers
- })
-
- /** Arbitrary instance of gen params */
- implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
- Arbitrary(for {
- size <- arbitrary[Int] suchThat (_ >= 0)
- } yield Gen.Params(size, StdRand))
-
- /** Arbitrary instance of prop params */
- implicit lazy val arbPropParams: Arbitrary[Prop.Params] =
- Arbitrary(for {
- genPrms <- arbitrary[Gen.Params]
- } yield Prop.Params(genPrms, FreqMap.empty[immutable.Set[Any]]))
-
-
- // Higher-order types //
-
- /** Arbitrary instance of Gen */
- implicit def arbGen[T](implicit a: Arbitrary[T]): Arbitrary[Gen[T]] =
- Arbitrary(frequency(
- (5, arbitrary[T] map (value(_))),
- (1, Gen.fail)
- ))
-
- /** Arbitrary instance of option type */
- implicit def arbOption[T](implicit a: Arbitrary[T]): Arbitrary[Option[T]] =
- Arbitrary(sized(n => if(n == 0) value(None) else resize(n - 1, arbitrary[T]).map(Some(_))))
-
- implicit def arbEither[T, U](implicit at: Arbitrary[T], au: Arbitrary[U]): Arbitrary[Either[T, U]] =
- Arbitrary(oneOf(arbitrary[T].map(Left(_)), arbitrary[U].map(Right(_))))
-
- /** Arbitrary instance of immutable map */
- implicit def arbImmutableMap[T,U](implicit at: Arbitrary[T], au: Arbitrary[U]
- ): Arbitrary[immutable.Map[T,U]] = Arbitrary(
- for(seq <- arbitrary[Stream[(T,U)]]) yield immutable.Map(seq: _*)
- )
-
- /** Arbitrary instance of mutable map */
- implicit def arbMutableMap[T,U](implicit at: Arbitrary[T], au: Arbitrary[U]
- ): Arbitrary[mutable.Map[T,U]] = Arbitrary(
- for(seq <- arbitrary[Stream[(T,U)]]) yield mutable.Map(seq: _*)
- )
-
- /** Arbitrary instance of any buildable container (such as lists, arrays,
- * streams, etc). The maximum size of the container depends on the size
- * generation parameter. */
- implicit def arbContainer[C[_],T](implicit a: Arbitrary[T], b: Buildable[T,C]
- ): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T]))
-
- /** Arbitrary instance of any array. */
- implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassManifest[T]
- ): Arbitrary[Array[T]] = Arbitrary(containerOf[Array,T](arbitrary[T]))
-
-
- // Functions //
-
- /** Arbitrary instance of Function1 */
- implicit def arbFunction1[T1,R](implicit a: Arbitrary[R]
- ): Arbitrary[T1 => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1) => r
- )
-
- /** Arbitrary instance of Function2 */
- implicit def arbFunction2[T1,T2,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2) => r
- )
-
- /** Arbitrary instance of Function3 */
- implicit def arbFunction3[T1,T2,T3,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3) => r
- )
-
- /** Arbitrary instance of Function4 */
- implicit def arbFunction4[T1,T2,T3,T4,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3,T4) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4) => r
- )
-
- /** Arbitrary instance of Function5 */
- implicit def arbFunction5[T1,T2,T3,T4,T5,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3,T4,T5) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) => r
- )
-
-
- // Tuples //
-
- /** Arbitrary instance of 2-tuple */
- implicit def arbTuple2[T1,T2](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2]
- ): Arbitrary[(T1,T2)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- } yield (t1,t2))
-
- /** Arbitrary instance of 3-tuple */
- implicit def arbTuple3[T1,T2,T3](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
- ): Arbitrary[(T1,T2,T3)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- } yield (t1,t2,t3))
-
- /** Arbitrary instance of 4-tuple */
- implicit def arbTuple4[T1,T2,T3,T4](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
- ): Arbitrary[(T1,T2,T3,T4)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- } yield (t1,t2,t3,t4))
-
- /** Arbitrary instance of 5-tuple */
- implicit def arbTuple5[T1,T2,T3,T4,T5](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5]
- ): Arbitrary[(T1,T2,T3,T4,T5)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- } yield (t1,t2,t3,t4,t5))
-
- /** Arbitrary instance of 6-tuple */
- implicit def arbTuple6[T1,T2,T3,T4,T5,T6](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- } yield (t1,t2,t3,t4,t5,t6))
-
- /** Arbitrary instance of 7-tuple */
- implicit def arbTuple7[T1,T2,T3,T4,T5,T6,T7](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- } yield (t1,t2,t3,t4,t5,t6,t7))
-
- /** Arbitrary instance of 8-tuple */
- implicit def arbTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- t8 <- arbitrary[T8]
- } yield (t1,t2,t3,t4,t5,t6,t7,t8))
-
- /** Arbitrary instance of 9-tuple */
- implicit def arbTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
- a9: Arbitrary[T9]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- t8 <- arbitrary[T8]
- t9 <- arbitrary[T9]
- } yield (t1,t2,t3,t4,t5,t6,t7,t8,t9))
-
-}
diff --git a/src/scalacheck/org/scalacheck/Arg.scala b/src/scalacheck/org/scalacheck/Arg.scala
deleted file mode 100644
index 4961c78a26..0000000000
--- a/src/scalacheck/org/scalacheck/Arg.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-case class Arg[+T](
- label: String,
- arg: T,
- shrinks: Int,
- origArg: T
-)(implicit prettyPrinter: T => Pretty) {
- lazy val prettyArg: Pretty = prettyPrinter(arg)
- lazy val prettyOrigArg: Pretty = prettyPrinter(origArg)
-}
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
deleted file mode 100644
index 604b68cb36..0000000000
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import Gen._
-import Prop._
-import Shrink._
-
-/** See User Guide for usage examples */
-trait Commands extends Prop {
-
- /** The abstract state data type. This type must be immutable.
- * The state type that encodes the abstract state. The abstract state
- * should model all the features we need from the real state, the system
- * under test. We should leave out all details that aren't needed for
- * specifying our pre- and postconditions. The state type must be called
- * State and be immutable. */
- type State <: AnyRef
-
- class Binding(private val key: State) {
- def get: Any = bindings.find(_._1 eq key) match {
- case None => sys.error("No value bound")
- case Some(x) => x._2
- }
- }
-
- /** Abstract commands are defined as subtypes of the traits Command or SetCommand.
- * Each command must have a run method and a method that returns the new abstract
- * state, as it should look after the command has been run.
- * A command can also define a precondition that states how the current
- * abstract state must look if the command should be allowed to run.
- * Finally, we can also define a postcondition which verifies that the
- * system under test is in a correct state after the command exectution. */
- trait Command {
-
- /** Used internally. */
- protected[Commands] def run_(s: State) = run(s)
-
- def run(s: State): Any
- def nextState(s: State): State
-
- /** Returns all preconditions merged into a single function */
- def preCondition: (State => Boolean) = s => preConditions.toList.forall(_.apply(s))
-
- /** A precondition is a function that
- * takes the current abstract state as parameter and returns a boolean
- * that says if the precondition is fulfilled or not. You can add several
- * conditions to the precondition list */
- val preConditions = new collection.mutable.ListBuffer[State => Boolean]
-
- /** Returns all postconditions merged into a single function */
- def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
-
- /** A postcondition is a function that
- * takes three parameters, s0, s1 and r. s0 is the abstract state before
- * the command was run, s1 is the abstract state after the command was
- * run, and r is the result from the command's run
- * method. The postcondition function should return a Boolean (or
- * a Prop instance) that says if the condition holds or not. You can add several
- * conditions to the postConditions list. */
- val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop]
- }
-
- /** A command that binds its result for later use */
- trait SetCommand extends Command {
- /** Used internally. */
- protected[Commands] final override def run_(s: State) = {
- val r = run(s)
- bindings += ((s,r))
- r
- }
-
- final def nextState(s: State) = nextState(s, new Binding(s))
- def nextState(s: State, b: Binding): State
- }
-
- private case class Cmds(cs: List[Command], ss: List[State]) {
- override def toString = cs.map(_.toString).mkString(", ")
- }
-
- private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)]
-
- private def initState() = {
- bindings.clear()
- initialState()
- }
-
- private def genCmds: Gen[Cmds] = {
- def sizedCmds(s: State)(sz: Int): Gen[Cmds] =
- if(sz <= 0) value(Cmds(Nil, Nil)) else for {
- c <- genCommand(s) suchThat (_.preCondition(s))
- Cmds(cs,ss) <- sizedCmds(c.nextState(s))(sz-1)
- } yield Cmds(c::cs, s::ss)
-
- for {
- s0 <- wrap(value(initialState()))
- cmds <- sized(sizedCmds(s0))
- } yield cmds
- }
-
- private def validCmds(s: State, cs: List[Command]): Option[Cmds] =
- cs match {
- case Nil => Some(Cmds(Nil, s::Nil))
- case c::_ if !c.preCondition(s) => None
- case c::cmds => for {
- Cmds(_, ss) <- validCmds(c.nextState(s), cmds)
- } yield Cmds(cs, s::ss)
- }
-
- private def runCommands(cmds: Cmds): Prop = cmds match {
- case Cmds(Nil, _) => proved
- case Cmds(c::cs, s::ss) =>
- c.postCondition(s,c.nextState(s),c.run_(s)) && runCommands(Cmds(cs,ss))
- case _ => sys.error("Should not be here")
- }
-
- private def commandsProp: Prop = {
- def shrinkCmds(cmds: Cmds) = cmds match { case Cmds(cs,_) =>
- shrink(cs)(shrinkContainer).flatMap(cs => validCmds(initialState(), cs).toList)
- }
-
- forAllShrink(genCmds label "COMMANDS", shrinkCmds)(runCommands _)
- }
-
- def apply(p: Prop.Params) = commandsProp(p)
-
- /** initialState should reset the system under test to a well defined
- * initial state, and return the abstract version of that state. */
- def initialState(): State
-
- /** The command generator. Given an abstract state, the generator
- * should return a command that is allowed to run in that state. Note that
- * it is still neccessary to define preconditions on the commands if there
- * are any. The generator is just giving a hint of which commands that are
- * suitable for a given state, the preconditions will still be checked before
- * a command runs. Sometimes you maybe want to adjust the distribution of
- * your command generator according to the state, or do other calculations
- * based on the state. */
- def genCommand(s: State): Gen[Command]
-
-}
diff --git a/src/scalacheck/org/scalacheck/ConsoleReporter.scala b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
deleted file mode 100644
index d565322d99..0000000000
--- a/src/scalacheck/org/scalacheck/ConsoleReporter.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import Pretty._
-import util.FreqMap
-
-class ConsoleReporter(val verbosity: Int) extends Test.TestCallback {
-
- private val prettyPrms = Params(verbosity)
-
- override def onTestResult(name: String, res: Test.Result) = {
- if(verbosity > 0) {
- if(name == "") {
- val s = (if(res.passed) "+ " else "! ") + pretty(res, prettyPrms)
- printf("\r%s\n", format(s, "", "", 75))
- } else {
- val s = (if(res.passed) "+ " else "! ") + name + ": " +
- pretty(res, prettyPrms)
- printf("\r%s\n", format(s, "", "", 75))
- }
- }
- }
-
-}
-
-object ConsoleReporter {
-
- /** Factory method, creates a ConsoleReporter with the
- * the given verbosity */
- def apply(verbosity: Int = 0) = new ConsoleReporter(verbosity)
-
- def testStatsEx(msg: String, res: Test.Result) = {
- lazy val m = if(msg.length == 0) "" else msg + ": "
- res.status match {
- case Test.Proved(_) => {}
- case Test.Passed => {}
- case f @ Test.Failed(_, _) => sys.error(m + f)
- case Test.Exhausted => {}
- case f @ Test.GenException(_) => sys.error(m + f)
- case f @ Test.PropException(_, _, _) => sys.error(m + f)
- }
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Gen.scala b/src/scalacheck/org/scalacheck/Gen.scala
deleted file mode 100644
index aec67159f1..0000000000
--- a/src/scalacheck/org/scalacheck/Gen.scala
+++ /dev/null
@@ -1,542 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import scala.collection.mutable.ListBuffer
-import util.Buildable
-import Prop._
-import Arbitrary._
-
-trait Choose[T] {
- def choose(min: T, max: T): Gen[T]
-}
-
-object Choose {
- import Gen.{fail, parameterized, value}
-
- implicit val chooseLong: Choose[Long] = new Choose[Long] {
- def choose(low: Long, high: Long) =
- if (low > high) fail
- else parameterized(prms => value(prms.choose(low,high)))
- }
-
- implicit val chooseDouble: Choose[Double] = new Choose[Double] {
- def choose(low: Double, high: Double) =
- if (low > high || (high-low > Double.MaxValue)) fail
- else parameterized(prms => value(prms.choose(low,high)))
- }
-
- implicit val chooseInt: Choose[Int] = new Choose[Int] {
- def choose(low: Int, high: Int) =
- chooseLong.choose(low, high).map(_.toInt)
- }
-
- implicit val chooseByte: Choose[Byte] = new Choose[Byte] {
- def choose(low: Byte, high: Byte) =
- chooseLong.choose(low, high).map(_.toByte)
- }
-
- implicit val chooseShort: Choose[Short] = new Choose[Short] {
- def choose(low: Short, high: Short) =
- chooseLong.choose(low, high).map(_.toShort)
- }
-
- implicit val chooseChar: Choose[Char] = new Choose[Char] {
- def choose(low: Char, high: Char) =
- chooseLong.choose(low, high).map(_.toChar)
- }
-
- implicit val chooseFloat: Choose[Float] = new Choose[Float] {
- def choose(low: Float, high: Float) =
- chooseDouble.choose(low, high).map(_.toFloat)
- }
-}
-
-case class FiniteGenRes[+T](
- r: T
-)
-
-sealed trait FiniteGen[+T] extends Gen[FiniteGenRes[T]]
-
-
-/** Class that represents a generator. */
-sealed trait Gen[+T] {
-
- import Gen.choose
-
- var label = "" // TODO: Ugly mutable field
-
- /** Put a label on the generator to make test reports clearer */
- def label(l: String): Gen[T] = {
- label = l
- this
- }
-
- /** Put a label on the generator to make test reports clearer */
- def :|(l: String) = label(l)
-
- /** Put a label on the generator to make test reports clearer */
- def |:(l: String) = label(l)
-
- /** Put a label on the generator to make test reports clearer */
- def :|(l: Symbol) = label(l.toString.drop(1))
-
- /** Put a label on the generator to make test reports clearer */
- def |:(l: Symbol) = label(l.toString.drop(1))
-
- def apply(prms: Gen.Params): Option[T]
-
- def map[U](f: T => U): Gen[U] = Gen(prms => this(prms).map(f)).label(label)
-
- def map2[U, V](g: Gen[U])(f: (T, U) => V) =
- combine(g)((t, u) => t.flatMap(t => u.flatMap(u => Some(f(t, u)))))
-
- def map3[U, V, W](gu: Gen[U], gv: Gen[V])(f: (T, U, V) => W) =
- combine3(gu, gv)((t, u, v) => t.flatMap(t => u.flatMap(u => v.flatMap(v => Some(f(t, u, v))))))
-
- def map4[U, V, W, X](gu: Gen[U], gv: Gen[V], gw: Gen[W])(f: (T, U, V, W) => X) =
- combine4(gu, gv, gw)((t, u, v, w) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => Some(f(t, u, v, w)))))))
-
- def map5[U, V, W, X, Y](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X])(f: (T, U, V, W, X) => Y) =
- combine5(gu, gv, gw, gx)((t, u, v, w, x) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => x.flatMap(x => Some(f(t, u, v, w, x))))))))
-
- def map6[U, V, W, X, Y, Z](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X], gy: Gen[Y])(f: (T, U, V, W, X, Y) => Z) =
- combine6(gu, gv, gw, gx, gy)((t, u, v, w, x, y) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => x.flatMap(x => y.flatMap(y => Some(f(t, u, v, w, x, y)))))))))
-
- def flatMap[U](f: T => Gen[U]): Gen[U] = Gen(prms => for {
- t <- this(prms)
- u <- f(t)(prms)
- } yield u)
-
- def filter(p: T => Boolean): Gen[T] = Gen(prms => for {
- t <- this(prms)
- u <- if (p(t)) Some(t) else None
- } yield u).label(label)
-
- def withFilter(p: T => Boolean) = new GenWithFilter[T](this, p)
-
- final class GenWithFilter[+A](self: Gen[A], p: A => Boolean) {
- def map[B](f: A => B): Gen[B] = self filter p map f
- def flatMap[B](f: A => Gen[B]): Gen[B] = self filter p flatMap f
- def withFilter(q: A => Boolean): GenWithFilter[A] = new GenWithFilter[A](self, x => p(x) && q(x))
- }
-
- def suchThat(p: T => Boolean): Gen[T] = filter(p)
-
- def combine[U,V](g: Gen[U])(f: (Option[T],Option[U]) => Option[V]): Gen[V] =
- Gen(prms => f(this(prms), g(prms)))
-
- def combine3[U, V, W](gu: Gen[U], gv: Gen[V])
- (f: (Option[T], Option[U], Option[V]) => Option[W]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms)))
-
- def combine4[U, V, W, X](gu: Gen[U], gv: Gen[V], gw: Gen[W])
- (f: (Option[T], Option[U], Option[V], Option[W]) => Option[X]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms)))
-
- def combine5[U, V, W, X, Y](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X])
- (f: (Option[T], Option[U], Option[V], Option[W], Option[X]) => Option[Y]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms), gx(prms)))
-
- def combine6[U, V, W, X, Y, Z](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X], gy: Gen[Y])
- (f: (Option[T], Option[U], Option[V], Option[W], Option[X], Option[Y]) => Option[Z]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms), gx(prms), gy(prms)))
-
- def ap[U](g: Gen[T => U]) = flatMap(t => g.flatMap(u => Gen(p => Some(u(t)))))
-
- override def toString =
- if(label.length == 0) "Gen()" else "Gen(\"" + label + "\")"
-
- /** Returns a new property that holds if and only if both this
- * and the given generator generates the same result, or both
- * generators generate no result. */
- def ==[U](g: Gen[U]) = Prop(prms =>
- (this(prms.genPrms), g(prms.genPrms)) match {
- case (None,None) => proved(prms)
- case (Some(r1),Some(r2)) if r1 == r2 => proved(prms)
- case _ => falsified(prms)
- }
- )
-
- def !=[U](g: Gen[U]) = forAll(this)(r => forAll(g)(_ != r))
-
- def !==[U](g: Gen[U]) = Prop(prms =>
- (this(prms.genPrms), g(prms.genPrms)) match {
- case (None,None) => falsified(prms)
- case (Some(r1),Some(r2)) if r1 == r2 => falsified(prms)
- case _ => proved(prms)
- }
- )
-
- private var freq = 1
- def |[U >: T](g: Gen[U]): Gen[U] = {
- val h = Gen.frequency((freq, this), (1, g))
- h.freq = freq+1
- h
- }
-
- /** Generates a sample value by using default parameters */
- def sample: Option[T] = apply(Gen.Params())
-
-}
-
-
-/** Contains combinators for building generators. */
-object Gen {
-
- import Arbitrary._
- import Shrink._
-
- /** Record that encapsulates all parameters required for data generation */
- case class Params(
- size: Int = 100,
- rng: java.util.Random = util.StdRand
- ) {
- def resize(newSize: Int) = this.copy(size = newSize)
-
- /** @throws IllegalArgumentException if l is greater than h, or if
- * the range between l and h doesn't fit in a Long. */
- def choose(l: Long, h: Long): Long = {
- if (h < l) throw new IllegalArgumentException("Invalid range")
- val d = h - l + 1
- if (d <= 0) {
- var n = rng.nextLong
- while (n < l || n > h) {
- n = rng.nextLong
- }
- n
- } else {
- l + math.abs(rng.nextLong % d)
- }
- }
-
- /** @throws IllegalArgumentException if l is greater than h, or if
- * the range between l and h doesn't fit in a Double. */
- def choose(l: Double, h: Double) = {
- val d = h-l
- if (d < 0 || d > Double.MaxValue)
- throw new IllegalArgumentException("Invalid range")
- else if (d == 0) l
- else rng.nextDouble * (h-l) + l
- }
- }
-
- /* Generator factory method */
- def apply[T](g: Gen.Params => Option[T]) = new Gen[T] {
- def apply(p: Gen.Params) = g(p)
- }
-
- /* Convenience method for using the `frequency` method like this:
- * {{{
- * frequency((1, "foo"), (3, "bar"))
- * }}}
- */
- implicit def freqTuple[T](t: (Int, T)): (Int, Gen[T]) = (t._1, value(t._2))
-
-
- //// Various Generator Combinators ////
-
- /** Sequences generators. If any of the given generators fails, the
- * resulting generator will also fail. */
- def sequence[C[_],T](gs: Iterable[Gen[T]])(implicit b: Buildable[T,C]): Gen[C[T]] = Gen(prms => {
- val builder = b.builder
- var none = false
- val xs = gs.iterator
- while(xs.hasNext && !none) xs.next.apply(prms) match {
- case None => none = true
- case Some(x) => builder += x
- }
- if(none) None else Some(builder.result())
- })
-
- /** Wraps a generator lazily. The given parameter is only evalutated once,
- * and not until the wrapper generator is evaluated. */
- def lzy[T](g: => Gen[T]) = new Gen[T] {
- lazy val h = g
- def apply(prms: Params) = h(prms)
- }
-
- /** Wraps a generator for later evaluation. The given parameter is
- * evaluated each time the wrapper generator is evaluated. */
- def wrap[T](g: => Gen[T]) = Gen(p => g(p))
-
- /** A generator that always generates the given value */
- implicit def value[T](x: T) = Gen(p => Some(x))
-
- /** A generator that never generates a value */
- def fail[T]: Gen[T] = Gen(p => None)
-
- /** A generator that generates a random value in the given (inclusive)
- * range. If the range is invalid, the generator will not generate any value.
- */
- def choose[T](min: T, max: T)(implicit c: Choose[T]): Gen[T] = {
- c.choose(min, max)
- }
-
- /** Creates a generator that can access its generation parameters */
- def parameterized[T](f: Params => Gen[T]): Gen[T] = Gen(prms => f(prms)(prms))
-
- /** Creates a generator that can access its generation size */
- def sized[T](f: Int => Gen[T]) = parameterized(prms => f(prms.size))
-
- /** Creates a resized version of a generator */
- def resize[T](s: Int, g: Gen[T]) = Gen(prms => g(prms.resize(s)))
-
- /** Chooses one of the given generators with a weighted random distribution */
- def frequency[T](gs: (Int,Gen[T])*): Gen[T] = {
- lazy val tot = (gs.map(_._1) :\ 0) (_+_)
-
- def pick(n: Int, l: List[(Int,Gen[T])]): Gen[T] = l match {
- case Nil => fail
- case (k,g)::gs => if(n <= k) g else pick(n-k, gs)
- }
-
- for {
- n <- choose(1,tot)
- x <- pick(n,gs.toList)
- } yield x
- }
-
- /** Picks a random value from a list */
- def oneOf[T](xs: Seq[T]): Gen[T] = if(xs.isEmpty) fail else for {
- i <- choose(0, xs.size-1)
- } yield xs(i)
-
- /** Picks a random generator from a list */
- def oneOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) = for {
- i <- choose(0, gs.length+1)
- x <- if(i == 0) g1 else if(i == 1) g2 else gs(i-2)
- } yield x
-
-
- //// List Generators ////
-
- /** Generates a container of any type for which there exists an implicit
- * [[org.scalacheck.util.Buildable]] instance. The elements in the container will
- * be generated by the given generator. The size of the generated container
- * is given by `n`. */
- def containerOfN[C[_],T](n: Int, g: Gen[T])(implicit b: Buildable[T,C]
- ): Gen[C[T]] = sequence[C,T](new Iterable[Gen[T]] {
- def iterator = new Iterator[Gen[T]] {
- var i = 0
- def hasNext = i < n
- def next = { i += 1; g }
- }
- })
-
- /** Generates a container of any type for which there exists an implicit
- * [[org.scalacheck.util.Buildable]] instance. The elements in the container
- * will be generated by the given generator. The size of the container is
- * bounded by the size parameter used when generating values. */
- def containerOf[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
- sized(size => for(n <- choose(0,size); c <- containerOfN[C,T](n,g)) yield c)
-
- /** Generates a non-empty container of any type for which there exists an
- * implicit [[org.scalacheck.util.Buildable]] instance. The elements in the container
- * will be generated by the given generator. The size of the container is
- * bounded by the size parameter used when generating values. */
- def containerOf1[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
- sized(size => for(n <- choose(1,size); c <- containerOfN[C,T](n,g)) yield c)
-
- /** Generates a list of random length. The maximum length depends on the
- * size parameter. This method is equal to calling
- * `containerOf[List,T](g)`. */
- def listOf[T](g: => Gen[T]) = containerOf[List,T](g)
-
- /** Generates a non-empty list of random length. The maximum length depends
- * on the size parameter. This method is equal to calling
- * `containerOf1[List,T](g)`. */
- def listOf1[T](g: => Gen[T]) = containerOf1[List,T](g)
-
- /** Generates a list of the given length. This method is equal to calling
- * `containerOfN[List,T](n,g)`. */
- def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
-
- /** A generator that picks a random number of elements from a list */
- def someOf[T](l: Iterable[T]) = choose(0,l.size) flatMap (pick(_,l))
-
- /** A generator that picks a random number of elements from a list */
- def someOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) = for {
- n <- choose(0, gs.length+2)
- x <- pick(n, g1, g2, gs: _*)
- } yield x
-
- /** A generator that picks a given number of elements from a list, randomly */
- def pick[T](n: Int, l: Iterable[T]): Gen[Seq[T]] =
- if(n > l.size || n < 0) fail
- else Gen(prms => {
- val buf = new ListBuffer[T]
- buf ++= l
- while(buf.length > n) {
- val g = choose(0, buf.length-1)
- buf.remove(g(prms).get)
- }
- Some(buf)
- })
-
- /** A generator that picks a given number of elements from a list, randomly */
- def pick[T](n: Int, g1: Gen[T], g2: Gen[T], gs: Gen[T]*): Gen[Seq[T]] = for {
- is <- pick(n, 0 until (gs.size+2))
- allGs = gs ++ (g1::g2::Nil)
- xs <- sequence[List,T](is.toList.map(allGs(_)))
- } yield xs
-
-
- //// Character Generators ////
-
- /* Generates a numerical character */
- def numChar: Gen[Char] = choose(48,57) map (_.toChar)
-
- /* Generates an upper-case alpha character */
- def alphaUpperChar: Gen[Char] = choose(65,90) map (_.toChar)
-
- /* Generates a lower-case alpha character */
- def alphaLowerChar: Gen[Char] = choose(97,122) map (_.toChar)
-
- /* Generates an alpha character */
- def alphaChar = frequency((1,alphaUpperChar), (9,alphaLowerChar))
-
- /* Generates an alphanumerical character */
- def alphaNumChar = frequency((1,numChar), (9,alphaChar))
-
- //// String Generators ////
-
- /* Generates a string that starts with a lower-case alpha character,
- * and only contains alphanumerical characters */
- def identifier: Gen[String] = for {
- c <- alphaLowerChar
- cs <- listOf(alphaNumChar)
- } yield (c::cs).mkString
-
- /* Generates a string of alpha characters */
- def alphaStr: Gen[String] = for(cs <- listOf(Gen.alphaChar)) yield cs.mkString
-
- /* Generates a string of digits */
- def numStr: Gen[String] = for(cs <- listOf(Gen.numChar)) yield cs.mkString
-
- //// Number Generators ////
-
- /** Generates positive numbers of uniform distribution, with an
- * upper bound of the generation size parameter. */
- def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
- import num._
- sized(max => c.choose(one, fromInt(max)))
- }
-
- /** Generates negative numbers of uniform distribution, with an
- * lower bound of the negated generation size parameter. */
- def negNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
- import num._
- sized(max => c.choose(-fromInt(max), -one))
- }
-
- /** Generates numbers within the given inclusive range, with
- * extra weight on zero, +/- unity, both extremities, and any special
- * numbers provided. The special numbers must lie within the given range,
- * otherwise they won't be included. */
- def chooseNum[T](minT: T, maxT: T, specials: T*)(
- implicit num: Numeric[T], c: Choose[T]
- ): Gen[T] = {
- import num._
- val basics = List(minT, maxT, zero, one, -one)
- val basicsAndSpecials = for {
- t <- specials ++ basics if t >= minT && t <= maxT
- } yield (1, value(t))
- val allGens = basicsAndSpecials ++ List(
- (basicsAndSpecials.length, c.choose(minT, maxT))
- )
- frequency(allGens: _*)
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T,R](f: T => R)(implicit a: Arbitrary[T]): Gen[R] =
- arbitrary[T] map f
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,R](f: (T1,T2) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2]
- ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2)) }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,R](f: (T1,T2,T3) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
- ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2, _:T3)) }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,R](f: (T1,T2,T3,T4) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,R](f: (T1,T2,T3,T4,T5) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,R](
- f: (T1,T2,T3,T4,T5,T6) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
- a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,R](
- f: (T1,T2,T3,T4,T5,T6,T7) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
- a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,R](
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,T9,R](
- f: (T1,T2,T3,T4,T5,T6,T7,T8,T9) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
- a9: Arbitrary[T9]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8, _:T9))
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
deleted file mode 100644
index 3e8f6de5f6..0000000000
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import math.round
-
-
-sealed trait Pretty {
- def apply(prms: Pretty.Params): String
-
- def map(f: String => String) = Pretty(prms => f(Pretty.this(prms)))
-
- def flatMap(f: String => Pretty) = Pretty(prms => f(Pretty.this(prms))(prms))
-}
-
-object Pretty {
-
- case class Params(verbosity: Int)
-
- val defaultParams = Params(0)
-
- def apply(f: Params => String) = new Pretty { def apply(p: Params) = f(p) }
-
- def pretty[T <% Pretty](t: T, prms: Params): String = t(prms)
-
- def pretty[T <% Pretty](t: T): String = t(defaultParams)
-
- implicit def strBreak(s1: String) = new {
- def /(s2: String) = if(s2 == "") s1 else s1+"\n"+s2
- }
-
- def pad(s: String, c: Char, length: Int) =
- if(s.length >= length) s
- else s + List.fill(length-s.length)(c).mkString
-
- def break(s: String, lead: String, length: Int): String =
- if(s.length <= length) s
- else s.substring(0, length) / break(lead+s.substring(length), lead, length)
-
- def format(s: String, lead: String, trail: String, width: Int) =
- s.lines.map(l => break(lead+l+trail, " ", width)).mkString("\n")
-
- implicit def prettyAny(t: Any) = Pretty { p => t.toString }
-
- implicit def prettyString(t: String) = Pretty { p => "\""++t++"\"" }
-
- implicit def prettyList(l: List[Any]) = Pretty { p =>
- l.map("\""+_+"\"").mkString("List(", ", ", ")")
- }
-
- implicit def prettyThrowable(e: Throwable) = Pretty { prms =>
- val strs = e.getStackTrace.map { st =>
- import st._
- getClassName+"."+getMethodName + "("+getFileName+":"+getLineNumber+")"
- }
-
- val strs2 =
- if(prms.verbosity <= 0) Array[String]()
- else if(prms.verbosity <= 1) strs.take(5)
- else strs
-
- e.getClass.getName + ": " + e.getMessage / strs2.mkString("\n")
- }
-
- implicit def prettyArgs(args: List[Arg[Any]]): Pretty = Pretty { prms =>
- if(args.isEmpty) "" else {
- for((a,i) <- args.zipWithIndex) yield {
- val l = if(a.label == "") "ARG_"+i else a.label
- val s =
- if(a.shrinks == 0 || prms.verbosity <= 1) ""
- else " (orig arg: "+a.prettyOrigArg(prms)+")"
-
- "> "+l+": "+a.prettyArg(prms)+""+s
- }
- }.mkString("\n")
- }
-
- implicit def prettyFreqMap(fm: Prop.FM) = Pretty { prms =>
- if(fm.total == 0) ""
- else {
- "> Collected test data: " / {
- for {
- (xs,r) <- fm.getRatios
- ys = xs - ()
- if !ys.isEmpty
- } yield round(r*100)+"% " + ys.mkString(", ")
- }.mkString("\n")
- }
- }
-
- implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
- def labels(ls: collection.immutable.Set[String]) =
- if(ls.isEmpty) ""
- else "> Labels of failing property: " / ls.mkString("\n")
- val s = res.status match {
- case Test.Proved(args) => "OK, proved property."/pretty(args,prms)
- case Test.Passed => "OK, passed "+res.succeeded+" tests."
- case Test.Failed(args, l) =>
- "Falsified after "+res.succeeded+" passed tests."/labels(l)/pretty(args,prms)
- case Test.Exhausted =>
- "Gave up after only "+res.succeeded+" passed tests. " +
- res.discarded+" tests were discarded."
- case Test.PropException(args,e,l) =>
- "Exception raised on property evaluation."/labels(l)/pretty(args,prms)/
- "> Exception: "+pretty(e,prms)
- case Test.GenException(e) =>
- "Exception raised on argument generation."/
- "> Exception: "+pretty(e,prms)
- }
- val t = if(prms.verbosity <= 1) "" else "Elapsed time: "+prettyTime(res.time)
- s/t/pretty(res.freqMap,prms)
- }
-
- def prettyTime(millis: Long): String = {
- val min = millis/(60*1000)
- val sec = (millis-(60*1000*min)) / 1000d
- if(min <= 0) "%.3f sec ".format(sec)
- else "%d min %.3f sec ".format(min, sec)
- }
-}
diff --git a/src/scalacheck/org/scalacheck/Prop.scala b/src/scalacheck/org/scalacheck/Prop.scala
deleted file mode 100644
index 38e00f260f..0000000000
--- a/src/scalacheck/org/scalacheck/Prop.scala
+++ /dev/null
@@ -1,818 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{FreqMap,Buildable}
-import scala.collection._
-import scala.annotation.tailrec
-
-/** A property is a generator that generates a property result */
-trait Prop {
-
- import Prop.{Result,Params,Proof,True,False,Exception,Undecided,provedToTrue}
- import Test.cmdLineParser.{Success, NoSuccess}
- import Result.merge
-
- def apply(prms: Params): Result
-
- def map(f: Result => Result): Prop = Prop(prms => f(this(prms)))
-
- def flatMap(f: Result => Prop): Prop = Prop(prms => f(this(prms))(prms))
-
- def combine(p: Prop)(f: (Result, Result) => Result) =
- for(r1 <- this; r2 <- p) yield f(r1,r2)
-
- /** Convenience method that checks this property with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead.
- * @deprecated (in 1.10.0) Use `check(Test.Parameters)` instead.
- */
- @deprecated("Use 'check(Test.Parameters)' instead", "1.10.0")
- def check(prms: Test.Params): Unit = Test.check(
- prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks this property with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead. */
- def check(prms: Test.Parameters): Unit = Test.check(
- prms copy (_testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks this property and reports the
- * result on the console. If you need to get the results from the test use
- * the `check` methods in [[org.scalacheck.Test]] instead. */
- def check: Unit = check(Test.Parameters.default)
-
- /** The logic for main, separated out to make it easier to
- * avoid System.exit calls. Returns exit code.
- */
- def mainRunner(args: Array[String]): Int = {
- Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) =>
- if (Test.check(params, this).passed) 0
- else 1
- case e: NoSuccess =>
- println("Incorrect options:"+"\n"+e+"\n")
- Test.cmdLineParser.printHelp
- -1
- }
- }
-
- /** Whether main should call System.exit with an exit code.
- * Defaults to true; override to change.
- */
- def mainCallsExit = true
-
- /** Convenience method that makes it possible to use this property
- * as an application that checks itself on execution */
- def main(args: Array[String]): Unit = {
- val code = mainRunner(args)
- if (mainCallsExit)
- System exit code
- }
-
- /** Returns a new property that holds if and only if both this
- * and the given property hold. If one of the properties doesn't
- * generate a result, the new property will generate false. */
- def &&(p: Prop) = combine(p)(_ && _)
-
- /** Returns a new property that holds if either this
- * or the given property (or both) hold. */
- def ||(p: Prop) = combine(p)(_ || _)
-
- /** Returns a new property that holds if and only if both this
- * and the given property hold. If one of the properties doesn't
- * generate a result, the new property will generate the same result
- * as the other property. */
- def ++(p: Prop): Prop = combine(p)(_ ++ _)
-
- /** Combines two properties through implication */
- def ==>(p: => Prop): Prop = flatMap { r1 =>
- if(r1.proved) p map { r2 => merge(r1,r2,r2.status) }
- else if(r1.success) p map { r2 => provedToTrue(merge(r1,r2,r2.status)) }
- else Prop(r1.copy(status = Undecided))
- }
-
- /** Returns a new property that holds if and only if both this
- * and the given property generates a result with the exact
- * same status. Note that this means that if one of the properties is
- * proved, and the other one passed, then the resulting property
- * will fail. */
- def ==(p: Prop) = this.flatMap { r1 =>
- p.map { r2 =>
- Result.merge(r1, r2, if(r1.status == r2.status) True else False)
- }
- }
-
- override def toString = "Prop"
-
- /** Put a label on the property to make test reports clearer */
- def label(l: String) = map(_.label(l))
-
- /** Put a label on the property to make test reports clearer */
- def :|(l: String) = label(l)
-
- /** Put a label on the property to make test reports clearer */
- def |:(l: String) = label(l)
-
- /** Put a label on the property to make test reports clearer */
- def :|(l: Symbol) = label(l.toString.drop(1))
-
- /** Put a label on the property to make test reports clearer */
- def |:(l: Symbol) = label(l.toString.drop(1))
-
-}
-
-object Prop {
-
- import Gen.{value, fail, frequency, oneOf}
- import Arbitrary._
- import Shrink._
-
-
- // Types
-
- type Args = List[Arg[Any]]
- type FM = FreqMap[immutable.Set[Any]]
-
- /** Property parameters */
- case class Params(val genPrms: Gen.Params, val freqMap: FM)
-
- object Result {
- def apply(st: Status) = new Result(
- st,
- Nil,
- immutable.Set.empty[Any],
- immutable.Set.empty[String]
- )
-
- def merge(x: Result, y: Result, status: Status) = new Result(
- status,
- x.args ++ y.args,
- (x.collected.asInstanceOf[Set[AnyRef]] ++ y.collected).asInstanceOf[immutable.Set[Any]],
- x.labels ++ y.labels
- )
- }
-
- /** The result of evaluating a property */
- case class Result(
- status: Status,
- args: Args,
- collected: immutable.Set[Any],
- labels: immutable.Set[String]
- ) {
- def success = status match {
- case True => true
- case Proof => true
- case _ => false
- }
-
- def failure = status match {
- case False => true
- case Exception(_) => true
- case _ => false
- }
-
- def proved = status == Proof
-
- def addArg(a: Arg[Any]) = copy(args = a::args)
-
- def collect(x: Any) = copy(collected = collected+x)
-
- def label(l: String) = copy(labels = labels+l)
-
- import Result.merge
-
- def &&(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,_) => this
- case (_,False) => r
-
- case (Undecided,_) => this
- case (_,Undecided) => r
-
- case (_,Proof) => merge(this, r, this.status)
- case (Proof,_) => merge(this, r, r.status)
-
- case (True,True) => merge(this, r, True)
- }
-
- def ||(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,False) => merge(this, r, False)
- case (False,_) => r
- case (_,False) => this
-
- case (Proof,_) => this
- case (_,Proof) => r
-
- case (True,_) => this
- case (_,True) => r
-
- case (Undecided,Undecided) => merge(this, r, Undecided)
- }
-
- def ++(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (_, Undecided) => this
- case (Undecided, _) => r
-
- case (_, Proof) => this
- case (Proof, _) => r
-
- case (_, True) => this
- case (True, _) => r
-
- case (False, _) => this
- case (_, False) => r
- }
-
- def ==>(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,_) => merge(this, r, Undecided)
-
- case (Undecided,_) => this
-
- case (Proof,_) => merge(this, r, r.status)
- case (True,_) => merge(this, r, r.status)
- }
-
- }
-
- sealed trait Status
-
- /** The property was proved */
- case object Proof extends Status
-
- /** The property was true */
- case object True extends Status
-
- /** The property was false */
- case object False extends Status
-
- /** The property could not be falsified or proved */
- case object Undecided extends Status
-
- /** Evaluating the property raised an exception */
- sealed case class Exception(e: Throwable) extends Status {
- override def equals(o: Any) = o match {
- case Exception(_) => true
- case _ => false
- }
- }
-
- def apply(f: Params => Result): Prop = new Prop {
- def apply(prms: Params) = f(prms)
- }
-
- def apply(r: Result): Prop = Prop(prms => r)
-
- def apply(b: Boolean): Prop = if(b) proved else falsified
-
-
- // Implicits
-
- /** A collection of property operators on [[Any]] values.
- * Import [[Prop.AnyOperators]] to make the operators available. */
- class ExtendedAny[T <% Pretty](x: => T) {
- /** See [[Prop.imply]] */
- def imply(f: PartialFunction[T,Prop]) = Prop.imply(x,f)
- /** See [[Prop.iff]] */
- def iff(f: PartialFunction[T,Prop]) = Prop.iff(x,f)
- @deprecated("Use 'Prop.throws' instead", "1.10.1")
- def throws[U <: Throwable](c: Class[U]): Prop = Prop.throws(c)(x)
- /** See [[Prop.?=]] */
- def ?=(y: T) = Prop.?=(x, y)
- /** See [[Prop.=?]] */
- def =?(y: T) = Prop.=?(x, y)
- }
-
- /** A collection of property operators on [[Boolean]] values.
- * Import [[Prop.BooleanOperators]] to make the operators available. */
- class ExtendedBoolean(b: => Boolean) {
- /** See [[Prop.==>]] */
- def ==>(p: => Prop) = Prop(b) ==> p
- }
-
- /** Implicit method that makes a number of property operators on values of
- * type [[Any]] available in the current scope. See [[Prop.ExtendedAny]] for
- * documentation on the operators. */
- @deprecated("Use 'Prop.AnyOperators' instead", "1.10.1")
- implicit def extendedAny[T <% Pretty](x: => T) = new ExtendedAny[T](x)
-
- /** Implicit method that makes a number of property operators on values of
- * type [[Any]] available in the current scope. See [[Prop.ExtendedAny]] for
- * documentation on the operators. */
- implicit def AnyOperators[T <% Pretty](x: => T) = new ExtendedAny[T](x)
-
- /** Implicit method that makes a number of property operators on boolean
- * values available in the current scope. See [[Prop.ExtendedBoolean]] for
- * documentation on the operators. */
- implicit def BooleanOperators(b: => Boolean) = new ExtendedBoolean(b)
-
- /** Implicit conversion of Boolean values to Prop values. */
- implicit def propBoolean(b: Boolean): Prop = Prop(b)
-
-
- // Private support functions
-
- private def provedToTrue(r: Result) = r.status match {
- case Proof => new Result(True, r.args, r.collected, r.labels)
- case _ => r
- }
-
-
- // Property combinators
-
- /** A property that never is proved or falsified */
- lazy val undecided = Prop(Result(Undecided))
-
- /** A property that always is false */
- lazy val falsified = Prop(Result(False))
-
- /** A property that always is proved */
- lazy val proved = Prop(Result(Proof))
-
- /** A property that always is passed */
- lazy val passed = Prop(Result(True))
-
- /** A property that denotes an exception */
- def exception(e: Throwable): Prop = Prop(Result(Exception(e)))
-
- /** A property that denotes an exception */
- lazy val exception: Prop = exception(null)
-
- /** Create a property that compares to values. If the values aren't equal,
- * the property will fail and report that first value doesn't match the
- * expected (second) value. */
- def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
- if(x == y) proved else falsified :| {
- val exp = Pretty.pretty[T](y, Pretty.Params(0))
- val act = Pretty.pretty[T](x, Pretty.Params(0))
- "Expected "+exp+" but got "+act
- }
-
- /** Create a property that compares to values. If the values aren't equal,
- * the property will fail and report that second value doesn't match the
- * expected (first) value. */
- def =?[T](x: T, y: T)(implicit pp: T => Pretty): Prop = ?=(y, x)
-
- /** A property that depends on the generator size */
- def sizedProp(f: Int => Prop): Prop = Prop { prms =>
- // provedToTrue since if the property is proved for
- // one size, it shouldn't be regarded as proved for
- // all sizes.
- provedToTrue(f(prms.genPrms.size)(prms))
- }
-
- /** Implication with several conditions */
- def imply[T](x: T, f: PartialFunction[T,Prop]): Prop =
- secure(if(f.isDefinedAt(x)) f(x) else undecided)
-
- /** Property holds only if the given partial function is defined at
- * `x`, and returns a property that holds */
- def iff[T](x: T, f: PartialFunction[T,Prop]): Prop =
- secure(if(f.isDefinedAt(x)) f(x) else falsified)
-
- /** Combines properties into one, which is true if and only if all the
- * properties are true */
- def all(ps: Prop*) = if(ps.isEmpty) proved else Prop(prms =>
- ps.map(p => p(prms)).reduceLeft(_ && _)
- )
-
- /** Combines properties into one, which is true if at least one of the
- * properties is true */
- def atLeastOne(ps: Prop*) = if(ps.isEmpty) falsified else Prop(prms =>
- ps.map(p => p(prms)).reduceLeft(_ || _)
- )
-
- /** A property that holds if at least one of the given generators
- * fails generating a value */
- def someFailing[T](gs: Seq[Gen[T]]) = atLeastOne(gs.map(_ == fail):_*)
-
- /** A property that holds iff none of the given generators
- * fails generating a value */
- def noneFailing[T](gs: Seq[Gen[T]]) = all(gs.map(_ !== fail):_*)
-
- /** A property that holds if the given statement throws an exception
- * of the specified type
- * @deprecated (in 1.10.1) Use `throws(...): Boolean` instead.
- */
- @deprecated("Use 'throws(...): Boolean' instead", "1.10.1")
- def throws[T <: Throwable](x: => Any, c: Class[T]): Prop = throws(c)(x)
-
- /** Returns true if the given statement throws an exception
- * of the specified type */
- def throws[T <: Throwable](c: Class[T])(x: => Any): Boolean =
- try { x; false } catch { case e if c.isInstance(e) => true }
-
- /** Collect data for presentation in test report */
- def collect[T, P <% Prop](f: T => P): T => Prop = t => Prop { prms =>
- val prop = f(t)
- prop(prms).collect(t)
- }
-
- /** Collect data for presentation in test report */
- def collect[T](t: T)(prop: Prop) = Prop { prms =>
- prop(prms).collect(t)
- }
-
- /** Collect data for presentation in test report */
- def classify(c: => Boolean, ifTrue: Any)(prop: Prop): Prop =
- if(c) collect(ifTrue)(prop) else collect(())(prop)
-
- /** Collect data for presentation in test report */
- def classify(c: => Boolean, ifTrue: Any, ifFalse: Any)(prop: Prop): Prop =
- if(c) collect(ifTrue)(prop) else collect(ifFalse)(prop)
-
- /** Wraps and protects a property */
- def secure[P <% Prop](p: => P): Prop =
- try { p: Prop } catch { case e: Throwable => exception(e) }
-
- /** Existential quantifier for an explicit generator. */
- def exists[A,P](f: A => P)(implicit
- pv: P => Prop,
- pp: A => Pretty,
- aa: Arbitrary[A]
- ): Prop = exists(aa.arbitrary)(f)
-
- /** Existential quantifier for an explicit generator. */
- def exists[A,P](g: Gen[A])(f: A => P)(implicit
- pv: P => Prop,
- pp: A => Pretty
- ): Prop = Prop { prms =>
- g(prms.genPrms) match {
- case None => undecided(prms)
- case Some(x) =>
- val p = secure(f(x))
- val r = p(prms).addArg(Arg(g.label,x,0,x))
- r.status match {
- case True => new Result(Proof, r.args, r.collected, r.labels)
- case False => new Result(Undecided, r.args, r.collected, r.labels)
- case _ => r
- }
- }
- }
-
- /** Universal quantifier for an explicit generator. Does not shrink failed
- * test cases. */
- def forAllNoShrink[T1,P](
- g1: Gen[T1])(
- f: T1 => P)(implicit
- pv: P => Prop,
- pp1: T1 => Pretty
- ): Prop = Prop { prms =>
- g1(prms.genPrms) match {
- case None => undecided(prms)
- case Some(x) =>
- val p = secure(f(x))
- provedToTrue(p(prms)).addArg(Arg(g1.label,x,0,x))
- }
- }
-
- /** Universal quantifier for two explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,P](
- g1: Gen[T1], g2: Gen[T2])(
- f: (T1,T2) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2)(f(t, _:T2)))
-
- /** Universal quantifier for three explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])(
- f: (T1,T2,T3) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3)(f(t, _:T2, _:T3)))
-
- /** Universal quantifier for four explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])(
- f: (T1,T2,T3,T4) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4)(f(t, _:T2, _:T3, _:T4)))
-
- /** Universal quantifier for five explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])(
- f: (T1,T2,T3,T4,T5) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5)))
-
- /** Universal quantifier for six explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])(
- f: (T1,T2,T3,T4,T5,T6) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)))
-
- /** Universal quantifier for seven explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])(
- f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty,
- pp7: T7 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)))
-
- /** Universal quantifier for eight explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,T8,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])(
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty,
- pp7: T7 => Pretty,
- pp8: T8 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)))
-
- /** Universal quantifier for an explicit generator. Shrinks failed arguments
- * with the given shrink function */
- def forAllShrink[T <% Pretty, P <% Prop](g: Gen[T],
- shrink: T => Stream[T])(f: T => P
- ): Prop = Prop { prms =>
-
- /** Returns the first failed result in Left or success in Right */
- def getFirstFailure(xs: Stream[T]): Either[(T,Result),(T,Result)] = {
- assert(!xs.isEmpty, "Stream cannot be empty")
- val results = xs.map { x =>
- val p = secure(f(x))
- (x, provedToTrue(p(prms)))
- }
- results.dropWhile(!_._2.failure).headOption match {
- case None => Right(results.head)
- case Some(xr) => Left(xr)
- }
- }
-
- def shrinker(x: T, r: Result, shrinks: Int, orig: T): Result = {
- val xs = shrink(x)
- val res = r.addArg(Arg(g.label,x,shrinks,orig))
- if(xs.isEmpty) res else getFirstFailure(xs) match {
- case Right(_) => res
- case Left((x2,r2)) => shrinker(x2, r2, shrinks+1, orig)
- }
- }
-
- g(prms.genPrms) match {
- case None => undecided(prms)
- case Some(x) => getFirstFailure(Stream.cons(x, Stream.empty)) match {
- case Right((x,r)) => r.addArg(Arg(g.label,x,0,x))
- case Left((x,r)) => shrinker(x,r,0,x)
- }
- }
-
- }
-
- /** Universal quantifier for an explicit generator. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,P](
- g1: Gen[T1])(
- f: T1 => P)(implicit
- p: P => Prop,
- s1: Shrink[T1],
- pp1: T1 => Pretty
- ): Prop = forAllShrink(g1, shrink[T1])(f)
-
- /** Universal quantifier for two explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,P](
- g1: Gen[T1], g2: Gen[T2])(
- f: (T1,T2) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2)(f(t, _:T2)))
-
- /** Universal quantifier for three explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])(
- f: (T1,T2,T3) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3)(f(t, _:T2, _:T3)))
-
- /** Universal quantifier for four explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])(
- f: (T1,T2,T3,T4) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4)(f(t, _:T2, _:T3, _:T4)))
-
- /** Universal quantifier for five explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])(
- f: (T1,T2,T3,T4,T5) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5)))
-
- /** Universal quantifier for six explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])(
- f: (T1,T2,T3,T4,T5,T6) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)))
-
- /** Universal quantifier for seven explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,T7,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])(
- f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty,
- s7: Shrink[T7], pp7: T7 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)))
-
- /** Universal quantifier for eight explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,T7,T8,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])(
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty,
- s7: Shrink[T7], pp7: T7 => Pretty,
- s8: Shrink[T8], pp8: T8 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,P] (
- f: A1 => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty
- ): Prop = forAllShrink(arbitrary[A1],shrink[A1])(f andThen p)
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,P] (
- f: (A1,A2) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,P] (
- f: (A1,A2,A3) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,P] (
- f: (A1,A2,A3,A4) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,P] (
- f: (A1,A2,A3,A4,A5) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,P] (
- f: (A1,A2,A3,A4,A5,A6) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,A7,P] (
- f: (A1,A2,A3,A4,A5,A6,A7) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty,
- a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,A7,A8,P] (
- f: (A1,A2,A3,A4,A5,A6,A7,A8) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty,
- a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty,
- a8: Arbitrary[A8], s8: Shrink[A8], pp8: A8 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7, _:A8)))
-
- /** Ensures that the property expression passed in completes within the given space of time. */
- def within(maximumMs: Long)(wrappedProp: => Prop): Prop = new Prop {
- @tailrec private def attempt(prms: Params, endTime: Long): Result = {
- val result = wrappedProp.apply(prms)
- if (System.currentTimeMillis > endTime) {
- (if (result.failure) result else Result(False)).label("Timeout")
- } else {
- if (result.success) result
- else attempt(prms, endTime)
- }
- }
- def apply(prms: Params) = attempt(prms, System.currentTimeMillis + maximumMs)
- }
-}
diff --git a/src/scalacheck/org/scalacheck/Properties.scala b/src/scalacheck/org/scalacheck/Properties.scala
deleted file mode 100644
index d4836d7420..0000000000
--- a/src/scalacheck/org/scalacheck/Properties.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-/** Represents a collection of properties, with convenient methods
- * for checking all properties at once. This class is itself a property, which
- * holds if and only if all of the contained properties hold.
- * <p>Properties are added in the following way:</p>
- *
- * {{{
- * object MyProps extends Properties("MyProps") {
- * property("myProp1") = forAll { (n:Int, m:Int) =>
- * n+m == m+n
- * }
- *
- * property("myProp2") = ((0/1) throws classOf[ArithmeticException])
- * }
- * }}}
- */
-class Properties(val name: String) extends Prop {
-
- import Test.cmdLineParser.{Success, NoSuccess}
-
- private val props = new scala.collection.mutable.ListBuffer[(String,Prop)]
-
- /** Returns one property which holds if and only if all of the
- * properties in this property collection hold */
- private def oneProperty: Prop = Prop.all((properties map (_._2)):_*)
-
- /** Returns all properties of this collection in a list of name/property
- * pairs. */
- def properties: Seq[(String,Prop)] = props
-
- def apply(p: Prop.Params) = oneProperty(p)
-
- /** Convenience method that checks the properties with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead. */
- override def check(prms: Test.Parameters): Unit = Test.checkProperties(
- prms copy (_testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks the properties with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead.
- * @deprecated (in 1.10.0) Use `check(Test.Parameters)` instead.
- */
- @deprecated("Use 'check(Test.Parameters)' instead", "1.10.0")
- override def check(prms: Test.Params): Unit = Test.checkProperties(
- prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks the properties and reports the
- * result on the console. If you need to get the results from the test use
- * the `check` methods in [[org.scalacheck.Test]] instead. */
- override def check: Unit = check(Test.Parameters.default)
-
- /** The logic for main, separated out to make it easier to
- * avoid System.exit calls. Returns exit code.
- */
- override def mainRunner(args: Array[String]): Int = {
- Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) =>
- val res = Test.checkProperties(params, this)
- val failed = res.filter(!_._2.passed).size
- failed
- case e: NoSuccess =>
- println("Incorrect options:"+"\n"+e+"\n")
- Test.cmdLineParser.printHelp
- -1
- }
- }
-
- /** Adds all properties from another property collection to this one. */
- def include(ps: Properties) = for((n,p) <- ps.properties) property(n) = p
-
- /** Used for specifying properties. Usage:
- * {{{
- * property("myProp") = ...
- * }}}
- */
- class PropertySpecifier() {
- def update(propName: String, p: Prop) = props += ((name+"."+propName, p))
- }
-
- lazy val property = new PropertySpecifier()
-}
diff --git a/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala b/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala
deleted file mode 100644
index 7764101844..0000000000
--- a/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-// vim: set ts=2 sw=2 et:
-
-package org.scalacheck
-
-import org.scalatools.testing._
-
-class ScalaCheckFramework extends Framework {
-
- private case object PropFingerprint extends TestFingerprint {
- val superClassName = "org.scalacheck.Prop"
- val isModule = false
- }
-
- private case object PropsFingerprint extends TestFingerprint {
- val superClassName = "org.scalacheck.Properties"
- val isModule = true
- }
-
- val name = "ScalaCheck"
-
- val tests = Array[Fingerprint](PropsFingerprint, PropsFingerprint)
-
- def testRunner(loader: ClassLoader, loggers: Array[Logger]) = new Runner2 {
-
- private def asEvent(nr: (String, Test.Result)) = nr match {
- case (n: String, r: Test.Result) => new Event {
- val testName = n
- val description = n
- val result = r.status match {
- case Test.Passed => Result.Success
- case _:Test.Proved => Result.Success
- case _:Test.Failed => Result.Failure
- case Test.Exhausted => Result.Skipped
- case _:Test.PropException | _:Test.GenException => Result.Error
- }
- val error = r.status match {
- case Test.PropException(_, e, _) => e
- case _:Test.Failed => new Exception(Pretty.pretty(r,Pretty.Params(0)))
- case _ => null
- }
- }
- }
-
- def run(testClassName: String, fingerprint: Fingerprint, handler: EventHandler, args: Array[String]) {
-
- val testCallback = new Test.TestCallback {
- override def onPropEval(n: String, w: Int, s: Int, d: Int) = {}
-
- override def onTestResult(n: String, r: Test.Result) = {
- for (l <- loggers) {
- import Pretty._
- l.info(
- (if (r.passed) "+ " else "! ") + n + ": " + pretty(r, Params(0))
- )
- }
- handler.handle(asEvent((n,r)))
- }
- }
-
- import Test.cmdLineParser.{Success, NoSuccess}
- val prms = Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) =>
- params.copy(_testCallback = testCallback, _customClassLoader = Some(loader))
- // TODO: Maybe handle this a bit better than throwing exception?
- case e: NoSuccess => throw new Exception(e.toString)
- }
-
- fingerprint match {
- case fp: SubclassFingerprint =>
- if(fp.isModule) {
- val obj = Class.forName(testClassName + "$", true, loader)
- val ps = obj.getField("MODULE$").get(null).asInstanceOf[Properties]
- Test.checkProperties(prms, ps)
- } else {
- val p = Class.forName(testClassName, true, loader).newInstance.asInstanceOf[Prop]
- handler.handle(asEvent((testClassName, Test.check(prms, p))))
- }
- }
- }
-
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Shrink.scala b/src/scalacheck/org/scalacheck/Shrink.scala
deleted file mode 100644
index 4895171a35..0000000000
--- a/src/scalacheck/org/scalacheck/Shrink.scala
+++ /dev/null
@@ -1,208 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.Buildable
-import scala.collection.{ JavaConversions => jcl }
-
-sealed abstract class Shrink[T] {
- def shrink(x: T): Stream[T]
-}
-
-object Shrink {
-
- import Stream.{cons, empty}
- import scala.collection._
- import java.util.ArrayList
-
- /** Interleaves to streams */
- private def interleave[T](xs: Stream[T], ys: Stream[T]): Stream[T] =
- if(xs.isEmpty) ys
- else if(ys.isEmpty) xs
- else Stream(xs.head, ys.head) append interleave(xs.tail, ys.tail)
-
- /** Shrink instance factory */
- def apply[T](s: T => Stream[T]): Shrink[T] = new Shrink[T] {
- override def shrink(x: T) = s(x)
- }
-
- /** Shrink a value */
- def shrink[T](x: T)(implicit s: Shrink[T]): Stream[T] = s.shrink(x)
-
- /** Default shrink instance */
- implicit def shrinkAny[T]: Shrink[T] = Shrink(x => empty)
-
- /** Shrink instance of container */
- implicit def shrinkContainer[C[_],T](implicit v: C[T] => Traversable[T], s: Shrink[T],
- b: Buildable[T,C]
- ): Shrink[C[T]] = Shrink { xs: C[T] =>
-
- def removeChunks(n: Int, xs: Stream[T]): Stream[Stream[T]] =
- if(xs.isEmpty) empty
- else if(xs.tail.isEmpty) cons(empty, empty)
- else {
- val n1 = n / 2
- val n2 = n - n1
- lazy val xs1 = xs.take(n1)
- lazy val xs2 = xs.drop(n1)
- lazy val xs3 =
- for(ys1 <- removeChunks(n1,xs1) if !ys1.isEmpty) yield ys1 append xs2
- lazy val xs4 =
- for(ys2 <- removeChunks(n2,xs2) if !ys2.isEmpty) yield xs1 append ys2
-
- cons(xs1, cons(xs2, interleave(xs3,xs4)))
- }
-
- def shrinkOne(zs: Stream[T]): Stream[Stream[T]] =
- if(zs.isEmpty) empty
- else {
- val x = zs.head
- val xs = zs.tail
- (for(y <- shrink(x)) yield cons(y,xs)) append
- (for(ys <- shrinkOne(xs)) yield cons(x,ys))
- }
-
- val ys = v(xs)
- val zs = ys.toStream
- removeChunks(ys.size,zs).append(shrinkOne(zs)).map(b.fromIterable)
-
- }
-
- /** Shrink instance of integer */
- implicit lazy val shrinkInt: Shrink[Int] = Shrink { n =>
-
- def halfs(n: Int): Stream[Int] =
- if(n == 0) empty else cons(n, halfs(n/2))
-
- if(n == 0) empty else {
- val ns = halfs(n/2).map(n - _)
- cons(0, interleave(ns, ns.map(-1 * _)))
- }
- }
-
- /** Shrink instance of String */
- implicit lazy val shrinkString: Shrink[String] = Shrink { s =>
- shrinkContainer[List,Char].shrink(s.toList).map(_.mkString)
- }
-
- /** Shrink instance of Option */
- implicit def shrinkOption[T](implicit s: Shrink[T]): Shrink[Option[T]] =
- Shrink {
- case None => empty
- case Some(x) => cons(None, for(y <- shrink(x)) yield Some(y))
- }
-
- /** Shrink instance of 2-tuple */
- implicit def shrinkTuple2[T1,T2](implicit
- s1: Shrink[T1], s2: Shrink[T2]
- ): Shrink[(T1,T2)] =
- Shrink { case (t1,t2) =>
- (for(x1 <- shrink(t1)) yield (x1, t2)) append
- (for(x2 <- shrink(t2)) yield (t1, x2))
- }
-
- /** Shrink instance of 3-tuple */
- implicit def shrinkTuple3[T1,T2,T3](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3]
- ): Shrink[(T1,T2,T3)] =
- Shrink { case (t1,t2,t3) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3))
- }
-
- /** Shrink instance of 4-tuple */
- implicit def shrinkTuple4[T1,T2,T3,T4](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4]
- ): Shrink[(T1,T2,T3,T4)] =
- Shrink { case (t1,t2,t3,t4) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4))
- }
-
- /** Shrink instance of 5-tuple */
- implicit def shrinkTuple5[T1,T2,T3,T4,T5](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5]
- ): Shrink[(T1,T2,T3,T4,T5)] =
- Shrink { case (t1,t2,t3,t4,t5) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5))
- }
-
- /** Shrink instance of 6-tuple */
- implicit def shrinkTuple6[T1,T2,T3,T4,T5,T6](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6]
- ): Shrink[(T1,T2,T3,T4,T5,T6)] =
- Shrink { case (t1,t2,t3,t4,t5,t6) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6))
- }
-
- /** Shrink instance of 7-tuple */
- implicit def shrinkTuple7[T1,T2,T3,T4,T5,T6,T7](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7]
- ): Shrink[(T1,T2,T3,T4,T5,T6,T7)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7)) append
- (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7))
- }
-
- /** Shrink instance of 8-tuple */
- implicit def shrinkTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7], s8: Shrink[T8]
- ): Shrink[(T1,T2,T3,T4,T5,T6,T7,T8)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7, t8)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7, t8)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7, t8)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7, t8)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7, t8)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7, t8)) append
- (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7, t8)) append
- (for(x8 <- shrink(t8)) yield (t1, t2, t3, t4, t5, t6, t7, x8))
- }
-
- /** Shrink instance of 9-tuple */
- implicit def shrinkTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7], s8: Shrink[T8],
- s9: Shrink[T9]
- ): Shrink[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8,t9) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7, t8, t9)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7, t8, t9)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7, t8, t9)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7, t8, t9)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7, t8, t9)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7, t8, t9)) append
- (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7, t8, t9)) append
- (for(x8 <- shrink(t8)) yield (t1, t2, t3, t4, t5, t6, t7, x8, t9)) append
- (for(x9 <- shrink(t9)) yield (t1, t2, t3, t4, t5, t6, t7, t8, x9))
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Test.scala b/src/scalacheck/org/scalacheck/Test.scala
deleted file mode 100644
index 6e9b6b88fd..0000000000
--- a/src/scalacheck/org/scalacheck/Test.scala
+++ /dev/null
@@ -1,392 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-object Test {
-
- import util.FreqMap
- import scala.collection.immutable
- import Prop.FM
- import util.CmdLineParser
-
- /** Test parameters used by the `Test.check` method.
- */
- trait Parameters {
- /** The minimum number of tests that must succeed for ScalaCheck to
- * consider a property passed. */
- def minSuccessfulTests: Int
-
- /** The starting size given as parameter to the generators. */
- def minSize: Int
-
- /** The maximum size given as parameter to the generators. */
- def maxSize: Int
-
- /** The random numbe generator used. */
- def rng: java.util.Random
-
- /** The number of tests run in parallell. */
- def workers: Int
-
- /** A callback that ScalaCheck calls each time a test is executed. */
- def testCallback: TestCallback
-
- /** The maximum ratio between discarded and passed tests allowed before
- * ScalaCheck gives up and discards the property. At least
- * `minSuccesfulTests` will always be run, though. */
- def maxDiscardRatio: Float
-
- /** A custom class loader that should be used during test execution. */
- def customClassLoader: Option[ClassLoader]
-
- // private since we can't guarantee binary compatibility for this one
- private[scalacheck] def copy(
- _minSuccessfulTests: Int = Parameters.this.minSuccessfulTests,
- _minSize: Int = Parameters.this.minSize,
- _maxSize: Int = Parameters.this.maxSize,
- _rng: java.util.Random = Parameters.this.rng,
- _workers: Int = Parameters.this.workers,
- _testCallback: TestCallback = Parameters.this.testCallback,
- _maxDiscardRatio: Float = Parameters.this.maxDiscardRatio,
- _customClassLoader: Option[ClassLoader] = Parameters.this.customClassLoader
- ): Parameters = new Parameters {
- val minSuccessfulTests: Int = _minSuccessfulTests
- val minSize: Int = _minSize
- val maxSize: Int = _maxSize
- val rng: java.util.Random = _rng
- val workers: Int = _workers
- val testCallback: TestCallback = _testCallback
- val maxDiscardRatio: Float = _maxDiscardRatio
- val customClassLoader: Option[ClassLoader] = _customClassLoader
- }
- }
-
- /** Test parameters used by the `Test.check` method.
- *
- * To override default values, extend the
- * [[org.scalacheck.Test.Parameters.Default]] trait:
- *
- * {{{
- * val myParams = new Parameters.Default {
- * override val minSuccesfulTests = 600
- * override val maxDiscardRatio = 8
- * }
- * }}}
- */
- object Parameters {
- /** Default test parameters trait. This can be overriden if you need to
- * tweak the parameters. */
- trait Default extends Parameters {
- val minSuccessfulTests: Int = 100
- val minSize: Int = 0
- val maxSize: Int = Gen.Params().size
- val rng: java.util.Random = Gen.Params().rng
- val workers: Int = 1
- val testCallback: TestCallback = new TestCallback {}
- val maxDiscardRatio: Float = 5
- val customClassLoader: Option[ClassLoader] = None
- }
-
- /** Default test parameters instance. */
- val default: Parameters = new Default {}
- }
-
- /** Test parameters
- * @deprecated (in 1.10.0) Use [[org.scalacheck.Test.Parameters]] instead.
- */
- @deprecated("Use [[org.scalacheck.Test.Parameters]] instead", "1.10.0")
- case class Params(
- minSuccessfulTests: Int = 100,
- maxDiscardedTests: Int = -1,
- minSize: Int = 0,
- maxSize: Int = Gen.Params().size,
- rng: java.util.Random = Gen.Params().rng,
- workers: Int = 1,
- testCallback: TestCallback = new TestCallback {}
- )
-
- @deprecated("Use [[org.scalacheck.Test.Parameters]] instead", "1.10.0")
- private def paramsToParameters(params: Params) = new Parameters {
- val minSuccessfulTests = params.minSuccessfulTests
- val minSize = params.minSize
- val maxSize = params.maxSize
- val rng = params.rng
- val workers = params.workers
- val testCallback = params.testCallback
-
- // maxDiscardedTests is deprecated, but if someone
- // uses it let it override maxDiscardRatio
- val maxDiscardRatio =
- if(params.maxDiscardedTests < 0) Parameters.default.maxDiscardRatio
- else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
-
- val customClassLoader = Parameters.default.customClassLoader
- }
-
- /** Test statistics */
- case class Result(status: Status, succeeded: Int, discarded: Int, freqMap: FM, time: Long = 0) {
- def passed = status match {
- case Passed => true
- case Proved(_) => true
- case _ => false
- }
- }
-
- /** Test status */
- sealed trait Status
-
- /** ScalaCheck found enough cases for which the property holds, so the
- * property is considered correct. (It is not proved correct, though). */
- case object Passed extends Status
-
- /** ScalaCheck managed to prove the property correct */
- sealed case class Proved(args: Prop.Args) extends Status
-
- /** The property was proved wrong with the given concrete arguments. */
- sealed case class Failed(args: Prop.Args, labels: Set[String]) extends Status
-
- /** The property test was exhausted, it wasn't possible to generate enough
- * concrete arguments satisfying the preconditions to get enough passing
- * property evaluations. */
- case object Exhausted extends Status
-
- /** An exception was raised when trying to evaluate the property with the
- * given concrete arguments. */
- sealed case class PropException(args: Prop.Args, e: Throwable,
- labels: Set[String]) extends Status
-
- /** An exception was raised when trying to generate concrete arguments
- * for evaluating the property. */
- sealed case class GenException(e: Throwable) extends Status
-
- trait TestCallback { self =>
- /** Called each time a property is evaluated */
- def onPropEval(name: String, threadIdx: Int, succeeded: Int,
- discarded: Int): Unit = ()
-
- /** Called whenever a property has finished testing */
- def onTestResult(name: String, result: Result): Unit = ()
-
- def chain(testCallback: TestCallback) = new TestCallback {
- override def onPropEval(name: String, threadIdx: Int,
- succeeded: Int, discarded: Int
- ): Unit = {
- self.onPropEval(name,threadIdx,succeeded,discarded)
- testCallback.onPropEval(name,threadIdx,succeeded,discarded)
- }
-
- override def onTestResult(name: String, result: Result): Unit = {
- self.onTestResult(name,result)
- testCallback.onTestResult(name,result)
- }
- }
- }
-
- private def assertParams(prms: Parameters) = {
- import prms._
- if(
- minSuccessfulTests <= 0 ||
- maxDiscardRatio <= 0 ||
- minSize < 0 ||
- maxSize < minSize ||
- workers <= 0
- ) throw new IllegalArgumentException("Invalid test parameters")
- }
-
- private def secure[T](x: => T): Either[T,Throwable] =
- try { Left(x) } catch { case e: Throwable => Right(e) }
-
- private[scalacheck] lazy val cmdLineParser = new CmdLineParser {
- object OptMinSuccess extends IntOpt {
- val default = Parameters.default.minSuccessfulTests
- val names = Set("minSuccessfulTests", "s")
- val help = "Number of tests that must succeed in order to pass a property"
- }
- object OptMaxDiscarded extends IntOpt {
- val default = -1
- val names = Set("maxDiscardedTests", "d")
- val help =
- "Number of tests that can be discarded before ScalaCheck stops " +
- "testing a property. NOTE: this option is deprecated, please use " +
- "the option maxDiscardRatio (-r) instead."
- }
- object OptMaxDiscardRatio extends FloatOpt {
- val default = Parameters.default.maxDiscardRatio
- val names = Set("maxDiscardRatio", "r")
- val help =
- "The maximum ratio between discarded and succeeded tests " +
- "allowed before ScalaCheck stops testing a property. At " +
- "least minSuccessfulTests will always be tested, though."
- }
- object OptMinSize extends IntOpt {
- val default = Parameters.default.minSize
- val names = Set("minSize", "n")
- val help = "Minimum data generation size"
- }
- object OptMaxSize extends IntOpt {
- val default = Parameters.default.maxSize
- val names = Set("maxSize", "x")
- val help = "Maximum data generation size"
- }
- object OptWorkers extends IntOpt {
- val default = Parameters.default.workers
- val names = Set("workers", "w")
- val help = "Number of threads to execute in parallel for testing"
- }
- object OptVerbosity extends IntOpt {
- val default = 1
- val names = Set("verbosity", "v")
- val help = "Verbosity level"
- }
-
- val opts = Set[Opt[_]](
- OptMinSuccess, OptMaxDiscarded, OptMaxDiscardRatio, OptMinSize,
- OptMaxSize, OptWorkers, OptVerbosity
- )
-
- def parseParams(args: Array[String]) = parseArgs(args) {
- optMap => Parameters.default.copy(
- _minSuccessfulTests = optMap(OptMinSuccess),
- _maxDiscardRatio =
- if (optMap(OptMaxDiscarded) < 0) optMap(OptMaxDiscardRatio)
- else optMap(OptMaxDiscarded).toFloat / optMap(OptMinSuccess),
- _minSize = optMap(OptMinSize),
- _maxSize = optMap(OptMaxSize),
- _workers = optMap(OptWorkers),
- _testCallback = ConsoleReporter(optMap(OptVerbosity))
- )
- }
- }
-
- /** Tests a property with the given testing parameters, and returns
- * the test results.
- * @deprecated (in 1.10.0) Use
- * `check(Parameters, Properties)` instead.
- */
- @deprecated("Use 'checkProperties(Parameters, Properties)' instead", "1.10.0")
- def check(params: Params, p: Prop): Result = {
- check(paramsToParameters(params), p)
- }
-
- /** Tests a property with the given testing parameters, and returns
- * the test results. */
- def check(params: Parameters, p: Prop): Result = {
- import params._
-
- assertParams(params)
- if(workers > 1) {
- assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
- }
-
- val iterations = math.ceil(minSuccessfulTests / (workers: Double))
- val sizeStep = (maxSize-minSize) / (iterations*workers)
- var stop = false
-
- def worker(workerIdx: Int) =
- if (workers < 2) () => workerFun(workerIdx)
- else actors.Futures.future {
- params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
- workerFun(workerIdx)
- }
-
- def workerFun(workerIdx: Int) = {
- var n = 0 // passed tests
- var d = 0 // discarded tests
- var res: Result = null
- var fm = FreqMap.empty[immutable.Set[Any]]
- while(!stop && res == null && n < iterations) {
- val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
- val propPrms = Prop.Params(Gen.Params(size.round.toInt, params.rng), fm)
- secure(p(propPrms)) match {
- case Right(e) => res =
- Result(GenException(e), n, d, FreqMap.empty[immutable.Set[Any]])
- case Left(propRes) =>
- fm =
- if(propRes.collected.isEmpty) fm
- else fm + propRes.collected
- propRes.status match {
- case Prop.Undecided =>
- d += 1
- testCallback.onPropEval("", workerIdx, n, d)
- // The below condition is kind of hacky. We have to have
- // some margin, otherwise workers might stop testing too
- // early because they have been exhausted, but the overall
- // test has not.
- if (n+d > minSuccessfulTests && 1+workers*maxDiscardRatio*n < d)
- res = Result(Exhausted, n, d, fm)
- case Prop.True =>
- n += 1
- testCallback.onPropEval("", workerIdx, n, d)
- case Prop.Proof =>
- n += 1
- res = Result(Proved(propRes.args), n, d, fm)
- stop = true
- case Prop.False =>
- res = Result(Failed(propRes.args,propRes.labels), n, d, fm)
- stop = true
- case Prop.Exception(e) =>
- res = Result(PropException(propRes.args,e,propRes.labels), n, d, fm)
- stop = true
- }
- }
- }
- if (res == null) {
- if (maxDiscardRatio*n > d) Result(Passed, n, d, fm)
- else Result(Exhausted, n, d, fm)
- } else res
- }
-
- def mergeResults(r1: () => Result, r2: () => Result) = {
- val Result(st1, s1, d1, fm1, _) = r1()
- val Result(st2, s2, d2, fm2, _) = r2()
- if (st1 != Passed && st1 != Exhausted)
- () => Result(st1, s1+s2, d1+d2, fm1++fm2, 0)
- else if (st2 != Passed && st2 != Exhausted)
- () => Result(st2, s1+s2, d1+d2, fm1++fm2, 0)
- else {
- if (s1+s2 >= minSuccessfulTests && maxDiscardRatio*(s1+s2) >= (d1+d2))
- () => Result(Passed, s1+s2, d1+d2, fm1++fm2, 0)
- else
- () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, 0)
- }
- }
-
- val start = System.currentTimeMillis
- val results = for(i <- 0 until workers) yield worker(i)
- val r = results.reduceLeft(mergeResults)()
- stop = true
- results foreach (_.apply())
- val timedRes = r.copy(time = System.currentTimeMillis-start)
- params.testCallback.onTestResult("", timedRes)
- timedRes
- }
-
- /** Check a set of properties.
- * @deprecated (in 1.10.0) Use
- * `checkProperties(Parameters, Properties)` instead.
- */
- @deprecated("Use 'checkProperties(Parameters, Properties)' instead", "1.10.0")
- def checkProperties(prms: Params, ps: Properties): Seq[(String,Result)] =
- checkProperties(paramsToParameters(prms), ps)
-
- /** Check a set of properties. */
- def checkProperties(prms: Parameters, ps: Properties): Seq[(String,Result)] =
- ps.properties.map { case (name,p) =>
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) =
- prms.testCallback.onPropEval(name,t,s,d)
- override def onTestResult(n: String, r: Result) =
- prms.testCallback.onTestResult(name,r)
- }
- val res = check(prms copy (_testCallback = testCallback), p)
- (name,res)
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
deleted file mode 100644
index 140c541a95..0000000000
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import scala.collection._
-
-trait Buildable[T,C[_]] {
- def builder: mutable.Builder[T,C[T]]
- def fromIterable(it: Traversable[T]): C[T] = {
- val b = builder
- b ++= it
- b.result()
- }
-}
-
-object Buildable {
-
- implicit def buildableList[T] = new Buildable[T,List] {
- def builder = new mutable.ListBuffer[T]
- }
-
- implicit def buildableStream[T] = new Buildable[T,Stream] {
- def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
- }
-
- implicit def buildableArray[T](implicit cm: ClassManifest[T]) =
- new Buildable[T,Array] {
- def builder = mutable.ArrayBuilder.make[T]
- }
-
- implicit def buildableMutableSet[T] = new Buildable[T,mutable.Set] {
- def builder = new mutable.SetBuilder(mutable.Set.empty[T])
- }
-
- implicit def buildableImmutableSet[T] = new Buildable[T,immutable.Set] {
- def builder = new mutable.SetBuilder(immutable.Set.empty[T])
- }
-
- implicit def buildableSet[T] = new Buildable[T,Set] {
- def builder = new mutable.SetBuilder(Set.empty[T])
- }
-
- import java.util.ArrayList
- implicit def buildableArrayList[T] = new Buildable[T,ArrayList] {
- def builder = new mutable.Builder[T,ArrayList[T]] {
- val al = new ArrayList[T]
- def +=(x: T) = {
- al.add(x)
- this
- }
- def clear() = al.clear()
- def result() = al
- }
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
deleted file mode 100644
index eb3a91fe59..0000000000
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.Reader
-import scala.util.parsing.input.Position
-import scala.collection.Set
-import org.scalacheck.Test
-
-trait CmdLineParser extends Parsers {
-
- type Elem = String
-
- trait Opt[+T] {
- val default: T
- val names: Set[String]
- val help: String
- }
- trait Flag extends Opt[Unit]
- trait IntOpt extends Opt[Int]
- trait FloatOpt extends Opt[Float]
- trait StrOpt extends Opt[String]
-
- class OptMap {
- private val opts = new collection.mutable.HashMap[Opt[_], Any]
- def apply(flag: Flag): Boolean = opts.contains(flag)
- def apply[T](opt: Opt[T]): T = opts.get(opt) match {
- case None => opt.default
- case Some(v) => v.asInstanceOf[T]
- }
- def update[T](opt: Opt[T], optVal: T) = opts.update(opt, optVal)
- }
-
- val opts: Set[Opt[_]]
-
- private class ArgsReader(args: Array[String], i: Int) extends Reader[String] {
- val pos = new Position {
- val column = (args take i).foldLeft(1)(_ + _.length + 1)
- val line = 1
- val lineContents = args.mkString(" ")
- }
- val atEnd = i >= args.length
- def first = if(atEnd) null else args(i)
- def rest = if(atEnd) this else new ArgsReader(args, i+1)
- }
-
- private def getOpt(s: String) = {
- if(s == null || s.length == 0 || s.charAt(0) != '-') None
- else opts.find(_.names.contains(s.drop(1)))
- }
-
- private val opt: Parser[Opt[Any]] = accept("option name", {
- case s if getOpt(s).isDefined => getOpt(s).get
- })
-
- private val strVal: Parser[String] = accept("string", {
- case s if s != null => s
- })
-
- private val intVal: Parser[Int] = accept("integer", {
- case s if s != null && s.length > 0 && s.forall(_.isDigit) => s.toInt
- })
-
- private val floatVal: Parser[Float] = accept("float", {
- case s if s != null && s.matches("[0987654321]+\\.?[0987654321]*")
- => s.toFloat
- })
-
- private case class OptVal[T](o: Opt[T], v: T)
-
- private val optVal: Parser[OptVal[Any]] = opt into {
- case o: Flag => success(OptVal(o, ()))
- case o: IntOpt => intVal ^^ (v => OptVal(o, v))
- case o: FloatOpt => floatVal ^^ (v => OptVal(o, v))
- case o: StrOpt => strVal ^^ (v => OptVal(o, v))
- }
-
- val options: Parser[OptMap] = rep(optVal) ^^ { xs =>
- val map = new OptMap
- xs.foreach { case OptVal(o,v) => map(o) = v }
- map
- }
-
- def printHelp = {
- println("Available options:")
- opts.foreach { opt =>
- println(" " + opt.names.map("-"+_).mkString(", ") + ": " + opt.help)
- }
- }
-
- def parseArgs[T](args: Array[String])(f: OptMap => T) =
- phrase(options map f)(new ArgsReader(args,0))
-}
diff --git a/src/scalacheck/org/scalacheck/util/FreqMap.scala b/src/scalacheck/org/scalacheck/util/FreqMap.scala
deleted file mode 100644
index d0686aec72..0000000000
--- a/src/scalacheck/org/scalacheck/util/FreqMap.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-trait FreqMap[T] {
- protected val underlying: scala.collection.immutable.Map[T,Int]
- val total: Int
-
- def +(t: T) = new FreqMap[T] {
- private val n = FreqMap.this.underlying.get(t) match {
- case None => 1
- case Some(n) => n+1
- }
- val underlying = FreqMap.this.underlying + (t -> n)
- val total = FreqMap.this.total + 1
- }
-
- def -(t: T) = new FreqMap[T] {
- val underlying = FreqMap.this.underlying.get(t) match {
- case None => FreqMap.this.underlying
- case Some(n) => FreqMap.this.underlying + (t -> (n-1))
- }
- val total = FreqMap.this.total + 1
- }
-
- def ++(fm: FreqMap[T]) = new FreqMap[T] {
- private val keys = FreqMap.this.underlying.keySet ++ fm.underlying.keySet
- private val mappings = keys.toStream.map { x =>
- (x, fm.getCount(x).getOrElse(0) + FreqMap.this.getCount(x).getOrElse(0))
- }
- val underlying = scala.collection.immutable.Map(mappings: _*)
- val total = FreqMap.this.total + fm.total
- }
-
- def --(fm: FreqMap[T]) = new FreqMap[T] {
- val underlying = FreqMap.this.underlying transform {
- case (x,n) => n - fm.getCount(x).getOrElse(0)
- }
- lazy val total = (0 /: underlying.valuesIterator) (_ + _)
- }
-
- def getCount(t: T) = underlying.get(t)
-
- def getCounts: List[(T,Int)] = underlying.toList.sortBy(-_._2)
-
- def getRatio(t: T) = for(c <- getCount(t)) yield (c: Float)/total
-
- def getRatios = for((t,c) <- getCounts) yield (t, (c: Float)/total)
-
- override def toString = underlying.toString
-}
-
-object FreqMap {
- def empty[T] = new FreqMap[T] {
- val underlying = scala.collection.immutable.Map.empty[T,Int]
- val total = 0
- }
-}
diff --git a/src/scalacheck/org/scalacheck/util/StdRand.scala b/src/scalacheck/org/scalacheck/util/StdRand.scala
deleted file mode 100644
index 7c1dc8dcc4..0000000000
--- a/src/scalacheck/org/scalacheck/util/StdRand.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-object StdRand extends java.util.Random
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
index d407b93a4b..fe5ed47d43 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -7,12 +7,10 @@ package scala.tools.nsc
package doc
import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
-import scala.reflect.internal.Chars._
-import symtab._
import typechecker.Analyzer
+import scala.reflect.internal.Chars._
import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
import scala.tools.nsc.doc.base.{ CommentFactoryBase, MemberLookupBase, LinkTo, LinkToExternal }
-import scala.language.postfixOps
trait ScaladocAnalyzer extends Analyzer {
val global : Global // generally, a ScaladocGlobal
@@ -168,7 +166,7 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax
}
override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] = None
- override def chooseLink(links: List[LinkTo]): LinkTo = links.headOption orNull
+ override def chooseLink(links: List[LinkTo]): LinkTo = links.headOption.orNull
override def toString(link: LinkTo): String = "No link"
override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = None
override def warnNoLink: Boolean = false
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
index 723f8b1dc8..e654678c6d 100644
--- a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package doc
import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
-import scala.reflect.internal.Chars._
import symtab._
import reporters.Reporter
import typechecker.Analyzer
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index c4e3c115be..8f217e087c 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -305,10 +305,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
else None
}
+ private def templateAndType(ancestor: Symbol): (TemplateImpl, TypeEntity) = (makeTemplate(ancestor), makeType(reprSymbol.info.baseType(ancestor), this))
lazy val (linearizationTemplates, linearizationTypes) =
- reprSymbol.ancestors map { ancestor =>
- (makeTemplate(ancestor), makeType(reprSymbol.info.baseType(ancestor), this))
- } unzip
+ (reprSymbol.ancestors map templateAndType).unzip
/* Subclass cache */
private lazy val subClassesCache = (
@@ -321,7 +320,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def directSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
- /* Implcitly convertible class cache */
+ /* Implicitly convertible class cache */
private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null
def registerImplicitlyConvertibleClass(dtpl: DocTemplateImpl, conv: ImplicitConversionImpl): Unit = {
if (implicitlyConvertibleClassesCache == null)
@@ -841,7 +840,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def value = tree
}
}
- case None =>
+ case None =>
argTrees map { tree =>
new ValueArgument {
def parameter = None
diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala
index 9f139cb5ea..123516bb2d 100644
--- a/src/scalap/scala/tools/scalap/Arguments.scala
+++ b/src/scalap/scala/tools/scalap/Arguments.scala
@@ -5,7 +5,6 @@
**
*/
-
package scala.tools.scalap
import scala.collection.mutable
diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
index 9c72bdbf1e..59f083ee76 100644
--- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala
+++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
@@ -5,13 +5,9 @@
**
*/
-
-package scala
-package tools.scalap
-
+package scala.tools.scalap
class ByteArrayReader(content: Array[Byte]) {
- import java.io._
/** the buffer containing the file
*/
@@ -105,9 +101,6 @@ class ByteArrayReader(content: Array[Byte]) {
def getDouble(bp: Int): Double = java.lang.Double.longBitsToDouble(getLong(bp))
/** skip next 'n' bytes
- */
- def skip(n: Int) {
- bp += n
- }
-
+ */
+ def skip(n: Int): Unit = bp += n
}
diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala
index f62df285f9..d9d264bbbf 100644
--- a/src/scalap/scala/tools/scalap/Classfile.scala
+++ b/src/scalap/scala/tools/scalap/Classfile.scala
@@ -5,10 +5,8 @@
**
*/
-
package scala.tools.scalap
-
class Classfile(in: ByteArrayReader) {
import Classfiles._
diff --git a/src/scalap/scala/tools/scalap/Classfiles.scala b/src/scalap/scala/tools/scalap/Classfiles.scala
index 9295dd7aff..982a83cfa0 100644
--- a/src/scalap/scala/tools/scalap/Classfiles.scala
+++ b/src/scalap/scala/tools/scalap/Classfiles.scala
@@ -5,10 +5,8 @@
**
*/
-
package scala.tools.scalap
-
object Classfiles {
final val JAVA_MAGIC = 0xCAFEBABE
final val JAVA_MAJOR_VERSION = 45
diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala
index 168050096d..21c4399d5c 100644
--- a/src/scalap/scala/tools/scalap/CodeWriter.scala
+++ b/src/scalap/scala/tools/scalap/CodeWriter.scala
@@ -6,13 +6,9 @@
*/
-package scala
-package tools.scalap
+package scala.tools.scalap
-import java.io._
-
-
-class CodeWriter(writer: Writer) {
+class CodeWriter(writer: java.io.Writer) {
private val nl = scala.compat.Platform.EOL
private var step = " "
diff --git a/src/scalap/scala/tools/scalap/Decode.scala b/src/scalap/scala/tools/scalap/Decode.scala
index 76ce3f4173..69325c1ec8 100644
--- a/src/scalap/scala/tools/scalap/Decode.scala
+++ b/src/scalap/scala/tools/scalap/Decode.scala
@@ -5,17 +5,14 @@
**
*/
-// $Id$
-
package scala.tools.scalap
-import scala.tools.scalap.scalax.rules.scalasig._
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.nsc.util.ScalaClassLoader.appLoader
+import scala.tools.scalap.scalasig._
+
+import scala.reflect.internal.util.ScalaClassLoader
import scala.reflect.internal.pickling.ByteCodecs
import ClassFileParser.{ ConstValueIndex, Annotation }
-import Main.{ SCALA_SIG, SCALA_SIG_ANNOTATION, BYTES_VALUE }
/** Temporary decoder. This would be better off in the scala.tools.nsc
* but right now the compiler won't acknowledge scala.tools.scalap
@@ -31,7 +28,7 @@ object Decode {
/** Return the classfile bytes representing the scala sig classfile attribute.
* This has been obsoleted by the switch to annotations.
*/
- def scalaSigBytes(name: String): Option[Array[Byte]] = scalaSigBytes(name, appLoader)
+ def scalaSigBytes(name: String): Option[Array[Byte]] = scalaSigBytes(name, ScalaClassLoader.appLoader)
def scalaSigBytes(name: String, classLoader: ScalaClassLoader): Option[Array[Byte]] = {
val bytes = classLoader.classBytes(name)
val reader = new ByteArrayReader(bytes)
@@ -39,17 +36,16 @@ object Decode {
cf.scalaSigAttribute map (_.data)
}
- /** Return the bytes representing the annotation
- */
- def scalaSigAnnotationBytes(name: String): Option[Array[Byte]] = scalaSigAnnotationBytes(name, appLoader)
+ /** Return the bytes representing the annotation. */
+ def scalaSigAnnotationBytes(name: String): Option[Array[Byte]] = scalaSigAnnotationBytes(name, ScalaClassLoader.appLoader)
def scalaSigAnnotationBytes(name: String, classLoader: ScalaClassLoader): Option[Array[Byte]] = {
val bytes = classLoader.classBytes(name)
val byteCode = ByteCode(bytes)
val classFile = ClassFileParser.parse(byteCode)
import classFile._
- classFile annotation SCALA_SIG_ANNOTATION map { case Annotation(_, els) =>
- val bytesElem = els find (x => constant(x.elementNameIndex) == BYTES_VALUE) getOrElse null
+ classFile annotation Main.SCALA_SIG_ANNOTATION map { case Annotation(_, els) =>
+ val bytesElem = els find (x => constant(x.elementNameIndex) == Main.BYTES_VALUE) getOrElse null
val _bytes = bytesElem.elementValue match { case ConstValueIndex(x) => constantWrapped(x) }
val bytes = _bytes.asInstanceOf[StringBytesPair].bytes
val length = ByteCodecs.decode(bytes)
@@ -58,8 +54,7 @@ object Decode {
}
}
- /** private[scala] so nobody gets the idea this is a supported interface.
- */
+ /** private[scala] so nobody gets the idea this is a supported interface. */
private[scala] def caseParamNames(path: String): Option[List[String]] = {
val (outer, inner) = (path indexOf '$') match {
case -1 => (path, "")
@@ -67,7 +62,7 @@ object Decode {
}
for {
- clazz <- appLoader.tryToLoadClass[AnyRef](outer)
+ clazz <- ScalaClassLoader.appLoader.tryToLoadClass[AnyRef](outer)
ssig <- ScalaSigParser.parse(clazz)
}
yield {
@@ -85,11 +80,10 @@ object Decode {
}
}
- /** Returns a map of Alias -> Type for the given package.
- */
+ /** Returns a map of Alias -> Type for the given package. */
private[scala] def typeAliases(pkg: String) = {
for {
- clazz <- appLoader.tryToLoadClass[AnyRef](pkg + ".package")
+ clazz <- ScalaClassLoader.appLoader.tryToLoadClass[AnyRef](pkg + ".package")
ssig <- ScalaSigParser.parse(clazz)
}
yield {
diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala
index 772cf6eacd..1ba89e4702 100644
--- a/src/scalap/scala/tools/scalap/JavaWriter.scala
+++ b/src/scalap/scala/tools/scalap/JavaWriter.scala
@@ -5,13 +5,11 @@
**
*/
-
package scala.tools.scalap
-import java.io._
import scala.reflect.NameTransformer
-class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer) {
+class JavaWriter(classfile: Classfile, writer: java.io.Writer) extends CodeWriter(writer) {
val cf = classfile
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 5da4227e53..44d7ef6a41 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -5,16 +5,16 @@
**
*/
-package scala
-package tools.scalap
+package scala.tools.scalap
import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
-import scala.reflect.NameTransformer
-import scalax.rules.scalasig._
+
import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
-import scala.tools.util.PathResolver
-import ClassPath.DefaultJavaContext
-import scala.tools.nsc.io.{ PlainFile, AbstractFile }
+import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+import scala.tools.nsc.io.AbstractFile
+
+import scala.tools.scalap.scalasig._
+
/**The main object used to execute scalap on the command-line.
*
@@ -104,7 +104,7 @@ class Main {
// we have to encode every fragment of a name separately, otherwise the NameTransformer
// will encode using unicode escaping dot separators as well
// we can afford allocations because this is not a performance critical code
- classname.split('.').map(NameTransformer.encode).mkString(".")
+ classname.split('.').map(scala.reflect.NameTransformer.encode).mkString(".")
}
val cls = path.findClass(encName)
if (cls.isDefined && cls.get.binary.isDefined) {
@@ -185,7 +185,7 @@ object Main extends Main {
val cparg = List("-classpath", "-cp") map (arguments getArgument _) reduceLeft (_ orElse _)
val path = cparg match {
case Some(cp) => new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
- case _ => PathResolver.fromPathString(".") // include '.' in the default classpath SI-6669
+ case _ => scala.tools.util.PathResolver.fromPathString(".") // include '.' in the default classpath SI-6669
}
// print the classpath if output is verbose
if (verbose)
diff --git a/src/scalap/scala/tools/scalap/MetaParser.scala b/src/scalap/scala/tools/scalap/MetaParser.scala
index 8b4ffb3efd..324330466f 100644
--- a/src/scalap/scala/tools/scalap/MetaParser.scala
+++ b/src/scalap/scala/tools/scalap/MetaParser.scala
@@ -6,18 +6,15 @@
*/
-package scala
-package tools.scalap
+package scala.tools.scalap
-import java.io._
-import java.util._
/** a parser class for parsing meta type information in classfiles
* generated by pico.
*/
class MetaParser(meta: String) {
- val scanner = new StringTokenizer(meta, "()[], \t<;", true)
+ val scanner = new java.util.StringTokenizer(meta, "()[], \t<;", true)
var token: String = _
val res = new StringBuffer
diff --git a/src/scalap/scala/tools/scalap/Properties.scala b/src/scalap/scala/tools/scalap/Properties.scala
index 8f9a9d8606..432dd495e9 100644
--- a/src/scalap/scala/tools/scalap/Properties.scala
+++ b/src/scalap/scala/tools/scalap/Properties.scala
@@ -9,8 +9,7 @@
package scala.tools.scalap
/** Loads decoder.properties from the jar. */
-object Properties extends scala.util.PropertiesTrait
-{
+object Properties extends scala.util.PropertiesTrait {
protected def propCategory = "decoder"
protected def pickJarBasedOn = classOf[Classfile]
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala b/src/scalap/scala/tools/scalap/rules/Memoisable.scala
index b4ce8cab23..418141bee7 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
+++ b/src/scalap/scala/tools/scalap/rules/Memoisable.scala
@@ -10,26 +10,24 @@
//
// -----------------------------------------------------------------------------
-package scala.tools.scalap
-package scalax
-package rules
+package scala.tools.scalap.rules
import scala.collection.mutable
trait MemoisableRules extends Rules {
- def memo[In <: Memoisable, Out, A, X](key : AnyRef)(toRule : => In => Result[Out, A, X]) = {
+ def memo[In <: Memoisable, Out, A, X](key: AnyRef)(toRule: => In => Result[Out, A, X]) = {
lazy val rule = toRule
from[In] { in => in.memo(key, rule(in)) }
}
- override def ruleWithName[In, Out, A, X](name : String, f : In => rules.Result[Out, A, X]) = super.ruleWithName(name, (in : In) => in match {
- case s : Memoisable => s.memo(name, f(in))
+ override def ruleWithName[In, Out, A, X](name: String, f: In => Result[Out, A, X]) = super.ruleWithName(name, (in: In) => in match {
+ case s: Memoisable => s.memo(name, f(in))
case _ => f(in)
})
}
trait Memoisable {
- def memo[A](key : AnyRef, a : => A) : A
+ def memo[A](key: AnyRef, a: => A): A
}
@@ -40,22 +38,19 @@ object DefaultMemoisable {
trait DefaultMemoisable extends Memoisable {
protected val map = new mutable.HashMap[AnyRef, Any]
- def memo[A](key : AnyRef, a : => A) = {
+ def memo[A](key: AnyRef, a: => A) = {
map.getOrElseUpdate(key, compute(key, a)).asInstanceOf[A]
}
- protected def compute[A](key : AnyRef, a : => A): Any = a match {
- case success : Success[_, _] => onSuccess(key, success); success
+ protected def compute[A](key: AnyRef, a: => A): Any = a match {
+ case success: Success[_, _] => onSuccess(key, success); success
case other =>
if(DefaultMemoisable.debug) println(key + " -> " + other)
other
}
- protected def onSuccess[S, T](key : AnyRef, result : Success[S, T]) {
+ protected def onSuccess[S, T](key: AnyRef, result: Success[S, T]) {
val Success(out, t) = result
if(DefaultMemoisable.debug) println(key + " -> " + t + " (" + out + ")")
}
}
-
-
-
diff --git a/src/scalap/scala/tools/scalap/rules/Result.scala b/src/scalap/scala/tools/scalap/rules/Result.scala
new file mode 100644
index 0000000000..ae05416d7a
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/rules/Result.scala
@@ -0,0 +1,69 @@
+// -----------------------------------------------------------------------------
+//
+// Scalax - The Scala Community Library
+// Copyright (c) 2005-8 The Scalax Project. All rights reserved.
+//
+// The primary distribution site is http://scalax.scalaforge.org/
+//
+// This software is released under the terms of the Revised BSD License.
+// There is NO WARRANTY. See the file LICENSE for the full text.
+//
+// -----------------------------------------------------------------------------
+
+package scala.tools.scalap.rules;
+
+/** Represents the combined value of two rules applied in sequence.
+ *
+ * @see the Scala parser combinator
+ */
+case class ~[+A, +B](_1: A, _2: B) {
+ override def toString = "(" + _1 + " ~ " + _2 + ")"
+}
+
+
+sealed abstract class Result[+Out, +A, +X] {
+ def out: Out
+ def value: A
+ def error: X
+
+ implicit def toOption: Option[A]
+
+ def map[B](f: A => B): Result[Out, B, X]
+ def mapOut[Out2](f: Out => Out2): Result[Out2, A, X]
+ def map[Out2, B](f: (Out, A) => (Out2, B)): Result[Out2, B, X]
+ def flatMap[Out2, B](f: (Out, A) => Result[Out2, B, Nothing]): Result[Out2, B, X]
+ def orElse[Out2 >: Out, B >: A](other: => Result[Out2, B, Nothing]): Result[Out2, B, X]
+}
+
+case class Success[+Out, +A](out: Out, value: A) extends Result[Out, A, Nothing] {
+ def error = throw new ScalaSigParserError("No error")
+
+ def toOption = Some(value)
+
+ def map[B](f: A => B): Result[Out, B, Nothing] = Success(out, f(value))
+ def mapOut[Out2](f: Out => Out2): Result[Out2, A, Nothing] = Success(f(out), value)
+ def map[Out2, B](f: (Out, A) => (Out2, B)): Success[Out2, B] = f(out, value) match { case (out2, b) => Success(out2, b) }
+ def flatMap[Out2, B](f: (Out, A) => Result[Out2, B, Nothing]): Result[Out2, B, Nothing]= f(out, value)
+ def orElse[Out2 >: Out, B >: A](other: => Result[Out2, B, Nothing]): Result[Out2, B, Nothing] = this
+}
+
+sealed abstract class NoSuccess[+X] extends Result[Nothing, Nothing, X] {
+ def out = throw new ScalaSigParserError("No output")
+ def value = throw new ScalaSigParserError("No value")
+
+ def toOption = None
+
+ def map[B](f: Nothing => B) = this
+ def mapOut[Out2](f: Nothing => Out2) = this
+ def map[Out2, B](f: (Nothing, Nothing) => (Out2, B)) = this
+ def flatMap[Out2, B](f: (Nothing, Nothing) => Result[Out2, B, Nothing]) = this
+ def orElse[Out2, B](other: => Result[Out2, B, Nothing]) = other
+}
+
+case object Failure extends NoSuccess[Nothing] {
+ def error = throw new ScalaSigParserError("No error")
+}
+
+case class ScalaSigParserError(msg: String) extends RuntimeException(msg)
+
+case class Error[+X](error: X) extends NoSuccess[X]
diff --git a/src/scalap/scala/tools/scalap/rules/Rule.scala b/src/scalap/scala/tools/scalap/rules/Rule.scala
new file mode 100644
index 0000000000..0a00111f7a
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/rules/Rule.scala
@@ -0,0 +1,172 @@
+// -----------------------------------------------------------------------------
+//
+// Scalax - The Scala Community Library
+// Copyright (c) 2005-8 The Scalax Project. All rights reserved.
+//
+// The primary distribution site is http://scalax.scalaforge.org/
+//
+// This software is released under the terms of the Revised BSD License.
+// There is NO WARRANTY. See the file LICENSE for the full text.
+//
+// -----------------------------------------------------------------------------
+
+package scala.tools.scalap.rules
+
+/** A Rule is a function from some input to a Result. The result may be:
+ * <ul>
+ * <li>Success, with a value of some type and an output that may serve as the input to subsequent rules.</li>
+ * <li>Failure. A failure may result in some alternative rule being applied.</li>
+ * <li>Error. No further rules should be attempted.</li>
+ * </ul>
+ *
+ * @author Andrew Foggin
+ *
+ * Inspired by the Scala parser combinator.
+ */
+trait Rule[-In, +Out, +A, +X] extends (In => Result[Out, A, X]) {
+ val factory: Rules
+ import factory._
+
+ def as(name: String) = ruleWithName(name, this)
+
+ def flatMap[Out2, B, X2 >: X](fa2ruleb: A => Out => Result[Out2, B, X2]) = mapResult {
+ case Success(out, a) => fa2ruleb(a)(out)
+ case Failure => Failure
+ case err @ Error(_) => err
+ }
+
+ def map[B](fa2b: A => B) = flatMap { a => out => Success(out, fa2b(a)) }
+
+ def filter(f: A => Boolean) = flatMap { a => out => if(f(a)) Success(out, a) else Failure }
+
+ def mapResult[Out2, B, Y](f: Result[Out, A, X] => Result[Out2, B, Y]) = rule {
+ in: In => f(apply(in))
+ }
+
+ def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other: => Rule[In2, Out2, A2, X2]): Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
+ val factory = Rule.this.factory
+ lazy val choices = Rule.this :: other :: Nil
+ }
+
+ def orError[In2 <: In] = this orElse error[Any]
+
+ def |[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other: => Rule[In2, Out2, A2, X2]) = orElse(other)
+
+ def ^^[B](fa2b: A => B) = map(fa2b)
+
+ def ^^?[B](pf: PartialFunction[A, B]) = filter (pf.isDefinedAt(_)) ^^ pf
+
+ def ??(pf: PartialFunction[A, Any]) = filter (pf.isDefinedAt(_))
+
+ def -^[B](b: B) = map { any => b }
+
+ /** Maps an Error */
+ def !^[Y](fx2y: X => Y) = mapResult {
+ case s @ Success(_, _) => s
+ case Failure => Failure
+ case Error(x) => Error(fx2y(x))
+ }
+
+ def >>[Out2, B, X2 >: X](fa2ruleb: A => Out => Result[Out2, B, X2]) = flatMap(fa2ruleb)
+
+ def >->[Out2, B, X2 >: X](fa2resultb: A => Result[Out2, B, X2]) = flatMap { a => any => fa2resultb(a) }
+
+ def >>?[Out2, B, X2 >: X](pf: PartialFunction[A, Rule[Out, Out2, B, X2]]) = filter(pf isDefinedAt _) flatMap pf
+
+ def >>&[B, X2 >: X](fa2ruleb: A => Out => Result[Any, B, X2]) = flatMap { a => out => fa2ruleb(a)(out) mapOut { any => out } }
+
+ def ~[Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield new ~(a, b)
+
+ def ~-[Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield a
+
+ def -~[Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield b
+
+ def ~++[Out2, B >: A, X2 >: X](next: => Rule[Out, Out2, Seq[B], X2]) = for (a <- this; b <- next) yield a :: b.toList
+
+ /** Apply the result of this rule to the function returned by the next rule */
+ def ~>[Out2, B, X2 >: X](next: => Rule[Out, Out2, A => B, X2]) = for (a <- this; fa2b <- next) yield fa2b(a)
+
+ /** Apply the result of this rule to the function returned by the previous rule */
+ def <~:[InPrev, B, X2 >: X](prev: => Rule[InPrev, In, A => B, X2]) = for (fa2b <- prev; a <- this) yield fa2b(a)
+
+ def ~![Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next.orError) yield new ~(a, b)
+
+ def ~-![Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next.orError) yield a
+
+ def -~![Out2, B, X2 >: X](next: => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next.orError) yield b
+
+ def -[In2 <: In](exclude: => Rule[In2, Any, Any, Any]) = !exclude -~ this
+
+ /** ^~^(f) is equivalent to ^^ { case b1 ~ b2 => f(b1, b2) }
+ */
+ def ^~^[B1, B2, B >: A <% B1 ~ B2, C](f: (B1, B2) => C) = map { a =>
+ (a: B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
+ }
+
+ /** ^~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
+ */
+ def ^~~^[B1, B2, B3, B >: A <% B1 ~ B2 ~ B3, C](f: (B1, B2, B3) => C) = map { a =>
+ (a: B1 ~ B2 ~ B3) match { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
+ }
+
+ /** ^~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
+ */
+ def ^~~~^[B1, B2, B3, B4, B >: A <% B1 ~ B2 ~ B3 ~ B4, C](f: (B1, B2, B3, B4) => C) = map { a =>
+ (a: B1 ~ B2 ~ B3 ~ B4) match { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
+ }
+
+ /** ^~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
+ */
+ def ^~~~~^[B1, B2, B3, B4, B5, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5, C](f: (B1, B2, B3, B4, B5) => C) = map { a =>
+ (a: B1 ~ B2 ~ B3 ~ B4 ~ B5) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
+ }
+
+ /** ^~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
+ */
+ def ^~~~~~^[B1, B2, B3, B4, B5, B6, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6, C](f: (B1, B2, B3, B4, B5, B6) => C) = map { a =>
+ (a: B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
+ }
+
+ /** ^~~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
+ */
+ def ^~~~~~~^[B1, B2, B3, B4, B5, B6, B7, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7, C](f: (B1, B2, B3, B4, B5, B6, B7) => C) = map { a =>
+ (a: B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 ~b7 => f(b1, b2, b3, b4, b5, b6, b7) }
+ }
+
+ /** >~>(f) is equivalent to >> { case b1 ~ b2 => f(b1, b2) }
+ */
+ def >~>[Out2, B1, B2, B >: A <% B1 ~ B2, C, X2 >: X](f: (B1, B2) => Out => Result[Out2, C, X2]) = flatMap { a =>
+ (a: B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
+ }
+
+ /** ^-^(f) is equivalent to ^^ { b2 => b1 => f(b1, b2) }
+ */
+ def ^-^ [B1, B2 >: A, C](f: (B1, B2) => C) = map { b2: B2 => b1: B1 => f(b1, b2) }
+
+ /** ^~>~^(f) is equivalent to ^^ { case b2 ~ b3 => b1 => f(b1, b2, b3) }
+ */
+ def ^~>~^ [B1, B2, B3, B >: A <% B2 ~ B3, C](f: (B1, B2, B3) => C) = map { a =>
+ (a: B2 ~ B3) match { case b2 ~ b3 => b1: B1 => f(b1, b2, b3) }
+ }
+}
+
+
+trait Choice[-In, +Out, +A, +X] extends Rule[In, Out, A, X] {
+ def choices: List[Rule[In, Out, A, X]]
+
+ def apply(in: In) = {
+ def oneOf(list: List[Rule[In, Out, A, X]]): Result[Out, A, X] = list match {
+ case Nil => Failure
+ case first :: rest => first(in) match {
+ case Failure => oneOf(rest)
+ case result => result
+ }
+ }
+ oneOf(choices)
+ }
+
+ override def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other: => Rule[In2, Out2, A2, X2]): Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
+ val factory = Choice.this.factory
+ lazy val choices = Choice.this.choices ::: other :: Nil
+ }
+}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala b/src/scalap/scala/tools/scalap/rules/Rules.scala
index 70926208b3..bdcc81c22d 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
+++ b/src/scalap/scala/tools/scalap/rules/Rules.scala
@@ -11,11 +11,10 @@
// -----------------------------------------------------------------------------
package scala.tools.scalap
-package scalax
package rules
trait Name {
- def name : String
+ def name: String
override def toString = name
}
@@ -26,13 +25,18 @@ trait Name {
* Inspired by the Scala parser combinator.
*/
trait Rules {
- implicit def rule[In, Out, A, X](f : In => Result[Out, A, X]) : Rule[In, Out, A, X] = new DefaultRule(f)
- implicit def inRule[In, Out, A, X](rule : Rule[In, Out, A, X]) : InRule[In, Out, A, X] = new InRule(rule)
- implicit def seqRule[In, A, X](rule : Rule[In, In, A, X]) : SeqRule[In, A, X] = new SeqRule(rule)
+ import scala.language.implicitConversions
+ implicit def rule[In, Out, A, X](f: In => Result[Out, A, X]): Rule[In, Out, A, X] = new DefaultRule(f)
+ implicit def inRule[In, Out, A, X](rule: Rule[In, Out, A, X]): InRule[In, Out, A, X] = new InRule(rule)
+ implicit def seqRule[In, A, X](rule: Rule[In, In, A, X]): SeqRule[In, A, X] = new SeqRule(rule)
- def from[In] = new {
- def apply[Out, A, X](f : In => Result[Out, A, X]) = rule(f)
+ trait FromRule[In] {
+ def apply[Out, A, X](f: In => Result[Out, A, X]): Rule[In, Out, A, X]
+ }
+
+ def from[In] = new FromRule[In] {
+ def apply[Out, A, X](f: In => Result[Out, A, X]) = rule(f)
}
def state[s] = new StateRules {
@@ -40,30 +44,30 @@ trait Rules {
val factory = Rules.this
}
- def success[Out, A](out : Out, a : A) = rule { in : Any => Success(out, a) }
+ def success[Out, A](out: Out, a: A) = rule { in: Any => Success(out, a) }
- def failure = rule { in : Any => Failure }
+ def failure = rule { in: Any => Failure }
- def error[In] = rule { in : In => Error(in) }
- def error[X](err : X) = rule { in : Any => Error(err) }
+ def error[In] = rule { in: In => Error(in) }
+ def error[X](err: X) = rule { in: Any => Error(err) }
- def oneOf[In, Out, A, X](rules : Rule[In, Out, A, X] *) : Rule[In, Out, A, X] = new Choice[In, Out, A, X] {
+ def oneOf[In, Out, A, X](rules: Rule[In, Out, A, X] *): Rule[In, Out, A, X] = new Choice[In, Out, A, X] {
val factory = Rules.this
val choices = rules.toList
}
- def ruleWithName[In, Out, A, X](_name : String, f : In => Result[Out, A, X]) : Rule[In, Out, A, X] with Name =
+ def ruleWithName[In, Out, A, X](_name: String, f: In => Result[Out, A, X]): Rule[In, Out, A, X] with Name =
new DefaultRule(f) with Name {
val name = _name
}
- class DefaultRule[In, Out, A, X](f : In => Result[Out, A, X]) extends Rule[In, Out, A, X] {
+ class DefaultRule[In, Out, A, X](f: In => Result[Out, A, X]) extends Rule[In, Out, A, X] {
val factory = Rules.this
- def apply(in : In) = f(in)
+ def apply(in: In) = f(in)
}
/** Converts a rule into a function that throws an Exception on failure. */
- def expect[In, Out, A, Any](rule : Rule[In, Out, A, Any]) : In => A = (in) => rule(in) match {
+ def expect[In, Out, A, Any](rule: Rule[In, Out, A, Any]): In => A = (in) => rule(in) match {
case Success(_, a) => a
case Failure => throw new ScalaSigParserError("Unexpected failure")
case Error(x) => throw new ScalaSigParserError("Unexpected error: " + x)
@@ -82,30 +86,30 @@ trait StateRules {
type S
type Rule[+A, +X] = rules.Rule[S, S, A, X]
- val factory : Rules
+ val factory: Rules
import factory._
- def apply[A, X](f : S => Result[S, A, X]) = rule(f)
+ def apply[A, X](f: S => Result[S, A, X]) = rule(f)
- def unit[A](a : => A) = apply { s => Success(s, a) }
- def read[A](f : S => A) = apply { s => Success(s, f(s)) }
+ def unit[A](a: => A) = apply { s => Success(s, a) }
+ def read[A](f: S => A) = apply { s => Success(s, f(s)) }
def get = apply { s => Success(s, s) }
- def set(s : => S) = apply { oldS => Success(s, oldS) }
+ def set(s: => S) = apply { oldS => Success(s, oldS) }
- def update(f : S => S) = apply { s => Success(s, f(s)) }
+ def update(f: S => S) = apply { s => Success(s, f(s)) }
def nil = unit(Nil)
def none = unit(None)
/** Create a rule that identities if f(in) is true. */
- def cond(f : S => Boolean) = get filter f
+ def cond(f: S => Boolean) = get filter f
/** Create a rule that succeeds if all of the given rules succeed.
@param rules the rules to apply in sequence.
*/
- def allOf[A, X](rules : Seq[Rule[A, X]]) = {
- def rep(in : S, rules : List[Rule[A, X]], results : List[A]) : Result[S, List[A], X] = {
+ def allOf[A, X](rules: Seq[Rule[A, X]]) = {
+ def rep(in: S, rules: List[Rule[A, X]], results: List[A]): Result[S, List[A], X] = {
rules match {
case Nil => Success(in, results.reverse)
case rule::tl => rule(in) match {
@@ -115,19 +119,19 @@ trait StateRules {
}
}
}
- in : S => rep(in, rules.toList, Nil)
+ in: S => rep(in, rules.toList, Nil)
}
/** Create a rule that succeeds with a list of all the provided rules that succeed.
@param rules the rules to apply in sequence.
*/
- def anyOf[A, X](rules : Seq[Rule[A, X]]) = allOf(rules.map(_ ?)) ^^ { opts => opts.flatMap(x => x) }
+ def anyOf[A, X](rules: Seq[Rule[A, X]]) = allOf(rules.map(_ ?)) ^^ { opts => opts.flatMap(x => x) }
/** Repeatedly apply a rule from initial value until finished condition is met. */
- def repeatUntil[T, X](rule : Rule[T => T, X])(finished : T => Boolean)(initial : T) = apply {
+ def repeatUntil[T, X](rule: Rule[T => T, X])(finished: T => Boolean)(initial: T) = apply {
// more compact using HoF but written this way so it's tail-recursive
- def rep(in : S, t : T) : Result[S, T, X] = {
+ def rep(in: S, t: T): Result[S, T, X] = {
if (finished(t)) Success(in, t)
else rule(in) match {
case Success(out, f) => rep(out, f(t)) // SI-5189 f.asInstanceOf[T => T]
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/rules/SeqRule.scala
index 51a789e041..e96a38b6be 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
+++ b/src/scalap/scala/tools/scalap/rules/SeqRule.scala
@@ -10,81 +10,79 @@
//
// -----------------------------------------------------------------------------
-package scala.tools.scalap
-package scalax
-package rules
+package scala.tools.scalap.rules
/**
* A workaround for the difficulties of dealing with
* a contravariant 'In' parameter type...
*/
-class InRule[In, +Out, +A, +X](rule : Rule[In, Out, A, X]) {
+class InRule[In, +Out, +A, +X](rule: Rule[In, Out, A, X]) {
- def mapRule[Out2, B, Y](f : Result[Out, A, X] => In => Result[Out2, B, Y]) : Rule[In, Out2, B, Y] = rule.factory.rule {
- in : In => f(rule(in))(in)
+ def mapRule[Out2, B, Y](f: Result[Out, A, X] => In => Result[Out2, B, Y]): Rule[In, Out2, B, Y] = rule.factory.rule {
+ in: In => f(rule(in))(in)
}
/** Creates a rule that succeeds only if the original rule would fail on the given context. */
def unary_! : Rule[In, In, Unit, Nothing] = mapRule {
- case Success(_, _) => in : In => Failure
- case _ => in : In => Success(in, ())
+ case Success(_, _) => in: In => Failure
+ case _ => in: In => Success(in, ())
}
/** Creates a rule that succeeds if the original rule succeeds, but returns the original input. */
def & : Rule[In, In, A, X] = mapRule {
- case Success(_, a) => in : In => Success(in, a)
- case Failure => in : In => Failure
- case Error(x) => in : In => Error(x)
+ case Success(_, a) => in: In => Success(in, a)
+ case Failure => in: In => Failure
+ case Error(x) => in: In => Error(x)
}
}
-class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
+class SeqRule[S, +A, +X](rule: Rule[S, S, A, X]) {
import rule.factory._
def ? = rule mapRule {
- case Success(out, a) => in : S => Success(out, Some(a))
- case Failure => in : S => Success(in, None)
- case Error(x) => in : S => Error(x)
+ case Success(out, a) => in: S => Success(out, Some(a))
+ case Failure => in: S => Success(in, None)
+ case Error(x) => in: S => Error(x)
}
/** Creates a rule that always succeeds with a Boolean value.
* Value is 'true' if this rule succeeds, 'false' otherwise */
- def -? = ? map { _ isDefined }
+ def -? = ? map { _.isDefined }
def * = from[S] {
// tail-recursive function with reverse list accumulator
- def rep(in : S, acc : List[A]) : Result[S, List[A], X] = rule(in) match {
+ def rep(in: S, acc: List[A]): Result[S, List[A], X] = rule(in) match {
case Success(out, a) => rep(out, a :: acc)
case Failure => Success(in, acc.reverse)
- case err : Error[_] => err
+ case err: Error[_] => err
}
in => rep(in, Nil)
}
def + = rule ~++ *
- def ~>?[B >: A, X2 >: X](f : => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f?) yield fs.foldLeft[B](a) { (b, f) => f(b) }
+ def ~>?[B >: A, X2 >: X](f: => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f?) yield fs.foldLeft[B](a) { (b, f) => f(b) }
- def ~>*[B >: A, X2 >: X](f : => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f*) yield fs.foldLeft[B](a) { (b, f) => f(b) }
+ def ~>*[B >: A, X2 >: X](f: => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f*) yield fs.foldLeft[B](a) { (b, f) => f(b) }
- def ~*~[B >: A, X2 >: X](join : => Rule[S, S, (B, B) => B, X2]) = {
- this ~>* (for (f <- join; a <- rule) yield f(_ : B, a))
+ def ~*~[B >: A, X2 >: X](join: => Rule[S, S, (B, B) => B, X2]) = {
+ this ~>* (for (f <- join; a <- rule) yield f(_: B, a))
}
/** Repeats this rule one or more times with a separator (which is discarded) */
- def +/[X2 >: X](sep : => Rule[S, S, Any, X2]) = rule ~++ (sep -~ rule *)
+ def +/[X2 >: X](sep: => Rule[S, S, Any, X2]) = rule ~++ (sep -~ rule *)
/** Repeats this rule zero or more times with a separator (which is discarded) */
- def */[X2 >: X](sep : => Rule[S, S, Any, X2]) = +/(sep) | state[S].nil
+ def */[X2 >: X](sep: => Rule[S, S, Any, X2]) = +/(sep) | state[S].nil
- def *~-[Out, X2 >: X](end : => Rule[S, Out, Any, X2]) = (rule - end *) ~- end
- def +~-[Out, X2 >: X](end : => Rule[S, Out, Any, X2]) = (rule - end +) ~- end
+ def *~-[Out, X2 >: X](end: => Rule[S, Out, Any, X2]) = (rule - end *) ~- end
+ def +~-[Out, X2 >: X](end: => Rule[S, Out, Any, X2]) = (rule - end +) ~- end
/** Repeats this rule num times */
- def times(num : Int) : Rule[S, S, Seq[A], X] = from[S] {
+ def times(num: Int): Rule[S, S, Seq[A], X] = from[S] {
val result = new scala.collection.mutable.ArraySeq[A](num)
// more compact using HoF but written this way so it's tail-recursive
- def rep(i : Int, in : S) : Result[S, Seq[A], X] = {
+ def rep(i: Int, in: S): Result[S, Seq[A], X] = {
if (i == num) Success(in, result)
else rule(in) match {
case Success(out, a) => {
@@ -92,10 +90,9 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
rep(i + 1, out)
}
case Failure => Failure
- case err : Error[_] => err
+ case err: Error[_] => err
}
}
in => rep(0, in)
}
}
-
diff --git a/src/scalap/scala/tools/scalap/rules/package.scala b/src/scalap/scala/tools/scalap/rules/package.scala
new file mode 100644
index 0000000000..dcd5f7ac00
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/rules/package.scala
@@ -0,0 +1,6 @@
+package scala.tools.scalap
+
+package object rules {
+ // make some language features in this package compile without warning
+ implicit def postfixOps = scala.language.postfixOps
+}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalasig/ClassFileParser.scala
index 1a4b3456b8..9bd8402ccc 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalasig/ClassFileParser.scala
@@ -1,18 +1,11 @@
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
+package scala.tools.scalap.scalasig
-
-import java.io.IOException
-
-import scala._
-import scala.Predef._
+import scala.tools.scalap.rules.{ Success, Failure, ~, RulesWithState }
object ByteCode {
- def apply(bytes : Array[Byte]) = new ByteCode(bytes, 0, bytes.length)
+ def apply(bytes: Array[Byte]) = new ByteCode(bytes, 0, bytes.length)
- def forClass(clazz : Class[_]) = {
+ def forClass(clazz: Class[_]) = {
val name = clazz.getName
val subPath = name.substring(name.lastIndexOf('.') + 1) + ".class"
val in = clazz.getResourceAsStream(subPath)
@@ -22,7 +15,7 @@ object ByteCode {
val bytes = new Array[Byte](rest)
while (rest > 0) {
val res = in.read(bytes, bytes.length - rest, rest)
- if (res == -1) throw new IOException("read error")
+ if (res == -1) throw new java.io.IOException("read error")
rest -= res
}
ByteCode(bytes)
@@ -33,19 +26,18 @@ object ByteCode {
}
}
-/** Represents a chunk of raw bytecode. Used as input for the parsers
- */
-class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
+/** Represents a chunk of raw bytecode. Used as input for the parsers. */
+class ByteCode(val bytes: Array[Byte], val pos: Int, val length: Int) {
assert(pos >= 0 && length >= 0 && pos + length <= bytes.length)
def nextByte = if (length == 0) Failure else Success(drop(1), bytes(pos))
- def next(n : Int) = if (length >= n) Success(drop(n), take(n)) else Failure
+ def next(n: Int) = if (length >= n) Success(drop(n), take(n)) else Failure
- def take(n : Int) = new ByteCode(bytes, pos, n)
- def drop(n : Int) = new ByteCode(bytes, pos + n, length - n)
+ def take(n: Int) = new ByteCode(bytes, pos, n)
+ def drop(n: Int) = new ByteCode(bytes, pos + n, length - n)
- def fold[X](x : X)(f : (X, Byte) => X) : X = {
+ def fold[X](x: X)(f: (X, Byte) => X): X = {
var result = x
var i = pos
while (i < pos + length) {
@@ -72,7 +64,7 @@ class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
StringBytesPair(str, chunk)
}
- def byte(i : Int) = bytes(pos) & 0xFF
+ def byte(i: Int) = bytes(pos) & 0xFF
}
/**
@@ -86,22 +78,22 @@ trait ByteCodeReader extends RulesWithState {
type S = ByteCode
type Parser[A] = Rule[A, String]
- val byte = apply(_ nextByte)
+ val byte = apply(_.nextByte)
val u1 = byte ^^ (_ & 0xFF)
- val u2 = bytes(2) ^^ (_ toInt)
- val u4 = bytes(4) ^^ (_ toInt) // should map to Long??
+ val u2 = bytes(2) ^^ (_.toInt)
+ val u4 = bytes(4) ^^ (_.toInt) // should map to Long??
- def bytes(n : Int) = apply(_ next n)
+ def bytes(n: Int) = apply(_ next n)
}
object ClassFileParser extends ByteCodeReader {
- def parse(byteCode : ByteCode) = expect(classFile)(byteCode)
+ def parse(byteCode: ByteCode) = expect(classFile)(byteCode)
def parseAnnotations(byteCode: ByteCode) = expect(annotations)(byteCode)
val magicNumber = (u4 filter (_ == 0xCAFEBABE)) | error("Not a valid class file")
val version = u2 ~ u2 ^^ { case minor ~ major => (major, minor) }
- val constantPool = (u2 ^^ ConstantPool) >> repeatUntil(constantPoolEntry)(_ isFull)
+ val constantPool = (u2 ^^ ConstantPool) >> repeatUntil(constantPoolEntry)(_.isFull)
// NOTE currently most constants just evaluate to a string description
// TODO evaluate to useful values
@@ -169,19 +161,19 @@ object ClassFileParser extends ByteCodeReader {
val classFile = header ~ fields ~ methods ~ attributes ~- !u1 ^~~~^ ClassFile
// TODO create a useful object, not just a string
- def memberRef(description : String) = u2 ~ u2 ^^ add1 {
+ def memberRef(description: String) = u2 ~ u2 ^^ add1 {
case classRef ~ nameAndTypeRef => pool => description + ": " + pool(classRef) + ", " + pool(nameAndTypeRef)
}
- def add1[T](f : T => ConstantPool => Any)(raw : T)(pool : ConstantPool) = pool add f(raw)
- def add2[T](f : T => ConstantPool => Any)(raw : T)(pool : ConstantPool) = pool add f(raw) add { pool => "<empty>" }
+ def add1[T](f: T => ConstantPool => Any)(raw: T)(pool: ConstantPool) = pool add f(raw)
+ def add2[T](f: T => ConstantPool => Any)(raw: T)(pool: ConstantPool) = pool add f(raw) add { pool => "<empty>" }
}
case class ClassFile(
- header : ClassFileHeader,
- fields : Seq[Field],
- methods : Seq[Method],
- attributes : Seq[Attribute]) {
+ header: ClassFileHeader,
+ fields: Seq[Field],
+ methods: Seq[Method],
+ attributes: Seq[Attribute]) {
def majorVersion = header.major
def minorVersion = header.minor
@@ -190,14 +182,14 @@ case class ClassFile(
def superClass = constant(header.superClassIndex)
def interfaces = header.interfaces.map(constant)
- def constant(index : Int) = header.constants(index) match {
+ def constant(index: Int) = header.constants(index) match {
case StringBytesPair(str, _) => str
case z => z
}
def constantWrapped(index: Int) = header.constants(index)
- def attribute(name : String) = attributes.find {attrib => constant(attrib.nameIndex) == name }
+ def attribute(name: String) = attributes.find {attrib => constant(attrib.nameIndex) == name }
val RUNTIME_VISIBLE_ANNOTATIONS = "RuntimeVisibleAnnotations"
def annotations = (attributes.find(attr => constant(attr.nameIndex) == RUNTIME_VISIBLE_ANNOTATIONS)
@@ -206,23 +198,23 @@ case class ClassFile(
def annotation(name: String) = annotations.flatMap(seq => seq.find(annot => constant(annot.typeIndex) == name))
}
-case class Attribute(nameIndex : Int, byteCode : ByteCode)
-case class Field(flags : Int, nameIndex : Int, descriptorIndex : Int, attributes : Seq[Attribute])
-case class Method(flags : Int, nameIndex : Int, descriptorIndex : Int, attributes : Seq[Attribute])
+case class Attribute(nameIndex: Int, byteCode: ByteCode)
+case class Field(flags: Int, nameIndex: Int, descriptorIndex: Int, attributes: Seq[Attribute])
+case class Method(flags: Int, nameIndex: Int, descriptorIndex: Int, attributes: Seq[Attribute])
case class ClassFileHeader(
- minor : Int,
- major : Int,
- constants : ConstantPool,
- flags : Int,
- classIndex : Int,
- superClassIndex : Int,
- interfaces : Seq[Int]) {
-
- def constant(index : Int) = constants(index)
+ minor: Int,
+ major: Int,
+ constants: ConstantPool,
+ flags: Int,
+ classIndex: Int,
+ superClassIndex: Int,
+ interfaces: Seq[Int]) {
+
+ def constant(index: Int) = constants(index)
}
-case class ConstantPool(len : Int) {
+case class ConstantPool(len: Int) {
val size = len - 1
private val buffer = new scala.collection.mutable.ArrayBuffer[ConstantPool => Any]
@@ -230,7 +222,7 @@ case class ConstantPool(len : Int) {
def isFull = buffer.length >= size
- def apply(index : Int) = {
+ def apply(index: Int) = {
// Note constant pool indices are 1-based
val i = index - 1
values(i) getOrElse {
@@ -241,9 +233,8 @@ case class ConstantPool(len : Int) {
}
}
- def add(f : ConstantPool => Any) = {
+ def add(f: ConstantPool => Any) = {
buffer += f
this
}
}
-
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala b/src/scalap/scala/tools/scalap/scalasig/Flags.scala
index 218639e4a2..b9925150d2 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala
+++ b/src/scalap/scala/tools/scalap/scalasig/Flags.scala
@@ -1,10 +1,7 @@
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
+package scala.tools.scalap.scalasig
trait Flags {
- def hasFlag(flag : Long) : Boolean
+ def hasFlag(flag: Long): Boolean
def isImplicit = hasFlag(0x00000001)
def isFinal = hasFlag(0x00000002)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalasig/ScalaSig.scala
index fd70e0de35..311e4acd6f 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
+++ b/src/scalap/scala/tools/scalap/scalasig/ScalaSig.scala
@@ -5,24 +5,25 @@
**
*/
+package scala.tools.scalap.scalasig
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
+import scala.language.implicitConversions
-import ClassFileParser.{ ConstValueIndex, Annotation }
import scala.reflect.internal.pickling.ByteCodecs
+import scala.tools.scalap.Main
+import scala.tools.scalap.rules._
+
+import ClassFileParser.{ ConstValueIndex, Annotation }
+
object ScalaSigParser {
- import Main.{ SCALA_SIG, SCALA_SIG_ANNOTATION, BYTES_VALUE }
def scalaSigFromAnnotation(classFile: ClassFile): Option[ScalaSig] = {
import classFile._
- classFile.annotation(SCALA_SIG_ANNOTATION) map {
+ classFile.annotation(Main.SCALA_SIG_ANNOTATION) map {
case Annotation(_, elements) =>
- val bytesElem = elements.find(elem => constant(elem.elementNameIndex) == BYTES_VALUE).get
+ val bytesElem = elements.find(elem => constant(elem.elementNameIndex) == Main.BYTES_VALUE).get
val bytes = ((bytesElem.elementValue match {case ConstValueIndex(index) => constantWrapped(index)})
.asInstanceOf[StringBytesPair].bytes)
val length = ByteCodecs.decode(bytes)
@@ -31,8 +32,8 @@ object ScalaSigParser {
}
}
- def scalaSigFromAttribute(classFile: ClassFile) : Option[ScalaSig] =
- classFile.attribute(SCALA_SIG).map(_.byteCode).map(ScalaSigAttributeParsers.parse)
+ def scalaSigFromAttribute(classFile: ClassFile): Option[ScalaSig] =
+ classFile.attribute(Main.SCALA_SIG).map(_.byteCode).map(ScalaSigAttributeParsers.parse)
def parse(classFile: ClassFile): Option[ScalaSig] = {
val scalaSig = scalaSigFromAttribute(classFile)
@@ -45,7 +46,7 @@ object ScalaSigParser {
}
}
- def parse(clazz : Class[_]): Option[ScalaSig] = {
+ def parse(clazz: Class[_]): Option[ScalaSig] = {
val byteCode = ByteCode.forClass(clazz)
val classFile = ClassFileParser.parse(byteCode)
@@ -54,10 +55,10 @@ object ScalaSigParser {
}
object ScalaSigAttributeParsers extends ByteCodeReader {
- def parse(byteCode : ByteCode) = expect(scalaSig)(byteCode)
+ def parse(byteCode: ByteCode) = expect(scalaSig)(byteCode)
val nat = apply {
- def natN(in : ByteCode, x : Int) : Result[ByteCode, Int, Nothing] = in.nextByte match {
+ def natN(in: ByteCode, x: Int): Result[ByteCode, Int, Nothing] = in.nextByte match {
case Success(out, b) => {
val y = (x << 7) + (b & 0x7f)
if ((b & 0x80) == 0) Success(out, y) else natN(out, y)
@@ -76,33 +77,33 @@ object ScalaSigAttributeParsers extends ByteCodeReader {
val longValue = read(_ toLong)
}
-case class ScalaSig(majorVersion : Int, minorVersion : Int, table : Seq[Int ~ ByteCode]) extends DefaultMemoisable {
+case class ScalaSig(majorVersion: Int, minorVersion: Int, table: Seq[Int ~ ByteCode]) extends DefaultMemoisable {
- case class Entry(index : Int, entryType : Int, byteCode : ByteCode) extends DefaultMemoisable {
+ case class Entry(index: Int, entryType: Int, byteCode: ByteCode) extends DefaultMemoisable {
def scalaSig = ScalaSig.this
- def setByteCode(byteCode : ByteCode) = Entry(index, entryType, byteCode)
+ def setByteCode(byteCode: ByteCode) = Entry(index, entryType, byteCode)
}
- def hasEntry(index : Int) = table isDefinedAt index
+ def hasEntry(index: Int) = table isDefinedAt index
- def getEntry(index : Int) = {
+ def getEntry(index: Int) = {
val entryType ~ byteCode = table(index)
Entry(index, entryType, byteCode)
}
- def parseEntry(index : Int) = applyRule(ScalaSigParsers.parseEntry(ScalaSigEntryParsers.entry)(index))
+ def parseEntry(index: Int) = applyRule(ScalaSigParsers.parseEntry(ScalaSigEntryParsers.entry)(index))
- implicit def applyRule[A](parser : ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(parser)(this)
+ implicit def applyRule[A](parser: ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(parser)(this)
override def toString = "ScalaSig version " + majorVersion + "." + minorVersion + {
for (i <- 0 until table.size) yield i + ":\t" + parseEntry(i) // + "\n\t" + getEntry(i)
}.mkString("\n", "\n", "")
- lazy val symbols : Seq[Symbol] = ScalaSigParsers.symbols
+ lazy val symbols: Seq[Symbol] = ScalaSigParsers.symbols
- lazy val topLevelClasses : List[ClassSymbol] = ScalaSigParsers.topLevelClasses
- lazy val topLevelObjects : List[ObjectSymbol] = ScalaSigParsers.topLevelObjects
+ lazy val topLevelClasses: List[ClassSymbol] = ScalaSigParsers.topLevelClasses
+ lazy val topLevelObjects: List[ObjectSymbol] = ScalaSigParsers.topLevelObjects
}
object ScalaSigParsers extends RulesWithState with MemoisableRules {
@@ -112,14 +113,14 @@ object ScalaSigParsers extends RulesWithState with MemoisableRules {
val symTab = read(_.table)
val size = symTab ^^ (_.size)
- def entry(index : Int) = memo(("entry", index)) {
+ def entry(index: Int) = memo(("entry", index)) {
cond(_ hasEntry index) -~ read(_ getEntry index) >-> { entry => Success(entry, entry.entryType) }
}
- def parseEntry[A](parser : ScalaSigEntryParsers.EntryParser[A])(index : Int) : Parser[A] =
+ def parseEntry[A](parser: ScalaSigEntryParsers.EntryParser[A])(index: Int): Parser[A] =
entry(index) -~ parser >> { a => entry => Success(entry.scalaSig, a) }
- def allEntries[A](f : ScalaSigEntryParsers.EntryParser[A]) = size >> { n => anyOf((0 until n) map parseEntry(f)) }
+ def allEntries[A](f: ScalaSigEntryParsers.EntryParser[A]) = size >> { n => anyOf((0 until n) map parseEntry(f)) }
lazy val entries = allEntries(ScalaSigEntryParsers.entry) as "entries"
lazy val symbols = allEntries(ScalaSigEntryParsers.symbol) as "symbols"
@@ -136,20 +137,20 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
type S = ScalaSig#Entry
type EntryParser[A] = Rule[A, String]
- implicit def byteCodeEntryParser[A](rule : ScalaSigAttributeParsers.Parser[A]) : EntryParser[A] = apply { entry =>
+ implicit def byteCodeEntryParser[A](rule: ScalaSigAttributeParsers.Parser[A]): EntryParser[A] = apply { entry =>
rule(entry.byteCode) mapOut (entry setByteCode _)
}
- def toEntry[A](index : Int) = apply { sigEntry => ScalaSigParsers.entry(index)(sigEntry.scalaSig) }
+ def toEntry[A](index: Int) = apply { sigEntry => ScalaSigParsers.entry(index)(sigEntry.scalaSig) }
- def parseEntry[A](parser : EntryParser[A])(index : Int) = (toEntry(index) -~ parser)
+ def parseEntry[A](parser: EntryParser[A])(index: Int) = (toEntry(index) -~ parser)
- implicit def entryType(code : Int) = key filter (_ == code)
+ implicit def entryType(code: Int) = key filter (_ == code)
val index = read(_.index)
val key = read(_.entryType)
- lazy val entry : EntryParser[Any] = symbol | typeEntry | literal | name | attributeInfo | annotInfo | children | get
+ lazy val entry: EntryParser[Any] = symbol | typeEntry | literal | name | attributeInfo | annotInfo | children | get
val ref = byteCodeEntryParser(nat)
@@ -158,7 +159,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
val name = termName | typeName as "name"
- def refTo[A](rule : EntryParser[A]) : EntryParser[A] = ref >>& parseEntry(rule)
+ def refTo[A](rule: EntryParser[A]): EntryParser[A] = ref >>& parseEntry(rule)
lazy val nameRef = refTo(name)
lazy val symbolRef = refTo(symbol)
@@ -169,7 +170,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
def symHeader(key: Int): EntryParser[Any] = (key -~ none | (key + 64) -~ nat)
- def symbolEntry(key : Int) = symHeader(key) -~ symbolInfo
+ def symbolEntry(key: Int) = symHeader(key) -~ symbolInfo
val noSymbol = 3 -^ NoSymbol
val typeSymbol = symbolEntry(4) ^^ TypeSymbol as "typeSymbol"
@@ -180,7 +181,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
val extRef = 9 -~ nameRef ~ (symbolRef?) ~ get ^~~^ ExternalSymbol as "extRef"
val extModClassRef = 10 -~ nameRef ~ (symbolRef?) ~ get ^~~^ ExternalSymbol as "extModClassRef"
- lazy val symbol : EntryParser[Symbol] = oneOf(
+ lazy val symbol: EntryParser[Symbol] = oneOf(
noSymbol,
typeSymbol,
aliasSymbol,
@@ -195,7 +196,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
val typeLevel = nat
val typeIndex = nat
- lazy val typeEntry : EntryParser[Type] = oneOf(
+ lazy val typeEntry: EntryParser[Type] = oneOf(
11 -^ NoType,
12 -^ NoPrefixType,
13 -~ symbolRef ^^ ThisType,
@@ -236,17 +237,17 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
lazy val topLevelClass = classSymbol filter isTopLevelClass
lazy val topLevelObject = objectSymbol filter isTopLevel
- def isTopLevel(symbol : Symbol) = symbol.parent match {
- case Some(ext : ExternalSymbol) => true
+ def isTopLevel(symbol: Symbol) = symbol.parent match {
+ case Some(ext: ExternalSymbol) => true
case _ => false
}
- def isTopLevelClass (symbol : Symbol) = !symbol.isModule && isTopLevel(symbol)
+ def isTopLevelClass (symbol: Symbol) = !symbol.isModule && isTopLevel(symbol)
}
- case class AttributeInfo(symbol : Symbol, typeRef : Type, value : Option[Any], values : Seq[String ~ Any]) // sym_Ref info_Ref {constant_Ref} {nameRef constantRef}
- case class Children(symbolRefs : Seq[Int]) //sym_Ref {sym_Ref}
+case class AttributeInfo(symbol: Symbol, typeRef: Type, value: Option[Any], values: Seq[String ~ Any]) // sym_Ref info_Ref {constant_Ref} {nameRef constantRef}
+case class Children(symbolRefs: Seq[Int]) //sym_Ref {sym_Ref}
- case class AnnotInfo(refs : Seq[Int]) // attarg_Ref {constant_Ref attarg_Ref}
+case class AnnotInfo(refs: Seq[Int]) // attarg_Ref {constant_Ref attarg_Ref}
/***************************************************
* | 49 TREE len_Nat 1 EMPTYtree
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalasig/ScalaSigPrinter.scala
index e5a4ff649e..5929e0f59f 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalasig/ScalaSigPrinter.scala
@@ -5,17 +5,14 @@
**
*/
+package scala.tools.scalap.scalasig
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
+import scala.language.implicitConversions
import java.io.{PrintStream, ByteArrayOutputStream}
import java.util.regex.Pattern
-import scala.tools.scalap.scalax.util.StringUtil
-import scala.reflect.NameTransformer
-import java.lang.String
+
+import scala.tools.scalap.rules.~
class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
import stream._
@@ -136,7 +133,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
print(" {")
//Print class selftype
c.selfType match {
- case Some(t: Type) => print("\n"); print(" this : " + toString(t) + " =>")
+ case Some(t: Type) => print("\n"); print(" this: " + toString(t) + " =>")
case None =>
}
print("\n")
@@ -186,22 +183,12 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
printWithIndent(level, "}\n")
}
- def genParamNames(t: {def paramTypes: Seq[Type]}): List[String] = t.paramTypes.toList.map(x => {
- var str = toString(x)
- val j = str.indexOf("[")
- if (j > 0) str = str.substring(0, j)
- str = StringUtil.trimStart(str, "=> ")
- val i = str.lastIndexOf(".")
- val res = if (i > 0) str.substring(i + 1) else str
- if (res.length > 1) StringUtil.decapitalize(res.substring(0, 1)) else res.toLowerCase
- })
-
def printMethodType(t: Type, printResult: Boolean)(cont: => Unit): Unit = {
- def _pmt(mt: Type {def resultType: Type; def paramSymbols: Seq[Symbol]}) = {
+ def _pmt(mt: MethodType) = {
val paramEntries = mt.paramSymbols.map({
- case ms: MethodSymbol => ms.name + " : " + toString(ms.infoType)(TypeFlags(true))
+ case ms: MethodSymbol => ms.name + ": " + toString(ms.infoType)(TypeFlags(true))
case _ => "^___^"
})
val implicitWord = mt.paramSymbols.headOption match {
@@ -216,21 +203,21 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
mt.resultType match {
case mt: MethodType => printMethodType(mt, printResult)({})
case x => if (printResult) {
- print(" : ");
+ print(": ");
printType(x)
}
}
}
t match {
- case NullaryMethodType(resType) => if (printResult) { print(" : "); printType(resType) }
+ case NullaryMethodType(resType) => if (printResult) { print(": "); printType(resType) }
case mt@MethodType(resType, paramSymbols) => _pmt(mt)
case pt@PolyType(mt, typeParams) => {
print(typeParamString(typeParams))
printMethodType(mt, printResult)({})
}
//todo consider another method types
- case x => print(" : "); printType(x)
+ case x => print(": "); printType(x)
}
// Print rest of the symbol output
@@ -356,8 +343,8 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
}
case "scala.<byname>" => "=> " + toString(typeArgs.head)
case _ => {
- val path = StringUtil.cutSubstring(symbol.path)(".package") //remove package object reference
- StringUtil.trimStart(processName(path) + typeArgString(typeArgs), "<empty>.")
+ val path = cutSubstring(symbol.path)(".package") //remove package object reference
+ trimStart(processName(path) + typeArgString(typeArgs), "<empty>.")
}
})
case TypeBoundsType(lower, upper) => {
@@ -402,7 +389,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
def typeArgString(typeArgs: Seq[Type]): String =
if (typeArgs.isEmpty) ""
- else typeArgs.map(toString).map(StringUtil.trimStart(_, "=> ")).mkString("[", ", ", "]")
+ else typeArgs.map(toString).map(trimStart(_, "=> ")).mkString("[", ", ", "]")
def typeParamString(params: Seq[Symbol]): String =
if (params.isEmpty) ""
@@ -423,7 +410,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
if (i > 0) name.substring(i + 2) else name
}
- def processName(name: String) = {
+ private def processName(name: String) = {
val stripped = stripPrivatePrefix(name)
val m = pattern.matcher(stripped)
var temp = stripped
@@ -433,7 +420,15 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
temp = temp.replaceAll(re, _syms(re))
}
val result = temp.replaceAll(placeholderPattern, "_")
- NameTransformer.decode(result)
+ scala.reflect.NameTransformer.decode(result)
}
+ private def trimStart(s: String, prefix: String) =
+ if (s != null && s.startsWith(prefix)) s.substring(prefix.length) else s
+
+ private def decapitalize(s: String) =
+ java.beans.Introspector.decapitalize(s)
+
+ private def cutSubstring(dom: String)(s: String) =
+ if (dom != null && s != null) dom.replace(s, "") else dom
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala b/src/scalap/scala/tools/scalap/scalasig/SourceFileAttributeParser.scala
index fc5a75c046..88d3d3b8b0 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala
+++ b/src/scalap/scala/tools/scalap/scalasig/SourceFileAttributeParser.scala
@@ -1,7 +1,4 @@
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
+package scala.tools.scalap.scalasig
/**
* @author ilyas
@@ -16,13 +13,12 @@ object SourceFileAttributeParser extends ByteCodeReader {
/**
*
* SourceFile_attribute {
- u2 attribute_name_index;
- u4 attribute_length;
- u2 sourcefile_index;
+ u2 attribute_name_index;
+ u4 attribute_length;
+ u2 sourcefile_index;
}
*
* Contains only file index in ConstantPool, first two fields are already treated
* by {@link scalax.rules.scalasig.ClassFile.attribute#attribute}
*/
case class SourceFileInfo(sourceFileIndex: Int)
-
diff --git a/src/scalap/scala/tools/scalap/scalasig/Symbol.scala b/src/scalap/scala/tools/scalap/scalasig/Symbol.scala
new file mode 100644
index 0000000000..0656938150
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/scalasig/Symbol.scala
@@ -0,0 +1,70 @@
+package scala.tools.scalap.scalasig
+
+import ScalaSigEntryParsers._
+
+trait Symbol extends Flags {
+ def name: String
+ def parent: Option[Symbol]
+ def children: Seq[Symbol]
+
+ def path: String = parent.map(_.path + ".").getOrElse("") + name
+}
+
+case object NoSymbol extends Symbol {
+ def name = "<no symbol>"
+ def parent = None
+ def hasFlag(flag: Long) = false
+ def children = Nil
+}
+
+abstract class ScalaSigSymbol extends Symbol {
+ def applyRule[A](rule: EntryParser[A]): A = expect(rule)(entry)
+ def applyScalaSigRule[A](rule: ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(rule)(entry.scalaSig)
+
+ def entry: ScalaSig#Entry
+ def index = entry.index
+
+ lazy val children: Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (_.parent == Some(this))
+ lazy val attributes: Seq[AttributeInfo] = applyScalaSigRule(ScalaSigParsers.attributes) filter (_.symbol == this)
+}
+
+case class ExternalSymbol(name: String, parent: Option[Symbol], entry: ScalaSig#Entry) extends ScalaSigSymbol {
+ override def toString = path
+ def hasFlag(flag: Long) = false
+}
+
+case class SymbolInfo(name: String, owner: Symbol, flags: Int, privateWithin: Option[AnyRef], info: Int, entry: ScalaSig#Entry) {
+ def symbolString(any: AnyRef) = any match {
+ case sym: SymbolInfoSymbol => sym.index.toString
+ case other => other.toString
+ }
+
+ override def toString = name + ", owner=" + symbolString(owner) + ", flags=" + flags.toHexString + ", info=" + info + (privateWithin match {
+ case Some(any) => ", privateWithin=" + symbolString(any)
+ case None => " "
+ })
+}
+
+abstract class SymbolInfoSymbol extends ScalaSigSymbol {
+ def symbolInfo: SymbolInfo
+
+ def entry = symbolInfo.entry
+ def name = symbolInfo.name
+ def parent = Some(symbolInfo.owner)
+ def hasFlag(flag: Long) = (symbolInfo.flags & flag) != 0L
+
+ lazy val infoType = applyRule(parseEntry(typeEntry)(symbolInfo.info))
+}
+
+case class TypeSymbol(symbolInfo: SymbolInfo) extends SymbolInfoSymbol{
+ override def path = name
+}
+
+case class AliasSymbol(symbolInfo: SymbolInfo) extends SymbolInfoSymbol{
+ override def path = name
+}
+case class ClassSymbol(symbolInfo: SymbolInfo, thisTypeRef: Option[Int]) extends SymbolInfoSymbol {
+ lazy val selfType = thisTypeRef.map{(x: Int) => applyRule(parseEntry(typeEntry)(x))}
+}
+case class ObjectSymbol(symbolInfo: SymbolInfo) extends SymbolInfoSymbol
+case class MethodSymbol(symbolInfo: SymbolInfo, aliasRef: Option[Int]) extends SymbolInfoSymbol
diff --git a/src/scalap/scala/tools/scalap/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalasig/Type.scala
new file mode 100644
index 0000000000..97dc28d223
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/scalasig/Type.scala
@@ -0,0 +1,22 @@
+package scala.tools.scalap.scalasig
+
+abstract class Type
+
+case object NoType extends Type
+case object NoPrefixType extends Type
+
+case class ThisType(symbol: Symbol) extends Type
+case class SingleType(typeRef: Type, symbol: Symbol) extends Type
+case class ConstantType(constant: Any) extends Type
+case class TypeRefType(prefix: Type, symbol: Symbol, typeArgs: Seq[Type]) extends Type
+case class TypeBoundsType(lower: Type, upper: Type) extends Type
+case class RefinedType(classSym: Symbol, typeRefs: List[Type]) extends Type
+case class ClassInfoType(symbol: Symbol, typeRefs: Seq[Type]) extends Type
+case class ClassInfoTypeWithCons(symbol: Symbol, typeRefs: Seq[Type], cons: String) extends Type
+case class MethodType(resultType: Type, paramSymbols: Seq[Symbol]) extends Type
+case class NullaryMethodType(resultType: Type) extends Type
+case class PolyType(typeRef: Type, symbols: Seq[TypeSymbol]) extends Type
+case class PolyTypeWithCons(typeRef: Type, symbols: Seq[TypeSymbol], cons: String) extends Type
+case class AnnotatedType(typeRef: Type, attribTreeRefs: List[Int]) extends Type
+case class AnnotatedWithSelfType(typeRef: Type, symbol: Symbol, attribTreeRefs: List[Int]) extends Type
+case class ExistentialType(typeRef: Type, symbols: Seq[Symbol]) extends Type
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Result.scala b/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
deleted file mode 100644
index 17ad4bd053..0000000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-// -----------------------------------------------------------------------------
-//
-// Scalax - The Scala Community Library
-// Copyright (c) 2005-8 The Scalax Project. All rights reserved.
-//
-// The primary distribution site is http://scalax.scalaforge.org/
-//
-// This software is released under the terms of the Revised BSD License.
-// There is NO WARRANTY. See the file LICENSE for the full text.
-//
-// -----------------------------------------------------------------------------
-
-package scala.tools.scalap
-package scalax
-package rules;
-
-/** Represents the combined value of two rules applied in sequence.
- *
- * @see the Scala parser combinator
- */
-case class ~[+A, +B](_1 : A, _2 : B) {
- override def toString = "(" + _1 + " ~ " + _2 + ")"
-}
-
-
-sealed abstract class Result[+Out, +A, +X] {
- def out : Out
- def value : A
- def error : X
-
- implicit def toOption : Option[A]
-
- def map[B](f : A => B) : Result[Out, B, X]
- def mapOut[Out2](f : Out => Out2) : Result[Out2, A, X]
- def map[Out2, B](f : (Out, A) => (Out2, B)) : Result[Out2, B, X]
- def flatMap[Out2, B](f : (Out, A) => Result[Out2, B, Nothing]) : Result[Out2, B, X]
- def orElse[Out2 >: Out, B >: A](other : => Result[Out2, B, Nothing]) : Result[Out2, B, X]
-}
-
-case class Success[+Out, +A](out : Out, value : A) extends Result[Out, A, Nothing] {
- def error = throw new ScalaSigParserError("No error")
-
- def toOption = Some(value)
-
- def map[B](f : A => B) : Result[Out, B, Nothing] = Success(out, f(value))
- def mapOut[Out2](f : Out => Out2) : Result[Out2, A, Nothing] = Success(f(out), value)
- def map[Out2, B](f : (Out, A) => (Out2, B)) : Success[Out2, B] = f(out, value) match { case (out2, b) => Success(out2, b) }
- def flatMap[Out2, B](f : (Out, A) => Result[Out2, B, Nothing]) : Result[Out2, B, Nothing]= f(out, value)
- def orElse[Out2 >: Out, B >: A](other : => Result[Out2, B, Nothing]) : Result[Out2, B, Nothing] = this
-}
-
-sealed abstract class NoSuccess[+X] extends Result[Nothing, Nothing, X] {
- def out = throw new ScalaSigParserError("No output")
- def value = throw new ScalaSigParserError("No value")
-
- def toOption = None
-
- def map[B](f : Nothing => B) = this
- def mapOut[Out2](f : Nothing => Out2) = this
- def map[Out2, B](f : (Nothing, Nothing) => (Out2, B)) = this
- def flatMap[Out2, B](f : (Nothing, Nothing) => Result[Out2, B, Nothing]) = this
- def orElse[Out2, B](other : => Result[Out2, B, Nothing]) = other
-}
-
-case object Failure extends NoSuccess[Nothing] {
- def error = throw new ScalaSigParserError("No error")
-}
-
-case class ScalaSigParserError(msg: String) extends RuntimeException(msg)
-
-case class Error[+X](error : X) extends NoSuccess[X] {
-}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
deleted file mode 100644
index 489a05ecd0..0000000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
+++ /dev/null
@@ -1,177 +0,0 @@
-// -----------------------------------------------------------------------------
-//
-// Scalax - The Scala Community Library
-// Copyright (c) 2005-8 The Scalax Project. All rights reserved.
-//
-// The primary distribution site is http://scalax.scalaforge.org/
-//
-// This software is released under the terms of the Revised BSD License.
-// There is NO WARRANTY. See the file LICENSE for the full text.
-//
-// -----------------------------------------------------------------------------
-
-package scala.tools.scalap
-package scalax
-package rules
-
-/** A Rule is a function from some input to a Result. The result may be:
- * <ul>
- * <li>Success, with a value of some type and an output that may serve as the input to subsequent rules.</li>
- * <li>Failure. A failure may result in some alternative rule being applied.</li>
- * <li>Error. No further rules should be attempted.</li>
- * </ul>
- *
- * @author Andrew Foggin
- *
- * Inspired by the Scala parser combinator.
- */
-trait Rule[-In, +Out, +A, +X] extends (In => Result[Out, A, X]) {
- val factory : Rules
- import factory._
-
- def as(name : String) = ruleWithName(name, this)
-
- def flatMap[Out2, B, X2 >: X](fa2ruleb : A => Out => Result[Out2, B, X2]) = mapResult {
- case Success(out, a) => fa2ruleb(a)(out)
- case Failure => Failure
- case err @ Error(_) => err
- }
-
- def map[B](fa2b : A => B) = flatMap { a => out => Success(out, fa2b(a)) }
-
- def filter(f : A => Boolean) = flatMap { a => out => if(f(a)) Success(out, a) else Failure }
-
- def mapResult[Out2, B, Y](f : Result[Out, A, X] => Result[Out2, B, Y]) = rule {
- in : In => f(apply(in))
- }
-
- def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) : Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
- val factory = Rule.this.factory
- lazy val choices = Rule.this :: other :: Nil
- }
-
- def orError[In2 <: In] = this orElse error[Any]
-
- def |[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) = orElse(other)
-
- def ^^[B](fa2b : A => B) = map(fa2b)
-
- def ^^?[B](pf : PartialFunction[A, B]) = filter (pf.isDefinedAt(_)) ^^ pf
-
- def ??(pf : PartialFunction[A, Any]) = filter (pf.isDefinedAt(_))
-
- def -^[B](b : B) = map { any => b }
-
- /** Maps an Error */
- def !^[Y](fx2y : X => Y) = mapResult {
- case s @ Success(_, _) => s
- case Failure => Failure
- case Error(x) => Error(fx2y(x))
- }
-
- def >>[Out2, B, X2 >: X](fa2ruleb : A => Out => Result[Out2, B, X2]) = flatMap(fa2ruleb)
-
- def >->[Out2, B, X2 >: X](fa2resultb : A => Result[Out2, B, X2]) = flatMap { a => any => fa2resultb(a) }
-
- def >>?[Out2, B, X2 >: X](pf : PartialFunction[A, Rule[Out, Out2, B, X2]]) = filter(pf isDefinedAt _) flatMap pf
-
- def >>&[B, X2 >: X](fa2ruleb : A => Out => Result[Any, B, X2]) = flatMap { a => out => fa2ruleb(a)(out) mapOut { any => out } }
-
- def ~[Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield new ~(a, b)
-
- def ~-[Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield a
-
- def -~[Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next) yield b
-
- def ~++[Out2, B >: A, X2 >: X](next : => Rule[Out, Out2, Seq[B], X2]) = for (a <- this; b <- next) yield a :: b.toList
-
- /** Apply the result of this rule to the function returned by the next rule */
- def ~>[Out2, B, X2 >: X](next : => Rule[Out, Out2, A => B, X2]) = for (a <- this; fa2b <- next) yield fa2b(a)
-
- /** Apply the result of this rule to the function returned by the previous rule */
- def <~:[InPrev, B, X2 >: X](prev : => Rule[InPrev, In, A => B, X2]) = for (fa2b <- prev; a <- this) yield fa2b(a)
-
- def ~![Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next orError) yield new ~(a, b)
-
- def ~-![Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next orError) yield a
-
- def -~![Out2, B, X2 >: X](next : => Rule[Out, Out2, B, X2]) = for (a <- this; b <- next orError) yield b
-
- def -[In2 <: In](exclude : => Rule[In2, Any, Any, Any]) = !exclude -~ this
-
- /** ^~^(f) is equivalent to ^^ { case b1 ~ b2 => f(b1, b2) }
- */
- def ^~^[B1, B2, B >: A <% B1 ~ B2, C](f : (B1, B2) => C) = map { a =>
- (a : B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
- }
-
- /** ^~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
- */
- def ^~~^[B1, B2, B3, B >: A <% B1 ~ B2 ~ B3, C](f : (B1, B2, B3) => C) = map { a =>
- (a : B1 ~ B2 ~ B3) match { case b1 ~ b2 ~ b3 => f(b1, b2, b3) }
- }
-
- /** ^~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
- */
- def ^~~~^[B1, B2, B3, B4, B >: A <% B1 ~ B2 ~ B3 ~ B4, C](f : (B1, B2, B3, B4) => C) = map { a =>
- (a : B1 ~ B2 ~ B3 ~ B4) match { case b1 ~ b2 ~ b3 ~ b4 => f(b1, b2, b3, b4) }
- }
-
- /** ^~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
- */
- def ^~~~~^[B1, B2, B3, B4, B5, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5, C](f : (B1, B2, B3, B4, B5) => C) = map { a =>
- (a : B1 ~ B2 ~ B3 ~ B4 ~ B5) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 => f(b1, b2, b3, b4, b5) }
- }
-
- /** ^~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
- */
- def ^~~~~~^[B1, B2, B3, B4, B5, B6, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6, C](f : (B1, B2, B3, B4, B5, B6) => C) = map { a =>
- (a : B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
- }
-
- /** ^~~~~~~^(f) is equivalent to ^^ { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 => f(b1, b2, b3, b4, b5, b6) }
- */
- def ^~~~~~~^[B1, B2, B3, B4, B5, B6, B7, B >: A <% B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7, C](f : (B1, B2, B3, B4, B5, B6, B7) => C) = map { a =>
- (a : B1 ~ B2 ~ B3 ~ B4 ~ B5 ~ B6 ~ B7) match { case b1 ~ b2 ~ b3 ~ b4 ~ b5 ~ b6 ~b7 => f(b1, b2, b3, b4, b5, b6, b7) }
- }
-
- /** >~>(f) is equivalent to >> { case b1 ~ b2 => f(b1, b2) }
- */
- def >~>[Out2, B1, B2, B >: A <% B1 ~ B2, C, X2 >: X](f : (B1, B2) => Out => Result[Out2, C, X2]) = flatMap { a =>
- (a : B1 ~ B2) match { case b1 ~ b2 => f(b1, b2) }
- }
-
- /** ^-^(f) is equivalent to ^^ { b2 => b1 => f(b1, b2) }
- */
- def ^-^ [B1, B2 >: A, C](f : (B1, B2) => C) = map { b2 : B2 => b1 : B1 => f(b1, b2) }
-
- /** ^~>~^(f) is equivalent to ^^ { case b2 ~ b3 => b1 => f(b1, b2, b3) }
- */
- def ^~>~^ [B1, B2, B3, B >: A <% B2 ~ B3, C](f : (B1, B2, B3) => C) = map { a =>
- (a : B2 ~ B3) match { case b2 ~ b3 => b1 : B1 => f(b1, b2, b3) }
- }
-}
-
-
-trait Choice[-In, +Out, +A, +X] extends Rule[In, Out, A, X] {
- def choices : List[Rule[In, Out, A, X]]
-
- def apply(in : In) = {
- def oneOf(list : List[Rule[In, Out, A, X]]) : Result[Out, A, X] = list match {
- case Nil => Failure
- case first :: rest => first(in) match {
- case Failure => oneOf(rest)
- case result => result
- }
- }
- oneOf(choices)
- }
-
- override def orElse[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) : Rule[In2, Out2, A2, X2] = new Choice[In2, Out2, A2, X2] {
- val factory = Choice.this.factory
- lazy val choices = Choice.this.choices ::: other :: Nil
- }
-}
-
-
-
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala
deleted file mode 100644
index dee1cf84ac..0000000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
-
-import ScalaSigEntryParsers._
-
-trait Symbol extends Flags {
- def name : String
- def parent : Option[Symbol]
- def children : Seq[Symbol]
-
- def path : String = parent.map(_.path + ".").getOrElse("") + name
-}
-
-case object NoSymbol extends Symbol {
- def name = "<no symbol>"
- def parent = None
- def hasFlag(flag : Long) = false
- def children = Nil
-}
-
-abstract class ScalaSigSymbol extends Symbol {
- def applyRule[A](rule : EntryParser[A]) : A = expect(rule)(entry)
- def applyScalaSigRule[A](rule : ScalaSigParsers.Parser[A]) = ScalaSigParsers.expect(rule)(entry.scalaSig)
-
- def entry : ScalaSig#Entry
- def index = entry.index
-
- lazy val children : Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (_.parent == Some(this))
- lazy val attributes : Seq[AttributeInfo] = applyScalaSigRule(ScalaSigParsers.attributes) filter (_.symbol == this)
-}
-
-case class ExternalSymbol(name : String, parent : Option[Symbol], entry : ScalaSig#Entry) extends ScalaSigSymbol {
- override def toString = path
- def hasFlag(flag : Long) = false
-}
-
-case class SymbolInfo(name : String, owner : Symbol, flags : Int, privateWithin : Option[AnyRef], info : Int, entry : ScalaSig#Entry) {
- def symbolString(any : AnyRef) = any match {
- case sym : SymbolInfoSymbol => sym.index.toString
- case other => other.toString
- }
-
- override def toString = name + ", owner=" + symbolString(owner) + ", flags=" + flags.toHexString + ", info=" + info + (privateWithin match {
- case Some(any) => ", privateWithin=" + symbolString(any)
- case None => " "
- })
-}
-
-abstract class SymbolInfoSymbol extends ScalaSigSymbol {
- def symbolInfo : SymbolInfo
-
- def entry = symbolInfo.entry
- def name = symbolInfo.name
- def parent = Some(symbolInfo.owner)
- def hasFlag(flag : Long) = (symbolInfo.flags & flag) != 0L
-
- lazy val infoType = applyRule(parseEntry(typeEntry)(symbolInfo.info))
-}
-
-case class TypeSymbol(symbolInfo : SymbolInfo) extends SymbolInfoSymbol{
- override def path = name
-}
-
-case class AliasSymbol(symbolInfo : SymbolInfo) extends SymbolInfoSymbol{
- override def path = name
-}
-case class ClassSymbol(symbolInfo : SymbolInfo, thisTypeRef : Option[Int]) extends SymbolInfoSymbol {
- lazy val selfType = thisTypeRef.map{(x: Int) => applyRule(parseEntry(typeEntry)(x))}
-}
-case class ObjectSymbol(symbolInfo : SymbolInfo) extends SymbolInfoSymbol
-case class MethodSymbol(symbolInfo : SymbolInfo, aliasRef : Option[Int]) extends SymbolInfoSymbol
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
deleted file mode 100644
index 0444e701f2..0000000000
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package scala.tools.scalap
-package scalax
-package rules
-package scalasig
-
-abstract class Type
-
-case object NoType extends Type
-case object NoPrefixType extends Type
-
-case class ThisType(symbol : Symbol) extends Type
-case class SingleType(typeRef : Type, symbol : Symbol) extends Type
-case class ConstantType(constant : Any) extends Type
-case class TypeRefType(prefix : Type, symbol : Symbol, typeArgs : Seq[Type]) extends Type
-case class TypeBoundsType(lower : Type, upper : Type) extends Type
-case class RefinedType(classSym : Symbol, typeRefs : List[Type]) extends Type
-case class ClassInfoType(symbol : Symbol, typeRefs : Seq[Type]) extends Type
-case class ClassInfoTypeWithCons(symbol : Symbol, typeRefs : Seq[Type], cons: String) extends Type
-case class MethodType(resultType : Type, paramSymbols : Seq[Symbol]) extends Type
-case class NullaryMethodType(resultType : Type) extends Type
-case class PolyType(typeRef : Type, symbols : Seq[TypeSymbol]) extends Type
-case class PolyTypeWithCons(typeRef : Type, symbols : Seq[TypeSymbol], cons: String) extends Type
-case class AnnotatedType(typeRef : Type, attribTreeRefs : List[Int]) extends Type
-case class AnnotatedWithSelfType(typeRef : Type, symbol : Symbol, attribTreeRefs : List[Int]) extends Type
-case class ExistentialType(typeRef : Type, symbols : Seq[Symbol]) extends Type
diff --git a/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala b/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala
deleted file mode 100644
index 6077eded0f..0000000000
--- a/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package scala.tools.scalap
-package scalax
-package util
-
-import java.beans.Introspector
-
-/**
- * @author ilyas
- */
-
-object StringUtil {
-
- def trimStart(s: String, prefix: String) = if (s != null && s.startsWith(prefix)) s.substring(prefix.length) else s
-
- def decapitalize(s: String) = Introspector.decapitalize(s)
-
- def cutSubstring(dom: String)(s: String) = if (dom != null && s != null) dom.replace(s, "") else dom
-
-}
diff --git a/src/xml/scala/xml/Elem.scala b/src/xml/scala/xml/Elem.scala
index 484cf98744..e9b87e516c 100755
--- a/src/xml/scala/xml/Elem.scala
+++ b/src/xml/scala/xml/Elem.scala
@@ -37,6 +37,7 @@ object Elem {
}
import scala.sys.process._
+ import scala.language.implicitConversions
/** Implicitly convert a [[scala.xml.Elem]] into a
* [[scala.sys.process.ProcessBuilder]]. This is done by obtaining the text
* elements of the element, trimming spaces, and then converting the result