summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xbuild.xml221
-rw-r--r--dbuild-meta.json61
-rw-r--r--src/build/dbuild-meta-json-gen.scala11
-rw-r--r--src/build/maven/maven-deploy.xml1
-rw-r--r--src/build/maven/scala-partest-pom.xml62
-rw-r--r--src/build/pack.xml5
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Validators.scala6
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala18
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala30
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala1
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala26
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala27
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala49
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala82
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala37
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala674
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala15
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala86
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala27
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala13
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala36
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala128
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala475
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala41
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala518
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala74
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala9
-rw-r--r--src/eclipse/partest/.classpath6
-rw-r--r--src/eclipse/partest/.project10
-rw-r--r--src/intellij/partest.iml.SAMPLE10
-rw-r--r--src/library/scala/annotation/compileTimeOnly.scala22
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala6
-rw-r--r--src/partest-extras/scala/tools/partest/ASMConverters.scala (renamed from src/partest/scala/tools/partest/ASMConverters.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/AsmNode.scala (renamed from src/partest/scala/tools/partest/AsmNode.scala)7
-rw-r--r--src/partest-extras/scala/tools/partest/BytecodeTest.scala (renamed from src/partest/scala/tools/partest/BytecodeTest.scala)17
-rw-r--r--src/partest-extras/scala/tools/partest/JavapTest.scala (renamed from src/partest/scala/tools/partest/JavapTest.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/ReplTest.scala (renamed from src/partest/scala/tools/partest/ReplTest.scala)5
-rw-r--r--src/partest-extras/scala/tools/partest/SigTest.scala (renamed from src/partest/scala/tools/partest/SigTest.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/Util.scala52
-rw-r--r--src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala (renamed from src/partest/scala/tools/partest/instrumented/Instrumentation.scala)0
-rw-r--r--src/partest-extras/scala/tools/partest/instrumented/Profiler.java82
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java49
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF (renamed from src/partest/scala/tools/partest/javaagent/MANIFEST.MF)0
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java (renamed from src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java)4
-rw-r--r--src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java25
-rw-r--r--src/partest/README31
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala60
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala128
-rw-r--r--src/partest/scala/tools/partest/IcodeTest.scala43
-rw-r--r--src/partest/scala/tools/partest/MemoryTest.scala38
-rw-r--r--src/partest/scala/tools/partest/PartestDefaults.scala28
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala207
-rw-r--r--src/partest/scala/tools/partest/SecurityTest.scala19
-rw-r--r--src/partest/scala/tools/partest/StoreReporterDirectTest.scala15
-rw-r--r--src/partest/scala/tools/partest/TestKinds.scala66
-rw-r--r--src/partest/scala/tools/partest/TestState.scala64
-rw-r--r--src/partest/scala/tools/partest/TestUtil.scala38
-rw-r--r--src/partest/scala/tools/partest/antlib.xml4
-rw-r--r--src/partest/scala/tools/partest/instrumented/Profiler.java82
-rw-r--r--src/partest/scala/tools/partest/javaagent/ASMTransformer.java49
-rw-r--r--src/partest/scala/tools/partest/javaagent/ProfilingAgent.java25
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala30
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala189
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala219
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala54
-rw-r--r--src/partest/scala/tools/partest/nest/DirectCompiler.scala105
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala165
-rw-r--r--src/partest/scala/tools/partest/nest/NestRunner.scala15
-rw-r--r--src/partest/scala/tools/partest/nest/NestUI.scala181
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala88
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala99
-rw-r--r--src/partest/scala/tools/partest/nest/Runner.scala883
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala85
-rw-r--r--src/partest/scala/tools/partest/nest/StreamCapture.scala53
-rw-r--r--src/partest/scala/tools/partest/package.scala241
-rw-r--r--src/partest/scala/tools/partest/utils/Properties.scala18
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala112
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala3
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala14
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala41
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala20
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala12
-rw-r--r--src/reflect/scala/reflect/internal/TypeDebugging.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala9
-rw-r--r--src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala32
-rw-r--r--src/reflect/scala/reflect/internal/annotations/package.scala6
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala6
-rw-r--r--src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala30
-rw-r--r--src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Naming.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Arbitrary.scala447
-rw-r--r--src/scalacheck/org/scalacheck/Arg.scala20
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala148
-rw-r--r--src/scalacheck/org/scalacheck/ConsoleReporter.scala52
-rw-r--r--src/scalacheck/org/scalacheck/Gen.scala542
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala127
-rw-r--r--src/scalacheck/org/scalacheck/Prop.scala818
-rw-r--r--src/scalacheck/org/scalacheck/Properties.scala96
-rw-r--r--src/scalacheck/org/scalacheck/ScalaCheckFramework.scala92
-rw-r--r--src/scalacheck/org/scalacheck/Shrink.scala208
-rw-r--r--src/scalacheck/org/scalacheck/Test.scala392
-rw-r--r--src/scalacheck/org/scalacheck/util/Buildable.scala63
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala101
-rw-r--r--src/scalacheck/org/scalacheck/util/FreqMap.scala65
-rw-r--r--src/scalacheck/org/scalacheck/util/StdRand.scala12
-rw-r--r--starr.number1
-rwxr-xr-xtest/build-partest.xml8
-rw-r--r--test/files/ant/README42
-rw-r--r--test/files/ant/fsc001-build.check14
-rw-r--r--test/files/ant/fsc001-build.xml26
-rw-r--r--test/files/ant/fsc001.scala7
-rw-r--r--test/files/ant/fsc002-build.check14
-rw-r--r--test/files/ant/fsc002-build.xml28
-rw-r--r--test/files/ant/fsc002.scala6
-rw-r--r--test/files/ant/fsc003-build.check14
-rw-r--r--test/files/ant/fsc003-build.xml25
-rw-r--r--test/files/ant/fsc003.scala7
-rw-r--r--test/files/ant/imported.xml150
-rw-r--r--test/files/ant/scalac001-build.check14
-rw-r--r--test/files/ant/scalac001-build.xml26
-rw-r--r--test/files/ant/scalac001.scala6
-rw-r--r--test/files/ant/scalac002-build.check14
-rw-r--r--test/files/ant/scalac002-build.xml28
-rw-r--r--test/files/ant/scalac002.scala7
-rw-r--r--test/files/ant/scalac003-build.check14
-rw-r--r--test/files/ant/scalac003-build.xml25
-rw-r--r--test/files/ant/scalac003.scala7
-rw-r--r--test/files/ant/scalac004-build.check24
-rw-r--r--test/files/ant/scalac004-build.xml26
-rw-r--r--test/files/ant/scalac004.scala11
-rw-r--r--test/files/ant/scaladoc-build.check15
-rw-r--r--test/files/ant/scaladoc-build.xml26
-rw-r--r--test/files/ant/scaladoc.scala7
-rw-r--r--test/files/jvm/opt_value_class.check2
-rw-r--r--test/files/jvm/opt_value_class/Value_1.scala28
-rw-r--r--test/files/jvm/opt_value_class/test.scala16
-rw-r--r--test/files/lib/scalacheck.jar.desired.sha11
-rw-r--r--test/files/neg/compile-time-only-a.check49
-rw-r--r--test/files/neg/compile-time-only-a.scala57
-rw-r--r--test/files/neg/compile-time-only-b.check7
-rw-r--r--test/files/neg/compile-time-only-b.scala15
-rw-r--r--test/files/neg/javac-error.check10
-rw-r--r--test/files/neg/t4425.check13
-rw-r--r--test/files/neg/t4425.scala10
-rw-r--r--test/files/neg/t4425b.check61
-rw-r--r--test/files/neg/t4425b.scala38
-rw-r--r--test/files/neg/t5903a.check7
-rw-r--r--test/files/neg/t5903a/Macros_1.scala28
-rw-r--r--test/files/neg/t5903a/Test_2.scala6
-rw-r--r--test/files/neg/t5903b.check9
-rw-r--r--test/files/neg/t5903b/Macros_1.scala23
-rw-r--r--test/files/neg/t5903b/Test_2.scala6
-rw-r--r--test/files/neg/t5903c.check7
-rw-r--r--test/files/neg/t5903c/Macros_1.scala26
-rw-r--r--test/files/neg/t5903c/Test_2.scala6
-rw-r--r--test/files/neg/t5903d.check7
-rw-r--r--test/files/neg/t5903d/Macros_1.scala23
-rw-r--r--test/files/neg/t5903d/Test_2.scala6
-rw-r--r--test/files/neg/t5903e.check4
-rw-r--r--test/files/neg/t5903e/Macros_1.scala25
-rw-r--r--test/files/neg/t5903e/Test_2.scala6
-rw-r--r--test/files/neg/t6289.check10
-rw-r--r--test/files/neg/t6289.flags (renamed from test/files/neg/javac-error.flags)0
-rw-r--r--test/files/neg/t6289/J.java (renamed from test/files/neg/javac-error/J.java)0
-rw-r--r--test/files/neg/t6289/SUT_5.scala (renamed from test/files/neg/javac-error/SUT_5.scala)0
-rw-r--r--test/files/neg/t6675.check2
-rw-r--r--test/files/neg/t7214neg.check7
-rw-r--r--test/files/neg/t7214neg.scala57
-rw-r--r--test/files/neg/t7721.check21
-rw-r--r--test/files/neg/t7721.flags1
-rw-r--r--test/files/neg/t7721.scala140
-rw-r--r--test/files/neg/t7756a.check7
-rw-r--r--test/files/neg/t7756a.scala11
-rw-r--r--test/files/neg/t7756b.check6
-rw-r--r--test/files/neg/t7756b.flags1
-rw-r--r--test/files/neg/t7756b.scala5
-rw-r--r--test/files/neg/t7757a.check4
-rw-r--r--test/files/neg/t7757a.scala1
-rw-r--r--test/files/neg/t7757b.check4
-rw-r--r--test/files/neg/t7757b.scala2
-rw-r--r--test/files/neg/t997.check7
-rw-r--r--test/files/pos/annotated-treecopy/Impls_Macros_1.scala2
-rw-r--r--test/files/pos/extractor-types.scala30
-rw-r--r--test/files/pos/optmatch.scala33
-rw-r--r--test/files/pos/overloaded-unapply.scala8
-rw-r--r--test/files/pos/patmat-extract-tparam.scala13
-rw-r--r--test/files/pos/t6797.scala4
-rw-r--r--test/files/run/matchonseq.scala10
-rw-r--r--test/files/run/name-based-patmat.check10
-rw-r--r--test/files/run/name-based-patmat.scala75
-rw-r--r--test/files/run/patmat-behavior-2.check24
-rw-r--r--test/files/run/patmat-behavior-2.scala50
-rw-r--r--test/files/run/patmat-behavior.check90
-rw-r--r--test/files/run/patmat-behavior.scala95
-rw-r--r--test/files/run/patmat-bind-typed.check1
-rw-r--r--test/files/run/patmat-bind-typed.scala8
-rw-r--r--test/files/run/repl-trim-stack-trace.scala33
-rw-r--r--test/files/run/string-extractor.check9
-rw-r--r--test/files/run/string-extractor.scala60
-rw-r--r--test/files/run/t5903a.check1
-rw-r--r--test/files/run/t5903a.flags1
-rw-r--r--test/files/run/t5903a/Macros_1.scala28
-rw-r--r--test/files/run/t5903a/Test_2.scala6
-rw-r--r--test/files/run/t5903b.check1
-rw-r--r--test/files/run/t5903b.flags1
-rw-r--r--test/files/run/t5903b/Macros_1.scala25
-rw-r--r--test/files/run/t5903b/Test_2.scala6
-rw-r--r--test/files/run/t5903c.check1
-rw-r--r--test/files/run/t5903c.flags1
-rw-r--r--test/files/run/t5903c/Macros_1.scala23
-rw-r--r--test/files/run/t5903c/Test_2.scala6
-rw-r--r--test/files/run/t5903d.check1
-rw-r--r--test/files/run/t5903d.flags1
-rw-r--r--test/files/run/t5903d/Macros_1.scala25
-rw-r--r--test/files/run/t5903d/Test_2.scala6
-rw-r--r--test/files/run/t6331b.scala2
-rw-r--r--test/files/run/t7214.scala2
-rw-r--r--test/files/run/value-class-extractor-2.check8
-rw-r--r--test/files/run/value-class-extractor-2.scala108
-rw-r--r--test/files/run/value-class-extractor-seq.check3
-rw-r--r--test/files/run/value-class-extractor-seq.scala59
-rw-r--r--test/files/run/value-class-extractor.check9
-rw-r--r--test/files/run/value-class-extractor.scala91
-rwxr-xr-xtest/partest57
-rw-r--r--test/pending/neg/t6680a.scala13
-rw-r--r--test/pending/neg/t6680b.check6
-rw-r--r--test/pending/neg/t6680b.scala10
-rw-r--r--test/pending/neg/t6680c.scala17
-rw-r--r--versions.properties7
247 files changed, 3720 insertions, 8636 deletions
diff --git a/build.xml b/build.xml
index cdcdcee58c..f428734ee0 100755
--- a/build.xml
+++ b/build.xml
@@ -1,6 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
-<project name="sabbus" default="build" xmlns:artifact="urn:maven-artifact-ant">
+<project name="sabbus" default="build"
+ xmlns:artifact="urn:maven-artifact-ant"
+ xmlns:rsel="antlib:org.apache.tools.ant.types.resources.selectors">
<include file="test/build-partest.xml" as="partest"/>
<description>
@@ -56,6 +58,8 @@ TODO:
<target name="build-opt" description="Optimized version of build."> <optimized name="build"/></target>
<target name="test-opt" description="Optimized version of test."> <optimized name="test"/></target>
+ <target name="test-core-opt" description="Optimized version of test.core."> <optimized name="test.core"/></target>
+ <target name="test-stab-opt" description="Optimized version of test.stability."> <optimized name="test.stability"/></target>
<target name="dist-opt" description="Optimized version of dist."> <optimized name="dist"/></target>
<target name="partialdist-opt" description="Optimized version of partialdist."> <optimized name="partialdist"/></target>
<target name="fastdist-opt" description="Optimized version of fastdist."> <optimized name="fastdist"/></target>
@@ -116,15 +120,14 @@ TODO:
<!-- Generating version number -->
<property file="${basedir}/build.number"/>
- <!-- read starr.version -->
- <property file="${basedir}/starr.number"/>
+ <!-- read versions.properties -->
+ <property file="${basedir}/versions.properties"/>
<!-- Sets location of pre-compiled libraries -->
<property name="library.starr.jar" value="${lib.dir}/scala-library.jar"/>
<property name="reflect.starr.jar" value="${lib.dir}/scala-reflect.jar"/>
<property name="compiler.starr.jar" value="${lib.dir}/scala-compiler.jar"/>
<property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
- <property name="scalacheck.jar" value="${lib.dir}/scalacheck.jar"/>
<!-- Sets location of build folders -->
<property name="build.dir" value="${basedir}/build"/>
@@ -151,7 +154,6 @@ TODO:
<property name="dists.dir" value="${basedir}/dists"/>
<property name="copyright.string" value="Copyright 2002-2013, LAMP/EPFL"/>
- <property name="partest.version.number" value="0.9.3"/>
<property name="jline.version" value="2.11"/>
@@ -248,11 +250,14 @@ TODO:
</artifact:dependencies>
- <artifact:dependencies pathId="partest.extras.classpath" filesetId="partest.extras.fileset" versionsId="partest.extras.versions">
- <dependency groupId="com.googlecode.java-diff-utils" artifactId="diffutils" version="1.3.0"/>
- <dependency groupId="org.scala-tools.testing" artifactId="test-interface" version="0.5" />
+ <artifact:dependencies pathId="partest.classpath" filesetId="partest.fileset" versionsId="partest.versions">
+ <!-- to facilitate building and publishing partest locally -->
+ <localRepository path="${user.home}/.m2/repository"/>
+ <!-- so we don't have to wait for artifacts to synch to maven central: -->
+ <artifact:remoteRepository id="sonatype-release" url="https://oss.sonatype.org/content/repositories/releases"/>
+ <dependency groupId="org.scala-lang.modules" artifactId="scala-partest_${scala.binary.version}" version="${partest.version.number}" />
</artifact:dependencies>
- <copy-deps fileset.prefix="partest.extras" out="partest"/>
+ <copy-deps fileset.prefix="partest" out="partest"/>
<artifact:dependencies pathId="repl.deps.classpath" filesetId="repl.deps.fileset" versionsId="repl.deps.versions">
<dependency groupId="jline" artifactId="jline" version="${jline.version}"/>
@@ -263,7 +268,7 @@ TODO:
<typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
<!-- Download STARR via maven if `starr.use.released` is set,
- and `starr.version` is specified (see the starr.number properties file).
+ and `starr.version` is specified (see the versions.properties properties file).
Want to slow down STARR changes, using only released versions. -->
<if><isset property="starr.use.released"/><then>
<echo message="Using Scala ${starr.version} for STARR."/>
@@ -480,6 +485,8 @@ TODO:
<if><isset property="locker.skip"/><then>
<echo message="Using STARR to build the quick stage (skipping locker)."/>
<path id="locker.compiler.path" refid="starr.compiler.path"/>
+ <!-- this is cheating, but should be close enough: -->
+ <path id="locker.compiler.build.path" refid="starr.compiler.path"/>
<property name="locker.locked" value="locker skipped"/></then>
<else>
<path id="locker.compiler.path"><path refid="locker.compiler.build.path"/></path></else></if>
@@ -493,7 +500,7 @@ TODO:
There must be a variable of the shape @{stage}.@{project}.build.path
for all @{stage} in locker, quick, strap
and all @{project} in library, reflect, compiler
- when stage is quick, @{project} also includes: actors, parser-combinators, xml, repl, swing, plugins, scalacheck, interactive, scaladoc, partest, scalap
+ when stage is quick, @{project} also includes: actors, parser-combinators, xml, repl, swing, plugins, scalacheck, interactive, scaladoc, scalap
-->
<!-- LOCKER -->
@@ -513,11 +520,14 @@ TODO:
<pathelement location="${build-locker.dir}/classes/reflect"/>
</path>
+ <if><not><isset property="locker.skip"/></not><then>
<path id="locker.compiler.build.path">
<path refid="locker.reflect.build.path"/>
<pathelement location="${build-locker.dir}/classes/compiler"/>
<path refid="asm.classpath"/>
</path>
+ </then></if>
+ <!-- else, locker.compiler.build.path is set above -->
<!-- QUICK -->
<path id="quick.library.build.path">
@@ -564,37 +574,36 @@ TODO:
</path>
<path id="quick.plugins.build.path">
- <path refid="quick.compiler.build.path"/>
+ <!-- plugins are run by locker compiler during quick stage,
+ so must compile against the same classes the locker was compiled to
+ -->
+ <path refid="locker.compiler.build.path"/>
<pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
</path>
- <path id="quick.scalacheck.build.path">
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/actors"/>
- <pathelement location="${build-quick.dir}/classes/parser-combinators"/>
- <pathelement location="${build-quick.dir}/classes/scalacheck"/>
- <path refid="partest.extras.classpath"/>
- </path>
-
<path id="quick.scalap.build.path">
<path refid="quick.compiler.build.path"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
</path>
- <path id="quick.partest.build.path">
- <path refid="quick.xml.build.path"/>
- <path refid="quick.scalap.build.path"/>
- <path refid="partest.extras.classpath"/>
+ <path id="quick.partest-extras.build.path">
+ <path refid="asm.classpath"/>
+ <path refid="partest.classpath"/>
<pathelement location="${build-quick.dir}/classes/repl"/>
- <pathelement location="${scalacheck.jar}"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
+ <!-- for the java dependency: Profiler.java -->
+ <pathelement location="${build-quick.dir}/classes/partest-extras"/>
+ </path>
+
+ <path id="quick.partest-javaagent.build.path">
+ <path refid="partest.classpath"/>
+ <path refid="asm.classpath"/>
</path>
<path id="quick.scaladoc.build.path">
<path refid="quick.xml.build.path"/>
<path refid="quick.compiler.build.path"/>
<path refid="quick.parser-combinators.build.path"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
+ <path refid="partest.classpath"/>
<pathelement location="${build-quick.dir}/classes/scaladoc"/>
</path>
@@ -658,21 +667,11 @@ TODO:
<path id="pack.swing.files"> <fileset dir="${build-quick.dir}/classes/swing"/> </path>
<path id="pack.reflect.files"> <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
<path id="pack.plugins.files"> <fileset dir="${build-quick.dir}/classes/continuations-plugin"/> </path>
- <path id="pack.scalacheck.files"> <fileset dir="${build-quick.dir}/classes/scalacheck"/> </path>
<path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/>
<fileset file="${src.dir}/scalap/decoder.properties"/> </path>
- <path id="pack.partest.files">
- <fileset dir="${build-quick.dir}/classes/partest">
- <exclude name="scala/tools/partest/javaagent/**"/>
- </fileset>
- </path>
-
- <path id="pack.partest-javaagent.files">
- <fileset dir="${build-quick.dir}/classes/partest">
- <include name="scala/tools/partest/javaagent/**"/>
- </fileset>
- </path>
+ <path id="pack.partest-extras.files"> <fileset dir="${build-quick.dir}/classes/partest-extras"/> </path>
+ <path id="pack.partest-javaagent.files"> <fileset dir="${build-quick.dir}/classes/partest-javaagent"/> </path>
<!-- STRAP -->
<path id="strap.library.build.path">
@@ -699,11 +698,10 @@ TODO:
<pathelement location="${build-pack.dir}/lib/scala-xml.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
+ <!-- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/> -->
<pathelement location="${build-pack.dir}/lib/scalap.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
<pathelement location="${ant.jar}"/>
- <path refid="partest.extras.classpath"/>
<path refid="aux.libs"/>
</path>
@@ -720,14 +718,52 @@ TODO:
<pathelement location="${build.dir}/manmaker/classes"/>
</path>
- <path id="partest.classpath">
- <path refid="pack.compiler.path"/>
+ <!--
+ This is the classpath used to run partest, which is what it uses to run the compiler and find other required jars.
+ "What's on the compiler's compilation path when compiling partest tests," you ask?
+ Why, the compiler we're testing, of course, and partest with all its dependencies.
+ -->
+ <path id="partest.compilation.path">
+ <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-parser-combinators.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-xml.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
+
+ <!-- to test a quick build without packing, replace the above pathelements with: (may need a little tweaking)
+ <path refid="quick.bin.tool.path">
+ <path refid="quick.interactive.build.path">
+ -->
+
<pathelement location="${build-pack.dir}/lib/scalap.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
- <path refid="partest.extras.classpath"/>
+ <!-- TODO: move scalap out of repo -->
+
+ <!--
+ include partest and its run-time dependencies,
+ but filter out the compiler we just built, as that's what we want to test!
+ TODO: mark partest's compiler dependencies as provided when publishing to maven,
+ so that we don't have to filter them out here...
+ -->
+ <restrict>
+ <path refid="partest.classpath"/>
+ <rsel:not><rsel:or>
+ <rsel:name name="scala-library*.jar"/>
+ <rsel:name name="scala-reflect*.jar"/>
+ <rsel:name name="scala-compiler*.jar"/>
+ <rsel:name name="scala-actors*.jar"/>
+ <rsel:name name="scala-scalap*.jar"/>
+ <!-- <rsel:name name="scala-parser-combinators*.jar"/>
+ <rsel:name name="scala-xml*.jar"/> -->
+ </rsel:or></rsel:not>
+ </restrict>
+
+ <!-- partest classes specific to the core compiler build -->
+ <pathelement location="${build-pack.dir}/lib/scala-partest-extras.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-partest-javaagent.jar"/>
+
+ <!-- sneaky extras used in tests -->
+ <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
</path>
<!-- obsolete? -->
@@ -767,10 +803,6 @@ TODO:
<path id="test.positions.sub.build.path" path="${build-quick.dir}/classes/library"/>
<!-- TODO: consolidate *.includes -->
- <patternset id="partest.includes">
- <include name="**/*.xml"/>
- </patternset>
-
<patternset id="lib.includes">
<include name="**/*.tmpl"/>
<include name="**/*.xml"/>
@@ -1002,7 +1034,7 @@ TODO:
<attribute name="args" default=""/> <!-- additional args -->
<attribute name="includes" default="comp.includes"/>
<attribute name="java-excludes" default=""/>
- <attribute name="version" default=""/> <!-- non-empty for partest and scaladoc: use @{version}.version.number in property file-->
+ <attribute name="version" default=""/> <!-- non-empty for scaladoc: use @{version}.version.number in property file-->
<sequential>
<staged-uptodate stage="@{stage}" project="@{project}">
@@ -1209,16 +1241,10 @@ TODO:
<target name="quick.repl" depends="quick.comp">
<staged-build with="locker" stage="quick" project="repl"/> </target>
- <target name="quick.scalacheck" depends="quick.actors, quick.parser-combinators, quick.lib">
- <staged-build with="locker" stage="quick" project="scalacheck" args="-nowarn"/> </target>
-
<target name="quick.scalap" depends="quick.repl">
<staged-build with="locker" stage="quick" project="scalap"/> </target>
- <target name="quick.partest" depends="quick.scalap, quick.xml, quick.repl, asm.done">
- <staged-build with="locker" stage="quick" project="partest" version="partest"/> </target>
-
- <target name="quick.scaladoc" depends="quick.comp, quick.partest, quick.parser-combinators">
+ <target name="quick.scaladoc" depends="quick.comp, quick.parser-combinators">
<staged-build with="locker" stage="quick" project="scaladoc" version="scaladoc"/> </target>
<target name="quick.interactive" depends="quick.comp, quick.scaladoc">
@@ -1248,13 +1274,7 @@ TODO:
<!-- might split off library part into its own ant target -->
<mkdir dir="${build-quick.dir}/classes/continuations-library"/>
- <!-- TODO: must build with quick to avoid
- [quick.plugins] error: java.lang.NoClassDefFoundError: scala/tools/nsc/transform/patmat/PatternMatching
- [quick.plugins] at scala.tools.selectivecps.SelectiveCPSTransform.newTransformer(SelectiveCPSTransform.scala:29)
-
- WHY OH WHY!? scala/tools/nsc/transform/patmat/PatternMatching should be on locker.compiler.path
- -->
- <staged-scalac with="quick" stage="quick" project="plugins"
+ <staged-scalac with="locker" stage="quick" project="plugins"
srcdir="continuations/library" destproject="continuations-library"
args="-Xplugin-require:continuations -P:continuations:enable -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins"/>
@@ -1263,7 +1283,7 @@ TODO:
</staged-uptodate>
</target>
- <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalacheck, quick.scalap, quick.interactive, quick.xml, quick.parser-combinators, quick.swing, quick.plugins, quick.partest, quick.scaladoc">
+ <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalap, quick.interactive, quick.xml, quick.parser-combinators, quick.swing, quick.plugins, quick.scaladoc">
<staged-bin stage="quick" classpathref="quick.bin.tool.path"/>
</target>
@@ -1309,31 +1329,17 @@ TODO:
</target>
<target name="pack.plugins" depends="quick.plugins"> <staged-pack project="plugins" targetdir="misc/scala-devel/plugins" targetjar="continuations.jar"/> </target>
- <target name="pack.scalacheck" depends="quick.scalacheck"> <staged-pack project="scalacheck" targetjar="scalacheck.jar"/> </target>
-
- <target name="pack.partest" depends="quick.partest">
- <staged-pack project="partest"/>
- <!-- TODO the manifest should influence actuality of this target -->
- <staged-pack project="partest-javaagent" manifest="${src.dir}/partest/scala/tools/partest/javaagent/MANIFEST.MF"/>
- </target>
<target name="pack.scalap" depends="quick.scalap"> <staged-pack project="scalap" targetjar="scalap.jar"/> </target>
- <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.partest, pack.plugins, pack.reflect, pack.scalacheck, pack.scalap, pack.xml, pack.swing, pack.parser-combinators">
+ <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.plugins, pack.reflect, pack.scalap, pack.xml, pack.swing, pack.parser-combinators">
<staged-bin stage="pack"/>
</target>
<!-- depend on quick.done so quick.bin is run when pack.done is -->
<target name="pack.done" depends="quick.done, pack.bin">
<!-- copy dependencies to build/pack/lib, it only takes a second so don't bother with uptodate checks -->
- <copy todir="${build-pack.dir}/lib">
- <resources refid="partest.extras.fileset"/>
- <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
- from="${partest.extras.versions}" to="flatten"/>
- </copy>
-
<taskdef resource="scala/tools/ant/antlib.xml" classpathref="docs.compiler.path"/>
- <taskdef resource="scala/tools/partest/antlib.xml" classpathref="partest.classpath"/>
</target>
@@ -1567,30 +1573,50 @@ TODO:
</target>
<!-- See test/build-partest.xml for the macro(s) being used here. -->
+ <target name="partest.task" depends="init">
+ <!-- note the classpathref! this is the classpath used to run partest,
+ so it must have the new compiler.... -->
+ <taskdef
+ classpathref="partest.compilation.path"
+ resource="scala/tools/partest/antlib.xml"/>
+
+ <!-- compile compiler-specific parts of partest -->
+ <staged-build with="starr" stage="quick" project="partest-extras" />
+ <staged-build with="starr" stage="quick" project="partest-javaagent" />
+ <staged-pack project="partest-extras"/>
+ <staged-pack project="partest-javaagent" manifest="${src.dir}/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF"/>
+ </target>
+
+ <target name="test.suite.init" depends="pack.done, partest.task">
+ <!-- read by test/partest to determine classpath used to run partest -->
+ <propertyfile file = "build/pack/partest.properties">
+ <entry key = "partest.classpath" value="${toString:partest.compilation.path}"/>
+ </propertyfile>
+ </target>
- <target name="test.suite" depends="pack.done">
- <testSuite/>
+ <target name="test.suite" depends="test.suite.init">
+ <testSuite kinds="pos neg run jvm res scalap scalacheck specialized instrumented"/>
</target>
- <target name="test.suite.color" depends="pack.done">
- <testSuite colors="8"/>
+ <target name="test.suite.color" depends="test.suite.init">
+ <testSuite colors="8" kinds="pos neg run jvm res scalap scalacheck specialized instrumented"/>
</target>
- <target name="test.run" depends="pack.done">
+ <target name="test.run" depends="test.suite.init">
<testSuite kinds="run jvm"/>
</target>
- <target name="test.continuations.suite" depends="pack.done">
+ <target name="test.continuations.suite" depends="test.suite.init">
<testSuite kinds="continuations-neg continuations-run"
scalacOpts="${scalac.args.optimise} -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins -Xplugin-require:continuations -P:continuations:enable"
/>
</target>
- <target name="test.scaladoc" depends="pack.done">
+ <target name="test.scaladoc" depends="test.suite.init">
<testSuite kinds="run scalacheck" srcdir="scaladoc"/>
</target>
- <target name="test.interactive" depends="pack.done">
+ <target name="test.interactive" depends="test.suite.init">
<testSuite kinds="presentation"/>
</target>
@@ -1771,12 +1797,6 @@ TODO:
</staged-docs>
</target>
- <target name="docs.partest" depends="docs.start">
- <staged-docs project="partest" title="Scala Parallel Testing Framework">
- <include name="**/*.scala"/>
- </staged-docs>
- </target>
-
<target name="docs.continuations-plugin" depends="docs.start">
<staged-docs project="continuations-plugin" dir="continuations/plugin" title="Delimited Continuations Compiler Plugin">
<include name="**/*.scala"/>
@@ -1820,7 +1840,7 @@ TODO:
</staged-uptodate>
</target>
- <target name="docs.done" depends="docs.comp, docs.man, docs.lib, docs.xml, docs.parser-combinators, docs.scalap, docs.partest, docs.continuations-plugin"/>
+ <target name="docs.done" depends="docs.comp, docs.man, docs.lib, docs.xml, docs.parser-combinators, docs.scalap, docs.continuations-plugin"/>
<!-- ===========================================================================
DISTRIBUTION
@@ -1848,19 +1868,11 @@ TODO:
<mkdir dir="${dist.dir}/lib"/>
<copy toDir="${dist.dir}/lib">
<fileset dir="${build-pack.dir}/lib">
- <include name="scalacheck.jar"/>
- <include name="scala-partest.jar"/>
<include name="scalap.jar"/>
</fileset>
</copy>
<copy todir="${dist.dir}/lib">
- <resources refid="partest.extras.fileset"/>
- <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
- from="${partest.extras.versions}" to="flatten"/>
- </copy>
-
- <copy todir="${dist.dir}/lib">
<resources refid="repl.deps.fileset"/>
<mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
from="${repl.deps.versions}" to="flatten"/>
@@ -1935,7 +1947,6 @@ TODO:
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-parser-combinators-src.jar" basedir="${src.dir}/parser-combinators"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
- <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-partest-src.jar" basedir="${src.dir}/partest"/>
</target>
<target name="dist.partial" depends="dist.base">
diff --git a/dbuild-meta.json b/dbuild-meta.json
index 705eeeb6b6..3987afa395 100644
--- a/dbuild-meta.json
+++ b/dbuild-meta.json
@@ -126,67 +126,6 @@
"artifacts": [
{
"extension": "jar",
- "name": "scalacheck",
- "organization": "org.scala-lang"
- }
- ],
- "dependencies": [
- {
- "extension": "jar",
- "name": "scala-library",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scala-actors",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scala-parser-combinators",
- "organization": "org.scala-lang"
- }
- ],
- "name": "scalacheck",
- "organization": "org.scala-lang"
- },
- {
- "artifacts": [
- {
- "extension": "jar",
- "name": "scala-partest",
- "organization": "org.scala-lang"
- }
- ],
- "dependencies": [
- {
- "extension": "jar",
- "name": "scala-compiler",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scalap",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scala-xml",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scalacheck",
- "organization": "org.scala-lang"
- }
- ],
- "name": "scala-partest",
- "organization": "org.scala-lang"
- },
- {
- "artifacts": [
- {
- "extension": "jar",
"name": "scaladoc",
"organization": "org.scala-lang"
}
diff --git a/src/build/dbuild-meta-json-gen.scala b/src/build/dbuild-meta-json-gen.scala
index 42214dd191..73eee8ac3a 100644
--- a/src/build/dbuild-meta-json-gen.scala
+++ b/src/build/dbuild-meta-json-gen.scala
@@ -1,6 +1,6 @@
// use this script to generate dbuild-meta.json
// make sure the version is specified correctly,
-// update the dependency structura and
+// update the dependency structure and
// check out distributed-build and run `sbt console`:
// TODO: also generate build.xml and eclipse config from a similar data-structure
@@ -40,15 +40,6 @@ val meta =
Seq(ProjectRef("scala-parser-combinators", "org.scala-lang")),
Seq(ProjectRef("scala-library", "org.scala-lang"))),
- Project("scalacheck", "org.scala-lang",
- Seq(ProjectRef("scalacheck", "org.scala-lang")),
- Seq(ProjectRef("scala-library", "org.scala-lang"), ProjectRef("scala-actors", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
-
- Project("scala-partest", "org.scala-lang",
- Seq(ProjectRef("scala-partest", "org.scala-lang")),
- Seq(ProjectRef("scala-compiler", "org.scala-lang"), // TODO: refine to scala-repl
- ProjectRef("scalap", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scalacheck", "org.scala-lang"))),
-
Project("scaladoc", "org.scala-lang",
Seq(ProjectRef("scaladoc", "org.scala-lang")),
Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 84a12066f5..f52a7888ce 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -110,7 +110,6 @@
<deploy-one name="scala-library" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-xml" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-parser-combinators" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-partest" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-reflect" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-swing" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scalap" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
diff --git a/src/build/maven/scala-partest-pom.xml b/src/build/maven/scala-partest-pom.xml
deleted file mode 100644
index ac05f242d5..0000000000
--- a/src/build/maven/scala-partest-pom.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-partest</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Parallel Test Framework</name>
- <description>testing framework for the Scala compiler.</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
-
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Typesafe</id>
- <name>Typesafe, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 6b6579ce12..fa030300ac 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -158,7 +158,6 @@ MAIN DISTRIBUTION PACKAGING
<mvn-copy-lib mvn.artifact.name="scala-compiler"/>
<mvn-copy-lib mvn.artifact.name="scala-swing"/>
<mvn-copy-lib mvn.artifact.name="scala-actors"/>
- <mvn-copy-lib mvn.artifact.name="scala-partest"/>
<mvn-copy-lib mvn.artifact.name="scalap"/>
</target>
@@ -210,10 +209,6 @@ MAIN DISTRIBUTION PACKAGING
basedir="${build-docs.dir}/scalap">
<include name="**/*"/>
</jar>
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
- basedir="${build-docs.dir}/partest">
- <include name="**/*"/>
- </jar>
<jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala
index fafd79d1d7..af17fd87c0 100644
--- a/src/compiler/scala/reflect/macros/compiler/Validators.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala
@@ -81,7 +81,11 @@ trait Validators {
// Technically this can be just an alias to MethodType, but promoting it to a first-class entity
// provides better encapsulation and convenient syntax for pattern matching.
- private case class MacroImplSig(tparams: List[Symbol], paramss: List[List[Symbol]], ret: Type)
+ private case class MacroImplSig(tparams: List[Symbol], paramss: List[List[Symbol]], ret: Type) {
+ private def tparams_s = if (tparams.isEmpty) "" else tparams.map(_.defString).mkString("[", ", ", "]")
+ private def paramss_s = paramss map (ps => ps.map(s => s"${s.name}: ${s.tpe_*}").mkString("(", ", ", ")")) mkString ""
+ override def toString = "MacroImplSig(" + tparams_s + paramss_s + ret + ")"
+ }
/** An actual macro implementation signature extracted from a macro implementation method.
*
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index b6f27f71ce..7610df67dc 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -8,6 +8,7 @@ trait Reshape {
import global._
import definitions._
+ import treeInfo.Unapplied
/**
* Rolls back certain changes that were introduced during typechecking of the reifee.
@@ -65,22 +66,9 @@ trait Reshape {
case block @ Block(stats, expr) =>
val stats1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(stats))
Block(stats1, expr).copyAttrs(block)
- case unapply @ UnApply(fun, args) =>
- def extractExtractor(tree: Tree): Tree = {
- val Apply(fun, args) = tree
- args match {
- case List(Ident(special)) if special == nme.SELECTOR_DUMMY =>
- val Select(extractor, flavor) = fun
- assert(flavor == nme.unapply || flavor == nme.unapplySeq)
- extractor
- case _ =>
- extractExtractor(fun)
- }
- }
-
+ case unapply @ UnApply(Unapplied(Select(fun, nme.unapply | nme.unapplySeq)), args) =>
if (reifyDebug) println("unapplying unapply: " + tree)
- val fun1 = extractExtractor(fun)
- Apply(fun1, args).copyAttrs(unapply)
+ Apply(fun, args).copyAttrs(unapply)
case _ =>
tree
}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index efe436f004..f7437e4e6c 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -91,7 +91,7 @@ trait CompilationUnits { self: Global =>
debuglog(s"removing synthetic $sym from $self")
map -= sym
}
- def get(sym: Symbol): Option[Tree] = logResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
+ def get(sym: Symbol): Option[Tree] = debuglogResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
map get sym
}
def keys: Iterable[Symbol] = map.keys
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index a6c69091c5..3f2d759a6d 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -220,12 +220,15 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// not deprecated yet, but a method called "error" imported into
// nearly every trait really must go. For now using globalError.
- def error(msg: String) = globalError(msg)
- override def inform(msg: String) = reporter.echo(msg)
- override def globalError(msg: String) = reporter.error(NoPosition, msg)
- override def warning(msg: String) =
- if (settings.fatalWarnings) globalError(msg)
- else reporter.warning(NoPosition, msg)
+ def error(msg: String) = globalError(msg)
+
+ override def inform(msg: String) = inform(NoPosition, msg)
+ override def globalError(msg: String) = globalError(NoPosition, msg)
+ override def warning(msg: String) = warning(NoPosition, msg)
+
+ def globalError(pos: Position, msg: String) = reporter.error(pos, msg)
+ def warning(pos: Position, msg: String) = if (settings.fatalWarnings) globalError(pos, msg) else reporter.warning(pos, msg)
+ def inform(pos: Position, msg: String) = reporter.echo(pos, msg)
// Getting in front of Predef's asserts to supplement with more info.
// This has the happy side effect of masking the one argument forms
@@ -258,17 +261,22 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (settings.debug)
body
}
+
+ override protected def isDeveloper = settings.developer || super.isDeveloper
+
/** This is for WARNINGS which should reach the ears of scala developers
* whenever they occur, but are not useful for normal users. They should
* be precise, explanatory, and infrequent. Please don't use this as a
* logging mechanism. !!! is prefixed to all messages issued via this route
* to make them visually distinct.
*/
- @inline final override def devWarning(msg: => String) {
- if (settings.developer || settings.debug)
- warning("!!! " + msg)
+ @inline final override def devWarning(msg: => String): Unit = devWarning(NoPosition, msg)
+ @inline final def devWarning(pos: Position, msg: => String) {
+ def pos_s = if (pos eq NoPosition) "" else s" [@ $pos]"
+ if (isDeveloper)
+ warning(pos, "!!! " + msg)
else
- log("!!! " + msg) // such warnings always at least logged
+ log(s"!!!$pos_s $msg") // such warnings always at least logged
}
def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg))
@@ -1107,7 +1115,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
"symbol owners" -> ownerChainString(sym),
"call site" -> (site.fullLocationString + " in " + site.enclosingPackage)
)
- ("\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
+ ("\n " + errorMessage + "\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n"
}
catch { case _: Exception | _: TypeError => errorMessage }
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 66ed0c8fae..d7a32c3be0 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -83,6 +83,7 @@ trait TreeDSL {
def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other)
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
+ def INT_- (other: Tree) = fn(target, getMember(IntClass, nme.MINUS), other)
// generic operations on ByteClass, IntClass, LongClass
def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other)
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 641ab9c279..381ffb1ed9 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -66,10 +66,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
*/
def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): ClassDef = {
// "if they have symbols they should be owned by `sym`"
- assert(
- mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
- ((mmap(vparamss)(_.symbol), sym))
- )
+ assert(mforall(vparamss)(_.symbol.owner == sym), (mmap(vparamss)(_.symbol), sym))
ClassDef(sym,
gen.mkTemplate(sym.info.parents map TypeTree,
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 8479df512e..94270e4cf3 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -2693,7 +2693,7 @@ self =>
syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", skipIt = false)
classContextBounds = List()
}
- val constrAnnots = constructorAnnotations()
+ val constrAnnots = if (!mods.isTrait) constructorAnnotations() else Nil
val (constrMods, vparamss) =
if (mods.isTrait) (Modifiers(Flags.TRAIT), List())
else (accessModifierOpt(), paramClauses(name, classContextBounds, ofCaseClass = mods.isCase))
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 56191cc981..09095879bf 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -78,10 +78,10 @@ abstract class Inliners extends SubComponent {
assert(clazz != NoSymbol, "Walked up past Object.superClass looking for " + sym +
", most likely this reveals the TFA at fault (receiver and callee don't match).")
if (sym.owner == clazz || isBottomType(clazz)) sym
- else sym.overridingSymbol(clazz) match {
- case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
- case imp => imp
- }
+ else sym.overridingSymbol(clazz) orElse (
+ if (sym.owner.isTrait) sym
+ else lookup(clazz.superClass)
+ )
}
if (needsLookup) {
val concreteMethod = lookup(clazz)
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 454c9db73c..14e3f5b642 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -659,14 +659,14 @@ abstract class ClassfileParser {
}
accept('>')
assert(xs.length > 0, tp)
- logResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)))
+ debuglogResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)))
}
// isMonomorphicType is false if the info is incomplete, as it usually is here
// so have to check unsafeTypeParams.isEmpty before worrying about raw type case below,
// or we'll create a boatload of needless existentials.
else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) tp
// raw type - existentially quantify all type parameters
- else logResult(s"raw type from $classSym")(unsafeClassExistentialType(classSym))
+ else debuglogResult(s"raw type from $classSym")(unsafeClassExistentialType(classSym))
case tp =>
assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp")
tp
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 0dcf4d00b7..d9d08dde1e 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -367,29 +367,3 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
}
}
}
-/*
- val ensureNoEscapes = new TypeTraverser {
- def ensureNoEscape(sym: Symbol) {
- if (sym.hasFlag(PRIVATE)) {
- var o = currentOwner;
- while (o != NoSymbol && o != sym.owner && !o.isLocal && !o.hasFlag(PRIVATE))
- o = o.owner
- if (o == sym.owner) sym.makeNotPrivate(base);
- }
- }
- def traverse(t: Type): TypeTraverser = {
- t match {
- case TypeRef(qual, sym, args) =>
- ensureNoEscape(sym)
- mapOver(t)
- case ClassInfoType(parents, decls, clazz) =>
- parents foreach { p => traverse; () }
- traverse(t.typeOfThis)
- case _ =>
- mapOver(t)
- }
- this
- }
- }
-
-*/
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index ee687e56b0..0a66ba8a32 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -525,8 +525,7 @@ abstract class Erasure extends AddInterfaces
private def isDifferentErasedValueType(tpe: Type, other: Type) =
isErasedValueType(tpe) && (tpe ne other)
- private def isPrimitiveValueMember(sym: Symbol) =
- sym != NoSymbol && isPrimitiveValueClass(sym.owner)
+ private def isPrimitiveValueMember(sym: Symbol) = isPrimitiveValueClass(sym.owner)
@inline private def box(tree: Tree, target: => String): Tree = {
val result = box1(tree)
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index ce495ca8ca..515fa66cfa 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -489,7 +489,7 @@ abstract class LambdaLift extends InfoTransform {
treeCopy.Assign(tree, qual, rhs)
case Ident(name) =>
val tree1 =
- if (sym != NoSymbol && sym.isTerm && !sym.isLabel)
+ if (sym.isTerm && !sym.isLabel)
if (sym.isMethod)
atPos(tree.pos)(memberRef(sym))
else if (sym.isLocal && !isSameOwnerEnclosure(sym))
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 1c44e86aca..3ec4d16bf5 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -734,10 +734,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
sym
}
- if (sym ne NoSymbol)
- sym
- else
- createBitmap
+ sym orElse createBitmap
}
def maskForOffset(offset: Int, sym: Symbol, kind: ClassSymbol): Tree = {
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e2ce2743f7..16c803e2e8 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -64,7 +64,6 @@ abstract class UnCurry extends InfoTransform
class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
private var needTryLift = false
- private var inPattern = false
private var inConstructorFlag = 0L
private val byNameArgs = mutable.HashSet[Tree]()
private val noApply = mutable.HashSet[Tree]()
@@ -79,12 +78,6 @@ abstract class UnCurry extends InfoTransform
@inline private def useNewMembers[T](owner: Symbol)(f: List[Tree] => T): T =
f(newMembers.remove(owner).getOrElse(Nil).toList)
- @inline private def withInPattern[T](value: Boolean)(body: => T): T = {
- inPattern = value
- try body
- finally inPattern = !value
- }
-
private def newFunction0(body: Tree): Tree = {
val result = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function]
log("Change owner from %s to %s in %s".format(currentOwner, result.symbol, result.body))
@@ -119,16 +112,6 @@ abstract class UnCurry extends InfoTransform
&& (isByName(tree.symbol))
)
- /** Uncurry a type of a tree node.
- * This function is sensitive to whether or not we are in a pattern -- when in a pattern
- * additional parameter sections of a case class are skipped.
- */
- def uncurryTreeType(tp: Type): Type = tp match {
- case MethodType(params, MethodType(params1, restpe)) if inPattern =>
- uncurryTreeType(MethodType(params, restpe))
- case _ =>
- uncurry(tp)
- }
// ------- Handling non-local returns -------------------------------------------------
@@ -327,7 +310,7 @@ abstract class UnCurry extends InfoTransform
}
else {
def mkArray = mkArrayValue(args drop (formals.length - 1), varargsElemType)
- if (isJava || inPattern) mkArray
+ if (isJava) mkArray
else if (args.isEmpty) gen.mkNil // avoid needlessly double-wrapping an empty argument list
else arrayToSequence(mkArray, varargsElemType)
}
@@ -474,10 +457,10 @@ abstract class UnCurry extends InfoTransform
else
super.transform(tree)
case UnApply(fn, args) =>
- val fn1 = withInPattern(value = false)(transform(fn))
+ val fn1 = transform(fn)
val args1 = transformTrees(fn.symbol.name match {
case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args))
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, localTyper.expectedPatternTypes(fn, args))
case _ => sys.error("internal error: UnApply node has wrong symbol")
})
treeCopy.UnApply(tree, fn1, args1)
@@ -510,7 +493,7 @@ abstract class UnCurry extends InfoTransform
else super.transform(tree)
case CaseDef(pat, guard, body) =>
- val pat1 = withInPattern(value = true)(transform(pat))
+ val pat1 = transform(pat)
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
case fun @ Function(_, _) =>
@@ -532,7 +515,7 @@ abstract class UnCurry extends InfoTransform
}
)
assert(result.tpe != null, result.shortClass + " tpe is null:\n" + result)
- result setType uncurryTreeType(result.tpe)
+ result modifyType uncurry
}
def postTransform(tree: Tree): Tree = exitingUncurry {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index 069484ff65..45aa1106f0 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -13,7 +13,6 @@ import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
import scala.reflect.internal.util.HashSet
-
trait Logic extends Debugging {
import PatternMatchingStats._
@@ -494,7 +493,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
import global.{ConstantType, Constant, SingletonType, Literal, Ident, singleType}
- import global.definitions.{AnyClass, UnitClass}
+ import global.definitions._
// all our variables range over types
@@ -549,7 +548,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
def tp: Type
def wideTp: Type
- def isAny = wideTp.typeSymbol == AnyClass
+ def isAny = wideTp =:= AnyTpe
def isValue: Boolean //= tp.isStable
// note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
@@ -564,6 +563,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
// (At least conceptually: `true` is an instance of class `Boolean`)
private def widenToClass(tp: Type): Type =
if (tp.typeSymbol.isClass) tp
+ else if (tp.baseClasses.isEmpty) sys.error("Bad type: " + tp)
else tp.baseType(tp.baseClasses.head)
object TypeConst extends TypeConstExtractor {
@@ -606,7 +606,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
if (tp.isInstanceOf[SingletonType]) tp
else p match {
case Literal(c) =>
- if (c.tpe.typeSymbol == UnitClass) c.tpe
+ if (c.tpe =:= UnitTpe) c.tpe
else ConstantType(c)
case Ident(_) if p.symbol.isStable =>
// for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index f089c8f5a5..8feb87210e 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -21,7 +21,7 @@ trait TreeAndTypeAnalysis extends Debugging {
// unfortunately this is not true in general:
// SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefTpe)
def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
- val tpValue = tp.typeSymbol.isPrimitiveValueClass
+ val tpValue = isPrimitiveValueType(tp)
// pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
// (and the subtype is respectively a value type or not a value type)
@@ -59,17 +59,20 @@ trait TreeAndTypeAnalysis extends Debugging {
debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym))))
None
case sym =>
- val subclasses = (
- sym.sealedDescendants.toList sortBy (_.sealedSortName)
+ val subclasses = debug.patmatResult(s"enum $sym sealed, subclasses")(
// symbols which are both sealed and abstract need not be covered themselves, because
// all of their children must be and they cannot otherwise be created.
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
- debug.patmat("enum sealed -- subclasses: "+ ((sym, subclasses)))
+ sym.sealedDescendants.toList
+ sortBy (_.sealedSortName)
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
+ )
val tpApprox = typer.infer.approximateAbstracts(tp)
val pre = tpApprox.prefix
+
+ Some(debug.patmatResult(s"enum sealed tp=$tp, tpApprox=$tpApprox as") {
// valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
- val validSubTypes = (subclasses flatMap {sym =>
+ subclasses flatMap { sym =>
// have to filter out children which cannot match: see ticket #3683 for an example
// compare to the fully known type `tp` (modulo abstract types),
// so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
@@ -81,9 +84,8 @@ trait TreeAndTypeAnalysis extends Debugging {
// debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
else None
- })
- debug.patmat("enum sealed "+ ((tp, tpApprox)) + " as "+ validSubTypes)
- Some(validSubTypes)
+ }
+ })
}
// approximate a type to the static type that is fully checkable at run time,
@@ -104,10 +106,7 @@ trait TreeAndTypeAnalysis extends Debugging {
mapOver(tp)
}
}
-
- val res = typeArgsToWildcardsExceptArray(tp)
- debug.patmat("checkable "+((tp, res)))
- res
+ debug.patmatResult(s"checkableType($tp)")(typeArgsToWildcardsExceptArray(tp))
}
// a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
@@ -136,20 +135,17 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
var currId = 0
}
case class Test(prop: Prop, treeMaker: TreeMaker) {
- // private val reusedBy = new scala.collection.mutable.HashSet[Test]
+ // private val reusedBy = new mutable.HashSet[Test]
var reuses: Option[Test] = None
def registerReuseBy(later: Test): Unit = {
assert(later.reuses.isEmpty, later.reuses)
// reusedBy += later
later.reuses = Some(this)
}
-
val id = { Test.currId += 1; Test.currId}
- override def toString =
- "T"+ id + "C("+ prop +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
+ override def toString = s"T${id}C($prop)"
}
-
class TreeMakersToPropsIgnoreNullChecks(root: Symbol) extends TreeMakersToProps(root) {
override def uniqueNonNullProp(p: Tree): Prop = True
}
@@ -158,9 +154,9 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
class TreeMakersToProps(val root: Symbol) {
prepareNewAnalysis() // reset hash consing for Var and Const
- private[this] val uniqueEqualityProps = new scala.collection.mutable.HashMap[(Tree, Tree), Eq]
- private[this] val uniqueNonNullProps = new scala.collection.mutable.HashMap[Tree, Not]
- private[this] val uniqueTypeProps = new scala.collection.mutable.HashMap[(Tree, Type), Eq]
+ private[this] val uniqueEqualityProps = new mutable.HashMap[(Tree, Tree), Eq]
+ private[this] val uniqueNonNullProps = new mutable.HashMap[Tree, Not]
+ private[this] val uniqueTypeProps = new mutable.HashMap[(Tree, Type), Eq]
def uniqueEqualityProp(testedPath: Tree, rhs: Tree): Prop =
uniqueEqualityProps getOrElseUpdate((testedPath, rhs), Eq(Var(testedPath), ValueConst(rhs)))
@@ -222,7 +218,7 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
// so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
case (f, t) =>
- t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
+ t.isInstanceOf[Ident] && t.symbol.exists && pointsToBound(f)
}
val (boundFrom, boundTo) = boundSubst.unzip
val (unboundFrom, unboundTo) = unboundSubst.unzip
@@ -624,9 +620,9 @@ trait MatchAnalysis extends MatchApproximation {
private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
- private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
- private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
- private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
+ private lazy val ctorParams = if (ctor.paramss.isEmpty) Nil else ctor.paramss.head
+ private lazy val cls = ctor.safeOwner
+ private lazy val caseFieldAccs = cls.caseFieldAccessors
def addField(symbol: Symbol, assign: VariableAssignment) {
// SI-7669 Only register this field if if this class contains it.
@@ -686,8 +682,7 @@ trait MatchAnalysis extends MatchApproximation {
// TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
case _ => NoExample
}
- debug.patmat("described as: "+ res)
- res
+ debug.patmatResult("described as")(res)
}
override def toString = toCounterExample().toString
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
index 1e4c56529c..cf74f0fb11 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -62,45 +62,44 @@ trait MatchCodeGen extends Interface {
def codegen: AbsCodegen
abstract class CommonCodegen extends AbsCodegen { import CODE._
- def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
- def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
- def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
- def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
- def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
+ def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+ def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
+
+ // Right now this blindly calls drop on the result of the unapplySeq
+ // unless it verifiably has no drop method (this is the case in particular
+ // with Array.) You should not actually have to write a method called drop
+ // for name-based matching, but this was an expedient route for the basics.
+ def drop(tgt: Tree)(n: Int): Tree = {
+ def callDirect = fn(tgt, nme.drop, LIT(n))
+ def callRuntime = Apply(REF(traversableDropMethod), tgt :: LIT(n) :: Nil)
+ def needsRuntime = (tgt.tpe ne null) && (typeOfMemberNamedDrop(tgt.tpe) == NoType)
+
+ if (needsRuntime) callRuntime else callDirect
+ }
+
+ // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+ def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder)
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, any = true, wrapInApply = false)
- // duplicated out of frustration with cast generation
- def mkZero(tp: Type): Tree = {
- tp.typeSymbol match {
- case UnitClass => Literal(Constant(()))
- case BooleanClass => Literal(Constant(false))
- case FloatClass => Literal(Constant(0.0f))
- case DoubleClass => Literal(Constant(0.0d))
- case ByteClass => Literal(Constant(0.toByte))
- case ShortClass => Literal(Constant(0.toShort))
- case IntClass => Literal(Constant(0))
- case LongClass => Literal(Constant(0L))
- case CharClass => Literal(Constant(0.toChar))
- case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
- }
+ def mkZero(tp: Type): Tree = gen.mkConstantZero(tp) match {
+ case Constant(null) => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+ case const => Literal(const)
}
}
}
trait PureMatchMonadInterface extends MatchMonadInterface {
val matchStrategy: Tree
-
- def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
- def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
- protected def matchMonadSym = oneSig.finalResultType.typeSymbol
-
import CODE._
def _match(n: Name): SelectStart = matchStrategy DOT n
- private lazy val oneSig: Type = typer.typedOperator(_match(vpmName.one)).tpe // TODO: error message
+ // TODO: error message
+ private lazy val oneType = typer.typedOperator(_match(vpmName.one)).tpe
+ override def pureType(tp: Type): Type = firstParamType(appliedType(oneType, tp :: Nil))
}
trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
@@ -132,13 +131,7 @@ trait MatchCodeGen extends Interface {
}
}
- trait OptimizedMatchMonadInterface extends MatchMonadInterface {
- override def inMatchMonad(tp: Type): Type = optionType(tp)
- override def pureType(tp: Type): Type = tp
- override protected def matchMonadSym = OptionClass
- }
-
- trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
+ trait OptimizedCodegen extends CodegenCore with TypedSubstitution with MatchMonadInterface {
override def codegen: AbsCodegen = optimizedCodegen
// when we know we're targetting Option, do some inlining the optimizer won't do
@@ -154,9 +147,8 @@ trait MatchCodeGen extends Interface {
* if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
*/
def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
- val matchEnd = newSynthCaseLabel("matchEnd")
val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
- matchEnd setInfo MethodType(List(matchRes), restpe)
+ val matchEnd = newSynthCaseLabel("matchEnd") setInfo MethodType(List(matchRes), restpe)
def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
var _currCase = newCaseSym
@@ -168,23 +160,22 @@ trait MatchCodeGen extends Interface {
LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
}
-
// must compute catchAll after caseLabels (side-effects nextCase)
// catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
// if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
val catchAllDef = matchFailGen map { matchFailGen =>
- val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+ val scrutRef = scrutSym.fold(EmptyTree: Tree)(REF) // for alternatives
LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
} toList // at most 1 element
// scrutSym == NoSymbol when generating an alternatives matcher
- val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
+ val scrutDef = scrutSym.fold(List[Tree]())(sym => (VAL(sym) === scrut) :: Nil) // for alternatives
// the generated block is taken apart in TailCalls under the following assumptions
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
Block(
scrutDef ++ caseDefs ++ catchAllDef,
LabelDef(matchEnd, List(matchRes), REF(matchRes))
@@ -206,15 +197,14 @@ trait MatchCodeGen extends Interface {
// next: MatchMonad[U]
// returns MatchMonad[U]
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
- val tp = inMatchMonad(b.tpe)
- val prevSym = freshSym(prev.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
-
+ val prevSym = freshSym(prev.pos, prev.tpe, "o")
BLOCK(
VAL(prevSym) === prev,
// must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
- ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+ ifThenElseZero(
+ NOT(prevSym DOT vpmName.isEmpty),
+ Substitution(b, prevSym DOT vpmName.get)(next)
+ )
)
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala
new file mode 100644
index 0000000000..0d08120e43
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala
@@ -0,0 +1,37 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+/** Segregating this super hacky CPS code. */
+trait MatchCps {
+ self: PatternMatching =>
+
+ import global._
+
+ // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
+ private object CpsSymbols {
+ private def cpsSymbol(name: String) = rootMirror.getClassIfDefined(s"scala.util.continuations.$name")
+
+ val MarkerCPSAdaptPlus = cpsSymbol("cpsPlus")
+ val MarkerCPSAdaptMinus = cpsSymbol("cpsMinus")
+ val MarkerCPSSynth = cpsSymbol("cpsSynth")
+ val MarkerCPSTypes = cpsSymbol("cpsParam")
+ val stripTriggerCPSAnns = Set[Symbol](MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
+ val strippedCPSAnns = stripTriggerCPSAnns + MarkerCPSTypes
+
+ // when one of the internal cps-type-state annotations is present, strip all CPS annotations
+ // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
+ // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
+ def removeCPSFromPt(pt: Type): Type = (
+ if (MarkerCPSAdaptPlus.exists && (stripTriggerCPSAnns exists pt.hasAnnotation))
+ pt filterAnnotations (ann => !(strippedCPSAnns exists ann.matches))
+ else
+ pt
+ )
+ }
+ def removeCPSFromPt(pt: Type): Type = CpsSymbols removeCPSFromPt pt
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index 9854e4ef62..ec45789687 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -210,7 +210,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
// }
//// SWITCHES -- TODO: operate on Tests rather than TreeMakers
- trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface {
+ trait SwitchEmission extends TreeMakers with MatchMonadInterface {
import treeInfo.isGuardedCase
abstract class SwitchMaker {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index fcee142932..75335f7920 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -12,86 +12,165 @@ import scala.reflect.internal.util.Statistics
/** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers.
*/
-trait MatchTranslation { self: PatternMatching =>
+trait MatchTranslation {
+ self: PatternMatching =>
+
import PatternMatchingStats._
import global._
import definitions._
import global.analyzer.{ErrorUtils, formalTypes}
+ import treeInfo.{ WildcardStarArg, Unapplied, isStar, unbind }
+ import CODE._
+
+ // Always map repeated params to sequences
+ private def setVarInfo(sym: Symbol, info: Type) =
+ sym setInfo debug.patmatResult(s"changing ${sym.defString} to")(repeatedToSeq(info))
+
+ private def hasSym(t: Tree) = t.symbol != null && t.symbol != NoSymbol
- trait MatchTranslator extends TreeMakers {
+ trait MatchTranslator extends TreeMakers with TreeMakerWarnings {
import typer.context
- // Why is it so difficult to say "here's a name and a context, give me any
- // matching symbol in scope" ? I am sure this code is wrong, but attempts to
- // use the scopes of the contexts in the enclosing context chain discover
- // nothing. How to associate a name with a symbol would would be a wonderful
- // linkage for which to establish a canonical acquisition mechanism.
- def matchingSymbolInScope(pat: Tree): Symbol = {
- def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
- case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
- case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
- case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
- case _ => NoSymbol
- }
- pat match {
- case Bind(name, _) =>
- context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
- res orElse declarationOfName(ctx.owner.rawInfo, name))
- case _ => NoSymbol
+ object SymbolBound {
+ def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match {
+ case Bind(_, expr) if hasSym(tree) => Some(tree.symbol -> expr)
+ case _ => None
}
}
- // Issue better warnings than "unreachable code" when people mis-use
- // variable patterns thinking they bind to existing identifiers.
- //
- // Possible TODO: more deeply nested variable patterns, like
- // case (a, b) => 1 ; case (c, d) => 2
- // However this is a pain (at least the way I'm going about it)
- // and I have to think these detailed errors are primarily useful
- // for beginners, not people writing nested pattern matches.
- def checkMatchVariablePatterns(cases: List[CaseDef]) {
- // A string describing the first variable pattern
- var vpat: String = null
- // Using an iterator so we can recognize the last case
- val it = cases.iterator
-
- def addendum(pat: Tree) = {
- matchingSymbolInScope(pat) match {
- case NoSymbol => ""
- case sym =>
- val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
- s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ def newBoundTree(tree: Tree, pt: Type): BoundTree = tree match {
+ case SymbolBound(sym, expr) => BoundTree(setVarInfo(sym, pt), expr)
+ case _ => BoundTree(setVarInfo(freshSym(tree.pos, prefix = "p"), pt), tree)
+ }
+
+ final case class BoundTree(binder: Symbol, tree: Tree) {
+ private lazy val extractor = ExtractorCall(tree)
+
+ def pos = tree.pos
+ def tpe = binder.info.dealiasWiden // the type of the variable bound to the pattern
+ def pt = unbound match {
+ case Star(tpt) => this glbWith seqType(tpt.tpe)
+ case TypeBound(tpe) => tpe
+ case tree => tree.tpe
+ }
+ def repeatedType = unbound match {
+ case Star(tpt) => tpt.tpe
+ case _ => NoType
+ }
+ def glbWith(other: Type) = glb(tpe :: other :: Nil).normalize
+
+ object SymbolAndTypeBound {
+ def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
+ case SymbolBound(sym, SymbolAndTypeBound(_, tpe)) => Some(sym -> tpe)
+ case TypeBound(tpe) => Some(binder -> tpe)
+ case _ => None
}
}
- while (it.hasNext) {
- val cdef = it.next()
- // If a default case has been seen, then every succeeding case is unreachable.
- if (vpat != null)
- context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
- // If this is a default case and more cases follow, warn about this one so
- // we have a reason to mention its pattern variable name and any corresponding
- // symbol in scope. Errors will follow from the remaining cases, at least
- // once we make the above warning an error.
- else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
- val vpatName = cdef.pat match {
- case Bind(name, _) => s" '$name'"
- case _ => ""
- }
- vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
- context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ object TypeBound {
+ def unapply(tree: Tree): Option[Type] = unbind(tree) match {
+ case Typed(Ident(_), _) if tree.tpe != null => Some(tree.tpe)
+ case _ => None
}
}
+
+ private def rebindTo(pattern: Tree) = BoundTree(binder, pattern)
+ private def step(treeMakers: TreeMaker*)(subpatterns: BoundTree*): TranslationStep = TranslationStep(treeMakers.toList, subpatterns.toList)
+
+ private def bindingStep(sub: Symbol, subpattern: Tree) = step(SubstOnlyTreeMaker(sub, binder))(rebindTo(subpattern))
+ private def equalityTestStep() = step(EqualityTestTreeMaker(binder, tree, pos))()
+ private def typeTestStep(sub: Symbol, subPt: Type) = step(TypeTestTreeMaker(sub, binder, subPt, glbWith(subPt))(pos))()
+ private def alternativesStep(alts: List[Tree]) = step(AlternativesTreeMaker(binder, translatedAlts(alts), alts.head.pos))()
+ private def translatedAlts(alts: List[Tree]) = alts map (alt => rebindTo(alt).translate())
+ private def noStep() = step()()
+
+ private def unsupportedPatternMsg = sm"""
+ |unsupported pattern: ${tree.shortClass} / $this (this is a scalac bug.)
+ |""".trim
+
+ // example check: List[Int] <:< ::[Int]
+ private def extractorStep(): TranslationStep = {
+ import extractor.{ paramType, treeMaker }
+ if (!extractor.isTyped)
+ ErrorUtils.issueNormalTypeError(tree, "Could not typecheck extractor call: "+ extractor)(context)
+
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ lazy val typeTest = TypeTestTreeMaker(binder, binder, paramType, paramType)(pos, extractorArgTypeTest = true)
+ // check whether typetest implies binder is not null,
+ // even though the eventual null check will be on typeTest.nextBinder
+ // it'll be equal to binder casted to paramType anyway (and the type test is on binder)
+ def extraction: TreeMaker = treeMaker(typeTest.nextBinder, typeTest impliesBinderNonNull binder, pos)
+
+ // paramType = the type expected by the unapply
+ // TODO: paramType may contain unbound type params (run/t2800, run/t3530)
+ val makers = (
+ // Statically conforms to paramType
+ if (this ensureConformsTo paramType) treeMaker(binder, false, pos) :: Nil
+ else typeTest :: extraction :: Nil
+ )
+ step(makers: _*)(extractor.subBoundTrees: _*)
+ }
+
+ // Summary of translation cases. I moved the excerpts from the specification further below so all
+ // the logic can be seen at once.
+ //
+ // [1] skip wildcard trees -- no point in checking them
+ // [2] extractor and constructor patterns
+ // [3] replace subpatBinder by patBinder, as if the Bind was not there.
+ // It must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type,
+ // this is not guaranteed until we cast
+ // [4] typed patterns - a typed pattern never has any subtrees
+ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
+ // [5] literal and stable id patterns
+ // [6] pattern alternatives
+ // [7] symbol-less bind patterns - this happens in certain ill-formed programs, there'll be an error later
+ // don't fail here though (or should we?)
+ def nextStep(): TranslationStep = tree match {
+ case WildcardPattern() => noStep()
+ case _: UnApply | _: Apply => extractorStep()
+ case SymbolAndTypeBound(sym, tpe) => typeTestStep(sym, tpe)
+ case TypeBound(tpe) => typeTestStep(binder, tpe)
+ case SymbolBound(sym, expr) => bindingStep(sym, expr)
+ case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) => equalityTestStep()
+ case Alternative(alts) => alternativesStep(alts)
+ case _ => context.unit.error(pos, unsupportedPatternMsg) ; noStep()
+ }
+ def translate(): List[TreeMaker] = nextStep() merge (_.translate())
+
+ private def setInfo(paramType: Type): Boolean = {
+ devWarning(s"resetting info of $this to $paramType")
+ setVarInfo(binder, paramType)
+ true
+ }
+ // If <:< but not =:=, no type test needed, but the tree maker relies on the binder having
+ // exactly paramType (and not just some type compatible with it.) SI-6624 shows this is necessary
+ // because apparently patBinder may have an unfortunate type (.decls don't have the case field
+ // accessors) TODO: get to the bottom of this -- I assume it happens when type checking
+ // infers a weird type for an unapply call. By going back to the parameterType for the
+ // extractor call we get a saner type, so let's just do that for now.
+ def ensureConformsTo(paramType: Type): Boolean = (
+ (tpe =:= paramType)
+ || (tpe <:< paramType) && setInfo(paramType)
+ )
+
+ private def concreteType = tpe.bounds.hi
+ private def unbound = unbind(tree)
+ private def tpe_s = if (pt <:< concreteType) "" + pt else s"$pt (binder: $tpe)"
+ private def at_s = unbound match {
+ case WildcardPattern() => ""
+ case pat => s" @ $pat"
+ }
+ override def toString = s"${binder.name}: $tpe_s$at_s"
}
- // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
- private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
- private lazy val MarkerCPSAdaptMinus = rootMirror.getClassIfDefined("scala.util.continuations.cpsMinus")
- private lazy val MarkerCPSSynth = rootMirror.getClassIfDefined("scala.util.continuations.cpsSynth")
- private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
- private lazy val MarkerCPSTypes = rootMirror.getClassIfDefined("scala.util.continuations.cpsParam")
- private lazy val strippedCPSAnns = MarkerCPSTypes :: stripTriggerCPSAnns
- private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
+ // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
+ final case class TranslationStep(makers: List[TreeMaker], subpatterns: List[BoundTree]) {
+ def merge(f: BoundTree => List[TreeMaker]): List[TreeMaker] = makers ::: (subpatterns flatMap f)
+ override def toString = if (subpatterns.isEmpty) "" else subpatterns.mkString("(", ", ", ")")
+ }
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -107,10 +186,8 @@ trait MatchTranslation { self: PatternMatching =>
val Match(selector, cases) = match_
val (nonSyntheticCases, defaultOverride) = cases match {
- case init :+ last if treeInfo isSyntheticDefaultCase last =>
- (init, Some(((scrut: Tree) => last.body)))
- case _ =>
- (cases, None)
+ case init :+ last if treeInfo isSyntheticDefaultCase last => (init, Some(((scrut: Tree) => last.body)))
+ case _ => (cases, None)
}
checkMatchVariablePatterns(nonSyntheticCases)
@@ -127,18 +204,11 @@ trait MatchTranslation { self: PatternMatching =>
val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
- val origPt = match_.tpe
// when one of the internal cps-type-state annotations is present, strip all CPS annotations
- // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
- // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
- val ptUnCPS =
- if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
- removeCPSAdaptAnnotations(origPt)
- else origPt
-
+ val origPt = removeCPSFromPt(match_.tpe)
// relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
// pt is the skolemized version
- val pt = repeatedToSeq(ptUnCPS)
+ val pt = repeatedToSeq(origPt)
// val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
@@ -183,7 +253,7 @@ trait MatchTranslation { self: PatternMatching =>
CaseDef(
Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
EmptyTree,
- combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
+ combineCasesNoSubstOnly(REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(REF(exSym))))
)
})
}
@@ -191,8 +261,6 @@ trait MatchTranslation { self: PatternMatching =>
typer.typedCases(catches, ThrowableTpe, WildcardType)
}
-
-
/** The translation of `pat if guard => body` has two aspects:
* 1) the substitution due to the variables bound by patterns
* 2) the combination of the extractor calls using `flatMap`.
@@ -221,166 +289,12 @@ trait MatchTranslation { self: PatternMatching =>
* a function that will take care of binding and substitution of the next ast (to the right).
*
*/
- def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
- translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = {
+ val CaseDef(pattern, guard, body) = caseDef
+ translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt)
}
- def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
- // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
- type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
- def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
- def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
-
- val pos = patTree.pos
-
- def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
- if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
- // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
-
- debug.patmat("translateExtractorPattern checking parameter type: "+ ((patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType)))
-
- // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
- // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
- // (it will later result in a type test when `tp` is not a subtype of `b.info`)
- // TODO: can we simplify this, together with the Bound case?
- (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
- debug.patmat("changing "+ b +" : "+ b.info +" -> "+ tp)
- b setInfo tp
- }
-
- // example check: List[Int] <:< ::[Int]
- // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
- val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
- if (patBinder.info.widen <:< extractor.paramType) {
- // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
- // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
- // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
- // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now
- /* TODO: uncomment when `settings.developer` and `devWarning` become available
- if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
- devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
- */
- (Nil, patBinder setInfo extractor.paramType, false)
- } else {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
-
- // check whether typetest implies patBinder is not null,
- // even though the eventual null check will be on patBinderOrCasted
- // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
- (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
- }
-
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
- }
-
-
- object MaybeBoundTyped {
- /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
- * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
- * The returned type is the one inferred by inferTypedPattern (`owntype`)
- *
- * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
- */
- def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
- // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
- case Bound(subpatBinder, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
- case Bind(_, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
- case Typed(Ident(_), tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
- case _ => None
- }
- }
-
- val (treeMakers, subpats) = patTree match {
- // skip wildcard trees -- no point in checking them
- case WildcardPattern() => noFurtherSubPats()
- case UnApply(unfun, args) =>
- // TODO: check unargs == args
- // debug.patmat("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
- translateExtractorPattern(ExtractorCall(unfun, args))
-
- /* A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
- It consists of a stable identifier c, followed by element patterns p1, ..., pn.
- The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
-
- If the case class is monomorphic, then it must conform to the expected type of the pattern,
- and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
-
- If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
- The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
-
- The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
- A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
- **/
- case Apply(fun, args) =>
- ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
- ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
- noFurtherSubPats()
- }
-
- /* A typed pattern x : T consists of a pattern variable x and a type pattern T.
- The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
- This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
- */
- // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
- case MaybeBoundTyped(subPatBinder, pt) =>
- val next = glb(List(dealiasWiden(patBinder.info), pt)).normalize
- // a typed pattern never has any subtrees
- noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
-
- /* A pattern binder x@p consists of a pattern variable x and a pattern p.
- The type of the variable x is the static type T of the pattern p.
- This pattern matches any value v matched by the pattern p,
- provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
- and it binds the variable name to that value.
- */
- case Bound(subpatBinder, p) =>
- // replace subpatBinder by patBinder (as if the Bind was not there)
- withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
- // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
- (patBinder, p)
- )
-
- /* 8.1.4 Literal Patterns
- A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
- The type of L must conform to the expected type of the pattern.
-
- 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
- The pattern matches any value v such that r == v (§12.1).
- The type of r must conform to the expected type of the pattern.
- */
- case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
- noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
-
- case Alternative(alts) =>
- noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
-
- /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
- case class Foo(x: Int, y: String)
- case class Bar(z: Int)
-
- def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
- */
-
- case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
- debug.patmat("WARNING: Bind tree with unbound symbol "+ patTree)
- noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
-
- // case Star(_) | ArrayValue => error("stone age pattern relics encountered!")
-
- case _ =>
- typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
- noFurtherSubPats()
- }
-
- treeMakers ++ subpats.flatMap { case (binder, pat) =>
- translatePattern(binder, pat) // recurse on subpatterns
- }
- }
+ def translatePattern(bound: BoundTree): List[TreeMaker] = bound.translate()
def translateGuard(guard: Tree): List[TreeMaker] =
if (guard == EmptyTree) Nil
@@ -395,28 +309,87 @@ trait MatchTranslation { self: PatternMatching =>
def translateBody(body: Tree, matchPt: Type): TreeMaker =
BodyTreeMaker(body, matchPt)
+ // Some notes from the specification
+
+ /*A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
+ It consists of a stable identifier c, followed by element patterns p1, ..., pn.
+ The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
+
+ If the case class is monomorphic, then it must conform to the expected type of the pattern,
+ and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected
+ types of the element patterns p1, ..., pn.
+
+ If the case class is polymorphic, then its type parameters are instantiated so that the
+ instantiation of c conforms to the expected type of the pattern.
+ The instantiated formal parameter types of c’s primary constructor are then taken as the
+ expected types of the component patterns p1, ..., pn.
+
+ The pattern matches all objects created from constructor invocations c(v1, ..., vn)
+ where each element pattern pi matches the corresponding value vi .
+ A special case arises when c’s formal parameter types end in a repeated parameter.
+ This is further discussed in (§8.1.9).
+ **/
+
+ /* A typed pattern x : T consists of a pattern variable x and a type pattern T.
+ The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+ This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
+ */
+
+ /* A pattern binder x@p consists of a pattern variable x and a pattern p.
+ The type of the variable x is the static type T of the pattern p.
+ This pattern matches any value v matched by the pattern p,
+ provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+ and it binds the variable name to that value.
+ */
+
+ /* 8.1.4 Literal Patterns
+ A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+ The type of L must conform to the expected type of the pattern.
+
+ 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
+ The pattern matches any value v such that r == v (§12.1).
+ The type of r must conform to the expected type of the pattern.
+ */
+
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
object ExtractorCall {
- def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
- def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
+ // TODO: check unargs == args
+ def apply(tree: Tree): ExtractorCall = tree match {
+ case UnApply(unfun, args) => new ExtractorCallRegular(unfun, args) // extractor
+ case Apply(fun, args) => new ExtractorCallProd(fun, args) // case class
+ }
}
- abstract class ExtractorCall(val args: List[Tree]) {
- val nbSubPats = args.length
+ abstract class ExtractorCall {
+ def fun: Tree
+ def args: List[Tree]
- // everything okay, captain?
- def isTyped : Boolean
+ val nbSubPats = args.length
+ val starLength = if (hasStar) 1 else 0
+ val nonStarLength = args.length - starLength
+ // everything okay, captain?
+ def isTyped: Boolean
def isSeq: Boolean
- lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
+
+ private def hasStar = nbSubPats > 0 && isStar(args.last)
+ private def isNonEmptySeq = nbSubPats > 0 && isSeq
+
+ /** This is special cased so that a single pattern will accept any extractor
+ * result, even if it's a tuple (SI-6675)
+ */
+ def isSingle = nbSubPats == 1 && !isSeq
// to which type should the previous binder be casted?
def paramType : Type
+ protected def rawSubPatTypes: List[Type]
+ protected def resultType: Type
+
/** Create the TreeMaker that embodies this extractor call
*
* `binder` has been casted to `paramType` if necessary
@@ -427,79 +400,91 @@ trait MatchTranslation { self: PatternMatching =>
// `subPatBinders` are the variables bound by this pattern in the following patterns
// subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
- lazy val subPatBinders = args map {
- case Bound(b, p) => b
- case p => freshSym(p.pos, prefix = "p")
- }
-
- lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
- case (b, Bound(_, p)) => (b, p)
- case bp => bp
- }
+ // must set infos to `subPatTypes`, which are provided by extractor's result,
+ // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
+ // (it will later result in a type test when `tp` is not a subtype of `b.info`)
+ // TODO: can we simplify this, together with the Bound case?
+ def subPatBinders = subBoundTrees map (_.binder)
+ lazy val subBoundTrees = (args, subPatTypes).zipped map newBoundTree
// never store these in local variables (for PreserveSubPatBinders)
- lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
- case (b, PatternBoundToUnderscore()) => b
- }.toSet
-
- def subPatTypes: List[Type] =
- if(isSeq) {
- val TypeRef(pre, SeqClass, args) = seqTp
- // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
- val formalsWithRepeated = rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args)
-
- if (lastIsStar) formalTypes(formalsWithRepeated, nbSubPats - 1) :+ seqTp
- else formalTypes(formalsWithRepeated, nbSubPats)
- } else rawSubPatTypes
-
- protected def rawSubPatTypes: List[Type]
-
- protected def seqTp = rawSubPatTypes.last baseType SeqClass
- protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
- protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
- protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
- protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
- protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
- protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
+ lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet
+
+ // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
+ private def nonStarSubPatTypes = formalTypes(rawInit :+ repeatedType, nonStarLength)
+
+ def subPatTypes: List[Type] = (
+ if (rawSubPatTypes.isEmpty || !isSeq) rawSubPatTypes
+ else if (hasStar) nonStarSubPatTypes :+ sequenceType
+ else nonStarSubPatTypes
+ )
+
+ private def rawGet = typeOfMemberNamedGetOrSelf(resultType)
+ private def emptySub = rawSubPatTypes.isEmpty
+ private def rawInit = rawSubPatTypes dropRight 1
+ protected def sequenceType = typeOfLastSelectorOrSelf(rawGet)
+ protected def elementType = elementTypeOfLastSelectorOrSelf(rawGet)
+ protected def repeatedType = scalaRepeatedType(elementType)
+
+ // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
+ protected def firstIndexingBinder = rawSubPatTypes.length - 1
+ protected def lastIndexingBinder = nbSubPats - 1 - starLength
+ protected def expectedLength = lastIndexingBinder - firstIndexingBinder + 1
+
+ private def productElemsToN(binder: Symbol, n: Int): List[Tree] = 1 to n map tupleSel(binder) toList
+ private def genTake(binder: Symbol, n: Int): List[Tree] = (0 until n).toList map (codegen index seqTree(binder))
+ private def genDrop(binder: Symbol, n: Int): List[Tree] = codegen.drop(seqTree(binder))(expectedLength) :: Nil
+
+ // codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder + 1)
protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require isSeq
+ // the trees that select the subpatterns on the extractor's result,
+ // referenced by `binder`
protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
- val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
- val nbIndexingIndices = indexingIndices.length
-
+ def lastTrees: List[Tree] = (
+ if (!hasStar) Nil
+ else if (expectedLength == 0) seqTree(binder) :: Nil
+ else genDrop(binder, expectedLength)
+ )
// this error-condition has already been checked by checkStarPatOK:
// if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
- // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
- (((1 to firstIndexingBinder) map tupleSel(binder)) ++
- // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
- (indexingIndices map codegen.index(seqTree(binder))) ++
- // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
- (if(!lastIsStar) Nil else List(
- if(nbIndexingIndices == 0) seqTree(binder)
- else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+
+ // [1] there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+ // [2] then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+ // [3] the last one -- if the last subpattern is a sequence wildcard:
+ // drop the prefix (indexed by the refs on the preceding line), return the remainder
+ ( productElemsToN(binder, firstIndexingBinder)
+ ++ genTake(binder, expectedLength)
+ ++ lastTrees
+ ).toList
}
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require (nbSubPats > 0 && (!lastIsStar || isSeq))
protected def subPatRefs(binder: Symbol): List[Tree] =
- if (nbSubPats == 0) Nil
- else if (isSeq) subPatRefsSeq(binder)
- else ((1 to nbSubPats) map tupleSel(binder)).toList
+ if (isNonEmptySeq) subPatRefsSeq(binder) else productElemsToN(binder, nbSubPats)
+
+ private def compareInts(t1: Tree, t2: Tree) =
+ gen.mkMethodCall(termMember(ScalaPackage, "math"), TermName("signum"), Nil, (t1 INT_- t2) :: Nil)
protected def lengthGuard(binder: Symbol): Option[Tree] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- checkedLength map { expectedLength => import CODE._
+ checkedLength map { expectedLength =>
// `binder.lengthCompare(expectedLength)`
- def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
+ // ...if binder has a lengthCompare method, otherwise
+ // `scala.math.signum(binder.length - expectedLength)`
+ def checkExpectedLength = sequenceType member nme.lengthCompare match {
+ case NoSymbol => compareInts(Select(seqTree(binder), nme.length), LIT(expectedLength))
+ case lencmp => (seqTree(binder) DOT lencmp)(LIT(expectedLength))
+ }
// the comparison to perform
// when the last subpattern is a wildcard-star the expectedLength is but a lower bound
// (otherwise equality is required)
def compareOp: (Tree, Tree) => Tree =
- if (lastIsStar) _ INT_>= _
- else _ INT_== _
+ if (hasStar) _ INT_>= _
+ else _ INT_== _
// `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
(seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
@@ -507,33 +492,29 @@ trait MatchTranslation { self: PatternMatching =>
def checkedLength: Option[Int] =
// no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- if (!isSeq || (expectedLength < minLenToCheck)) None
+ if (!isSeq || expectedLength < starLength) None
else Some(expectedLength)
-
}
// TODO: to be called when there's a def unapplyProd(x: T): U
// U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
- //
// for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
- class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ class ExtractorCallProd(val fun: Tree, val args: List[Tree]) extends ExtractorCall {
// TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
/*override def equals(x$1: Any): Boolean = ...
val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
*/
- // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
- // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
- // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
- // debug.patmat("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
- // debug.patmat("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
+
private def constructorTp = fun.tpe
def isTyped = fun.isTyped
// to which type should the previous binder be casted?
def paramType = constructorTp.finalResultType
+ def resultType = fun.tpe.finalResultType
+
+ def isSeq = isVarArgTypes(rawSubPatTypes)
- def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
protected def rawSubPatTypes = constructorTp.paramTypes
/** Create the TreeMaker that embodies this extractor call
@@ -547,34 +528,36 @@ trait MatchTranslation { self: PatternMatching =>
// binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
// make an exception for classes under the scala package as they should be well-behaved,
// to optimize matching on List
- val mutableBinders =
+ val mutableBinders = (
if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
(paramAccessors exists (_.isMutable)))
subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
else Nil
+ )
// checks binder ne null before chaining to the next extractor
ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
}
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
- override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
+ override protected def tupleSel(binder: Symbol)(i: Int): Tree = {
val accessors = binder.caseFieldAccessors
if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
- override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
+ override def toString() = s"ExtractorCallProd($fun:${fun.tpe} / ${fun.symbol} / args=$args)"
}
- class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
- private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
+ class ExtractorCallRegular(extractorCallIncludingDummy: Tree, val args: List[Tree]) extends ExtractorCall {
+ val Unapplied(fun) = extractorCallIncludingDummy
- def tpe = extractorCall.tpe
- def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
- def paramType = tpe.paramTypes.head
+ def tpe = fun.tpe
+ def paramType = firstParamType(tpe)
def resultType = tpe.finalResultType
- def isSeq = extractorCall.symbol.name == nme.unapplySeq
+ def isTyped = (tpe ne NoType) && fun.isTyped && (resultInMonad ne ErrorType)
+ def isSeq = fun.symbol.name == nme.unapplySeq
+ def isBool = resultType =:= BooleanTpe
/** Create the TreeMaker that embodies this extractor call
*
@@ -587,49 +570,56 @@ trait MatchTranslation { self: PatternMatching =>
* Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
*/
def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
- // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
+ // the extractor call (applied to the binder bound by the flatMap corresponding
+ // to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
- val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
+ // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely
+ // wrong when isSeq, and resultInMonad should always be correct since it comes
+ // directly from the extractor's result type
+ val binder = freshSym(pos, pureType(resultInMonad))
+
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
+ subPatBinders,
+ subPatRefs(binder),
+ isBool,
+ checkedLength,
+ patBinderOrCasted,
+ ignoredSubPatBinders
+ )
}
override protected def seqTree(binder: Symbol): Tree =
- if (firstIndexingBinder == 0) CODE.REF(binder)
+ if (firstIndexingBinder == 0) REF(binder)
else super.seqTree(binder)
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require (nbSubPats > 0 && (!lastIsStar || isSeq))
override protected def subPatRefs(binder: Symbol): List[Tree] =
- if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
+ if (isSingle) REF(binder) :: Nil // special case for extractors
else super.subPatRefs(binder)
protected def spliceApply(binder: Symbol): Tree = {
object splice extends Transformer {
override def transform(t: Tree) = t match {
case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
- treeCopy.Apply(t, x, List(CODE.REF(binder).setPos(i.pos)))
- case _ => super.transform(t)
+ treeCopy.Apply(t, x, (REF(binder) setPos i.pos) :: Nil)
+ case _ =>
+ super.transform(t)
}
}
- splice.transform(extractorCallIncludingDummy)
+ splice transform extractorCallIncludingDummy
}
- // what's the extractor's result type in the monad?
- // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
- protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
- if (resultType.typeSymbol == BooleanClass) UnitTpe
- else matchMonadResult(resultType)
- }
+ // what's the extractor's result type in the monad? It is the type of its nullary member `get`.
+ protected lazy val resultInMonad: Type = if (isBool) UnitTpe else typeOfMemberNamedGet(resultType)
- protected lazy val rawSubPatTypes =
- if (resultInMonad.typeSymbol eq UnitClass) Nil
- else if(!isSeq && nbSubPats == 1) List(resultInMonad)
- else getProductArgs(resultInMonad) match {
- case Nil => List(resultInMonad)
- case x => x
- }
+ protected lazy val rawSubPatTypes = (
+ if (isBool) Nil
+ else if (isSingle) resultInMonad :: Nil // don't go looking for selectors if we only expect one pattern
+ else typesOfSelectorsOrSelf(resultInMonad)
+ )
- override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
+ override def toString() = s"ExtractorCallRegular($fun: $tpe / ${fun.symbol})"
}
/** A conservative approximation of which patterns do not discern anything.
@@ -638,10 +628,9 @@ trait MatchTranslation { self: PatternMatching =>
object WildcardPattern {
def unapply(pat: Tree): Boolean = pat match {
case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
case Star(WildcardPattern()) => true
case x: Ident => treeInfo.isVarPattern(x)
- case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
+ case Alternative(ps) => ps forall unapply
case EmptyTree => true
case _ => false
}
@@ -651,7 +640,7 @@ trait MatchTranslation { self: PatternMatching =>
def unapply(pat: Tree): Boolean = pat match {
case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
case Ident(nme.WILDCARD) => true
- case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Alternative(ps) => ps forall unapply
case Typed(PatternBoundToUnderscore(), _) => true
case _ => false
}
@@ -659,9 +648,8 @@ trait MatchTranslation { self: PatternMatching =>
object Bound {
def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
- case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
- Some((t.symbol, p))
- case _ => None
+ case t@Bind(n, p) if t.hasExistingSymbol => Some((t.symbol, p)) // pos/t2429 does not satisfy these conditions
+ case _ => None
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index baccdcf544..942aa80c34 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -201,6 +201,16 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def extraStoredBinders: Set[Symbol] = Set()
+ debug.patmat(s"""
+ |ExtractorTreeMaker($extractor, $extraCond, $nextBinder) {
+ | $subPatBinders
+ | $subPatRefs
+ | $extractorReturnsBoolean
+ | $checkedLength
+ | $prevBinder
+ | $ignoredSubPatBinders
+ |}""".stripMargin)
+
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val condAndNext = extraCond match {
case Some(cond) =>
@@ -426,7 +436,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case _ if testedBinder.info.widen <:< expectedTp =>
// if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
// since the types conform, no further checking is required
- if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ if (isPrimitiveValueType(expectedTp)) tru
// have to test outer and non-null only when it's a reference type
else if (expectedTp <:< AnyRefTpe) {
// do non-null check first to ensure we won't select outer on null
@@ -587,9 +597,8 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
t.symbol.owner = currentOwner
case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
debug.patmat("def: "+ ((d, d.symbol.ownerChain, currentOwner.ownerChain)))
- if(d.symbol.moduleClass ne NoSymbol)
- d.symbol.moduleClass.owner = currentOwner
+ d.symbol.moduleClass andAlso (_.owner = currentOwner)
d.symbol.owner = currentOwner
// case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
debug.patmat("untouched "+ ((t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)))
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
new file mode 100644
index 0000000000..a7d7680db1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala
@@ -0,0 +1,86 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+trait MatchWarnings {
+ self: PatternMatching =>
+
+ import global._
+
+ trait TreeMakerWarnings {
+ self: MatchTranslator =>
+
+ import typer.context
+
+ // Why is it so difficult to say "here's a name and a context, give me any
+ // matching symbol in scope" ? I am sure this code is wrong, but attempts to
+ // use the scopes of the contexts in the enclosing context chain discover
+ // nothing. How to associate a name with a symbol would would be a wonderful
+ // linkage for which to establish a canonical acquisition mechanism.
+ private def matchingSymbolInScope(pat: Tree): Symbol = {
+ def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
+ case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
+ case _ => NoSymbol
+ }
+ pat match {
+ case Bind(name, _) =>
+ context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
+ res orElse declarationOfName(ctx.owner.rawInfo, name))
+ case _ => NoSymbol
+ }
+ }
+
+ // Issue better warnings than "unreachable code" when people mis-use
+ // variable patterns thinking they bind to existing identifiers.
+ //
+ // Possible TODO: more deeply nested variable patterns, like
+ // case (a, b) => 1 ; case (c, d) => 2
+ // However this is a pain (at least the way I'm going about it)
+ // and I have to think these detailed errors are primarily useful
+ // for beginners, not people writing nested pattern matches.
+ def checkMatchVariablePatterns(cases: List[CaseDef]) {
+ // A string describing the first variable pattern
+ var vpat: String = null
+ // Using an iterator so we can recognize the last case
+ val it = cases.iterator
+
+ def addendum(pat: Tree) = {
+ matchingSymbolInScope(pat) match {
+ case NoSymbol => ""
+ case sym =>
+ val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
+ s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ }
+ }
+
+ while (it.hasNext) {
+ val cdef = it.next()
+ // If a default case has been seen, then every succeeding case is unreachable.
+ if (vpat != null)
+ context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
+ // If this is a default case and more cases follow, warn about this one so
+ // we have a reason to mention its pattern variable name and any corresponding
+ // symbol in scope. Errors will follow from the remaining cases, at least
+ // once we make the above warning an error.
+ else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
+ val vpatName = cdef.pat match {
+ case Bind(name, _) => s" '$name'"
+ case _ => ""
+ }
+ vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
+ context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index 63834ae51e..a4944caa2b 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -40,10 +40,12 @@ trait PatternMatching extends Transform with TypingTransformers
with MatchTranslation
with MatchTreeMaking
with MatchCodeGen
+ with MatchCps
with ScalaLogic
with Solving
with MatchAnalysis
- with MatchOptimization {
+ with MatchOptimization
+ with MatchWarnings {
import global._
val phaseName: String = "patmat"
@@ -94,12 +96,17 @@ trait Debugging {
// TODO: the inliner fails to inline the closures to debug.patmat unless the method is nested in an object
object debug {
val printPatmat = global.settings.Ypatmatdebug.value
- @inline final def patmat(s: => String) = if (printPatmat) println(s)
+ @inline final def patmat(s: => String) = if (printPatmat) Console.err.println(s)
+ @inline final def patmatResult[T](s: => String)(result: T): T = {
+ if (printPatmat) Console.err.println(s + ": " + result)
+ result
+ }
}
}
trait Interface extends ast.TreeDSL {
- import global.{newTermName, analyzer, Type, ErrorType, Symbol, Tree}
+ import global._
+ import definitions._
import analyzer.Typer
// 2.10/2.11 compatibility
@@ -166,6 +173,10 @@ trait Interface extends ast.TreeDSL {
trait MatchMonadInterface {
val typer: Typer
val matchOwner = typer.context.owner
+ def pureType(tp: Type): Type = tp
+
+ // Extracting from the monad: tp == { def get: T }, result == T
+ def matchMonadResult(tp: Type) = typeOfMemberNamedGet(tp)
def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
def reportMissingCases(pos: Position, counterExamples: List[String]) = {
@@ -175,16 +186,6 @@ trait Interface extends ast.TreeDSL {
typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
}
-
- def inMatchMonad(tp: Type): Type
- def pureType(tp: Type): Type
- final def matchMonadResult(tp: Type): Type =
- tp.baseType(matchMonadSym).typeArgs match {
- case arg :: Nil => arg
- case _ => ErrorType
- }
-
- protected def matchMonadSym: Symbol
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index 31a31df764..67c5666f66 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -246,8 +246,8 @@ trait Checkable {
uncheckedOk(P0) || (P0.widen match {
case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false
case RefinedType(_, decls) if !decls.isEmpty => false
- case p =>
- new CheckabilityChecker(AnyTpe, p) isCheckable
+ case RefinedType(parents, _) => parents forall isCheckable
+ case p => new CheckabilityChecker(AnyTpe, p) isCheckable
})
)
@@ -273,6 +273,8 @@ trait Checkable {
// Matching on types like case _: AnyRef { def bippy: Int } => doesn't work -- yet.
case RefinedType(_, decls) if !decls.isEmpty =>
getContext.unit.warning(tree.pos, s"a pattern match on a refinement type is unchecked")
+ case RefinedType(parents, _) =>
+ parents foreach (p => checkCheckable(tree, p, X, inPattern, canRemedy))
case _ =>
val checker = new CheckabilityChecker(X, P)
log(checker.summaryString)
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 81f5545695..1f4d5cbac2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -517,6 +517,9 @@ trait ContextErrors {
def TooManyArgsPatternError(fun: Tree) =
NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity)
+ def WrongShapeExtractorExpansion(fun: Tree) =
+ NormalTypeError(fun, "extractor macros can only expand into extractor calls")
+
def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
@@ -593,7 +596,12 @@ trait ContextErrors {
}
def CaseClassConstructorError(tree: Tree) = {
- issueNormalTypeError(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method")
+ val baseMessage = tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method"
+ val addendum = directUnapplyMember(tree.symbol.info) match {
+ case sym if hasMultipleNonImplicitParamLists(sym) => s"\nNote: ${sym.defString} exists in ${tree.symbol}, but it cannot be used as an extractor due to its second non-implicit parameter list"
+ case _ => ""
+ }
+ issueNormalTypeError(tree, baseMessage + addendum)
setError(tree)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index f3a22a2cee..86a0d33737 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -269,7 +269,7 @@ trait Contexts { self: Analyzer =>
/** The next enclosing context (potentially `this`) that is owned by a class or method */
def enclClassOrMethod: Context =
- if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
+ if (!owner.exists || owner.isClass || owner.isMethod) this
else outer.enclClassOrMethod
/** The next enclosing context (potentially `this`) that has a `CaseDef` as a tree */
@@ -653,13 +653,8 @@ trait Contexts { self: Analyzer =>
lastAccessCheckDetails = ""
// Console.println("isAccessible(%s, %s, %s)".format(sym, pre, superAccess))
- def accessWithinLinked(ab: Symbol) = {
- val linked = ab.linkedClassOfClass
- // don't have access if there is no linked class
- // (before adding the `ne NoSymbol` check, this was a no-op when linked eq NoSymbol,
- // since `accessWithin(NoSymbol) == true` whatever the symbol)
- (linked ne NoSymbol) && accessWithin(linked)
- }
+ // don't have access if there is no linked class (so exclude linkedClass=NoSymbol)
+ def accessWithinLinked(ab: Symbol) = ab.linkedClassOfClass.fold(false)(accessWithin)
/* Are we inside definition of `ab`? */
def accessWithin(ab: Symbol) = {
@@ -957,7 +952,7 @@ trait Contexts { self: Analyzer =>
// 2) sym.owner is inherited by the correct package object class
// We try to establish 1) by inspecting the owners directly, and then we try
// to rule out 2), and only if both those fail do we resort to looking in the info.
- !sym.isPackage && (sym.owner ne NoSymbol) && (
+ !sym.isPackage && sym.owner.exists && (
if (sym.owner.isPackageObjectClass)
sym.owner.owner == pkgClass
else
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 0a2628b482..396f3407f3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -74,22 +74,19 @@ abstract class Duplicators extends Analyzer {
override def mapOver(tpe: Type): Type = tpe match {
case TypeRef(NoPrefix, sym, args) if sym.isTypeParameterOrSkolem =>
- var sym1 = context.scope.lookup(sym.name)
- if (sym1 eq NoSymbol) {
- // try harder (look in outer scopes)
- // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
- BodyDuplicator.super.silent(_.typedType(Ident(sym.name))) match {
- case SilentResultValue(t) =>
- sym1 = t.symbol
- debuglog("fixed by trying harder: "+((sym, sym1, context)))
- case _ =>
- }
- }
-// assert(sym1 ne NoSymbol, tpe)
- if ((sym1 ne NoSymbol) && (sym1 ne sym)) {
- debuglog("fixing " + sym + " -> " + sym1)
+ val sym1 = (
+ context.scope lookup sym.name orElse {
+ // try harder (look in outer scopes)
+ // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but
+ // is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
+ BodyDuplicator.super.silent(_ typedType Ident(sym.name)).fold(NoSymbol: Symbol)(_.symbol)
+ } filter (_ ne sym)
+ )
+ if (sym1.exists) {
+ debuglog(s"fixing $sym -> $sym1")
typeRef(NoPrefix, sym1, mapOverArgs(args, sym1.typeParams))
- } else super.mapOver(tpe)
+ }
+ else super.mapOver(tpe)
case TypeRef(pre, sym, args) =>
val newsym = updateSym(sym)
@@ -157,7 +154,7 @@ abstract class Duplicators extends Analyzer {
case vdef @ ValDef(mods, name, _, rhs) if mods.hasFlag(Flags.LAZY) =>
debuglog("ValDef " + name + " sym.info: " + vdef.symbol.info)
invalidSyms(vdef.symbol) = vdef
- val newowner = if (owner != NoSymbol) owner else context.owner
+ val newowner = owner orElse context.owner
val newsym = vdef.symbol.cloneSymbol(newowner)
newsym.setInfo(fixType(vdef.symbol.info))
vdef.symbol = newsym
@@ -362,12 +359,11 @@ abstract class Duplicators extends Analyzer {
case _ =>
debuglog("Duplicators default case: " + tree.summaryString)
debuglog(" ---> " + tree)
- if (tree.hasSymbolField && tree.symbol != NoSymbol && (tree.symbol.owner == AnyClass)) {
+ if (tree.hasSymbolField && tree.symbol.safeOwner == AnyClass)
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
- }
+
val ntree = castType(tree, pt)
- val res = super.typed(ntree, mode, pt)
- res
+ super.typed(ntree, mode, pt)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 8ca0d82e93..50d88d7c4d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -25,18 +25,27 @@ trait Infer extends Checkable {
/** The formal parameter types corresponding to `formals`.
* If `formals` has a repeated last parameter, a list of
- * (nargs - params.length + 1) copies of its type is returned.
- * By-name types are replaced with their underlying type.
+ * (numArgs - numFormals + 1) copies of its type is appended
+ * to the other formals. By-name types are replaced with their
+ * underlying type.
*
* @param removeByName allows keeping ByName parameters. Used in NamesDefaults.
* @param removeRepeated allows keeping repeated parameter (if there's one argument). Used in NamesDefaults.
*/
- def formalTypes(formals: List[Type], nargs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
- val formals1 = if (removeByName) formals mapConserve dropByName else formals
- if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) {
- val ft = formals1.last.dealiasWiden.typeArgs.head
- formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
- } else formals1
+ def formalTypes(formals: List[Type], numArgs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
+ val numFormals = formals.length
+ val formals1 = if (removeByName) formals mapConserve dropByName else formals
+ val expandLast = (
+ (removeRepeated || numFormals != numArgs)
+ && isVarArgTypes(formals1)
+ )
+ def lastType = formals1.last.dealiasWiden.typeArgs.head
+ def expanded(n: Int) = (1 to n).toList map (_ => lastType)
+
+ if (expandLast)
+ formals1.init ::: expanded(numArgs - numFormals + 1)
+ else
+ formals1
}
/** Sorts the alternatives according to the given comparison function.
@@ -67,96 +76,6 @@ trait Infer extends Checkable {
override def complete(sym: Symbol) = ()
}
- /** Returns `(formals, formalsExpanded)` where `formalsExpanded` are the expected types
- * for the `nbSubPats` sub-patterns of an extractor pattern, of which the corresponding
- * unapply[Seq] call is assumed to have result type `resTp`.
- *
- * `formals` are the formal types before expanding a potential repeated parameter (must come last in `formals`, if at all)
- *
- * @param nbSubPats The number of arguments to the extractor pattern
- * @param effectiveNbSubPats `nbSubPats`, unless there is one sub-pattern which, after unwrapping
- * bind patterns, is a Tuple pattern, in which case it is the number of
- * elements. Used to issue warnings about binding a `TupleN` to a single value.
- * @throws TypeError when the unapply[Seq] definition is ill-typed
- * @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
- *
- * This is the spec currently implemented -- TODO: update it.
- *
- * 8.1.8 ExtractorPatterns
- *
- * An extractor pattern x(p1, ..., pn) where n ≥ 0 is of the same syntactic form as a constructor pattern.
- * However, instead of a case class, the stable identifier x denotes an object which has a member method named unapply or unapplySeq that matches the pattern.
- *
- * An `unapply` method with result type `R` in an object `x` matches the
- * pattern `x(p_1, ..., p_n)` if it takes exactly one argument and, either:
- * - `n = 0` and `R =:= Boolean`, or
- * - `n = 1` and `R <:< Option[T]`, for some type `T`.
- * The argument pattern `p1` is typed in turn with expected type `T`.
- * - Or, `n > 1` and `R <:< Option[Product_n[T_1, ..., T_n]]`, for some
- * types `T_1, ..., T_n`. The argument patterns `p_1, ..., p_n` are
- * typed with expected types `T_1, ..., T_n`.
- *
- * An `unapplySeq` method in an object `x` matches the pattern `x(p_1, ..., p_n)`
- * if it takes exactly one argument and its result type is of the form `Option[S]`,
- * where either:
- * - `S` is a subtype of `Seq[U]` for some element type `U`, (set `m = 0`)
- * - or `S` is a `ProductX[T_1, ..., T_m]` and `T_m <: Seq[U]` (`m <= n`).
- *
- * The argument patterns `p_1, ..., p_n` are typed with expected types
- * `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
- *
- */
- def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int,
- unappSym: Symbol, effectiveNbSubPats: Int): (List[Type], List[Type]) = {
- val isUnapplySeq = unappSym.name == nme.unapplySeq
- val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
-
- def seqToRepeatedChecked(tp: Type) = {
- val toRepeated = seqToRepeated(tp)
- if (tp eq toRepeated) throw new TypeError("(the last tuple-component of) the result type of an unapplySeq must be a Seq[_]")
- else toRepeated
- }
-
- // empty list --> error, otherwise length == 1
- lazy val optionArgs = resTp.baseType(OptionClass).typeArgs
- // empty list --> not a ProductN, otherwise product element types
- def productArgs = getProductArgs(optionArgs.head)
-
- val formals =
- // convert Seq[T] to the special repeated argument type
- // so below we can use formalTypes to expand formals to correspond to the number of actuals
- if (isUnapplySeq) {
- if (optionArgs.nonEmpty)
- productArgs match {
- case Nil => List(seqToRepeatedChecked(optionArgs.head))
- case normalTps :+ seqTp => normalTps :+ seqToRepeatedChecked(seqTp)
- }
- else throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.fullLocationString} does not conform to Option[_]")
- } else {
- if (booleanExtractor && nbSubPats == 0) Nil
- else if (optionArgs.nonEmpty)
- if (nbSubPats == 1) {
- val productArity = productArgs.size
- if (productArity > 1 && productArity != effectiveNbSubPats && settings.lint)
- global.currentUnit.warning(pos,
- s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
- optionArgs
- }
- // TODO: update spec to reflect we allow any ProductN, not just TupleN
- else productArgs
- else
- throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.fullLocationString} does not conform to Option[_] or Boolean")
- }
-
- // for unapplySeq, replace last vararg by as many instances as required by nbSubPats
- val formalsExpanded =
- if (isUnapplySeq && formals.nonEmpty) formalTypes(formals, nbSubPats)
- else formals
-
- if (formalsExpanded.lengthCompare(nbSubPats) != 0) (null, null)
- else (formals, formalsExpanded)
- }
-
/** A fresh type variable with given type parameter as origin.
*/
def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
@@ -1190,6 +1109,17 @@ trait Infer extends Checkable {
val tparam = tvar.origin.typeSymbol
val TypeBounds(lo0, hi0) = tparam.info.bounds
val tb @ TypeBounds(lo1, hi1) = instBounds(tvar)
+ val enclCase = context.enclosingCaseDef
+
+ log("\n" + sm"""
+ |-----
+ | enclCase: ${enclCase.tree}
+ | saved: ${enclCase.savedTypeBounds}
+ | tparam: ${tparam.shortSymbolClass}
+ | def_s: ${tparam.defString}
+ | seen_s: ${tparam.defStringSeenAs(tb)}
+ |-----
+ """.trim)
if (lo1 <:< hi1) {
if (lo1 <:< lo0 && hi0 <:< hi1) // bounds unimproved
@@ -1197,7 +1127,7 @@ trait Infer extends Checkable {
else if (tparam == lo1.typeSymbolDirect || tparam == hi1.typeSymbolDirect)
log(s"cyclical bounds: discarding TypeBounds($lo1, $hi1) for $tparam because $tparam appears as bounds")
else {
- context.enclosingCaseDef pushTypeBounds tparam
+ enclCase pushTypeBounds tparam
tparam setInfo logResult(s"updated bounds: $tparam from ${tparam.info} to")(tb)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 546186479f..3a5845c8ca 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -25,7 +25,7 @@ trait MethodSynthesis {
type TT[T] = ru.TypeTag[T]
type CT[T] = ClassTag[T]
- def ValOrDefDef(sym: Symbol, body: Tree) =
+ def newValOrDefDef(sym: Symbol, body: Tree) =
if (sym.isLazy) ValDef(sym, body)
else DefDef(sym, body)
@@ -67,7 +67,7 @@ trait MethodSynthesis {
}
private def finishMethod(method: Symbol, f: Symbol => Tree): Tree =
- localTyper typed ValOrDefDef(method, f(method))
+ localTyper typed newValOrDefDef(method, f(method))
private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
val name1 = name.toTermName
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index cac6bd2ef2..2bb2cc1ab4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -390,9 +390,7 @@ trait Namers extends MethodSynthesis {
* has been defined in a separate file.
*/
private def validateCompanionDefs(tree: ImplDef) {
- val sym = tree.symbol
- if (sym eq NoSymbol) return
-
+ val sym = tree.symbol orElse { return }
val ctx = if (context.owner.isPackageObjectClass) context.outer else context
val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name
val clazz = if (sym.isClass) sym else ctx.scope lookupClass tree.name
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
new file mode 100644
index 0000000000..7120aeaaa6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -0,0 +1,475 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package tools
+package nsc
+package typechecker
+
+import scala.collection.mutable
+import symtab.Flags
+import Mode._
+
+ /**
+ *
+ * A pattern match such as
+ *
+ * x match { case Foo(a, b) => ...}
+ *
+ * Might match an instance of any of the following definitions of Foo.
+ * Note the analogous treatment between case classes and unapplies.
+ *
+ * case class Foo(xs: Int*)
+ * case class Foo(a: Int, xs: Int*)
+ * case class Foo(a: Int, b: Int)
+ * case class Foo(a: Int, b: Int, xs: Int*)
+ *
+ * object Foo { def unapplySeq(x: Any): Option[Seq[Int]] }
+ * object Foo { def unapplySeq(x: Any): Option[(Int, Seq[Int])] }
+ * object Foo { def unapply(x: Any): Option[(Int, Int)] }
+ * object Foo { def unapplySeq(x: Any): Option[(Int, Int, Seq[Int])] }
+ */
+
+trait PatternTypers {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+
+ private object FixedAndRepeatedTypes {
+ def unapply(types: List[Type]) = types match {
+ case init :+ last if isRepeatedParamType(last) => Some((init, dropRepeated(last)))
+ case _ => Some((types, NoType))
+ }
+ }
+
+ // when true:
+ // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
+ // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
+ // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
+ protected def newPatternMatching = true // presently overridden in the presentation compiler
+
+ trait PatternTyper {
+ self: Typer =>
+
+ import TyperErrorGen._
+ import infer._
+
+ private def unit = context.unit
+
+ // If the tree's symbol's type does not define an extractor, maybe the tree's type does.
+ // this is the case when we encounter an arbitrary tree as the target of an unapply call
+ // (rather than something that looks like a constructor call.) (for now, this only happens
+ // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
+ // more common place)
+ private def hasUnapplyMember(tpe: Type): Boolean = reallyExists(unapplyMember(tpe))
+ private def hasUnapplyMember(sym: Symbol): Boolean = hasUnapplyMember(sym.tpe_*)
+ private def hasUnapplyMember(fun: Tree): Boolean = hasUnapplyMember(fun.symbol) || hasUnapplyMember(fun.tpe)
+
+ // ad-hoc overloading resolution to deal with unapplies and case class constructors
+ // If some but not all alternatives survive filtering the tree's symbol with `p`,
+ // then update the tree's symbol and type to exclude the filtered out alternatives.
+ private def inPlaceAdHocOverloadingResolution(fun: Tree)(p: Symbol => Boolean): Tree = fun.symbol filter p match {
+ case sym if sym.exists && (sym ne fun.symbol) => fun setSymbol sym modifyType (tp => filterOverloadedAlts(tp)(p))
+ case _ => fun
+ }
+ private def filterOverloadedAlts(tpe: Type)(p: Symbol => Boolean): Type = tpe match {
+ case OverloadedType(pre, alts) => overloadedType(pre, alts filter p)
+ case tp => tp
+ }
+
+ def typedConstructorPattern(fun0: Tree, pt: Type) = {
+ // Do some ad-hoc overloading resolution and update the tree's symbol and type
+ // do not update the symbol if the tree's symbol's type does not define an unapply member
+ // (e.g. since it's some method that returns an object with an unapply member)
+ val fun = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember)
+ def caseClass = fun.tpe.typeSymbol.linkedClassOfClass
+
+ // Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala
+ // A case class with 23+ params has no unapply method.
+ // A case class constructor be overloaded with unapply methods in the companion.
+ if (caseClass.isCase && !unapplyMember(fun.tpe).isOverloaded)
+ convertToCaseConstructor(fun, caseClass, pt)
+ else if (hasUnapplyMember(fun))
+ fun
+ else
+ CaseClassConstructorError(fun)
+ }
+
+ def expectedPatternTypes(fun: Tree, args: List[Tree]): List[Type] =
+ newExtractorShape(fun, args).expectedPatternTypes
+
+ def typedPatternArgs(fun: Tree, args: List[Tree], mode: Mode): List[Tree] =
+ typedArgsForFormals(args, newExtractorShape(fun, args).formals, mode)
+
+ def typedArgsForFormals(args: List[Tree], formals: List[Type], mode: Mode): List[Tree] = {
+ def typedArgWithFormal(arg: Tree, pt: Type) = {
+ val newMode = if (isByNameParamType(pt)) mode.onlySticky else mode.onlySticky | BYVALmode
+ typedArg(arg, mode, newMode, dropByName(pt))
+ }
+ val FixedAndRepeatedTypes(fixed, elem) = formals
+ val front = (args, fixed).zipped map typedArgWithFormal
+ def rest = context withinStarPatterns (args drop front.length map (typedArgWithFormal(_, elem)))
+
+ elem match {
+ case NoType => front
+ case _ => front ::: rest
+ }
+ }
+
+ private def boundedArrayType(bound: Type): Type = {
+ val tparam = context.owner freshExistential "" setInfo (TypeBounds upper bound)
+ newExistentialType(tparam :: Nil, arrayType(tparam.tpe_*))
+ }
+
+ protected def typedStarInPattern(tree: Tree, mode: Mode, pt: Type) = {
+ val Typed(expr, tpt) = tree
+ val exprTyped = typed(expr, mode)
+ val baseClass = exprTyped.tpe.typeSymbol match {
+ case ArrayClass => ArrayClass
+ case _ => SeqClass
+ }
+ val starType = baseClass match {
+ case ArrayClass if isPrimitiveValueType(pt) || !isFullyDefined(pt) => arrayType(pt)
+ case ArrayClass => boundedArrayType(pt)
+ case _ => seqType(pt)
+ }
+ val exprAdapted = adapt(exprTyped, mode, starType)
+ exprAdapted.tpe baseType baseClass match {
+ case TypeRef(_, _, elemtp :: Nil) => treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
+ case _ => setError(tree)
+ }
+ }
+
+ protected def typedInPattern(tree: Typed, mode: Mode, pt: Type) = {
+ val Typed(expr, tpt) = tree
+ val tptTyped = typedType(tpt, mode)
+ val tpe = tptTyped.tpe
+ val exprTyped = typed(expr, mode, tpe.deconst)
+ val extractor = extractorForUncheckedType(tpt.pos, tpe)
+
+ val canRemedy = tpe match {
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case RefinedType(parents, _) if parents exists isUncheckable => false
+ case _ => extractor.nonEmpty
+ }
+
+ val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy)
+ val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped) setType ownType
+
+ extractor match {
+ case EmptyTree => treeTyped
+ case _ => wrapClassTagUnapply(treeTyped, extractor, tpe)
+ }
+ }
+
+ def newExtractorShape(tree: Tree): ExtractorShape = tree match {
+ case Apply(fun, args) => ExtractorShape(fun, args)
+ case UnApply(fun, args) => ExtractorShape(fun, args)
+ }
+ def newExtractorShape(fun: Tree, args: List[Tree]): ExtractorShape = ExtractorShape(fun, args)
+
+ case class CaseClassInfo(clazz: Symbol, classType: Type) {
+ def constructor = clazz.primaryConstructor
+ def constructorType = classType.prefix memberType clazz memberType constructor
+ def paramTypes = constructorType.paramTypes
+ def accessors = clazz.caseFieldAccessors
+ def accessorTypes = accessors map (m => (classType memberType m).finalResultType)
+ // def inverted = MethodType(clazz :: Nil, tupleType(accessorTypes))
+ }
+ object NoCaseClassInfo extends CaseClassInfo(NoSymbol, NoType) {
+ override def toString = "NoCaseClassInfo"
+ }
+
+ case class UnapplyMethodInfo(unapply: Symbol, tpe: Type) {
+ def name = unapply.name
+ def isUnapplySeq = name == nme.unapplySeq
+ def unapplyType = tpe memberType method
+ def resultType = tpe.finalResultType
+ def method = unapplyMember(tpe)
+ def paramType = firstParamType(unapplyType)
+ def rawGet = if (isBool) UnitTpe else typeOfMemberNamedGetOrSelf(resultType)
+ def rawTypes = if (isBool) Nil else typesOfSelectorsOrSelf(rawGet)
+ def rawArity = rawTypes.size
+ def isBool = resultType =:= BooleanTpe // aka "Tuple0" or "Option[Unit]"
+ def isNothing = rawGet =:= NothingTpe
+ def isCase = method.isCase
+ }
+
+ object NoUnapplyMethodInfo extends UnapplyMethodInfo(NoSymbol, NoType) {
+ override def toString = "NoUnapplyMethodInfo"
+ }
+
+ case class ExtractorShape(fun: Tree, args: List[Tree]) {
+ def pos = fun.pos
+ private def symbol = fun.symbol
+ private def tpe = fun.tpe
+
+ val ccInfo = tpe.typeSymbol.linkedClassOfClass match {
+ case clazz if clazz.isCase => CaseClassInfo(clazz, tpe)
+ case _ => NoCaseClassInfo
+ }
+ val exInfo = UnapplyMethodInfo(symbol, tpe)
+ import exInfo.{ rawGet, rawTypes, isUnapplySeq }
+
+ override def toString = s"ExtractorShape($fun, $args)"
+
+ def unapplyMethod = exInfo.method
+ def unapplyType = exInfo.unapplyType
+ def unapplyParamType = exInfo.paramType
+ def caseClass = ccInfo.clazz
+ def enclClass = symbol.enclClass
+
+ // TODO - merge these. The difference between these two methods is that expectedPatternTypes
+ // expands the list of types so it is the same length as the number of patterns, whereas formals
+ // leaves the varargs type unexpanded.
+ def formals = (
+ if (isUnapplySeq) productTypes :+ varargsType
+ else if (elementArity == 0) productTypes
+ else if (isSingle) squishIntoOne()
+ else wrongArity(patternFixedArity)
+ )
+ def expectedPatternTypes = elementArity match {
+ case 0 => productTypes
+ case _ if elementArity > 0 && isUnapplySeq => productTypes ::: elementTypes
+ case _ if productArity > 1 && patternFixedArity == 1 => squishIntoOne()
+ case _ => wrongArity(patternFixedArity)
+ }
+
+ def elementType = elementTypeOfLastSelectorOrSelf(rawGet)
+
+ private def hasBogusExtractor = directUnapplyMember(tpe).exists && !unapplyMethod.exists
+ private def expectedArity = "" + productArity + ( if (isUnapplySeq) "+" else "")
+ private def wrongArityMsg(n: Int) = (
+ if (hasBogusExtractor) s"$enclClass does not define a valid extractor method"
+ else s"wrong number of patterns for $enclClass offering $rawTypes_s: expected $expectedArity, found $n"
+ )
+ private def rawTypes_s = rawTypes match {
+ case Nil => "()"
+ case tp :: Nil => "" + tp
+ case tps => tps.mkString("(", ", ", ")")
+ }
+
+ private def err(msg: String) = { unit.error(pos, msg) ; throw new TypeError(msg) }
+ private def wrongArity(n: Int) = err(wrongArityMsg(n))
+
+ def squishIntoOne() = {
+ if (settings.lint)
+ unit.warning(pos, s"$enclClass expects $expectedArity patterns to hold $rawGet but crushing into $productArity-tuple to fit single pattern (SI-6675)")
+
+ rawGet :: Nil
+ }
+ // elementArity is the number of non-sequence patterns minus the
+ // the number of non-sequence product elements returned by the extractor.
+ // If it is zero, there is a perfect match between those parts, and
+ // if there is a wildcard star it will match any sequence.
+ // If it is positive, there are more patterns than products,
+ // so a sequence will have to fill in the elements. If it is negative,
+ // there are more products than patterns, which is a compile time error.
+ def elementArity = patternFixedArity - productArity
+ def patternFixedArity = treeInfo effectivePatternArity args
+ def productArity = productTypes.size
+ def isSingle = !isUnapplySeq && (patternFixedArity == 1)
+
+ def productTypes = if (isUnapplySeq) rawTypes dropRight 1 else rawTypes
+ def elementTypes = List.fill(elementArity)(elementType)
+ def varargsType = scalaRepeatedType(elementType)
+ }
+
+ private class VariantToSkolemMap extends TypeMap(trackVariance = true) {
+ private val skolemBuffer = mutable.ListBuffer[TypeSymbol]()
+
+ def skolems = try skolemBuffer.toList finally skolemBuffer.clear()
+ def apply(tp: Type): Type = mapOver(tp) match {
+ // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see SI-5189
+ case tp @ TypeRef(NoPrefix, tpSym, Nil) if tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
+ if (variance.isInvariant) {
+ // if (variance.isInvariant) tpSym.tpeHK.bounds
+ devWarning(s"variantToSkolem skipping rewrite of $tpSym due to invariance")
+ return tp
+ }
+ val bounds = (
+ if (variance.isPositive) TypeBounds.upper(tpSym.tpeHK)
+ else TypeBounds.lower(tpSym.tpeHK)
+ )
+ // origin must be the type param so we can deskolemize
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
+ skolemBuffer += skolem
+ skolem.tpe_*
+ case tp1 => tp1
+ }
+ }
+ /*
+ * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
+ * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
+ *
+ * Consider the following example:
+ *
+ * class AbsWrapperCov[+A]
+ * case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
+ *
+ * def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
+ * case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
+ * wrapped // : Wrapped[_ <: T]
+ * }
+ *
+ * this method should type check if and only if Wrapped is covariant in its type parameter
+ *
+ * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
+ * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
+ * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
+ *
+ * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
+ * we can simply replace skolems that represent method type parameters as seen from the method's body
+ * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
+ * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
+ *
+ * see test/files/../t5189*.scala
+ */
+ private def convertToCaseConstructor(tree: Tree, caseClass: Symbol, pt: Type): Tree = {
+ val variantToSkolem = new VariantToSkolemMap
+ val caseConstructorType = tree.tpe.prefix memberType caseClass memberType caseClass.primaryConstructor
+ val tree1 = TypeTree(caseConstructorType) setOriginal tree
+
+ // have to open up the existential and put the skolems in scope
+ // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
+ val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
+ val freeVars = variantToSkolem.skolems
+
+ // use "tree" for the context, not context.tree: don't make another CaseDef context,
+ // as instantiateTypeVar's bounds would end up there
+ log(sm"""|convert to case constructor {
+ | tree: $tree: ${tree.tpe}
+ | ptSafe: $ptSafe
+ | context.tree: ${context.tree}: ${context.tree.tpe}
+ |}""".trim)
+
+ val ctorContext = context.makeNewScope(tree, context.owner)
+ freeVars foreach ctorContext.scope.enter
+ newTyper(ctorContext).infer.inferConstructorInstance(tree1, caseClass.typeParams, ptSafe)
+
+ // simplify types without losing safety,
+ // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
+ val extrapolator = new ExistentialExtrapolation(freeVars)
+ def extrapolate(tp: Type) = extrapolator extrapolate tp
+
+ // once the containing CaseDef has been type checked (see typedCase),
+ // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
+ tree1 modifyType {
+ case MethodType(ctorArgs, restpe) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
+ copyMethodType(tree1.tpe, ctorArgs map (_ modifyInfo extrapolate), extrapolate(restpe)) // no need to clone ctorArgs, this is OUR method type
+ case tp => tp
+ }
+ }
+
+ def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
+ def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
+ def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
+
+ if (args.length > MaxTupleArity)
+ return duplErrorTree(TooManyArgsPatternError(fun))
+
+ def freshArgType(tp: Type): Type = tp match {
+ case MethodType(param :: _, _) => param.tpe
+ case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, freshArgType(restpe))(polyType)
+ case OverloadedType(_, _) => OverloadedUnapplyError(fun) ; ErrorType
+ case _ => UnapplyWithSingleArgError(fun) ; ErrorType
+ }
+ val shape = newExtractorShape(fun, args)
+ import shape.{ unapplyParamType, unapplyType, unapplyMethod }
+
+ def extractor = extractorForUncheckedType(shape.pos, unapplyParamType)
+ def canRemedy = unapplyParamType match {
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case RefinedType(parents, _) if parents exists isUncheckable => false
+ case _ => extractor.nonEmpty
+ }
+
+ def freshUnapplyArgType(): Type = {
+ val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree))
+ val unapplyContext = context.makeNewScope(context.tree, context.owner)
+ freeVars foreach unapplyContext.scope.enter
+ val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy)
+ // turn any unresolved type variables in freevars into existential skolems
+ val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
+ pattp.substSym(freeVars, skolems)
+ }
+
+ val unapplyArg = (
+ context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, Flags.SYNTHETIC) setInfo (
+ if (isApplicableSafe(Nil, unapplyType, pt :: Nil, WildcardType)) pt
+ else freshUnapplyArgType()
+ )
+ )
+ // clearing the type is necessary so that ref will be stabilized; see bug 881
+ val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapplyMethod), Ident(unapplyArg) :: Nil))
+
+ def makeTypedUnApply() = {
+ // the union of the expected type and the inferred type of the argument to unapply
+ val glbType = glb(ensureFullyDefined(pt) :: unapplyArg.tpe_* :: Nil)
+ val wrapInTypeTest = canRemedy && !(fun1.symbol.owner isNonBottomSubClass ClassTagClass)
+ val args1 = typedPatternArgs(fun1, args, mode)
+ val result = UnApply(fun1, args1) setPos tree.pos setType glbType
+
+ if (wrapInTypeTest)
+ wrapClassTagUnapply(result, extractor, glbType)
+ else
+ result
+ }
+
+ if (fun1.tpe.isErroneous)
+ duplErrTree
+ else if (unapplyMethod.isMacro && !fun1.isInstanceOf[Apply])
+ duplErrorTree(WrongShapeExtractorExpansion(tree))
+ else
+ makeTypedUnApply()
+ }
+
+ def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
+ // TODO: disable when in unchecked match
+ // we don't create a new Context for a Match, so find the CaseDef,
+ // then go out one level and navigate back to the match that has this case
+ val args = List(uncheckedPattern)
+ val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
+ // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
+ // and re-typechecks of the target of the unapply call in PATTERNmode,
+ // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
+ // but an arbitrary tree as is the case here
+ val res = doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
+
+ log(sm"""
+ |wrapClassTagUnapply {
+ | pattern: $uncheckedPattern
+ | extract: $classTagExtractor
+ | pt: $pt
+ | res: $res
+ |}""".trim)
+
+ res
+ }
+
+ // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
+ // return the corresponding extractor (an instance of ClassTag[`pt`])
+ def extractorForUncheckedType(pos: Position, pt: Type): Tree = {
+ if (isPastTyper || (pt eq NoType)) EmptyTree else {
+ pt match {
+ case RefinedType(parents, decls) if !decls.isEmpty || (parents exists isUncheckable) => return EmptyTree
+ case _ =>
+ }
+ // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
+ // but at least make a proper type before passing it elsewhere
+ val pt1 = pt.dealiasWiden match {
+ case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
+ case pt1 => pt1
+ }
+ if (isCheckable(pt1)) EmptyTree
+ else resolveClassTag(pos, pt1) match {
+ case tree if unapplyMember(tree.tpe).exists => tree
+ case _ => devWarning(s"Cannot create runtime type test for $pt1") ; EmptyTree
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 333d867797..5929cab1d1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -113,6 +113,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
var localTyper: analyzer.Typer = typer
var currentApplication: Tree = EmptyTree
var inPattern: Boolean = false
+ @inline final def savingInPattern[A](body: => A): A = {
+ val saved = inPattern
+ try body finally inPattern = saved
+ }
+
var checkedCombinations = Set[List[Type]]()
// only one overloaded alternative is allowed to define default arguments
@@ -211,7 +216,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
// Delaying calling memberType as long as possible
- if (inherited ne NoSymbol) {
+ if (inherited.exists) {
val jtpe = toJavaRepeatedParam(self memberType member)
// this is a bit tortuous: we look for non-private members or bridges
// if we find a bridge everything is OK. If we find another member,
@@ -1424,6 +1429,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
private def applyRefchecksToAnnotations(tree: Tree): Unit = {
def applyChecks(annots: List[AnnotationInfo]) = {
+ annots foreach (annot => checkCompileTimeOnly(annot.atp.typeSymbol, annot.pos))
checkAnnotations(annots map (_.atp), tree)
transformTrees(annots flatMap (_.args))
}
@@ -1526,7 +1532,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
checkCompileTimeOnly(sym, tree.pos)
checkDelayedInitSelect(qual, sym, tree.pos)
- if (sym eq NoSymbol)
+ if (!sym.exists)
devWarning("Select node has NoSymbol! " + tree + " / " + tree.tpe)
else if (sym.hasLocalFlag)
varianceValidator.checkForEscape(sym, currentClass)
@@ -1662,6 +1668,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
tree
case Ident(name) =>
+ checkCompileTimeOnly(tree.symbol, tree.pos)
transformCaseApply(tree,
if (name != nme.WILDCARD && name != tpnme.WILDCARD_STAR) {
assert(sym != NoSymbol, "transformCaseApply: name = " + name.debugString + " tree = " + tree + " / " + tree.getClass) //debug
@@ -1681,19 +1688,33 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case _ => tree
}
+
// skip refchecks in patterns....
result = result match {
case CaseDef(pat, guard, body) =>
- inPattern = true
- val pat1 = transform(pat)
- inPattern = false
+ val pat1 = savingInPattern {
+ inPattern = true
+ transform(pat)
+ }
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
case LabelDef(_, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
- val old = inPattern
- inPattern = true
- val res = deriveLabelDef(result)(transform)
- inPattern = old
- res
+ savingInPattern {
+ inPattern = true
+ deriveLabelDef(result)(transform)
+ }
+ case Apply(fun, args) if fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol) =>
+ savingInPattern {
+ // SI-7756 If we were in a translated pattern, we can now switch out of pattern mode, as the label apply signals
+ // that we are in the user-supplied code in the case body.
+ //
+ // Relies on the translation of:
+ // (null: Any) match { case x: List[_] => x; x.reverse; case _ => }'
+ // to:
+ // <synthetic> val x2: List[_] = (x1.asInstanceOf[List[_]]: List[_]);
+ // matchEnd4({ x2; x2.reverse}) // case body is an argument to a label apply.
+ inPattern = false
+ super.transform(result)
+ }
case _ =>
super.transform(result)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 6933b10a0a..12d6bb2e6a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -269,8 +269,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
&& sym.enclClass != currentClass
&& !sym.owner.isPackageClass // SI-7091 no accessor needed package owned (ie, top level) symbols
&& !sym.owner.isTrait
- && (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
- && (qual.symbol.info.member(sym.name) ne NoSymbol)
+ && sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass
+ && qual.symbol.info.member(sym.name).exists
&& !needsProtectedAccessor(sym, tree.pos)
)
if (shouldEnsureAccessor) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index b4a37f9943..0c5f798c23 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -143,7 +143,7 @@ trait TypeDiagnostics {
def defaultMessage = moduleMessage + preResultString + tree.tpe
def applyMessage = defaultMessage + tree.symbol.locationString
- if ((sym eq null) || (sym eq NoSymbol)) {
+ if (!tree.hasExistingSymbol) {
if (isTyperInPattern) patternMessage
else exprMessage
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index d2ff47626d..cccd0949a2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -26,7 +26,7 @@ import Mode._
* @author Martin Odersky
* @version 1.0
*/
-trait Typers extends Adaptations with Tags with TypersTracking {
+trait Typers extends Adaptations with Tags with TypersTracking with PatternTypers {
self: Analyzer =>
import global._
@@ -36,7 +36,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
final def forArgMode(fun: Tree, mode: Mode) =
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode else mode
- // printResult(s"forArgMode($fun, $mode) gets SCCmode")(mode | SCCmode)
// namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
// is cached here and re-used in typedDefDef / typedValDef
// Also used to cache imports type-checked by namer.
@@ -63,6 +62,10 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
sealed abstract class SilentResult[+T] {
+ @inline final def fold[U](none: => U)(f: T => U): U = this match {
+ case SilentResultValue(value) => f(value)
+ case _ => none
+ }
@inline final def map[U](f: T => U): SilentResult[U] = this match {
case SilentResultValue(value) => SilentResultValue(f(value))
case x: SilentTypeError => x
@@ -90,13 +93,7 @@ trait Typers extends Adaptations with Tags with TypersTracking {
private final val InterpolatorCodeRegex = """\$\{.*?\}""".r
private final val InterpolatorIdentRegex = """\$\w+""".r
- // when true:
- // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
- // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
- // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
- protected def newPatternMatching = true // presently overridden in the presentation compiler
-
- abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
+ abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with PatternTyper with TyperContextErrors {
import context0.unit
import typeDebug.{ ptTree, ptBlock, ptLine, inGreen, inRed }
import TyperErrorGen._
@@ -912,121 +909,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
}
- /*
- * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
- * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
- *
- * Consider the following example:
- *
- * class AbsWrapperCov[+A]
- * case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
- *
- * def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
- * case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
- * wrapped // : Wrapped[_ <: T]
- * }
- *
- * this method should type check if and only if Wrapped is covariant in its type parameter
- *
- * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
- * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
- * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
- *
- * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
- * we can simply replace skolems that represent method type parameters as seen from the method's body
- * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
- * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
- *
- * see test/files/../t5189*.scala
- */
- def adaptConstrPattern(): Tree = { // (5)
- def hasUnapplyMember(tp: Type) = reallyExists(unapplyMember(tp))
- val overloadedExtractorOfObject = tree.symbol filter (sym => hasUnapplyMember(sym.tpe))
- // if the tree's symbol's type does not define an extractor, maybe the tree's type does.
- // this is the case when we encounter an arbitrary tree as the target of an unapply call
- // (rather than something that looks like a constructor call.) (for now, this only happens
- // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
- // more common place)
- val extractor = overloadedExtractorOfObject orElse unapplyMember(tree.tpe)
- def convertToCaseConstructor(clazz: Symbol): TypeTree = {
- // convert synthetic unapply of case class to case class constructor
- val prefix = tree.tpe.prefix
- val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
- .setOriginal(tree)
-
- val skolems = new mutable.ListBuffer[TypeSymbol]
- object variantToSkolem extends TypeMap(trackVariance = true) {
- def apply(tp: Type) = mapOver(tp) match {
- // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see SI-5189
- case TypeRef(NoPrefix, tpSym, Nil) if !variance.isInvariant && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
- // must initialize or tpSym.tpe might see random type params!!
- // without this, we'll get very weird types inferred in test/scaladoc/run/SI-5933.scala
- // TODO: why is that??
- tpSym.initialize
- val bounds = if (variance.isPositive) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
- // origin must be the type param so we can deskolemize
- val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
- // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
- skolems += skolem
- skolem.tpe
- case tp1 => tp1
- }
- }
-
- // have to open up the existential and put the skolems in scope
- // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
- val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
- val freeVars = skolems.toList
-
- // use "tree" for the context, not context.tree: don't make another CaseDef context,
- // as instantiateTypeVar's bounds would end up there
- val ctorContext = context.makeNewScope(tree, context.owner)
- freeVars foreach ctorContext.scope.enter
- newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
-
- // simplify types without losing safety,
- // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
- val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
- val extrapolated = tree1.tpe match {
- case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
- ctorArgs foreach (p => p.info = extrapolate(p.info)) // no need to clone, this is OUR method type
- copyMethodType(tree1.tpe, ctorArgs, extrapolate(res))
- case tp => tp
- }
-
- // once the containing CaseDef has been type checked (see typedCase),
- // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
- tree1 setType extrapolated
- }
-
- if (extractor != NoSymbol) {
- // if we did some ad-hoc overloading resolution, update the tree's symbol
- // do not update the symbol if the tree's symbol's type does not define an unapply member
- // (e.g. since it's some method that returns an object with an unapply member)
- if (overloadedExtractorOfObject != NoSymbol)
- tree setSymbol overloadedExtractorOfObject
-
- tree.tpe match {
- case OverloadedType(pre, alts) => tree setType overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe)))
- case _ =>
- }
- val unapply = unapplyMember(extractor.tpe)
- val clazz = unapplyParameterType(unapply)
-
- if (unapply.isCase && clazz.isCase) {
- convertToCaseConstructor(clazz)
- } else {
- tree
- }
- } else {
- val clazz = tree.tpe.typeSymbol.linkedClassOfClass
- if (clazz.isCase)
- convertToCaseConstructor(clazz)
- else
- CaseClassConstructorError(tree)
- }
- }
-
def insertApply(): Tree = {
assert(!context.inTypeConstructorAllowed, mode) //@M
val adapted = adaptToName(tree, nme.apply)
@@ -1213,7 +1095,7 @@ trait Typers extends Adaptations with Tags with TypersTracking {
else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree))
macroExpandApply(this, tree, mode, pt)
else if (mode.typingConstructorPattern)
- adaptConstrPattern()
+ typedConstructorPattern(tree, pt)
else if (shouldInsertApply(tree))
insertApply()
else if (hasUndetsInMonoMode) { // (9)
@@ -2495,7 +2377,7 @@ trait Typers extends Adaptations with Tags with TypersTracking {
// list, so substitute the final result type of the method, i.e. the type
// of the case class.
if (pat1.tpe.paramSectionCount > 0)
- pat1 setType pat1.tpe.finalResultType
+ pat1 modifyType (_.finalResultType)
for (bind @ Bind(name, _) <- cdef.pat)
if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
@@ -2510,8 +2392,10 @@ trait Typers extends Adaptations with Tags with TypersTracking {
// insert a cast if something typechecked under the GADT constraints,
// but not in real life (i.e., now that's we've reset the method's type skolems'
// infos back to their pre-GADT-constraint state)
- if (isFullyDefined(pt) && !(body1.tpe <:< pt))
+ if (isFullyDefined(pt) && !(body1.tpe <:< pt)) {
+ log(s"Adding cast to pattern because ${body1.tpe} does not conform to expected type $pt")
body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden))
+ }
}
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
@@ -3026,32 +2910,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
def typedArgs(args: List[Tree], mode: Mode) =
args mapConserve (arg => typedArg(arg, mode, NOmode, WildcardType))
- /** Type trees in `args0` against corresponding expected type in `adapted0`.
- *
- * The mode in which each argument is typed is derived from `mode` and
- * whether the arg was originally by-name or var-arg (need `formals0` for that)
- * the default is by-val, of course.
- *
- * (docs reverse-engineered -- AM)
- */
- def typedArgs(args0: List[Tree], mode: Mode, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
- def loop(args: List[Tree], formals: List[Type], adapted: List[Type]): List[Tree] = {
- if (args.isEmpty || adapted.isEmpty) Nil
- else {
- // No formals left or * indicates varargs.
- val isVarArgs = formals.isEmpty || formals.tail.isEmpty && isRepeatedParamType(formals.head)
- val isByName = formals.nonEmpty && isByNameParamType(formals.head)
- def typedMode = if (isByName) mode.onlySticky else mode.onlySticky | BYVALmode
- def body = typedArg(args.head, mode, typedMode, adapted.head)
- def arg1 = if (isVarArgs) context.withinStarPatterns(body) else body
-
- // formals may be empty, so don't call tail
- arg1 :: loop(args.tail, formals drop 1, adapted.tail)
- }
- }
- loop(args0, formals0, adapted0)
- }
-
/** Does function need to be instantiated, because a missing parameter
* in an argument closure overlaps with an uninstantiated formal?
*/
@@ -3288,22 +3146,20 @@ trait Typers extends Adaptations with Tags with TypersTracking {
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
def handleMonomorphicCall: Tree = {
- // In order for checkDead not to be misled by the unfortunate special
- // case of AnyRef#synchronized (which is implemented with signature T => T
- // but behaves as if it were (=> T) => T) we need to know what is the actual
- // target of a call. Since this information is no longer available from
- // typedArg, it is recorded here.
- val args1 =
- // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
- // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
- // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
- // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
- // casting breaks SI-6145,
- // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
- if (!phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol))
+ // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
+ // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
+ // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
+ // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
+ // casting breaks SI-6145,
+ // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
+ def noExpectedType = !phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol)
+
+ val args1 = (
+ if (noExpectedType)
typedArgs(args, forArgMode(fun, mode))
else
- typedArgs(args, forArgMode(fun, mode), paramTypes, formals)
+ typedArgsForFormals(args, paramTypes, forArgMode(fun, mode))
+ )
// instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
// val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
@@ -3387,129 +3243,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
}
- def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
- def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
- def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
-
- val otpe = fun.tpe
-
- if (args.length > MaxTupleArity)
- return duplErrorTree(TooManyArgsPatternError(fun))
-
- //
- def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
- case MethodType(param :: _, _) =>
- (Nil, param.tpe)
- case PolyType(tparams, restpe) =>
- createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
- // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
- case OverloadedType(_, _) =>
- OverloadedUnapplyError(fun)
- (Nil, ErrorType)
- case _ =>
- UnapplyWithSingleArgError(fun)
- (Nil, ErrorType)
- }
-
- val unapp = unapplyMember(otpe)
- val unappType = otpe.memberType(unapp)
- val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
- val arg = Ident(argDummy) setType pt
-
- val uncheckedTypeExtractor =
- if (unappType.paramTypes.nonEmpty)
- extractorForUncheckedType(tree.pos, unappType.paramTypes.head)
- else None
-
- if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
- //Console.println(s"UNAPP: need to typetest, arg: ${arg.tpe} unappType: $unappType")
- val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
- val unapplyContext = context.makeNewScope(context.tree, context.owner)
- freeVars foreach unapplyContext.scope.enter
-
- val typer1 = newTyper(unapplyContext)
- val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe, canRemedy = uncheckedTypeExtractor.nonEmpty)
-
- // turn any unresolved type variables in freevars into existential skolems
- val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
- arg setType pattp.substSym(freeVars, skolems)
- argDummy setInfo arg.tpe
- }
-
- // clearing the type is necessary so that ref will be stabilized; see bug 881
- val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapp), List(arg)))
-
- if (fun1.tpe.isErroneous) duplErrTree
- else {
- val resTp = fun1.tpe.finalResultType.dealiasWiden
- val nbSubPats = args.length
- val (formals, formalsExpanded) =
- extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol, treeInfo.effectivePatternArity(args))
- if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
- else {
- val args1 = typedArgs(args, mode, formals, formalsExpanded)
- val pt1 = ensureFullyDefined(pt) // SI-1048
- val itype = glb(List(pt1, arg.tpe))
- arg setType pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
- val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
-
- // if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
- // skip if the unapply's type is not a method type with (at least, but really it should be exactly) one argument
- // also skip if we already wrapped a classtag extractor (so we don't keep doing that forever)
- if (uncheckedTypeExtractor.isEmpty || fun1.symbol.owner.isNonBottomSubClass(ClassTagClass)) unapply
- else wrapClassTagUnapply(unapply, uncheckedTypeExtractor.get, unappType.paramTypes.head)
- }
- }
- }
-
- def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
- // TODO: disable when in unchecked match
- // we don't create a new Context for a Match, so find the CaseDef, then go out one level and navigate back to the match that has this case
- // val thisCase = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
- // val unchecked = thisCase.outer.tree.collect{case Match(selector, cases) if cases contains thisCase => selector} match {
- // case List(Typed(_, tpt)) if tpt.tpe hasAnnotation UncheckedClass => true
- // case t => println("outer tree: "+ (t, thisCase, thisCase.outer.tree)); false
- // }
- // println("wrapClassTagUnapply"+ (!isPastTyper && infer.containsUnchecked(pt), pt, uncheckedPattern))
- // println("wrapClassTagUnapply: "+ extractor)
- // println(util.Position.formatMessage(uncheckedPattern.pos, "made unchecked type test into a checked one", true))
-
- val args = List(uncheckedPattern)
- val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
- // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
- // and re-typechecks of the target of the unapply call in PATTERNmode,
- // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
- // but an arbitrary tree as is the case here
- doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
- }
-
- // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
- // return the corresponding extractor (an instance of ClassTag[`pt`])
- def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (isPastTyper) None else {
- // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
- // but at least make a proper type before passing it elsewhere
- val pt1 = pt.dealiasWiden match {
- case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
- case pt1 => pt1
- }
- pt1 match {
- // if at least one of the types in an intersection is checkable, use the checkable ones
- // this avoids problems as in run/matchonseq.scala, where the expected type is `Coll with scala.collection.SeqLike`
- // Coll is an abstract type, but SeqLike of course is not
- case RefinedType(ps, _) if ps.length > 1 && (ps exists infer.isCheckable) =>
- None
-
- case ptCheckable if infer isUncheckable ptCheckable =>
- val classTagExtractor = resolveClassTag(pos, ptCheckable)
-
- if (classTagExtractor != EmptyTree && unapplyMember(classTagExtractor.tpe) != NoSymbol)
- Some(classTagExtractor)
- else None
-
- case _ => None
- }
- }
-
/**
* Convert an annotation constructor call into an AnnotationInfo.
*/
@@ -3757,11 +3490,15 @@ trait Typers extends Adaptations with Tags with TypersTracking {
/** convert local symbols and skolems to existentials */
def packedType(tree: Tree, owner: Symbol): Type = {
- def defines(tree: Tree, sym: Symbol) =
- sym.isExistentialSkolem && sym.unpackLocation == tree ||
- tree.isDef && tree.symbol == sym
- def isVisibleParameter(sym: Symbol) =
- sym.isParameter && (sym.owner == owner) && (sym.isType || !owner.isAnonymousFunction)
+ def defines(tree: Tree, sym: Symbol) = (
+ sym.isExistentialSkolem && sym.unpackLocation == tree
+ || tree.isDef && tree.symbol == sym
+ )
+ def isVisibleParameter(sym: Symbol) = (
+ sym.isParameter
+ && (sym.owner == owner)
+ && (sym.isType || !owner.isAnonymousFunction)
+ )
def containsDef(owner: Symbol, sym: Symbol): Boolean =
(!sym.hasPackageFlag) && {
var o = sym.owner
@@ -4992,14 +4729,6 @@ trait Typers extends Adaptations with Tags with TypersTracking {
treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
}
-
- def typedUnApply(tree: UnApply) = {
- val fun1 = typed(tree.fun)
- val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args), tree.args.length)
- val args1 = map2(tree.args, tpes)(typedPattern)
- treeCopy.UnApply(tree, fun1, args1) setType pt
- }
-
def issueTryWarnings(tree: Try): Try = {
def checkForCatchAll(cdef: CaseDef) {
def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
@@ -5051,61 +4780,28 @@ trait Typers extends Adaptations with Tags with TypersTracking {
}
def typedTyped(tree: Typed) = {
- val expr = tree.expr
- val tpt = tree.tpt
- tpt match {
- case Function(List(), EmptyTree) =>
- // find out whether the programmer is trying to eta-expand a macro def
- // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
- // that typecheck must not trigger macro expansions, so we explicitly prohibit them
- // however we cannot do `context.withMacrosDisabled`
- // because `expr` might contain nested macro calls (see SI-6673)
- val exprTyped = typed1(suppressMacroExpansion(expr), mode, pt)
- exprTyped match {
- case macroDef if treeInfo.isMacroApplication(macroDef) =>
- MacroEtaError(exprTyped)
- case _ =>
- typedEta(checkDead(exprTyped))
- }
-
- case t if treeInfo isWildcardStarType t =>
- val exprTyped = typed(expr, mode.onlySticky)
- def subArrayType(pt: Type) =
- if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
- else {
- val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
- newExistentialType(List(tparam), arrayType(tparam.tpe))
- }
-
- val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match {
- case ArrayClass => (adapt(exprTyped, mode.onlySticky, subArrayType(pt)), ArrayClass)
- case _ => (adapt(exprTyped, mode.onlySticky, seqType(pt)), SeqClass)
- }
- exprAdapted.tpe.baseType(baseClass) match {
- case TypeRef(_, _, List(elemtp)) =>
- treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
- case _ =>
- setError(tree)
+ if (treeInfo isWildcardStarType tree.tpt)
+ typedStarInPattern(tree, mode.onlySticky, pt)
+ else if (mode.inPatternMode)
+ typedInPattern(tree, mode.onlySticky, pt)
+ else tree match {
+ // find out whether the programmer is trying to eta-expand a macro def
+ // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
+ // that typecheck must not trigger macro expansions, so we explicitly prohibit them
+ // however we cannot do `context.withMacrosDisabled`
+ // because `expr` might contain nested macro calls (see SI-6673)
+ //
+ // Note: apparently `Function(Nil, EmptyTree)` is the secret parser marker
+ // which means trailing underscore.
+ case Typed(expr, Function(Nil, EmptyTree)) =>
+ typed1(suppressMacroExpansion(expr), mode, pt) match {
+ case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef)
+ case exprTyped => typedEta(checkDead(exprTyped))
}
-
- case _ =>
- val tptTyped = typedType(tpt, mode)
- val exprTyped = typed(expr, mode.onlySticky, tptTyped.tpe.deconst)
- val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
-
- if (mode.inPatternMode) {
- val uncheckedTypeExtractor = extractorForUncheckedType(tpt.pos, tptTyped.tpe)
- // make fully defined to avoid bounded wildcard types that may be in pt from calling dropExistential (SI-2038)
- val ptDefined = ensureFullyDefined(pt) // FIXME this is probably redundant now that we don't dropExistenial in pattern mode.
- val ownType = inferTypedPattern(tptTyped, tptTyped.tpe, ptDefined, canRemedy = uncheckedTypeExtractor.nonEmpty)
- treeTyped setType ownType
-
- uncheckedTypeExtractor match {
- case None => treeTyped
- case Some(extractor) => wrapClassTagUnapply(treeTyped, extractor, tptTyped.tpe)
- }
- } else
- treeTyped setType tptTyped.tpe
+ case Typed(expr, tpt) =>
+ val tpt1 = typedType(tpt, mode) // type the ascribed type first
+ val expr1 = typed(expr, mode.onlySticky, tpt1.tpe.deconst) // then type the expression with tpt1 as the expected type
+ treeCopy.Typed(tree, expr1, tpt1) setType tpt1.tpe
}
}
@@ -5241,11 +4937,13 @@ trait Typers extends Adaptations with Tags with TypersTracking {
case _ => tree
}
}
- else
+ else {
// we should get here only when something before failed
// and we try again (@see tryTypedApply). In that case we can assign
// whatever type to tree; we just have to survive until a real error message is issued.
+ devWarning(tree.pos, s"Assigning Any type to TypeTree because tree.original is null: tree is $tree/${System.identityHashCode(tree)}, sym=${tree.symbol}, tpe=${tree.tpe}")
tree setType AnyTpe
+ }
}
def typedFunction(fun: Function) = {
if (fun.symbol == NoSymbol)
@@ -5254,52 +4952,80 @@ trait Typers extends Adaptations with Tags with TypersTracking {
typerWithLocalContext(context.makeNewScope(fun, fun.symbol))(_.typedFunction(fun, mode, pt))
}
- // begin typed1
- //if (settings.debug.value && tree.isDef) log("typing definition of "+sym);//DEBUG
- tree match {
- case tree: Ident => typedIdentOrWildcard(tree)
- case tree: Select => typedSelectOrSuperCall(tree)
- case tree: Apply => typedApply(tree)
+ // Trees only allowed during pattern mode.
+ def typedInPatternMode(tree: Tree): Tree = tree match {
+ case tree: Alternative => typedAlternative(tree)
+ case tree: Star => typedStar(tree)
+ case _ => abort(s"unexpected tree in pattern mode: ${tree.getClass}\n$tree")
+ }
+
+ def typedTypTree(tree: TypTree): Tree = tree match {
case tree: TypeTree => typedTypeTree(tree)
- case tree: Literal => typedLiteral(tree)
- case tree: This => typedThis(tree)
- case tree: ValDef => typedValDef(tree)
- case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
- case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
- case tree: If => typedIf(tree)
- case tree: TypeApply => typedTypeApply(tree)
case tree: AppliedTypeTree => typedAppliedTypeTree(tree)
- case tree: Bind => typedBind(tree)
- case tree: Function => typedFunction(tree)
- case tree: Match => typedVirtualizedMatch(tree)
- case tree: New => typedNew(tree)
- case tree: Assign => typedAssign(tree.lhs, tree.rhs)
- case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
- case tree: Super => typedSuper(tree)
case tree: TypeBoundsTree => typedTypeBoundsTree(tree)
- case tree: Typed => typedTyped(tree)
- case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
- case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
- case tree: TypeDef => typedTypeDef(tree)
- case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
- case tree: PackageDef => typedPackageDef(tree)
- case tree: DocDef => typedDocDef(tree, mode, pt)
- case tree: Annotated => typedAnnotated(tree)
case tree: SingletonTypeTree => typedSingletonTypeTree(tree)
case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree)
case tree: CompoundTypeTree => typedCompoundTypeTree(tree)
case tree: ExistentialTypeTree => typedExistentialTypeTree(tree)
- case tree: Return => typedReturn(tree)
- case tree: Try => typedTry(tree)
- case tree: Throw => typedThrow(tree)
- case tree: Alternative => typedAlternative(tree)
- case tree: Star => typedStar(tree)
- case tree: UnApply => typedUnApply(tree)
- case tree: ArrayValue => typedArrayValue(tree)
- case tree: ApplyDynamic => typedApplyDynamic(tree)
- case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
- case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
+ case _ => abort(s"unexpected type-representing tree: ${tree.getClass}\n$tree")
+ }
+
+ def typedMemberDef(tree: MemberDef): Tree = tree match {
+ case tree: ValDef => typedValDef(tree)
+ case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
+ case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
+ case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
+ case tree: TypeDef => typedTypeDef(tree)
+ case tree: PackageDef => typedPackageDef(tree)
+ case _ => abort(s"unexpected member def: ${tree.getClass}\n$tree")
+ }
+
+ // Trees not allowed during pattern mode.
+ def typedOutsidePatternMode(tree: Tree): Tree = tree match {
+ case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
+ case tree: If => typedIf(tree)
+ case tree: TypeApply => typedTypeApply(tree)
+ case tree: Function => typedFunction(tree)
+ case tree: Match => typedVirtualizedMatch(tree)
+ case tree: New => typedNew(tree)
+ case tree: Assign => typedAssign(tree.lhs, tree.rhs)
+ case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
+ case tree: Super => typedSuper(tree)
+ case tree: Annotated => typedAnnotated(tree)
+ case tree: Return => typedReturn(tree)
+ case tree: Try => typedTry(tree)
+ case tree: Throw => typedThrow(tree)
+ case tree: ArrayValue => typedArrayValue(tree)
+ case tree: ApplyDynamic => typedApplyDynamic(tree)
+ case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
+ case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
+ case tree: DocDef => typedDocDef(tree, mode, pt)
+ case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
+ }
+
+ // Trees allowed in or out of pattern mode.
+ def typedInAnyMode(tree: Tree): Tree = tree match {
+ case tree: Ident => typedIdentOrWildcard(tree)
+ case tree: Bind => typedBind(tree)
+ case tree: Apply => typedApply(tree)
+ case tree: Select => typedSelectOrSuperCall(tree)
+ case tree: Literal => typedLiteral(tree)
+ case tree: Typed => typedTyped(tree)
+ case tree: This => typedThis(tree) // SI-6104
+ case tree: UnApply => abort(s"unexpected UnApply $tree") // turns out UnApply never reaches here
+ case _ =>
+ if (mode.inPatternMode)
+ typedInPatternMode(tree)
+ else
+ typedOutsidePatternMode(tree)
+ }
+
+ // begin typed1
+ tree match {
+ case tree: TypTree => typedTypTree(tree)
+ case tree: MemberDef => typedMemberDef(tree)
+ case _ => typedInAnyMode(tree)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 47c859bb5c..5049fec65b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -12,8 +12,7 @@ import symtab.Flags._
* @author Martin Odersky
* @version 1.0
*/
-trait Unapplies extends ast.TreeDSL
-{
+trait Unapplies extends ast.TreeDSL {
self: Analyzer =>
import global._
@@ -21,7 +20,8 @@ trait Unapplies extends ast.TreeDSL
import CODE.{ CASE => _, _ }
import treeInfo.{ isRepeatedParamType, isByNameParamType }
- private val unapplyParamName = nme.x_0
+ private def unapplyParamName = nme.x_0
+ private def caseMods = Modifiers(SYNTHETIC | CASE)
// In the typeCompleter (templateSig) of a case class (resp it's module),
// synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
@@ -30,39 +30,17 @@ trait Unapplies extends ast.TreeDSL
// moduleClass symbol of the companion module.
class ClassForCaseCompanionAttachment(val caseClass: ClassDef)
- /** returns type list for return type of the extraction
- * @see extractorFormalTypes
+ /** Returns unapply or unapplySeq if available, without further checks.
*/
- def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, args: List[Tree]) = {
- assert(ufn.isMethod, ufn)
- val nbSubPats = args.length
- //Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
- ufn.name match {
- case nme.unapply | nme.unapplySeq =>
- val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn, treeInfo.effectivePatternArity(args))
- if (formals == null) throw new TypeError(s"$ufn of type $ufntpe cannot extract $nbSubPats sub-patterns")
- else formals
- case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
- }
- }
+ def directUnapplyMember(tp: Type): Symbol = (tp member nme.unapply) orElse (tp member nme.unapplySeq)
- /** returns unapply or unapplySeq if available */
- def unapplyMember(tp: Type): Symbol = (tp member nme.unapply) match {
- case NoSymbol => tp member nme.unapplySeq
- case unapp => unapp
- }
+ /** Filters out unapplies with multiple (non-implicit) parameter lists,
+ * as they cannot be used as extractors
+ */
+ def unapplyMember(tp: Type): Symbol = directUnapplyMember(tp) filter (sym => !hasMultipleNonImplicitParamLists(sym))
object ExtractorType {
- def unapply(tp: Type): Option[Symbol] = {
- val member = unapplyMember(tp)
- if (member.exists) Some(member) else None
- }
- }
-
- /** returns unapply member's parameter type. */
- def unapplyParameterType(extractor: Symbol) = extractor.tpe.params match {
- case p :: Nil => p.tpe.typeSymbol
- case _ => NoSymbol
+ def unapply(tp: Type): Option[Symbol] = unapplyMember(tp).toOption
}
def copyUntyped[T <: Tree](tree: T): T =
@@ -93,25 +71,19 @@ trait Unapplies extends ast.TreeDSL
*/
private def caseClassUnapplyReturnValue(param: Name, caseclazz: ClassDef) = {
def caseFieldAccessorValue(selector: ValDef): Tree = {
- val accessorName = selector.name
- val privateLocalParamAccessor = caseclazz.impl.body.collectFirst {
- case dd: ValOrDefDef if dd.name == accessorName && dd.mods.isPrivateLocal => dd.symbol
- }
- privateLocalParamAccessor match {
- case None =>
- // Selecting by name seems to be the most straight forward way here to
- // avoid forcing the symbol of the case class in order to list the accessors.
- val maybeRenamedAccessorName = caseAccessorName(caseclazz.symbol, accessorName)
- Ident(param) DOT maybeRenamedAccessorName
- case Some(sym) =>
- // But, that gives a misleading error message in neg/t1422.scala, where a case
- // class has an illegal private[this] parameter. We can detect this by checking
- // the modifiers on the param accessors.
- //
- // We just generate a call to that param accessor here, which gives us an inaccessible
- // symbol error, as before.
- Ident(param) DOT sym
+ // Selecting by name seems to be the most straight forward way here to
+ // avoid forcing the symbol of the case class in order to list the accessors.
+ def selectByName = Ident(param) DOT caseAccessorName(caseclazz.symbol, selector.name)
+ // But, that gives a misleading error message in neg/t1422.scala, where a case
+ // class has an illegal private[this] parameter. We can detect this by checking
+ // the modifiers on the param accessors.
+ // We just generate a call to that param accessor here, which gives us an inaccessible
+ // symbol error, as before.
+ def localAccessor = caseclazz.impl.body find {
+ case t @ ValOrDefDef(mods, selector.name, _, _) => mods.isPrivateLocal
+ case _ => false
}
+ localAccessor.fold(selectByName)(Ident(param) DOT _.symbol)
}
// Working with trees, rather than symbols, to avoid cycles like SI-5082
@@ -153,8 +125,6 @@ trait Unapplies extends ast.TreeDSL
gen.mkTemplate(parents, emptyValDef, NoMods, Nil, body, cdef.impl.pos.focus))
}
- private val caseMods = Modifiers(SYNTHETIC | CASE)
-
/** The apply method corresponding to a case class
*/
def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index 752aac5c8c..ea3c9d8dde 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -8,6 +8,7 @@ package tools
package nsc
import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
+import scala.compat.Platform.EOL
package object util {
@@ -78,6 +79,14 @@ package object util {
s"$clazz$msg @ $frame"
}
+ def stackTracePrefixString(ex: Throwable)(p: StackTraceElement => Boolean): String = {
+ val frames = ex.getStackTrace takeWhile p map (" at " + _)
+ val msg = ex.getMessage match { case null => "" ; case s => s": $s" }
+ val clazz = ex.getClass.getName
+
+ s"$clazz$msg" +: frames mkString EOL
+ }
+
lazy val trace = new SimpleTracer(System.out)
@deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 462cbb9c94..5a4448e01a 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -1,14 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="src" path="partest"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scalap"/>
+ <classpathentry kind="src" path="partest-extras"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
<classpathentry kind="var" path="M2_REPO/org/scala-tools/testing/test-interface/0.5/test-interface-0.5.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M4/1.0-RC3/scala-partest_2.11.0-M4-1.0-RC3.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="output" path="build-quick-partest"/>
+ <classpathentry kind="output" path="build-quick-partest-extras"/>
</classpath>
diff --git a/src/eclipse/partest/.project b/src/eclipse/partest/.project
index 45c24332ba..5f52d4bf8f 100644
--- a/src/eclipse/partest/.project
+++ b/src/eclipse/partest/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>partest</name>
+ <name>partest-extras</name>
<comment></comment>
<projects>
</projects>
@@ -17,9 +17,9 @@
</natures>
<linkedResources>
<link>
- <name>build-quick-partest</name>
+ <name>build-quick-partest-extras</name>
<type>2</type>
- <locationURI>SCALA_BASEDIR/build/quick/classes/partest</locationURI>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/partest-extras</locationURI>
</link>
<link>
<name>lib</name>
@@ -27,9 +27,9 @@
<locationURI>SCALA_BASEDIR/lib</locationURI>
</link>
<link>
- <name>partest</name>
+ <name>partest-extras</name>
<type>2</type>
- <locationURI>SCALA_BASEDIR/src/partest</locationURI>
+ <locationURI>SCALA_BASEDIR/src/partest-extras</locationURI>
</link>
</linkedResources>
</projectDescription>
diff --git a/src/intellij/partest.iml.SAMPLE b/src/intellij/partest.iml.SAMPLE
index 5b8cfa3f38..893236b621 100644
--- a/src/intellij/partest.iml.SAMPLE
+++ b/src/intellij/partest.iml.SAMPLE
@@ -12,17 +12,11 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/../partest">
- <sourceFolder url="file://$MODULE_DIR$/../partest" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../partest-extras">
+ <sourceFolder url="file://$MODULE_DIR$/../partest-extras" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="library" />
- <orderEntry type="module" module-name="xml" />
- <orderEntry type="module" module-name="reflect" />
- <orderEntry type="module" module-name="actors" />
- <orderEntry type="module" module-name="scalap" />
- <orderEntry type="module" module-name="compiler" />
<orderEntry type="library" name="partest-deps" level="project" />
<orderEntry type="module" module-name="repl" />
</component>
diff --git a/src/library/scala/annotation/compileTimeOnly.scala b/src/library/scala/annotation/compileTimeOnly.scala
new file mode 100644
index 0000000000..942e9cad8c
--- /dev/null
+++ b/src/library/scala/annotation/compileTimeOnly.scala
@@ -0,0 +1,22 @@
+package scala.annotation
+
+import scala.annotation.meta._
+
+/**
+ * An annotation that designates that an annottee should not be referred to after
+ * type checking (which includes macro expansion).
+ *
+ * Examples of potential use:
+ * 1) The annottee can only appear in the arguments of some other macro
+ * that will eliminate it from the AST during expansion.
+ * 2) The annottee is a macro and should have been expanded away,
+ * so if hasn't, something wrong has happened.
+ * (Comes in handy to provide better support for new macro flavors,
+ * e.g. macro annotations, that can't be expanded by the vanilla compiler).
+ *
+ * @param message the error message to print during compilation if a reference remains
+ * after type checking
+ * @since 2.11.0
+ */
+@getter @setter @beanGetter @beanSetter @companionClass @companionMethod
+final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 77fe2eb1e1..315f56bd4e 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -12,7 +12,7 @@ package runtime
import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
-import scala.collection.generic.{ Sorted }
+import scala.collection.generic.{ Sorted, IsTraversableLike }
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import java.lang.{ Class => jClass }
@@ -48,6 +48,10 @@ object ScalaRunTime {
names.toSet
}
+ // A helper method to make my life in the pattern matcher a lot easier.
+ def drop[Repr](coll: Repr, num: Int)(implicit traversable: IsTraversableLike[Repr]): Repr =
+ traversable conversion coll drop num
+
/** Return the class object representing an array with element class `clazz`.
*/
def arrayClass(clazz: jClass[_]): jClass[_] = {
diff --git a/src/partest/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala
index d618e086f4..d618e086f4 100644
--- a/src/partest/scala/tools/partest/ASMConverters.scala
+++ b/src/partest-extras/scala/tools/partest/ASMConverters.scala
diff --git a/src/partest/scala/tools/partest/AsmNode.scala b/src/partest-extras/scala/tools/partest/AsmNode.scala
index d181436676..e6a91498d1 100644
--- a/src/partest/scala/tools/partest/AsmNode.scala
+++ b/src/partest-extras/scala/tools/partest/AsmNode.scala
@@ -16,10 +16,11 @@ sealed trait AsmNode[+T] {
def visibleAnnotations: List[AnnotationNode]
def invisibleAnnotations: List[AnnotationNode]
def characteristics = f"$name%15s $desc%-30s$accessString$sigString"
+ def erasedCharacteristics = f"$name%15s $desc%-30s$accessString"
- private def accessString = if (access == 0) "" else " " + Modifier.toString(access)
- private def sigString = if (signature == null) "" else " " + signature
- override def toString = characteristics
+ private def accessString = if (access == 0) "" else " " + Modifier.toString(access)
+ private def sigString = if (signature == null) "" else " " + signature
+ override def toString = characteristics
}
object AsmNode {
diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
index 2690b784d1..7650a892fd 100644
--- a/src/partest/scala/tools/partest/BytecodeTest.scala
+++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala
@@ -48,7 +48,18 @@ abstract class BytecodeTest extends ASMConverters {
// descriptors and generic signatures? Method bodies are not considered, and
// the names of the classes containing the methods are substituted so they do
// not appear as differences.
- def sameMethodAndFieldSignatures(clazzA: ClassNode, clazzB: ClassNode): Boolean = {
+ def sameMethodAndFieldSignatures(clazzA: ClassNode, clazzB: ClassNode) =
+ sameCharacteristics(clazzA, clazzB)(_.characteristics)
+
+ // Same as sameMethodAndFieldSignatures, but ignoring generic signatures.
+ // This allows for methods which receive the same descriptor but differing
+ // generic signatures. In particular, this happens with value classes,
+ // which get a generic signature where a method written in terms of the
+ // underlying values does not.
+ def sameMethodAndFieldDescriptors(clazzA: ClassNode, clazzB: ClassNode) =
+ sameCharacteristics(clazzA, clazzB)(_.erasedCharacteristics)
+
+ private def sameCharacteristics(clazzA: ClassNode, clazzB: ClassNode)(f: AsmNode[_] => String): Boolean = {
val ms1 = clazzA.fieldsAndMethods.toIndexedSeq
val ms2 = clazzB.fieldsAndMethods.toIndexedSeq
val name1 = clazzA.name
@@ -59,8 +70,8 @@ abstract class BytecodeTest extends ASMConverters {
false
}
else (ms1, ms2).zipped forall { (m1, m2) =>
- val c1 = m1.characteristics
- val c2 = m2.characteristics.replaceAllLiterally(name2, name1)
+ val c1 = f(m1)
+ val c2 = f(m2).replaceAllLiterally(name2, name1)
if (c1 == c2)
println(s"[ok] $m1")
else
diff --git a/src/partest/scala/tools/partest/JavapTest.scala b/src/partest-extras/scala/tools/partest/JavapTest.scala
index 3cb3dc6ca8..3cb3dc6ca8 100644
--- a/src/partest/scala/tools/partest/JavapTest.scala
+++ b/src/partest-extras/scala/tools/partest/JavapTest.scala
diff --git a/src/partest/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala
index 7381b8af54..7cc2dd39a9 100644
--- a/src/partest/scala/tools/partest/ReplTest.scala
+++ b/src/partest-extras/scala/tools/partest/ReplTest.scala
@@ -7,7 +7,6 @@ package scala.tools.partest
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.ILoop
-import scala.tools.partest.nest.FileUtil
import java.lang.reflect.{ Method => JMethod, Field => JField }
/** A trait for testing repl code. It drops the first line
@@ -31,7 +30,7 @@ abstract class ReplTest extends DirectTest {
def show() = eval() foreach println
}
-abstract class SessionTest extends ReplTest with FileUtil {
+abstract class SessionTest extends ReplTest {
def session: String
override final def code = expected filter (_.startsWith(prompt)) map (_.drop(prompt.length)) mkString "\n"
def expected = session.stripMargin.lines.toList
@@ -39,6 +38,6 @@ abstract class SessionTest extends ReplTest with FileUtil {
override def show() = {
val out = eval().toList
if (out.size != expected.size) Console println s"Expected ${expected.size} lines, got ${out.size}"
- if (out != expected) Console print compareContents(expected, out, "expected", "actual")
+ if (out != expected) Console print nest.FileManager.compareContents(expected, out, "expected", "actual")
}
}
diff --git a/src/partest/scala/tools/partest/SigTest.scala b/src/partest-extras/scala/tools/partest/SigTest.scala
index fe233a4fb5..fe233a4fb5 100644
--- a/src/partest/scala/tools/partest/SigTest.scala
+++ b/src/partest-extras/scala/tools/partest/SigTest.scala
diff --git a/src/partest-extras/scala/tools/partest/Util.scala b/src/partest-extras/scala/tools/partest/Util.scala
new file mode 100644
index 0000000000..114658b0cd
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/Util.scala
@@ -0,0 +1,52 @@
+package scala.tools.partest
+
+import scala.language.experimental.macros
+
+object Util {
+ /**
+ * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
+ * {{{
+ * trace> "".isEmpty
+ * res: Boolean = true
+ *
+ * }}}
+ *
+ * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
+ * test code in a string.
+ */
+ def trace[A](a: A) = macro traceImpl[A]
+
+ import scala.reflect.macros.Context
+ def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
+ import c.universe._
+ import definitions._
+
+ // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0,
+ // because this impairs reflection refactorings
+ //
+ // val exprCode = c.literal(show(a.tree))
+ // val exprType = c.literal(show(a.actualType))
+ // reify {
+ // println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
+ // a.splice
+ // }
+
+ c.Expr(Block(
+ List(Apply(
+ Select(Ident(PredefModule), TermName("println")),
+ List(Apply(
+ Select(Apply(
+ Select(Ident(ScalaPackage), TermName("StringContext")),
+ List(
+ Literal(Constant("trace> ")),
+ Literal(Constant("\\nres: ")),
+ Literal(Constant(" = ")),
+ Literal(Constant("\\n")))),
+ TermName("s")),
+ List(
+ Literal(Constant(show(a.tree))),
+ Literal(Constant(show(a.actualType))),
+ a.tree))))),
+ a.tree))
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala
index 18dd740208..18dd740208 100644
--- a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
+++ b/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala
diff --git a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
new file mode 100644
index 0000000000..d6b62e1d9e
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java
@@ -0,0 +1,82 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.instrumented;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
+ * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
+ *
+ * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
+ * because it must be universally accessible for instrumentation needs. If you want to profile your test use
+ * {@link Instrumentation} instead.
+ */
+public class Profiler {
+
+ private static boolean isProfiling = false;
+ private static Map<MethodCallTrace, Integer> counts = new HashMap<MethodCallTrace, Integer>();
+
+ static public class MethodCallTrace {
+ final String className;
+ final String methodName;
+ final String methodDescriptor;
+
+ public MethodCallTrace(final String className, final String methodName, final String methodDescriptor) {
+ this.className = className;
+ this.methodName = methodName;
+ this.methodDescriptor = methodDescriptor;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof MethodCallTrace)) {
+ return false;
+ } else {
+ MethodCallTrace that = (MethodCallTrace) obj;
+ return that.className.equals(className) && that.methodName.equals(methodName) && that.methodDescriptor.equals(methodDescriptor);
+ }
+ }
+ @Override
+ public int hashCode() {
+ return className.hashCode() ^ methodName.hashCode() ^ methodDescriptor.hashCode();
+ }
+ }
+
+ public static void startProfiling() {
+ isProfiling = true;
+ }
+
+ public static void stopProfiling() {
+ isProfiling = false;
+ }
+
+ public static boolean isProfiling() {
+ return isProfiling;
+ }
+
+ public static void resetProfiling() {
+ counts = new HashMap<MethodCallTrace, Integer>();
+ }
+
+ public static void methodCalled(final String className, final String methodName, final String methodDescriptor) {
+ if (isProfiling) {
+ MethodCallTrace trace = new MethodCallTrace(className, methodName, methodDescriptor);
+ Integer counter = counts.get(trace);
+ if (counter == null) {
+ counts.put(trace, 1);
+ } else {
+ counts.put(trace, counter+1);
+ }
+ }
+ }
+
+ public static Map<MethodCallTrace, Integer> getStatistics() {
+ return new HashMap<MethodCallTrace, Integer>(counts);
+ }
+
+}
diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java
new file mode 100644
index 0000000000..86f5e64516
--- /dev/null
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ASMTransformer.java
@@ -0,0 +1,49 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import java.lang.instrument.ClassFileTransformer;
+import java.security.ProtectionDomain;
+
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.ClassWriter;
+
+public class ASMTransformer implements ClassFileTransformer {
+
+ private boolean shouldTransform(String className) {
+ return
+ // do not instrument instrumentation logic (in order to avoid infinite recursion)
+ !className.startsWith("scala/tools/partest/instrumented/") &&
+ !className.startsWith("scala/tools/partest/javaagent/") &&
+ // we instrument all classes from empty package
+ (!className.contains("/") ||
+ // we instrument all classes from scala package
+ className.startsWith("scala/") ||
+ // we instrument all classes from `instrumented` package
+ className.startsWith("instrumented/"));
+ }
+
+ public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
+ if (shouldTransform(className)) {
+ ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
+ @Override protected String getCommonSuperClass(final String type1, final String type2) {
+ // Since we are not recomputing stack frame map, this should never be called we override this method because
+ // default implementation uses reflection for implementation and might try to load the class that we are
+ // currently processing. That leads to weird results like swallowed exceptions and classes being not
+ // transformed.
+ throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
+ ") while transforming " + className);
+ }
+ };
+ ProfilerVisitor visitor = new ProfilerVisitor(writer);
+ ClassReader reader = new ClassReader(classfileBuffer);
+ reader.accept(visitor, 0);
+ return writer.toByteArray();
+ } else {
+ return classfileBuffer;
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/javaagent/MANIFEST.MF b/src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF
index be0fee46a2..be0fee46a2 100644
--- a/src/partest/scala/tools/partest/javaagent/MANIFEST.MF
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
index 8306327b14..b1b100fbb0 100644
--- a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilerVisitor.java
@@ -10,7 +10,7 @@ import scala.tools.asm.MethodVisitor;
import scala.tools.asm.Opcodes;
public class ProfilerVisitor extends ClassVisitor implements Opcodes {
-
+
private static String profilerClass = "scala/tools/partest/instrumented/Profiler";
public ProfilerVisitor(final ClassVisitor cv) {
@@ -53,7 +53,7 @@ public class ProfilerVisitor extends ClassVisitor implements Opcodes {
"(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
}
}
- return mv;
+ return mv;
}
}
diff --git a/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java
new file mode 100644
index 0000000000..819a5cc39b
--- /dev/null
+++ b/src/partest-javaagent/scala/tools/partest/javaagent/ProfilingAgent.java
@@ -0,0 +1,25 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import java.lang.instrument.Instrumentation;
+import java.lang.instrument.UnmodifiableClassException;
+
+/**
+ * Profiling agent that instruments byte-code to insert calls to
+ * {@link scala.tools.partest.instrumented.Profiler#methodCalled(String, String, String)}
+ * by using ASM library for byte-code manipulation.
+ */
+public class ProfilingAgent {
+ public static void premain(String args, Instrumentation inst) throws UnmodifiableClassException {
+ // NOTE: we are adding transformer that won't be applied to classes that are already loaded
+ // This should be ok because premain should be executed before main is executed so Scala library
+ // and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
+ // not depend on Scala library. In case our assumptions are wrong we can always insert call to
+ // inst.retransformClasses.
+ inst.addTransformer(new ASMTransformer(), false);
+ }
+}
diff --git a/src/partest/README b/src/partest/README
deleted file mode 100644
index 17594dbb1e..0000000000
--- a/src/partest/README
+++ /dev/null
@@ -1,31 +0,0 @@
-How partest chooses the compiler / library:
-
- * ''-Dpartest.build=build/four-pack'' -> will search for libraries in
- ''lib'' directory of given path
- * ''--pack'' -> will set ''partest.build=build/pack'', and run all tests.
- add ''--[kind]'' to run a selected set of tests.
- * auto detection:
- - partest.build property -> ''bin'' / ''lib'' directories
- - distribution (''dists/latest'')
- - supersabbus pack (''build/pack'')
- - sabbus quick (''build/quick'')
- - installed dist (test files in ''misc/scala-test/files'')
-
-How partest choses test files: the test files must be accessible from
-the directory on which partest is run. So the test files must be either
-at:
- * ./test/files
- * ./files (cwd is "test")
- * ./misc/scala-test/files (installed scala distribution)
-
-Other arguments:
- * --pos next files test a compilation success
- * --neg next files test a compilation failure
- * --run next files test the interpreter and all backends
- * --jvm next files test the JVM backend
- * --res next files test the resident compiler
- * --shootout next files are shootout tests
- * --script next files test the script runner
- * ''-Dpartest.scalac_opts=...'' -> add compiler options
- * ''--verbose'' -> print verbose messages
- * ''-Dpartest.debug=true'' -> print debug messages
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
deleted file mode 100644
index df4a81dee2..0000000000
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.reflect.runtime.{universe => ru}
-import scala.tools.nsc._
-
-/** For testing compiler internals directly.
- * Each source code string in "sources" will be compiled, and
- * the check function will be called with the source code and the
- * resulting CompilationUnit. The check implementation should
- * test for what it wants to test and fail (via assert or other
- * exception) if it is not happy.
- */
-abstract class CompilerTest extends DirectTest {
- def check(source: String, unit: global.CompilationUnit): Unit
-
- lazy val global: Global = newCompiler()
- lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
- import global._
- import definitions.{ compilerTypeFromTag }
-
- override def extraSettings = "-usejavacp -d " + testOutput.path
-
- def show() = (sources, units).zipped foreach check
-
- // Override at least one of these...
- def code = ""
- def sources: List[String] = List(code)
-
- // Utility functions
- class MkType(sym: Symbol) {
- def apply[M](implicit t: ru.TypeTag[M]): Type =
- if (sym eq NoSymbol) NoType
- else appliedType(sym, compilerTypeFromTag(t))
- }
- implicit def mkMkType(sym: Symbol) = new MkType(sym)
-
- def allMembers(root: Symbol): List[Symbol] = {
- def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = {
- val latest = roots flatMap (_.info.members) filterNot (seen contains _)
- if (latest.isEmpty) seen.toList.sortWith(_ isLess _)
- else loop(seen ++ latest, latest)
- }
- loop(Set(), List(root))
- }
-
- class SymsInPackage(pkgName: String) {
- def pkg = rootMirror.getPackage(pkgName)
- def classes = allMembers(pkg) filter (_.isClass)
- def modules = allMembers(pkg) filter (_.isModule)
- def symbols = classes ++ terms filterNot (_ eq NoSymbol)
- def terms = allMembers(pkg) filter (s => s.isTerm && !s.isConstructor)
- def tparams = classes flatMap (_.info.typeParams)
- def tpes = symbols map (_.tpe) distinct
- }
-}
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
deleted file mode 100644
index 2e6c3baa02..0000000000
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ /dev/null
@@ -1,128 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.tools.nsc._
-import settings.ScalaVersion
-import util.{ SourceFile, BatchSourceFile }
-import reporters.{Reporter, ConsoleReporter}
-import scala.tools.cmd.CommandLineParser
-
-/** A class for testing code which is embedded as a string.
- * It allows for more complete control over settings, compiler
- * configuration, sequence of events, etc. than does partest.
- */
-abstract class DirectTest extends App {
- // The program being tested in some fashion
- def code: String
- // produce the output to be compared against a checkfile
- def show(): Unit
-
- // the test file or dir, and output directory
- def testPath = SFile(sys.props("partest.test-path"))
- def testOutput = Directory(sys.props("partest.output"))
-
- // override to add additional settings with strings
- def extraSettings: String = ""
- // a default Settings object
- def settings: Settings = newSettings(CommandLineParser tokenize extraSettings)
- // a custom Settings object
- def newSettings(args: List[String]) = {
- val s = new Settings
- val allArgs = args ++ (CommandLineParser tokenize debugSettings)
- log("newSettings: allArgs = " + allArgs)
- s processArguments (allArgs, true)
- s
- }
- // new compiler
- def newCompiler(args: String*): Global = {
- val settings = newSettings((CommandLineParser tokenize ("-d \"" + testOutput.path + "\" " + extraSettings)) ++ args.toList)
- newCompiler(settings)
- }
-
- def newCompiler(settings: Settings): Global = Global(settings, reporter(settings))
-
- def reporter(settings: Settings): Reporter = new ConsoleReporter(settings)
-
- private def newSourcesWithExtension(ext: String)(codes: String*): List[BatchSourceFile] =
- codes.toList.zipWithIndex map {
- case (src, idx) => new BatchSourceFile(s"newSource${idx + 1}.$ext", src)
- }
-
- def newJavaSources(codes: String*) = newSourcesWithExtension("java")(codes: _*)
- def newSources(codes: String*) = newSourcesWithExtension("scala")(codes: _*)
-
- def compileString(global: Global)(sourceCode: String): Boolean = {
- withRun(global)(_ compileSources newSources(sourceCode))
- !global.reporter.hasErrors
- }
-
- def javaCompilationUnits(global: Global)(sourceCodes: String*) = {
- sourceFilesToCompiledUnits(global)(newJavaSources(sourceCodes: _*))
- }
-
- def sourceFilesToCompiledUnits(global: Global)(files: List[SourceFile]) = {
- withRun(global) { run =>
- run compileSources files
- run.units.toList
- }
- }
-
- def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
- val units = sourceFilesToCompiledUnits(global)(newSources(sourceCodes: _*))
- if (global.reporter.hasErrors) {
- global.reporter.flush()
- sys.error("Compilation failure.")
- }
- units
- }
-
- def withRun[T](global: Global)(f: global.Run => T): T = {
- global.reporter.reset()
- f(new global.Run)
- }
-
- // compile the code, optionally first adding to the settings
- def compile(args: String*) = compileString(newCompiler(args: _*))(code)
-
- /** Constructor/main body **/
- try show()
- catch { case t: Exception => println(t.getMessage) ; t.printStackTrace ; sys.exit(1) }
-
- /** Debugger interest only below this line **/
- protected def isDebug = (sys.props contains "partest.debug") || (sys.env contains "PARTEST_DEBUG")
- protected def debugSettings = sys.props.getOrElse("partest.debug.settings", "")
-
- final def log(msg: => Any) {
- if (isDebug) Console.err println msg
- }
-
- /**
- * Run a test only if the current java version is at least the version specified.
- */
- def testUnderJavaAtLeast[A](version: String)(yesRun: =>A) = new TestUnderJavaAtLeast(version, { yesRun })
-
- class TestUnderJavaAtLeast[A](version: String, yesRun: => A) {
- val javaVersion = System.getProperty("java.specification.version")
-
- // the "ScalaVersion" class parses Java specification versions just fine
- val requiredJavaVersion = ScalaVersion(version)
- val executingJavaVersion = ScalaVersion(javaVersion)
- val shouldRun = executingJavaVersion >= requiredJavaVersion
- val preamble = if (shouldRun) "Attempting" else "Doing fallback for"
-
- def logInfo() = log(s"$preamble java $version specific test under java version $javaVersion")
-
- /*
- * If the current java version is at least 'version' then 'yesRun' is evaluated
- * otherwise 'fallback' is
- */
- def otherwise(fallback: =>A): A = {
- logInfo()
- if (shouldRun) yesRun else fallback
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/IcodeTest.scala b/src/partest/scala/tools/partest/IcodeTest.scala
deleted file mode 100644
index b12ec0de61..0000000000
--- a/src/partest/scala/tools/partest/IcodeTest.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import scala.tools.partest.nest.FileUtil.compareContents
-
-/** A trait for testing icode. All you need is this in a
- * partest source file:
- * {{{
- * object Test extends IcodeTest
- * }}}
- * And then the generated output will be the icode for everything
- * in that file. See source for possible customizations.
- */
-abstract class IcodeTest extends DirectTest {
- // override to check icode at a different point.
- def printIcodeAfterPhase = "icode"
- // override to use source code other than the file being tested.
- def code = testPath.slurp()
-
- override def extraSettings: String = "-usejavacp -Xprint-icode:" + printIcodeAfterPhase
-
- // Compile, read in all the *.icode files, delete them, and return their contents
- def collectIcode(args: String*): List[String] = {
- compile("-d" :: testOutput.path :: args.toList : _*)
- val icodeFiles = testOutput.files.toList filter (_ hasExtension "icode")
-
- try icodeFiles sortBy (_.name) flatMap (f => f.lines.toList)
- finally icodeFiles foreach (f => f.delete())
- }
-
- // Default show() compiles the code with and without optimization and
- // outputs the diff.
- def show() {
- val lines1 = collectIcode("")
- val lines2 = collectIcode("-optimise")
-
- println(compareContents(lines1, lines2))
- }
-}
diff --git a/src/partest/scala/tools/partest/MemoryTest.scala b/src/partest/scala/tools/partest/MemoryTest.scala
deleted file mode 100644
index 58d25d2f01..0000000000
--- a/src/partest/scala/tools/partest/MemoryTest.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package scala.tools.partest
-
-abstract class MemoryTest {
- def maxDelta: Double
- def calcsPerIter: Int
- def calc(): Unit
-
- def main(args: Array[String]) {
- val rt = Runtime.getRuntime()
- def memUsage() = {
- import java.lang.management._
- import scala.collection.JavaConverters._
- val pools = ManagementFactory.getMemoryPoolMXBeans.asScala
- pools.map(_.getUsage.getUsed).sum / 1000000d
- }
-
- val history = scala.collection.mutable.ListBuffer[Double]()
- def stressTestIter() = {
- var i = 0
- while (i < calcsPerIter) { calc(); i += 1 }
- 1 to 5 foreach (_ => rt.gc())
- history += memUsage
- }
-
- 1 to 5 foreach (_ => stressTestIter())
- val reference = memUsage()
- 1 to 5 foreach (_ => stressTestIter())
- 1 to 5 foreach (_ => rt.gc())
- val result = memUsage()
- history += result
-
- val delta = result - reference
- if (delta > maxDelta) {
- println("FAILED")
- history foreach (mb => println(mb + " Mb"))
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
deleted file mode 100644
index 8478edeb4d..0000000000
--- a/src/partest/scala/tools/partest/PartestDefaults.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.tools
-package partest
-
-import scala.concurrent.duration.Duration
-import scala.tools.nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
-import java.lang.Runtime.{ getRuntime => runtime }
-
-object PartestDefaults {
-
- def testRootName = propOrNone("partest.root")
- def srcDirName = propOrElse("partest.srcdir", "files")
- def testRootDir = testRootName map (x => Directory(x))
-
- // def classPath = propOrElse("partest.classpath", "")
- def classPath = PathResolver.Environment.javaUserClassPath // XXX
-
- def javaCmd = propOrElse("partest.javacmd", "java")
- def javacCmd = propOrElse("partest.javac_cmd", "javac")
- def javaOpts = propOrElse("partest.java_opts", "")
- def scalacOpts = propOrElse("partest.scalac_opts", "")
-
- def testBuild = propOrNone("partest.build")
- def errorCount = propOrElse("partest.errors", "0").toInt
- def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse runtime.availableProcessors
- def waitTime = propOrNone("partest.timeout") map (Duration.apply) getOrElse Duration("4 hours")
-
- //def timeout = "1200000" // per-test timeout
-}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
deleted file mode 100644
index 8b88021dbf..0000000000
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ /dev/null
@@ -1,207 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-
-import scala.util.Properties.setProp
-import scala.tools.ant.sabbus.CompilationPathProperty
-import java.lang.reflect.Method
-import org.apache.tools.ant.Task
-import org.apache.tools.ant.types.{ Reference, FileSet}
-import org.apache.tools.ant.types.Commandline.Argument
-import scala.tools.ant.ScalaTask
-
-/** An Ant task to execute the Scala test suite (NSC).
- *
- * This task can take the following parameters as attributes:
- * - `srcdir`,
- * - `classpath`,
- * - `classpathref`,
- * - `erroronfailed`,
- * - `javacmd`,
- * - `javaccmd`,
- * - `scalacopts`,
- * - `debug`,
- * - `junitreportdir`.
- *
- * It also takes the following parameters as nested elements:
- * - `compilationpath`.
- *
- * @author Philippe Haller
- */
-class PartestTask extends Task with CompilationPathProperty with ScalaTask {
- type Path = org.apache.tools.ant.types.Path
-
- private var kinds: List[String] = Nil
- private var classpath: Option[Path] = None
- private var debug = false
- private var errorOnFailed: Boolean = true
- private var jUnitReportDir: Option[File] = None
- private var javaccmd: Option[File] = None
- private var javacmd: Option[File] = Option(sys.props("java.home")) map (x => new File(x, "bin/java"))
- private var scalacArgs: Option[Seq[Argument]] = None
- private var srcDir: Option[String] = None
- private var colors: Int = 0
-
- def setSrcDir(input: String) {
- srcDir = Some(input)
- }
-
- def setColors(input: String) {
- try colors = input.toInt catch { case _: NumberFormatException => () }
- if (colors > 0)
- sys.props("partest.colors") = colors.toString
- }
-
- def setClasspath(input: Path) {
- if (classpath.isEmpty)
- classpath = Some(input)
- else
- classpath.get.append(input)
- }
-
- def createClasspath(): Path = {
- if (classpath.isEmpty) classpath = Some(new Path(getProject()))
- classpath.get.createPath()
- }
-
- def setClasspathref(input: Reference) {
- createClasspath().setRefid(input)
- }
- def setErrorOnFailed(input: Boolean) {
- errorOnFailed = input
- }
-
- def setJavaCmd(input: File) {
- javacmd = Some(input)
- }
-
- def setKinds(input: String) {
- kinds = words(input)
- }
-
- def setJavacCmd(input: File) {
- javaccmd = Some(input)
- }
-
- def setScalacOpts(input: String) {
- val s = input.split(' ').map { s => val a = new Argument; a.setValue(s); a }
- scalacArgs = Some(scalacArgs.getOrElse(Seq()) ++ s)
- }
-
- def createCompilerArg(): Argument = {
- val a = new Argument
- scalacArgs = Some(scalacArgs.getOrElse(Seq()) :+ a)
- a
- }
-
- def setDebug(input: Boolean) {
- debug = input
- }
-
- def setJUnitReportDir(input: File) {
- jUnitReportDir = Some(input)
- }
-
- override def execute() {
- if (debug || sys.props.contains("partest.debug")) {
- nest.NestUI.setDebug()
- }
-
- srcDir foreach (x => setProp("partest.srcdir", x))
-
- val classpath = this.compilationPath getOrElse sys.error("Mandatory attribute 'compilationPath' is not set.")
- val cpfiles = classpath.list map { fs => new File(fs) } toList
- def findCp(name: String) = cpfiles find (f =>
- (f.getName == s"scala-$name.jar")
- || (f.absolutePathSegments endsWith Seq("classes", name))
- ) getOrElse sys.error(s"Provided classpath does not contain a Scala $name element.")
-
- val scalaLibrary = findCp("library")
- val scalaReflect = findCp("reflect")
- val scalaCompiler = findCp("compiler")
- val scalaPartest = findCp("partest")
- val scalaActors = findCp("actors")
-
- def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
- val parts = a.getParts
- if (parts eq null) Nil else parts.toSeq
- })
-
- val antRunner = new scala.tools.partest.nest.AntRunner
- val antFileManager = antRunner.fileManager
-
- // antFileManager.failed = runFailed
- antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*)
- antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
- antFileManager.LATEST_REFLECT = scalaReflect.getAbsolutePath
- antFileManager.LATEST_COMP = scalaCompiler.getAbsolutePath
- antFileManager.LATEST_PARTEST = scalaPartest.getAbsolutePath
- antFileManager.LATEST_ACTORS = scalaActors.getAbsolutePath
-
- javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
- javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
- scalacArgsFlat foreach (antFileManager.SCALAC_OPTS ++= _)
-
- def runSet(kind: String, files: Array[File]): (Int, Int, List[String]) = {
- if (files.isEmpty) (0, 0, List())
- else {
- log(s"Running ${files.length} tests in '$kind' at $now")
- // log(s"Tests: ${files.toList}")
- val results = antRunner.reflectiveRunTestsForFiles(files, kind)
- val (passed, failed) = results partition (_.isOk)
- val numPassed = passed.size
- val numFailed = failed.size
- def failedMessages = failed map (_.longStatus)
-
- log(s"Completed '$kind' at $now")
-
- // create JUnit Report xml files if directory was specified
- jUnitReportDir foreach { d =>
- d.mkdir
-
- val report = testReport(kind, results, numPassed, numFailed)
- scala.xml.XML.save(d.getAbsolutePath+"/"+kind+".xml", report)
- }
-
- (numPassed, numFailed, failedMessages)
- }
- }
-
- val _results = kinds map (k => runSet(k, TestKinds testsFor k map (_.jfile) toArray))
- val allSuccesses = _results map (_._1) sum
- val allFailures = _results map (_._2) sum
- val allFailedPaths = _results flatMap (_._3)
-
- def f = if (errorOnFailed && allFailures > 0) buildError(_: String) else log(_: String)
- def s = if (allFailures > 1) "s" else ""
- val msg =
- if (allFailures > 0)
- "Test suite finished with %d case%s failing:\n".format(allFailures, s)+
- allFailedPaths.mkString("\n")
- else if (allSuccesses == 0) "There were no tests to run."
- else "Test suite finished with no failures."
-
- f(msg)
- }
-
- private def oneResult(res: TestState) =
- <testcase name={res.testIdent}>{
- if (res.isOk) scala.xml.NodeSeq.Empty
- else <failure message="Test failed"/>
- }</testcase>
-
- private def testReport(kind: String, results: Iterable[TestState], succs: Int, fails: Int) =
- <testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
- <properties/>
- {
- results map oneResult
- }
- </testsuite>
-}
diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala
deleted file mode 100644
index 1f1c8a95ea..0000000000
--- a/src/partest/scala/tools/partest/SecurityTest.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.partest
-
-import java.security._
-import java.util._
-
-abstract class SecurityTest extends App {
- def throwIt(x: Any) = throw new AccessControlException("" + x)
- def propertyCheck(p: PropertyPermission): Unit = throwIt(p)
-
- def check(perm: Permission): Unit = perm match {
- case p: PropertyPermission => propertyCheck(p)
- case _ => ()
- }
-}
diff --git a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala b/src/partest/scala/tools/partest/StoreReporterDirectTest.scala
deleted file mode 100644
index 7f3604c86c..0000000000
--- a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package scala.tools.partest
-
-import scala.tools.nsc.Settings
-import scala.tools.nsc.reporters.StoreReporter
-import scala.collection.mutable
-
-trait StoreReporterDirectTest extends DirectTest {
- lazy val storeReporter: StoreReporter = new scala.tools.nsc.reporters.StoreReporter()
-
- /** Discards all but the first message issued at a given position. */
- def filteredInfos: Seq[storeReporter.Info] = storeReporter.infos.groupBy(_.pos).map(_._2.head).toList
-
- /** Hook into [[scala.tools.partest.DirectTest]] to install the custom reporter */
- override def reporter(settings: Settings) = storeReporter
-}
diff --git a/src/partest/scala/tools/partest/TestKinds.scala b/src/partest/scala/tools/partest/TestKinds.scala
deleted file mode 100644
index b4e8afd0d2..0000000000
--- a/src/partest/scala/tools/partest/TestKinds.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package scala.tools
-package partest
-
-import nest.PathSettings.srcDir
-
-object TestKinds {
- val standardKinds = ("pos neg run jvm res scalacheck scalap specialized instrumented presentation ant" split "\\s+").toList
-
- def denotesTestFile(p: Path) = p.isFile && p.hasExtension("scala", "res", "xml")
- def denotesTestDir(p: Path) = kindOf(p) match {
- case "res" => false
- case _ => p.isDirectory && p.extension == ""
- }
- def denotesTestPath(p: Path) = denotesTestDir(p) || denotesTestFile(p)
-
- // TODO
- def isTestForPartest(p: Path) = (
- (p.name == "intentional-failure.scala")
- || (p.path contains "test-for-partest")
- )
-
- def kindOf(p: Path) = {
- p.toAbsolute.segments takeRight 2 head
-
- // (srcDir relativize p.toCanonical).segments match {
- // case (".." :: "scaladoc" :: xs) => xs.head
- // case xs => xs.head
- // }
- }
- def logOf(p: Path) = {
- p.parent / s"${p.stripExtension}-${kindOf(p)}.log"
- // p.parent / s"${p.stripExtension}.log"
- }
-
- // true if a test path matches the --grep expression.
- private def pathMatchesExpr(path: Path, expr: String) = {
- // Matches the expression if any source file contains the expr,
- // or if the checkfile contains it, or if the filename contains
- // it (the last is case-insensitive.)
- def matches(p: Path) = (
- (p.path.toLowerCase contains expr.toLowerCase)
- || (p.fileContents contains expr)
- )
- def candidates = {
- (path changeExtension "check") +: {
- if (path.isFile) List(path)
- else path.toDirectory.deepList() filter (_.isJavaOrScala) toList
- }
- }
-
- (candidates exists matches)
- }
-
- def groupedTests(paths: List[Path]): List[(String, List[Path])] =
- (paths.distinct groupBy kindOf).toList sortBy (standardKinds indexOf _._1)
-
- /** Includes tests for testing partest. */
- private def allTestsForKind(kind: String): List[Path] =
- (srcDir / kind toDirectory).list.toList filter denotesTestPath
-
- def testsForPartest: List[Path] = standardKinds flatMap allTestsForKind filter isTestForPartest
- def testsFor(kind: String): List[Path] = allTestsForKind(kind) filterNot isTestForPartest
- def grepFor(expr: String): List[Path] = standardTests filter (t => pathMatchesExpr(t, expr))
- def standardTests: List[Path] = standardKinds flatMap testsFor
- def failedTests: List[Path] = standardTests filter (p => logOf(p).isFile)
-}
diff --git a/src/partest/scala/tools/partest/TestState.scala b/src/partest/scala/tools/partest/TestState.scala
deleted file mode 100644
index e58b479e54..0000000000
--- a/src/partest/scala/tools/partest/TestState.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package scala.tools.partest
-
-import scala.tools.nsc.FatalError
-import scala.tools.nsc.util.stackTraceString
-
-sealed abstract class TestState {
- def testFile: File
- def what: String
- def reason: String
- def transcript: List[String]
-
- def isOk = false
- def isSkipped = false
- def testIdent = testFile.testIdent
- def transcriptString = transcript mkString EOL
-
- def identAndReason = testIdent + reasonString
- def status = s"$what - $identAndReason"
- def longStatus = status + transcriptString
- def reasonString = if (reason == "") "" else s" [$reason]"
-
- def shortStatus = if (isOk) "ok" else "!!"
-
- override def toString = status
-}
-
-object TestState {
- case class Uninitialized(testFile: File) extends TestState {
- def what = "uninitialized"
- def reason = what
- def transcript = Nil
- override def shortStatus = "??"
- }
- case class Pass(testFile: File) extends TestState {
- def what = "pass"
- override def isOk = true
- def transcript: List[String] = Nil
- def reason = ""
- }
- case class Updated(testFile: File) extends TestState {
- def what = "updated"
- override def isOk = true
- def transcript: List[String] = Nil
- def reason = "updated check file"
- override def shortStatus = "++"
- }
- case class Skip(testFile: File, reason: String) extends TestState {
- def what = "skip"
- override def isOk = true
- override def isSkipped = true
- def transcript: List[String] = Nil
- override def shortStatus = "--"
- }
- case class Fail(testFile: File, reason: String, transcript: List[String]) extends TestState {
- def what = "fail"
- }
- case class Crash(testFile: File, caught: Throwable, transcript: List[String]) extends TestState {
- def what = "crash"
- def reason = s"caught $caught_s - ${caught.getMessage}"
-
- private def caught_s = (caught.getClass.getName split '.').last
- override def transcriptString = nljoin(super.transcriptString, caught_s)
- }
-}
diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala
deleted file mode 100644
index 5c177ac962..0000000000
--- a/src/partest/scala/tools/partest/TestUtil.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package scala.tools.partest
-
-import scala.reflect.{ classTag, ClassTag }
-
-trait TestUtil {
- /** Given function and block of code, evaluates code block,
- * calls function with nanoseconds elapsed, and returns block result.
- */
- def timed[T](f: Long => Unit)(body: => T): T = {
- val start = System.nanoTime
- val result = body
- val end = System.nanoTime
-
- f(end - start)
- result
- }
- /** Times body and returns (nanos, result).
- */
- def alsoNanos[T](body: => T): (Long, T) = {
- var nanos = 0L
- val result = timed(nanos = _)(body)
-
- (nanos, result)
- }
- def nanos(body: => Unit): Long = alsoNanos(body)._1
-
- def intercept[T <: Exception : ClassTag](code: => Unit): Unit =
- try {
- code
- assert(false, "did not throw " + classTag[T])
- } catch {
- case ex: Exception if classTag[T].runtimeClass isInstance ex =>
- }
-}
-
-// Used in tests.
-object TestUtil extends TestUtil {
-}
diff --git a/src/partest/scala/tools/partest/antlib.xml b/src/partest/scala/tools/partest/antlib.xml
deleted file mode 100644
index b3b98e853f..0000000000
--- a/src/partest/scala/tools/partest/antlib.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<antlib>
- <taskdef name="partest"
- classname="scala.tools.partest.PartestTask"/>
-</antlib>
diff --git a/src/partest/scala/tools/partest/instrumented/Profiler.java b/src/partest/scala/tools/partest/instrumented/Profiler.java
deleted file mode 100644
index e267e197e7..0000000000
--- a/src/partest/scala/tools/partest/instrumented/Profiler.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.instrumented;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
- * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
- *
- * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
- * because it must be universally accessible for instrumentation needs. If you want to profile your test use
- * {@link Instrumentation} instead.
- */
-public class Profiler {
-
- private static boolean isProfiling = false;
- private static Map<MethodCallTrace, Integer> counts = new HashMap<MethodCallTrace, Integer>();
-
- static public class MethodCallTrace {
- final String className;
- final String methodName;
- final String methodDescriptor;
-
- public MethodCallTrace(final String className, final String methodName, final String methodDescriptor) {
- this.className = className;
- this.methodName = methodName;
- this.methodDescriptor = methodDescriptor;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof MethodCallTrace)) {
- return false;
- } else {
- MethodCallTrace that = (MethodCallTrace) obj;
- return that.className.equals(className) && that.methodName.equals(methodName) && that.methodDescriptor.equals(methodDescriptor);
- }
- }
- @Override
- public int hashCode() {
- return className.hashCode() ^ methodName.hashCode() ^ methodDescriptor.hashCode();
- }
- }
-
- public static void startProfiling() {
- isProfiling = true;
- }
-
- public static void stopProfiling() {
- isProfiling = false;
- }
-
- public static boolean isProfiling() {
- return isProfiling;
- }
-
- public static void resetProfiling() {
- counts = new HashMap<MethodCallTrace, Integer>();
- }
-
- public static void methodCalled(final String className, final String methodName, final String methodDescriptor) {
- if (isProfiling) {
- MethodCallTrace trace = new MethodCallTrace(className, methodName, methodDescriptor);
- Integer counter = counts.get(trace);
- if (counter == null) {
- counts.put(trace, 1);
- } else {
- counts.put(trace, counter+1);
- }
- }
- }
-
- public static Map<MethodCallTrace, Integer> getStatistics() {
- return new HashMap<MethodCallTrace, Integer>(counts);
- }
-
-}
diff --git a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
deleted file mode 100644
index 878c8613d5..0000000000
--- a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.javaagent;
-
-import java.lang.instrument.ClassFileTransformer;
-import java.security.ProtectionDomain;
-
-import scala.tools.asm.ClassReader;
-import scala.tools.asm.ClassWriter;
-
-public class ASMTransformer implements ClassFileTransformer {
-
- private boolean shouldTransform(String className) {
- return
- // do not instrument instrumentation logic (in order to avoid infinite recursion)
- !className.startsWith("scala/tools/partest/instrumented/") &&
- !className.startsWith("scala/tools/partest/javaagent/") &&
- // we instrument all classes from empty package
- (!className.contains("/") ||
- // we instrument all classes from scala package
- className.startsWith("scala/") ||
- // we instrument all classes from `instrumented` package
- className.startsWith("instrumented/"));
- }
-
- public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
- if (shouldTransform(className)) {
- ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
- @Override protected String getCommonSuperClass(final String type1, final String type2) {
- // Since we are not recomputing stack frame map, this should never be called we override this method because
- // default implementation uses reflection for implementation and might try to load the class that we are
- // currently processing. That leads to weird results like swallowed exceptions and classes being not
- // transformed.
- throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
- ") while transforming " + className);
- }
- };
- ProfilerVisitor visitor = new ProfilerVisitor(writer);
- ClassReader reader = new ClassReader(classfileBuffer);
- reader.accept(visitor, 0);
- return writer.toByteArray();
- } else {
- return classfileBuffer;
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
deleted file mode 100644
index 3b18987040..0000000000
--- a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Grzegorz Kossakowski
- */
-
-package scala.tools.partest.javaagent;
-
-import java.lang.instrument.Instrumentation;
-import java.lang.instrument.UnmodifiableClassException;
-
-/**
- * Profiling agent that instruments byte-code to insert calls to
- * {@link scala.tools.partest.instrumented.Profiler#methodCalled(String, String, String)}
- * by using ASM library for byte-code manipulation.
- */
-public class ProfilingAgent {
- public static void premain(String args, Instrumentation inst) throws UnmodifiableClassException {
- // NOTE: we are adding transformer that won't be applied to classes that are already loaded
- // This should be ok because premain should be executed before main is executed so Scala library
- // and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
- // not depend on Scala library. In case our assumptions are wrong we can always insert call to
- // inst.retransformClasses.
- inst.addTransformer(new ASMTransformer(), false);
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
deleted file mode 100644
index 1d3b79171b..0000000000
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.partest
-package nest
-
-class AntRunner extends DirectRunner {
-
- val fileManager = new FileManager {
- var JAVACMD: String = "java"
- var JAVAC_CMD: String = "javac"
- var CLASSPATH: String = _
- var LATEST_LIB: String = _
- var LATEST_REFLECT: String = _
- var LATEST_COMP: String = _
- var LATEST_PARTEST: String = _
- var LATEST_ACTORS: String = _
- val testRootPath: String = "test"
- val testRootDir: Directory = Directory(testRootPath)
- }
-
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): List[TestState] =
- runTestsForFiles(kindFiles.toList, kind)
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
deleted file mode 100644
index b436675d3a..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ /dev/null
@@ -1,189 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-
-
-package scala.tools.partest
-package nest
-
-import java.io.{ FilenameFilter, IOException }
-import java.net.URI
-import scala.util.Properties.{ propOrElse, scalaCmd, scalacCmd }
-import scala.tools.nsc.{ io, util }
-import PathResolver.{ Environment, Defaults }
-
-class ConsoleFileManager extends FileManager {
- var testBuild: Option[String] = PartestDefaults.testBuild
- def testBuildFile = testBuild map (testParent / _)
-
- var testClasses: Option[String] = None
-
- def this(buildPath: String, rawClasses: Boolean) = {
- this()
- if (rawClasses)
- testClasses = Some(buildPath)
- else
- testBuild = Some(buildPath)
- // re-run because initialization of default
- // constructor must be updated
- findLatest()
- }
-
- def this(buildPath: String) = {
- this(buildPath, false)
- }
-
- def this(buildPath: String, rawClasses: Boolean, moreOpts: String) = {
- this(buildPath, rawClasses)
- SCALAC_OPTS = SCALAC_OPTS ++ moreOpts.split(' ').toSeq.filter(_.length > 0)
- }
-
- lazy val srcDir = PathSettings.srcDir
- lazy val testRootDir = PathSettings.testRoot
- lazy val testRootPath = testRootDir.toAbsolute.path
- def testParent = testRootDir.parent
-
- var CLASSPATH = PartestDefaults.classPath
- var JAVACMD = PartestDefaults.javaCmd
- var JAVAC_CMD = PartestDefaults.javacCmd
-
-
- vlog("CLASSPATH: "+CLASSPATH)
-
- if (!srcDir.isDirectory) {
- NestUI.failure("Source directory \"" + srcDir.path + "\" not found")
- sys.exit(1)
- }
-
- CLASSPATH = {
- val libs = (srcDir / Directory("lib")).files filter (_ hasExtension "jar") map (_.toCanonical.path)
-
- // add all jars in libs
- (CLASSPATH :: libs.toList) mkString pathSeparator
- }
-
- def findLatest() {
- vlog("test parent: "+testParent)
-
- def prefixFileWith(parent: File, relPath: String) = (SFile(parent) / relPath).toCanonical
- def prefixFile(relPath: String) = (testParent / relPath).toCanonical
-
- if (!testClasses.isEmpty) {
- testClassesDir = Path(testClasses.get).toCanonical.toDirectory
- vlog("Running with classes in "+testClassesDir)
-
- latestLibFile = testClassesDir / "library"
- latestActorsFile = testClassesDir / "library" / "actors"
- latestReflectFile = testClassesDir / "reflect"
- latestCompFile = testClassesDir / "compiler"
- latestPartestFile = testClassesDir / "partest"
- }
- else if (testBuild.isDefined) {
- val dir = Path(testBuild.get)
- vlog("Running on "+dir)
- latestLibFile = dir / "lib/scala-library.jar"
- latestActorsFile = dir / "lib/scala-actors.jar"
- latestReflectFile = dir / "lib/scala-reflect.jar"
- latestCompFile = dir / "lib/scala-compiler.jar"
- latestPartestFile = dir / "lib/scala-partest.jar"
- }
- else {
- def setupQuick() {
- vlog("Running build/quick")
- latestLibFile = prefixFile("build/quick/classes/library")
- latestActorsFile = prefixFile("build/quick/classes/library/actors")
- latestReflectFile = prefixFile("build/quick/classes/reflect")
- latestCompFile = prefixFile("build/quick/classes/compiler")
- latestPartestFile = prefixFile("build/quick/classes/partest")
- }
-
- def setupInst() {
- vlog("Running dist (installed)")
- val p = testParent.getParentFile
- latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
- latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
- latestReflectFile = prefixFileWith(p, "lib/scala-reflect.jar")
- latestCompFile = prefixFileWith(p, "lib/scala-compiler.jar")
- latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
- }
-
- def setupDist() {
- vlog("Running dists/latest")
- latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
- latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
- latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar")
- latestCompFile = prefixFile("dists/latest/lib/scala-compiler.jar")
- latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
- }
-
- def setupPack() {
- vlog("Running build/pack")
- latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
- latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
- latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar")
- latestCompFile = prefixFile("build/pack/lib/scala-compiler.jar")
- latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
- }
-
- def mostRecentOf(base: String, names: String*) =
- names map (x => prefixFile(base + "/" + x).lastModified) reduceLeft (_ max _)
-
- // detect most recent build
- val quickTime = mostRecentOf("build/quick/classes", "compiler/compiler.properties", "reflect/reflect.properties", "library/library.properties")
- val packTime = mostRecentOf("build/pack/lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
- val distTime = mostRecentOf("dists/latest/lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
- val instTime = mostRecentOf("lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
-
- val pairs = Map(
- (quickTime, () => setupQuick()),
- (packTime, () => setupPack()),
- (distTime, () => setupDist()),
- (instTime, () => setupInst())
- )
-
- // run setup based on most recent time
- pairs(pairs.keys max)()
- }
-
- LATEST_LIB = latestLibFile.getAbsolutePath
- LATEST_REFLECT = latestReflectFile.getAbsolutePath
- LATEST_COMP = latestCompFile.getAbsolutePath
- LATEST_PARTEST = latestPartestFile.getAbsolutePath
- LATEST_ACTORS = latestActorsFile.getAbsolutePath
- }
-
- var LATEST_LIB: String = ""
- var LATEST_REFLECT: String = ""
- var LATEST_COMP: String = ""
- var LATEST_PARTEST: String = ""
- var LATEST_ACTORS: String = ""
-
- var latestLibFile: File = _
- var latestActorsFile: File = _
- var latestReflectFile: File = _
- var latestCompFile: File = _
- var latestPartestFile: File = _
- //def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile
- //def latestScalapFile: File = new File(latestLibFile.getParentFile, "scalap.jar")
- var testClassesDir: Directory = _
- // initialize above fields
- findLatest()
-
- /*
- def getFiles(kind: String, cond: Path => Boolean): List[File] = {
- def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _)
-
- val dir = Directory(srcDir / kind)
-
- if (dir.isDirectory) NestUI.verbose("look in %s for tests" format dir)
- else NestUI.failure("Directory '%s' not found" format dir)
-
- val files = dir.list filterNot ignoreDir filter cond toList
-
- ( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile)
- }
- */
- var latestFjbgFile: File = _
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
deleted file mode 100644
index 8189446162..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ /dev/null
@@ -1,219 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-package nest
-
-import utils.Properties._
-import scala.tools.nsc.Properties.{ versionMsg, setProp }
-import scala.collection.{ mutable, immutable }
-import PathSettings.srcDir
-import TestKinds._
-import scala.reflect.internal.util.Collections.distinctBy
-import scala.tools.cmd.{ CommandLine, CommandLineParser, Instance }
-
-class ConsoleRunner(argstr: String) extends {
- val parsed = ConsoleRunnerSpec.creator(CommandLineParser tokenize argstr)
-} with DirectRunner with ConsoleRunnerSpec with Instance {
- import NestUI._
- import NestUI.color._
-
- // So we can ctrl-C a test run and still hear all
- // the buffered failure info.
- scala.sys addShutdownHook issueSummaryReport()
-
- var fileManager: ConsoleFileManager = _
-
- private var totalTests = 0
- private val passedTests = mutable.ListBuffer[TestState]()
- private val failedTests = mutable.ListBuffer[TestState]()
-
- def comment(s: String) = echo(magenta("# " + s))
- def levyJudgment() = {
- if (totalTests == 0) echoMixed("No tests to run.")
- else if (elapsedMillis == 0) echoMixed("Test Run ABORTED")
- else if (isSuccess) echoPassed("Test Run PASSED")
- else echoFailed("Test Run FAILED")
- }
-
- def passFailString(passed: Int, failed: Int, skipped: Int): String = {
- val total = passed + failed + skipped
- val isSuccess = failed == 0
- def p0 = s"$passed/$total"
- def p = ( if (isSuccess) bold(green(p0)) else p0 ) + " passed"
- def f = if (failed == 0) "" else bold(red("" + failed)) + " failed"
- def s = if (skipped == 0) "" else bold(yellow("" + skipped)) + " skipped"
-
- oempty(p, f, s) mkString ", "
- }
-
- private var summarizing = false
- private var elapsedMillis = 0L
- private var expectedFailures = 0
- private def isSuccess = failedTests.size == expectedFailures
-
- def issueSummaryReport() {
- // Don't run twice
- if (!summarizing) {
- summarizing = true
-
- val passed0 = passedTests.toList
- val failed0 = failedTests.toList
- val passed = passed0.size
- val failed = failed0.size
- val skipped = totalTests - (passed + failed)
- val passFail = passFailString(passed, failed, skipped)
- val elapsed = if (elapsedMillis > 0) " (elapsed time: " + elapsedString(elapsedMillis) + ")" else ""
- val message = passFail + elapsed
-
- if (failed0.nonEmpty) {
- if (isPartestVerbose) {
- echo(bold(cyan("##### Transcripts from failed tests #####\n")))
- failed0 foreach { state =>
- comment("partest " + state.testFile)
- echo(state.transcriptString + "\n")
- }
- }
-
- def files_s = failed0.map(_.testFile).mkString(""" \""" + "\n ")
- echo("# Failed test paths (this command will update checkfiles)")
- echo("test/partest --update-check \\\n " + files_s + "\n")
- }
-
- echo(message)
- levyJudgment()
- }
- }
-
- def run(): Unit = {
- // Early return on no args, version, or invalid args
- if (optVersion) return echo(versionMsg)
- if ((argstr == "") || optHelp) return NestUI.usage()
-
- val (individualTests, invalid) = parsed.residualArgs map (p => Path(p)) partition denotesTestPath
- if (invalid.nonEmpty) {
- if (isPartestVerbose)
- invalid foreach (p => echoWarning(s"Discarding invalid test path " + p))
- else if (!isPartestTerse)
- echoWarning(s"Discarding ${invalid.size} invalid test paths")
- }
-
- optSourcePath foreach (x => setProp("partest.srcdir", x))
- optTimeout foreach (x => setProp("partest.timeout", x))
-
- fileManager =
- if (optBuildPath.isDefined) new ConsoleFileManager(optBuildPath.get)
- else if (optClassPath.isDefined) new ConsoleFileManager(optClassPath.get, true)
- else if (optPack) new ConsoleFileManager("build/pack")
- else new ConsoleFileManager // auto detection, see ConsoleFileManager.findLatest
-
- fileManager.updateCheck = optUpdateCheck
- fileManager.failed = optFailed
-
- val partestTests = (
- if (optSelfTest) TestKinds.testsForPartest
- else Nil
- )
-
- val grepExpr = optGrep getOrElse ""
-
- // If --grep is given we suck in every file it matches.
- val greppedTests = if (grepExpr == "") Nil else {
- val paths = grepFor(grepExpr)
- if (paths.isEmpty)
- echoWarning(s"grep string '$grepExpr' matched no tests.\n")
-
- paths.sortBy(_.toString)
- }
-
- val isRerun = optFailed
- val rerunTests = if (isRerun) TestKinds.failedTests else Nil
- def miscTests = partestTests ++ individualTests ++ greppedTests ++ rerunTests
-
- val givenKinds = standardKinds filter parsed.isSet
- val kinds = (
- if (optAll) standardKinds
- else if (givenKinds.nonEmpty) givenKinds
- else if (invalid.isEmpty && miscTests.isEmpty && !isRerun) standardKinds // If no kinds, --grep, or individual tests were given, assume --all
- else Nil
- )
- val kindsTests = kinds flatMap testsFor
- val dir =
- if (fileManager.testClasses.isDefined) fileManager.testClassesDir
- else fileManager.testBuildFile getOrElse {
- fileManager.latestCompFile.getParentFile.getParentFile.getAbsoluteFile
- }
-
- def testContributors = {
- List(
- if (partestTests.isEmpty) "" else "partest self-tests",
- if (rerunTests.isEmpty) "" else "previously failed tests",
- if (kindsTests.isEmpty) "" else s"${kinds.size} named test categories",
- if (greppedTests.isEmpty) "" else s"${greppedTests.size} tests matching '$grepExpr'",
- if (individualTests.isEmpty) "" else "specified tests"
- ) filterNot (_ == "") mkString ", "
- }
-
- def banner = {
- val vmBin = javaHome + fileSeparator + "bin"
- val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
- val vmOpts = fileManager.JAVA_OPTS
-
- s"""|Scala compiler classes in: $dir
- |Scala version is: $versionMsg
- |Scalac options are: ${fileManager.SCALAC_OPTS mkString " "}
- |Java binaries in: $vmBin
- |Java runtime is: $vmName
- |Java options are: $vmOpts
- |Source directory is: $srcDir
- |Available processors: ${Runtime.getRuntime().availableProcessors()}
- |Java Classpath: ${sys.props("java.class.path")}
- """.stripMargin
- }
-
- chatty(banner)
-
- val allTests: List[Path] = distinctBy(miscTests ++ kindsTests)(_.toCanonical) sortBy (_.toString)
- val grouped = (allTests groupBy kindOf).toList sortBy (x => standardKinds indexOf x._1)
-
- totalTests = allTests.size
- expectedFailures = propOrNone("partest.errors") match {
- case Some(num) => num.toInt
- case _ => 0
- }
- val expectedFailureMessage = if (expectedFailures == 0) "" else s" (expecting $expectedFailures to fail)"
- echo(s"Selected $totalTests tests drawn from $testContributors$expectedFailureMessage\n")
-
- val (_, millis) = timed {
- for ((kind, paths) <- grouped) {
- val num = paths.size
- val ss = if (num == 1) "" else "s"
- comment(s"starting $num test$ss in $kind")
- val results = runTestsForFiles(paths map (_.jfile.getAbsoluteFile), kind)
- val (passed, failed) = results partition (_.isOk)
-
- passedTests ++= passed
- failedTests ++= failed
- if (failed.nonEmpty) {
- comment(passFailString(passed.size, failed.size, 0) + " in " + kind)
- }
- echo("")
- }
- }
- this.elapsedMillis = millis
- issueSummaryReport()
- System exit ( if (isSuccess) 0 else 1 )
- }
-
- run()
-}
-
-object ConsoleRunner {
- def main(args: Array[String]): Unit = {
- new ConsoleRunner(args mkString " ")
- }
-}
-
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala b/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala
deleted file mode 100644
index bb831a4964..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-package scala.tools.partest.nest
-
-import language.postfixOps
-
-import scala.tools.cmd.{ CommandLine, Interpolation, Meta, Reference, Spec }
-
-trait ConsoleRunnerSpec extends Spec with Meta.StdOpts with Interpolation {
- def referenceSpec = ConsoleRunnerSpec
- def programInfo = Spec.Info(
- "console-runner",
- "Usage: NestRunner [options] [test test ...]",
- "scala.tools.partest.nest.ConsoleRunner")
-
- heading("Test categories:")
- val optAll = "all" / "run all tests" --?
- val optPos = "pos" / "run compilation tests (success)" --?
- val optNeg = "neg" / "run compilation tests (failure)" --?
- val optRun = "run" / "run interpreter and backend tests" --?
- val optJvm = "jvm" / "run JVM backend tests" --?
- val optRes = "res" / "run resident compiler tests" --?
- val optAnt = "ant" / "run Ant tests" --?
- val optScalap = "scalap" / "run scalap tests" --?
- val optSpecialized = "specialized" / "run specialization tests" --?
- val optScalacheck = "scalacheck" / "run ScalaCheck tests" --?
- val optInstrumented = "instrumented" / "run instrumented tests" --?
- val optPresentation = "presentation" / "run presentation compiler tests" --?
-
- heading("Test runner options:")
- val optFailed = "failed" / "run only those tests that failed during the last run" --?
- val optTimeout = "timeout" / "aborts the test suite after the given amount of time" --|
- val optPack = "pack" / "pick compiler/reflect/library in build/pack, and run all tests" --?
- val optGrep = "grep" / "run all tests whose source file contains the expression given to grep" --|
- val optUpdateCheck = "update-check" / "instead of failing tests with output change, update checkfile (use with care!)" --?
- val optBuildPath = "buildpath" / "set (relative) path to build jars (ex.: --buildpath build/pack)" --|
- val optClassPath = "classpath" / "set (absolute) path to build classes" --|
- val optSourcePath = "srcpath" / "set (relative) path to test source files (ex.: --srcpath pending)" --|
-
- heading("Test output options:")
- val optShowDiff = "show-diff" / "show diffs for failed tests" --> NestUI.setDiffOnFail()
- val optVerbose = "verbose" / "show verbose progress information" --> NestUI.setVerbose()
- val optTerse = "terse" / "show terse progress information" --> NestUI.setTerse()
- val optDebug = "debug" / "enable debugging output" --> NestUI.setDebug()
-
- heading("Other options:")
- val optVersion = "version" / "show Scala version and exit" --?
- val optSelfTest = "self-test" / "run tests for partest itself" --?
- val optHelp = "help" / "show this page and exit" --?
-
-}
-
-object ConsoleRunnerSpec extends ConsoleRunnerSpec with Reference {
- type ThisCommandLine = CommandLine
- def creator(args: List[String]): ThisCommandLine = new CommandLine(ConsoleRunnerSpec, args)
-}
diff --git a/src/partest/scala/tools/partest/nest/DirectCompiler.scala b/src/partest/scala/tools/partest/nest/DirectCompiler.scala
deleted file mode 100644
index 8e5ff2abc4..0000000000
--- a/src/partest/scala/tools/partest/nest/DirectCompiler.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError }
-import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
-import scala.tools.nsc.util.{ FakePos, stackTraceString }
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
-import scala.reflect.io.AbstractFile
-import scala.reflect.internal.util.Position
-import java.io.{ BufferedReader, PrintWriter, FileReader, Writer, FileWriter }
-
-class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) {
- shortname = true
- // override def error(pos: Position, msg: String): Unit
-}
-
-class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
- def this(cp: String) = this(cp, _ => ())
-
- nowarnings.value = false
- encoding.value = "UTF-8"
- classpath.value = cp
-}
-
-class PartestGlobal(settings: Settings, reporter: Reporter) extends Global(settings, reporter) {
- // override def abort(msg: String): Nothing
- // override def globalError(msg: String): Unit
- // override def supplementErrorMessage(msg: String): String
-}
-class DirectCompiler(val fileManager: FileManager) {
- def newGlobal(settings: Settings, reporter: Reporter): PartestGlobal =
- new PartestGlobal(settings, reporter)
-
- def newGlobal(settings: Settings, logWriter: FileWriter): Global =
- newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter)))
-
- def newSettings(): TestSettings = new TestSettings(fileManager.LATEST_LIB)
- def newSettings(outdir: String): TestSettings = {
- val cp = ClassPath.join(fileManager.LATEST_LIB, outdir)
- val s = new TestSettings(cp)
- s.outdir.value = outdir
- s
- }
-
- def compile(runner: Runner, opts0: List[String], sources: List[File]): TestState = {
- import runner.{ sources => _, _ }
-
- val testSettings = new TestSettings(ClassPath.join(fileManager.LATEST_LIB, outDir.getPath))
- val logWriter = new FileWriter(logFile)
- val srcDir = if (testFile.isDirectory) testFile else Path(testFile).parent.jfile
- val opts = fileManager.updatePluginPath(opts0, AbstractFile getDirectory outDir, AbstractFile getDirectory srcDir)
- val command = new CompilerCommand(opts, testSettings)
- val global = newGlobal(testSettings, logWriter)
- val reporter = global.reporter.asInstanceOf[ExtConsoleReporter]
- def errorCount = reporter.ERROR.count
-
- def defineSettings(s: Settings) = {
- s.outputDirs setSingleOutput outDir.getPath
- // adding codelib.jar to the classpath
- // codelib provides the possibility to override standard reify
- // this shields the massive amount of reification tests from changes in the API
- prependToClasspaths(s, codelib)
- s.classpath append fileManager.CLASSPATH // adding this why?
-
- // add the instrumented library version to classpath
- if (kind == "specialized")
- prependToClasspaths(s, speclib)
-
- // check that option processing succeeded
- opts0.isEmpty || command.ok
- }
-
- if (!defineSettings(testSettings))
- if (opts0.isEmpty)
- reporter.error(null, s"bad settings: $testSettings")
- else
- reporter.error(null, opts0.mkString("bad options: ", space, ""))
-
- def ids = sources.map(_.testIdent) mkString space
- vlog(s"% scalac $ids")
-
- def execCompile() =
- if (command.shouldStopWithInfo) {
- logWriter append (command getInfoMessage global)
- runner genFail "compilation stopped with info"
- } else {
- new global.Run compile sources.map(_.getPath)
- if (!reporter.hasErrors) runner.genPass()
- else {
- reporter.printSummary()
- reporter.writer.close()
- runner.genFail(s"compilation failed with $errorCount errors")
- }
- }
-
- try { execCompile() }
- catch { case t: Throwable => reporter.error(null, t.getMessage) ; runner.genCrash(t) }
- finally { logWriter.close() }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
deleted file mode 100644
index 208418047c..0000000000
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{File, FilenameFilter, IOException, StringWriter,
- FileInputStream, FileOutputStream, BufferedReader,
- FileReader, PrintWriter, FileWriter}
-import java.net.URI
-import scala.reflect.io.AbstractFile
-import scala.collection.mutable
-
-trait FileUtil {
- /**
- * Compares two files using difflib to produce a unified diff.
- *
- * @param f1 the first file to be compared
- * @param f2 the second file to be compared
- * @return the unified diff of the compared files or the empty string if they're equal
- */
- def compareFiles(f1: File, f2: File): String = {
- compareContents(io.Source.fromFile(f1).getLines.toSeq, io.Source.fromFile(f2).getLines.toSeq, f1.getName, f2.getName)
- }
-
- /**
- * Compares two lists of lines using difflib to produce a unified diff.
- *
- * @param origLines the first seq of lines to be compared
- * @param newLines the second seq of lines to be compared
- * @param origName file name to be used in unified diff for `origLines`
- * @param newName file name to be used in unified diff for `newLines`
- * @return the unified diff of the `origLines` and `newLines` or the empty string if they're equal
- */
- def compareContents(origLines: Seq[String], newLines: Seq[String], origName: String = "a", newName: String = "b"): String = {
- import collection.JavaConverters._
-
- val diff = difflib.DiffUtils.diff(origLines.asJava, newLines.asJava)
- if (diff.getDeltas.isEmpty) ""
- else difflib.DiffUtils.generateUnifiedDiff(origName, newName, origLines.asJava, diff, 1).asScala.mkString("\n")
- }
-}
-object FileUtil extends FileUtil { }
-
-trait FileManager extends FileUtil {
-
- def testRootDir: Directory
- def testRootPath: String
-
- var JAVACMD: String
- var JAVAC_CMD: String
-
- var CLASSPATH: String
- var LATEST_LIB: String
- var LATEST_REFLECT: String
- var LATEST_COMP: String
- var LATEST_PARTEST: String
- var LATEST_ACTORS: String
-
- protected def relativeToLibrary(what: String): String = {
- def jarname = if (what startsWith "scala") s"$what.jar" else s"scala-$what.jar"
- if (LATEST_LIB endsWith ".jar")
- (SFile(LATEST_LIB).parent / jarname).toAbsolute.path
- else
- (SFile(LATEST_LIB).parent.parent / "classes" / what).toAbsolute.path
- }
- def latestParserCBLib = relativeToLibrary("parser-combinators")
- def latestXmlLib = relativeToLibrary("xml")
- def latestScaladoc = relativeToLibrary("scaladoc")
- def latestInteractive = relativeToLibrary("interactive")
- def latestScalapFile = relativeToLibrary("scalap")
- def latestPaths = List(
- LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS,
- latestParserCBLib, latestXmlLib, latestScalapFile, latestScaladoc, latestInteractive
- )
- def latestFiles = latestPaths map (p => new java.io.File(p))
- def latestUrls = latestFiles map (_.toURI.toURL)
-
- var showDiff = false
- var updateCheck = false
- var showLog = false
- var failed = false
-
- var SCALAC_OPTS = PartestDefaults.scalacOpts.split(' ').toSeq
- var JAVA_OPTS = PartestDefaults.javaOpts
-
- /** Only when --debug is given. */
- lazy val testTimings = new mutable.HashMap[String, Long]
- def recordTestTiming(name: String, milliseconds: Long) =
- synchronized { testTimings(name) = milliseconds }
-
- def getLogFile(dir: File, fileBase: String, kind: String): File =
- new File(dir, fileBase + "-" + kind + ".log")
-
- def getLogFile(file: File, kind: String): File = {
- val dir = file.getParentFile
- val fileBase = basename(file.getName)
-
- getLogFile(dir, fileBase, kind)
- }
-
- def logFileExists(file: File, kind: String) =
- getLogFile(file, kind).canRead
-
- def overwriteFileWith(dest: File, file: File) =
- dest.isFile && copyFile(file, dest)
-
- def copyFile(from: File, dest: File): Boolean = {
- if (from.isDirectory) {
- assert(dest.isDirectory, "cannot copy directory to file")
- val subDir:Directory = Path(dest) / Directory(from.getName)
- subDir.createDirectory()
- from.listFiles.toList forall (copyFile(_, subDir))
- }
- else {
- val to = if (dest.isDirectory) new File(dest, from.getName) else dest
-
- try {
- SFile(to) writeAll SFile(from).slurp()
- true
- }
- catch { case _: IOException => false }
- }
- }
-
- def mapFile(file: File, replace: String => String) {
- val f = SFile(file)
-
- f.printlnAll(f.lines.toList map replace: _*)
- }
-
- /** Massage args to merge plugins and fix paths.
- * Plugin path can be relative to test root, or cwd is out.
- * While we're at it, mix in the baseline options, too.
- * That's how ant passes in the plugins dir.
- */
- def updatePluginPath(args: List[String], out: AbstractFile, srcdir: AbstractFile): List[String] = {
- val dir = testRootDir
- // The given path, or the output dir if ".", or a temp dir if output is virtual (since plugin loading doesn't like virtual)
- def pathOrCwd(p: String) =
- if (p == ".") {
- val plugxml = "scalac-plugin.xml"
- val pout = if (out.isVirtual) Directory.makeTemp() else Path(out.path)
- val srcpath = Path(srcdir.path)
- val pd = (srcpath / plugxml).toFile
- if (pd.exists) pd copyTo (pout / plugxml)
- pout.toAbsolute
- } else Path(p)
- def absolutize(path: String) = pathOrCwd(path) match {
- case x if x.isAbsolute => x.path
- case x => (dir / x).toAbsolute.path
- }
-
- val xprefix = "-Xplugin:"
- val (xplugs, others) = args partition (_ startsWith xprefix)
- val Xplugin = if (xplugs.isEmpty) Nil else List(xprefix +
- (xplugs map (_ stripPrefix xprefix) flatMap (_ split pathSeparator) map absolutize mkString pathSeparator)
- )
- SCALAC_OPTS.toList ::: others ::: Xplugin
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/NestRunner.scala b/src/partest/scala/tools/partest/nest/NestRunner.scala
deleted file mode 100644
index e398d2ead9..0000000000
--- a/src/partest/scala/tools/partest/nest/NestRunner.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-object NestRunner {
- def main(args: Array[String]) {
- new ReflectiveRunner main (args mkString " ")
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
deleted file mode 100644
index d063c17ac0..0000000000
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ /dev/null
@@ -1,181 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import java.io.PrintWriter
-
-class Colors(enabled: => Boolean) {
- import Console._
-
- val bold = colored(BOLD)
- val yellow = colored(YELLOW)
- val green = colored(GREEN)
- val blue = colored(BLUE)
- val red = colored(RED)
- val red_b = colored(RED_B)
- val green_b = colored(GREEN_B)
- val cyan = colored(CYAN)
- val magenta = colored(MAGENTA)
-
- private def colored(code: String): String => String =
- s => if (enabled) code + s + RESET else s
-}
-
-object NestUI {
- private val testNum = new java.util.concurrent.atomic.AtomicInteger(1)
- @volatile private var testNumberFmt = "%3d"
- private def testNumber = testNumberFmt format testNum.getAndIncrement()
- def resetTestNumber(max: Int = -1) {
- testNum set 1
- val width = if (max > 0) max.toString.length else 3
- testNumberFmt = s"%${width}d"
- }
-
- var colorEnabled = sys.props contains "partest.colors"
- val color = new Colors(colorEnabled)
- import color._
-
- val NONE = 0
- val SOME = 1
- val MANY = 2
-
- private var _outline = ""
- private var _success = ""
- private var _failure = ""
- private var _warning = ""
- private var _default = ""
-
- private var dotCount = 0
- private val DotWidth = 72
-
- def leftFlush() {
- if (dotCount != 0) {
- normal("\n")
- dotCount = 0
- }
- }
-
- def statusLine(state: TestState) = {
- import state._
- import TestState._
- val colorizer = state match {
- case _: Skip => yellow
- case _: Updated => cyan
- case s if s.isOk => green
- case _ => red
- }
- val word = bold(colorizer(state.shortStatus))
- f"$word $testNumber - $testIdent%-40s$reasonString"
- }
-
- def reportTest(state: TestState) = {
- if (isTerse && state.isOk) {
- if (dotCount >= DotWidth) {
- outline("\n.")
- dotCount = 1
- }
- else {
- outline(".")
- dotCount += 1
- }
- }
- else {
- echo(statusLine(state))
- if (!state.isOk && isDiffy) {
- val differ = bold(red("% ")) + "diff "
- state.transcript find (_ startsWith differ) foreach (echo(_))
- }
- }
- }
-
- def echo(message: String): Unit = synchronized {
- leftFlush()
- print(message + "\n")
- }
- def chatty(msg: String) = if (isVerbose) echo(msg)
-
- def echoSkipped(msg: String) = echo(yellow(msg))
- def echoPassed(msg: String) = echo(bold(green(msg)))
- def echoFailed(msg: String) = echo(bold(red(msg)))
- def echoMixed(msg: String) = echo(bold(yellow(msg)))
- def echoWarning(msg: String) = echo(bold(red(msg)))
-
- def initialize(number: Int) = number match {
- case MANY =>
- _outline = Console.BOLD + Console.BLACK
- _success = Console.BOLD + Console.GREEN
- _failure = Console.BOLD + Console.RED
- _warning = Console.BOLD + Console.YELLOW
- _default = Console.RESET
- case SOME =>
- _outline = Console.BOLD + Console.BLACK
- _success = Console.RESET
- _failure = Console.BOLD + Console.BLACK
- _warning = Console.BOLD + Console.BLACK
- _default = Console.RESET
- case _ =>
- }
-
- def outline(msg: String) = print(_outline + msg + _default)
- def outline(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_outline + msg + _default)
- }
-
- def success(msg: String) = print(_success + msg + _default)
- def success(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_success + msg + _default)
- }
-
- def failure(msg: String) = print(_failure + msg + _default)
- def failure(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_failure + msg + _default)
- }
-
- def warning(msg: String) = print(_warning + msg + _default)
-
- def normal(msg: String) = print(_default + msg)
- def normal(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_default + msg)
- }
-
- def usage() {
- println(ConsoleRunnerSpec.programInfo.usage)
- println(ConsoleRunnerSpec.helpMsg)
- sys.exit(1)
- }
-
- var _verbose = false
- var _debug = false
- var _terse = false
- var _diff = false
-
- def isVerbose = _verbose
- def isDebug = _debug
- def isTerse = _terse
- def isDiffy = _diff
-
- def setVerbose() {
- _verbose = true
- }
- def setDebug() {
- _debug = true
- }
- def setTerse() {
- _terse = true
- }
- def setDiffOnFail() {
- _diff = true
- }
- def verbose(msg: String) {
- if (isVerbose)
- System.err.println(msg)
- }
- def debug(msg: String) {
- if (isDebug)
- System.err.println(msg)
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
deleted file mode 100644
index 030c515947..0000000000
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io.{ Path, File, Directory }
-import Path._
-
-object PathSettings {
- import PartestDefaults.{ testRootDir, srcDirName }
-
- private def cwd = Directory.Current getOrElse sys.error("user.dir property not set")
- private def isPartestDir(d: Directory) = (d.name == "test") && (d / srcDirName isDirectory)
- private def findJar(d: Directory, name: String): Option[File] = findJar(d.files, name)
- private def findJar(files: Iterator[File], name: String): Option[File] =
- files filter (_ hasExtension "jar") find { _.name startsWith name }
- private def findJarOrFail(name: String, ds: Directory*): File = findJar(ds flatMap (_.files) iterator, name) getOrElse
- sys.error(s"'${name}.jar' not found in '${ds map (_.path) mkString ", "}'.")
-
- // Directory <root>/test
- lazy val testRoot: Directory = testRootDir getOrElse {
- val candidates: List[Directory] = (cwd :: cwd.parents) flatMap (d => List(d, Directory(d / "test")))
-
- candidates find isPartestDir getOrElse sys.error("Directory 'test' not found.")
- }
-
- // Directory <root>/test/files or .../scaladoc
- def srcDir = Directory(testRoot / srcDirName toCanonical)
-
- // Directory <root>/test/files/lib
- lazy val srcLibDir = Directory(srcDir / "lib")
-
- // Directory <root>/test/files/speclib
- lazy val srcSpecLibDir = Directory(srcDir / "speclib")
-
- lazy val srcSpecLib: File = findJar(srcSpecLibDir, "instrumented") getOrElse {
- sys.error("No instrumented.jar found in %s".format(srcSpecLibDir))
- }
-
- // Directory <root>/test/files/codelib
- lazy val srcCodeLibDir = Directory(srcDir / "codelib")
-
- lazy val srcCodeLib: File = (
- findJar(srcCodeLibDir, "code")
- orElse findJar(Directory(testRoot / "files" / "codelib"), "code") // work with --srcpath pending
- getOrElse sys.error("No code.jar found in %s".format(srcCodeLibDir))
- )
-
- lazy val instrumentationAgentLib: File = {
- findJar(buildPackLibDir.files, "scala-partest-javaagent") getOrElse {
- sys.error("No partest-javaagent jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
- }
- }
-
- // Directory <root>/build
- lazy val buildDir: Directory = {
- val bases = testRoot :: testRoot.parents
- // In the classic "ant" build, the relevant subdirectory is called build,
- // but in the postmodern "sbt" build, it is called target. Look for both.
- val dirs = Path.onlyDirs(bases flatMap (x => List(x / "build", x / "target")))
-
- dirs.headOption getOrElse sys.error("Neither 'build' nor 'target' dir found under test root " + testRoot + ".")
- }
-
- // Directory <root>/build/pack/lib
- lazy val buildPackLibDir = Directory(buildDir / "pack" / "lib")
-
- lazy val scalaCheck: File =
- findJar(buildPackLibDir.files ++ srcLibDir.files, "scalacheck") getOrElse {
- sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
- }
-
- lazy val testInterface: File = findJarOrFail("test-interface", buildPackLibDir, srcLibDir)
-
- lazy val diffUtils: File =
- findJar(buildPackLibDir.files, "diffutils") getOrElse sys.error(s"No diffutils.jar found in '$buildPackLibDir'.")
-
- /** The platform-specific support jar, `tools.jar`.
- */
- lazy val platformTools: Option[File] = PathResolver.SupplementalLocations.platformTools
-}
-
-class PathSettings() {
- // def classpathAsURLs: List[URL]
-}
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
deleted file mode 100644
index 3c77a03f1e..0000000000
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io
-import io.Path
-import java.net.URLClassLoader
-
-/* This class is used to load an instance of DirectRunner using
- * a custom class loader.
- * The purpose is to "auto-detect" a good classpath for the
- * rest of the classes (Worker, CompileManager etc.), so that
- * the main NestRunner can be started merely by putting its
- * class on the classpath (ideally).
- */
-class ReflectiveRunner {
- // TODO: we might also use fileManager.CLASSPATH
- // to use the same classes as used by `scala` that
- // was used to start the runner.
- val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner"
-
- private def searchPath(option: String, as: List[String]): Option[String] = as match {
- case `option` :: r :: _ => Some(r)
- case _ :: rest => searchPath(option, rest)
- case Nil => None
- }
-
- def main(args: String) {
- val argList = (args.split("\\s")).toList
-
- if (isPartestDebug)
- showAllJVMInfo
-
- // find out which build to test
- val buildPath = searchPath("--buildpath", argList)
- val classPath = searchPath("--classpath", argList)
- val fileManager =
- if (!buildPath.isEmpty)
- new ConsoleFileManager(buildPath.get)
- else if (!classPath.isEmpty)
- new ConsoleFileManager(classPath.get, true)
- else if (argList contains "--pack")
- new ConsoleFileManager("build/pack")
- else // auto detection
- new ConsoleFileManager
-
- // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
- // when that bug is fixed, the addition of PathSettings.srcCodeLib can be removed
- // we hack into the classloader that will become parent classloader for scalac
- // this way we ensure that reflective macro lookup will pick correct Code.lift
- // it's also used to inject diffutils into the classpath when running partest from the test/partest script
- val srcCodeLibAndDiff = List(PathSettings.srcCodeLib, PathSettings.diffUtils, PathSettings.testInterface)
- val sepUrls = srcCodeLibAndDiff.map(_.toURI.toURL) ::: fileManager.latestUrls
- // this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script
- val sepLoader = new URLClassLoader(sepUrls.toArray, null)
-
- if (isPartestDebug)
- println("Loading classes from:\n " + fileManager.latestUrls.mkString("\n "))
-
- // @partest maintainer: it seems to me that commented lines are incorrect
- // if classPath is not empty, then it has been provided by the --classpath option
- // which points to the root of Scala home (see ConsoleFileManager's testClasses and the true flag in the ctor for more information)
- // this doesn't mean that we had custom Java classpath set, so we don't have to override latestXXXFiles from the file manager
- //
- //val paths = classPath match {
- // case Some(cp) => Nil
- // case _ => files.toList map (_.path)
- //}
-
- setProp("java.class.path", ClassPath.join(fileManager.latestPaths: _*))
-
- // don't let partest find pluginsdir; in ant build, standard plugin has dedicated test suite
- //setProp("scala.home", latestLibFile.parent.parent.path)
- setProp("scala.home", "")
-
- if (isPartestDebug)
- for (prop <- List("java.class.path", "sun.boot.class.path", "java.ext.dirs"))
- println(prop + ": " + propOrEmpty(prop))
-
- try {
- val sepRunnerClass = sepLoader loadClass sepRunnerClassName
- val sepMainMethod = sepRunnerClass.getMethod("main", classOf[Array[String]])
- val cargs: Array[AnyRef] = Array(Array(args))
- sepMainMethod.invoke(null, cargs: _*)
- }
- catch {
- case cnfe: ClassNotFoundException =>
- cnfe.printStackTrace()
- NestUI.failure(sepRunnerClassName +" could not be loaded from:\n")
- sepUrls foreach (x => NestUI.failure(x + "\n"))
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala
deleted file mode 100644
index fa2fb99f2f..0000000000
--- a/src/partest/scala/tools/partest/nest/Runner.scala
+++ /dev/null
@@ -1,883 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-package scala.tools.partest
-package nest
-
-import java.io.{ Console => _, _ }
-import java.net.URL
-import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
-import java.util.concurrent.Executors
-import java.util.concurrent.TimeUnit.NANOSECONDS
-import scala.collection.mutable.ListBuffer
-import scala.concurrent.duration.Duration
-import scala.io.Codec
-import scala.reflect.internal.FatalError
-import scala.sys.process.{ Process, ProcessLogger }
-import scala.tools.nsc.Properties.{ envOrElse, isWin, jdkHome, javaHome, propOrElse, propOrEmpty, setProp }
-import scala.tools.nsc.{ Settings, CompilerCommand, Global }
-import scala.tools.nsc.reporters.ConsoleReporter
-import scala.tools.nsc.util.{ Exceptional, ScalaClassLoader, stackTraceString }
-import scala.tools.scalap.Main.decompileScala
-import scala.tools.scalap.scalasig.ByteCode
-import scala.util.{ Try, Success, Failure }
-import ClassPath.{ join, split }
-import PartestDefaults.{ javaCmd, javacCmd }
-import TestState.{ Pass, Fail, Crash, Uninitialized, Updated }
-
-trait PartestRunSettings {
- def gitPath: Path
- def reportPath: Path
- def logPath: Path
-
- def testPaths: List[Path]
-
- def gitDiffOptions: List[String]
- def extraScalacOptions: List[String]
- def extraJavaOptions: List[String]
-}
-
-class TestTranscript {
- import NestUI.color._
- private val buf = ListBuffer[String]()
- private def pass(s: String) = bold(green("% ")) + s
- private def fail(s: String) = bold(red("% ")) + s
-
- def add(action: String): this.type = { buf += action ; this }
- def append(text: String) { val s = buf.last ; buf.trimEnd(1) ; buf += (s + text) }
-
- // Colorize prompts according to pass/fail
- def fail: List[String] = buf.toList match {
- case Nil => Nil
- case xs => (xs.init map pass) :+ fail(xs.last)
- }
-}
-
-/** Run a single test. Rubber meets road. */
-class Runner(val testFile: File, fileManager: FileManager, val testRunParams: TestRunParams) {
- import fileManager._
-
- // Override to true to have the outcome of this test displayed
- // whether it passes or not; in general only failures are reported,
- // except for a . per passing test to show progress.
- def isEnumeratedTest = false
-
- private var _lastState: TestState = null
- private var _transcript = new TestTranscript
-
- def lastState = if (_lastState == null) Uninitialized(testFile) else _lastState
- def setLastState(s: TestState) = _lastState = s
- def transcript: List[String] = _transcript.fail ++ logFile.fileLines
- def pushTranscript(msg: String) = _transcript add msg
-
- val parentFile = testFile.getParentFile
- val kind = parentFile.getName
- val fileBase = basename(testFile.getName)
- val logFile = new File(parentFile, s"$fileBase-$kind.log")
- val outFile = logFile changeExtension "obj"
- val checkFile = testFile changeExtension "check"
- val flagsFile = testFile changeExtension "flags"
- val testIdent = testFile.testIdent // e.g. pos/t1234
-
- lazy val outDir = { outFile.mkdirs() ; outFile }
-
- type RanOneTest = (Boolean, LogContext)
-
- def showCrashInfo(t: Throwable) {
- System.err.println(s"Crashed running test $testIdent: $t")
- if (!isPartestTerse)
- System.err.println(stackTraceString(t))
- }
- protected def crashHandler: PartialFunction[Throwable, TestState] = {
- case t: InterruptedException =>
- genTimeout()
- case t: Throwable =>
- showCrashInfo(t)
- logFile.appendAll(stackTraceString(t))
- genCrash(t)
- }
-
- def genPass() = Pass(testFile)
- def genFail(reason: String) = Fail(testFile, reason, _transcript.fail)
- def genTimeout() = Fail(testFile, "timed out", _transcript.fail)
- def genCrash(caught: Throwable) = Crash(testFile, caught, _transcript.fail)
- def genUpdated() = Updated(testFile)
-
- def speclib = PathSettings.srcSpecLib.toString // specialization lib
- def codelib = PathSettings.srcCodeLib.toString // reify lib
-
- // Prepend to a classpath, but without incurring duplicate entries
- def prependTo(classpath: String, path: String): String = {
- val segments = ClassPath split classpath
-
- if (segments startsWith path) classpath
- else ClassPath.join(path :: segments distinct: _*)
- }
-
- def prependToJavaClasspath(path: String) {
- val jcp = sys.props.getOrElse("java.class.path", "")
- prependTo(jcp, path) match {
- case `jcp` =>
- case cp => sys.props("java.class.path") = cp
- }
- }
- def prependToClasspaths(s: Settings, path: String) {
- prependToJavaClasspath(path)
- val scp = s.classpath.value
- prependTo(scp, path) match {
- case `scp` =>
- case cp => s.classpath.value = cp
- }
- }
-
- private def workerError(msg: String): Unit = System.err.println("Error: " + msg)
-
- def javac(files: List[File]): TestState = {
- // compile using command-line javac compiler
- val args = Seq(
- javacCmd,
- "-d",
- outDir.getAbsolutePath,
- "-classpath",
- join(outDir.toString, CLASSPATH)
- ) ++ files.map(_.getAbsolutePath)
-
- pushTranscript(args mkString " ")
- val captured = StreamCapture(runCommand(args, logFile))
- if (captured.result) genPass() else {
- logFile appendAll captured.stderr
- genFail("java compilation failed")
- }
- }
-
- def testPrompt = kind match {
- case "res" => "nsc> "
- case _ => "% "
- }
-
- /** Evaluate an action body and update the test state.
- * @param failFn optionally map a result to a test state.
- */
- def nextTestAction[T](body: => T)(failFn: PartialFunction[T, TestState]): T = {
- val result = body
- setLastState( if (failFn isDefinedAt result) failFn(result) else genPass() )
- result
- }
- def nextTestActionExpectTrue(reason: String, body: => Boolean): Boolean = (
- nextTestAction(body) { case false => genFail(reason) }
- )
- def nextTestActionFailing(reason: String): Boolean = nextTestActionExpectTrue(reason, false)
-
- private def assembleTestCommand(outDir: File, logFile: File): List[String] = {
- // check whether there is a ".javaopts" file
- val argsFile = testFile changeExtension "javaopts"
- val argString = file2String(argsFile)
- if (argString != "")
- NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString))
-
- val testFullPath = testFile.getAbsolutePath
-
- // Note! As this currently functions, JAVA_OPTS must precede argString
- // because when an option is repeated to java only the last one wins.
- // That means until now all the .javaopts files were being ignored because
- // they all attempt to change options which are also defined in
- // partest.java_opts, leading to debug output like:
- //
- // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
- // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
- val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil
- val propertyOptions = List(
- "-Dfile.encoding=UTF-8",
- "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
- "-Dpartest.output="+outDir.getAbsolutePath,
- "-Dpartest.lib="+LATEST_LIB,
- "-Dpartest.reflect="+LATEST_REFLECT,
- "-Dpartest.cwd="+outDir.getParent,
- "-Dpartest.test-path="+testFullPath,
- "-Dpartest.testname="+fileBase,
- "-Djavacmd="+javaCmd,
- "-Djavaccmd="+javacCmd,
- "-Duser.language=en",
- "-Duser.country=US"
- ) ++ extras
-
- val classpath = if (extraClasspath != "") join(extraClasspath, CLASSPATH) else CLASSPATH
-
- javaCmd +: (
- (JAVA_OPTS.split(' ') ++ extraJavaOptions.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "").toList ++ Seq(
- "-classpath",
- join(outDir.toString, classpath)
- ) ++ propertyOptions ++ Seq(
- "scala.tools.nsc.MainGenericRunner",
- "-usejavacp",
- "Test",
- "jvm"
- )
- )
- }
-
- /** Runs command redirecting standard out and
- * error out to output file.
- */
- private def runCommand(args: Seq[String], outFile: File): Boolean = {
- //(Process(args) #> outFile !) == 0 or (Process(args) ! pl) == 0
- val pl = ProcessLogger(outFile)
- val nonzero = 17 // rounding down from 17.3
- def run: Int = {
- val p = Process(args) run pl
- try p.exitValue
- catch {
- case e: InterruptedException =>
- NestUI verbose s"Interrupted waiting for command to finish (${args mkString " "})"
- p.destroy
- nonzero
- case t: Throwable =>
- NestUI verbose s"Exception waiting for command to finish: $t (${args mkString " "})"
- p.destroy
- throw t
- }
- finally pl.close()
- }
- (pl buffer run) == 0
- }
-
- private def execTest(outDir: File, logFile: File): Boolean = {
- val cmd = assembleTestCommand(outDir, logFile)
-
- pushTranscript((cmd mkString s" \\$EOL ") + " > " + logFile.getName)
- nextTestAction(runCommand(cmd, logFile)) {
- case false =>
- _transcript append EOL + logFile.fileContents
- genFail("non-zero exit code")
- }
- }
-
- override def toString = s"""Test($testIdent, lastState = $lastState)"""
-
- // result is unused
- def newTestWriters() = {
- val swr = new StringWriter
- val wr = new PrintWriter(swr, true)
- // diff = ""
-
- ((swr, wr))
- }
-
- def fail(what: Any) = {
- NestUI.verbose("scalac: compilation of "+what+" failed\n")
- false
- }
-
- /** Filter the check file for conditional blocks.
- * The check file can contain lines of the form:
- * `#partest java7`
- * where the line contains a conventional flag name.
- * If the flag tests true, succeeding lines are retained
- * (removed on false) until the next #partest flag.
- * A missing flag evaluates the same as true.
- */
- def filteredCheck: Seq[String] = {
- import scala.util.Properties.{javaVersion, isAvian}
- // use lines in block so labeled? Default to sorry, Charlie.
- def retainOn(expr: String) = {
- val f = expr.trim
- def flagWasSet(f: String) = fileManager.SCALAC_OPTS contains f
- val (invert, token) =
- if (f startsWith "!") (true, f drop 1) else (false, f)
- val cond = token.trim match {
- case "java7" => javaVersion startsWith "1.7"
- case "java6" => javaVersion startsWith "1.6"
- case "avian" => isAvian
- case "true" => true
- case "-optimise" | "-optimize"
- => flagWasSet("-optimise") || flagWasSet("-optimize")
- case flag if flag startsWith "-"
- => flagWasSet(flag)
- case rest => rest.isEmpty
- }
- if (invert) !cond else cond
- }
- val prefix = "#partest"
- val b = new ListBuffer[String]()
- var on = true
- for (line <- file2String(checkFile).lines) {
- if (line startsWith prefix) {
- on = retainOn(line stripPrefix prefix)
- } else if (on) {
- b += line
- }
- }
- b.toList
- }
-
- def currentDiff = {
- val logged = augmentString(file2String(logFile)).lines.toList
- val (other, othername) =
- if (checkFile.canRead) (filteredCheck, checkFile.getName) else (Nil, "empty")
- compareContents(logged, other, logFile.getName, othername)
- }
-
- val gitRunner = List("/usr/local/bin/git", "/usr/bin/git") map (f => new java.io.File(f)) find (_.canRead)
- val gitDiffOptions = "--ignore-space-at-eol --no-index " + propOrEmpty("partest.git_diff_options")
- // --color=always --word-diff
-
- def gitDiff(f1: File, f2: File): Option[String] = {
- try gitRunner map { git =>
- val cmd = s"$git diff $gitDiffOptions $f1 $f2"
- val diff = Process(cmd).lines_!.drop(4).map(_ + "\n").mkString
-
- "\n" + diff
- }
- catch { case t: Exception => None }
- }
-
- /** Normalize the log output by applying test-specific filters
- * and fixing filesystem-specific paths.
- *
- * Line filters are picked up from `filter: pattern` at the top of sources.
- * The filtered line is detected with a simple "contains" test,
- * and yes, "filter" means "filter out" in this context.
- *
- * File paths are detected using the absolute path of the test root.
- * A string that looks like a file path is normalized by replacing
- * the leading segments (the root) with "$ROOT" and by replacing
- * any Windows backslashes with the one true file separator char.
- */
- def normalizeLog() {
- // Apply judiciously; there are line comments in the "stub implementations" error output.
- val slashes = """[/\\]+""".r
- def squashSlashes(s: String) = slashes replaceAllIn (s, "/")
-
- // this string identifies a path and is also snipped from log output.
- // to preserve more of the path, could use fileManager.testRootPath
- val elided = parentFile.getAbsolutePath
-
- // something to mark the elision in the log file (disabled)
- val ellipsis = "" //".../" // using * looks like a comment
-
- // no spaces in test file paths below root, because otherwise how to detect end of path string?
- val pathFinder = raw"""(?i)\Q${elided}${File.separator}\E([\${File.separator}\w]*)""".r
- def canonicalize(s: String): String = (
- pathFinder replaceAllIn (s, m => ellipsis + squashSlashes(m group 1))
- )
-
- def masters = {
- val files = List(new File(parentFile, "filters"), new File(PathSettings.srcDir.path, "filters"))
- files filter (_.exists) flatMap (_.fileLines) map (_.trim) filter (s => !(s startsWith "#"))
- }
- val filters = toolArgs("filter", split = false) ++ masters
- val elisions = ListBuffer[String]()
- //def lineFilter(s: String): Boolean = !(filters exists (s contains _))
- def lineFilter(s: String): Boolean = (
- filters map (_.r) forall { r =>
- val res = (r findFirstIn s).isEmpty
- if (!res) elisions += s
- res
- }
- )
-
- logFile.mapInPlace(canonicalize)(lineFilter)
- if (isPartestVerbose && elisions.nonEmpty) {
- import NestUI.color._
- val emdash = bold(yellow("--"))
- pushTranscript(s"filtering ${logFile.getName}$EOL${elisions mkString (emdash, EOL + emdash, EOL)}")
- }
- }
-
- def diffIsOk: Boolean = {
- // always normalize the log first
- normalizeLog()
- val diff = currentDiff
- // if diff is not empty, is update needed?
- val updating: Option[Boolean] = (
- if (diff == "") None
- else Some(fileManager.updateCheck)
- )
- pushTranscript(s"diff $logFile $checkFile")
- nextTestAction(updating) {
- case Some(true) =>
- NestUI.verbose("Updating checkfile " + checkFile)
- checkFile writeAll file2String(logFile)
- genUpdated()
- case Some(false) =>
- // Get a word-highlighted diff from git if we can find it
- val bestDiff = if (updating.isEmpty) "" else {
- if (checkFile.canRead)
- gitDiff(logFile, checkFile) getOrElse {
- s"diff $logFile $checkFile\n$diff"
- }
- else diff
- }
- _transcript append bestDiff
- genFail("output differs")
- // TestState.fail("output differs", "output differs",
- // genFail("output differs")
- // TestState.Fail("output differs", bestDiff)
- case None => genPass() // redundant default case
- } getOrElse true
- }
-
- /** 1. Creates log file and output directory.
- * 2. Runs script function, providing log file and output directory as arguments.
- * 2b. or, just run the script without context and return a new context
- */
- def runInContext(body: => Boolean): (Boolean, LogContext) = {
- val (swr, wr) = newTestWriters()
- val succeeded = body
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- /** Grouped files in group order, and lex order within each group. */
- def groupedFiles(sources: List[File]): List[List[File]] = (
- if (sources.tail.nonEmpty) {
- val grouped = sources groupBy (_.group)
- grouped.keys.toList.sorted map (k => grouped(k) sortBy (_.getName))
- }
- else List(sources)
- )
-
- /** Source files for the given test file. */
- def sources(file: File): List[File] = (
- if (file.isDirectory)
- file.listFiles.toList filter (_.isJavaOrScala)
- else
- List(file)
- )
-
- def newCompiler = new DirectCompiler(fileManager)
-
- def attemptCompile(sources: List[File]): TestState = {
- val state = newCompiler.compile(this, flagsForCompilation(sources), sources)
- if (!state.isOk)
- _transcript append ("\n" + file2String(logFile))
-
- state
- }
-
- // snort or scarf all the contributing flags files
- def flagsForCompilation(sources: List[File]): List[String] = {
- def argsplitter(s: String) = words(s) filter (_.nonEmpty)
- val perTest = argsplitter(flagsFile.fileContents)
- val perGroup = if (testFile.isDirectory) {
- sources flatMap { f => SFile(Path(f) changeExtension "flags").safeSlurp map argsplitter getOrElse Nil }
- } else Nil
- perTest ++ perGroup
- }
-
- def toolArgs(tool: String, split: Boolean = true): List[String] = {
- def argsplitter(s: String) = if (split) words(s) filter (_.nonEmpty) else List(s)
- def argsFor(f: File): List[String] = {
- import scala.util.matching.Regex
- val p = new Regex(s"(?:.*\\s)?${tool}:(?:\\s*)(.*)?", "args")
- val max = 10
- val src = Path(f).toFile.chars(codec)
- val args = try {
- src.getLines take max collectFirst {
- case s if (p findFirstIn s).nonEmpty => for (m <- p findFirstMatchIn s) yield m group "args"
- }
- } finally src.close()
- args.flatten map argsplitter getOrElse Nil
- }
- sources(testFile) flatMap argsFor
- }
-
- abstract class CompileRound {
- def fs: List[File]
- def result: TestState
- def description: String
-
- def fsString = fs map (_.toString stripPrefix parentFile.toString + "/") mkString " "
- def isOk = result.isOk
- def mkScalacString(): String = s"""scalac $fsString"""
- override def toString = description + ( if (result.isOk) "" else "\n" + result.status )
- }
- case class OnlyJava(fs: List[File]) extends CompileRound {
- def description = s"""javac $fsString"""
- lazy val result = { pushTranscript(description) ; javac(fs) }
- }
- case class OnlyScala(fs: List[File]) extends CompileRound {
- def description = mkScalacString()
- lazy val result = { pushTranscript(description) ; attemptCompile(fs) }
- }
- case class ScalaAndJava(fs: List[File]) extends CompileRound {
- def description = mkScalacString()
- lazy val result = { pushTranscript(description) ; attemptCompile(fs) }
- }
-
- def compilationRounds(file: File): List[CompileRound] = (
- (groupedFiles(sources(file)) map mixedCompileGroup).flatten
- )
- def mixedCompileGroup(allFiles: List[File]): List[CompileRound] = {
- val (scalaFiles, javaFiles) = allFiles partition (_.isScala)
- val isMixed = javaFiles.nonEmpty && scalaFiles.nonEmpty
- val round1 = if (scalaFiles.isEmpty) None else Some(ScalaAndJava(allFiles))
- val round2 = if (javaFiles.isEmpty) None else Some(OnlyJava(javaFiles))
- val round3 = if (!isMixed) None else Some(OnlyScala(scalaFiles))
-
- List(round1, round2, round3).flatten
- }
-
- def runNegTest() = runInContext {
- val rounds = compilationRounds(testFile)
-
- // failing means Does Not Compile
- val failing = rounds find (x => nextTestActionExpectTrue("compilation failed", x.isOk) == false)
-
- // which means passing if it checks and didn't crash the compiler
- // or, OK, we'll let you crash the compiler with a FatalError if you supply a check file
- def checked(r: CompileRound) = r.result match {
- case Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => false
- case _ => diffIsOk
- }
-
- failing map (checked) getOrElse nextTestActionFailing("expected compilation failure")
- }
-
- def runTestCommon(andAlso: => Boolean): (Boolean, LogContext) = runInContext {
- compilationRounds(testFile).forall(x => nextTestActionExpectTrue("compilation failed", x.isOk)) && andAlso
- }
-
- // Apache Ant 1.6 or newer
- def ant(args: Seq[String], output: File): Boolean = {
- val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/"))
- val antLibDir = Directory(antDir / "lib")
- val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path
- val antOptions =
- if (NestUI._verbose) List("-verbose", "-noinput")
- else List("-noinput")
- val cmd = javaCmd +: (
- JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq(
- "-classpath",
- antLauncherPath,
- "org.apache.tools.ant.launch.Launcher"
- ) ++ antOptions ++ args
- )
-
- runCommand(cmd, output)
- }
-
- def runAntTest(): (Boolean, LogContext) = {
- val (swr, wr) = newTestWriters()
-
- val succeeded = try {
- val binary = "-Dbinary="+(
- if (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick"
- else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack"
- else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest"
- else "installed"
- )
- val args = Array(binary, "-logfile", logFile.getPath, "-file", testFile.getPath)
- NestUI.verbose("ant "+args.mkString(" "))
-
- pushTranscript(s"ant ${args.mkString(" ")}")
- nextTestActionExpectTrue("ant failed", ant(args, logFile)) && diffIsOk
- }
- catch { // *catch-all*
- case e: Exception =>
- NestUI.warning("caught "+e)
- false
- }
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- def extraClasspath = kind match {
- case "specialized" => PathSettings.srcSpecLib.toString
- case _ => ""
- }
- def extraJavaOptions = kind match {
- case "instrumented" => "-javaagent:"+PathSettings.instrumentationAgentLib
- case _ => ""
- }
-
- def runScalacheckTest() = runTestCommon {
- NestUI verbose f"compilation of $testFile succeeded%n"
-
- // this classloader is test specific: its parent contains library classes and others
- val loader = {
- import PathSettings.scalaCheck
- val locations = List(outDir, scalaCheck.jfile) map (_.getAbsoluteFile.toURI.toURL)
- ScalaClassLoader.fromURLs(locations, getClass.getClassLoader)
- }
- val logWriter = new PrintStream(new FileOutputStream(logFile), true)
-
- def runInFramework(): Boolean = {
- import org.scalatools.testing._
- val f: Framework = loader.instantiate[Framework]("org.scalacheck.ScalaCheckFramework")
- val logger = new Logger {
- def ansiCodesSupported = false //params.env.isSet("colors")
- def error(msg: String) = logWriter println msg
- def warn(msg: String) = logWriter println msg
- def info(msg: String) = logWriter println msg
- def debug(msg: String) = logWriter println msg
- def trace(t: Throwable) = t printStackTrace logWriter
- }
- var bad = 0
- val handler = new EventHandler {
- // testName, description, result, error
- // Result = Success, Failure, Error, Skipped
- def handle(event: Event): Unit = event.result match {
- case Result.Success =>
- //case Result.Skipped => // an exhausted test is skipped, therefore bad
- case _ => bad += 1
- }
- }
- val loggers = Array(logger)
- val r = f.testRunner(loader, loggers).asInstanceOf[Runner2] // why?
- val claas = "Test"
- val fingerprint = f.tests collectFirst { case x: SubclassFingerprint if x.isModule => x }
- val args = toolArgs("scalacheck")
- vlog(s"Run $testFile with args $args")
- // set the context class loader for scaladoc/scalacheck tests (FIX ME)
- ScalaClassLoader(testRunParams.scalaCheckParentClassLoader).asContext {
- r.run(claas, fingerprint.get, handler, args.toArray) // synchronous?
- }
- val ok = (bad == 0)
- if (!ok) _transcript append logFile.fileContents
- ok
- }
- try nextTestActionExpectTrue("ScalaCheck test failed", runInFramework()) finally logWriter.close()
- }
-
- def runResidentTest() = {
- // simulate resident compiler loop
- val prompt = "\nnsc> "
- val (swr, wr) = newTestWriters()
-
- NestUI.verbose(this+" running test "+fileBase)
- val dir = parentFile
- val resFile = new File(dir, fileBase + ".res")
-
- // run compiler in resident mode
- // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
- val sourcedir = logFile.getParentFile.getAbsoluteFile
- val sourcepath = sourcedir.getAbsolutePath+File.separator
- NestUI.verbose("sourcepath: "+sourcepath)
-
- val argList = List(
- "-d", outDir.getAbsoluteFile.getPath,
- "-Xresident",
- "-sourcepath", sourcepath)
-
- // configure input/output files
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut, true)
- val resReader = new BufferedReader(new FileReader(resFile))
- val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true)
-
- // create compiler
- val settings = new Settings(workerError)
- settings.sourcepath.value = sourcepath
- settings.classpath.value = fileManager.CLASSPATH
- val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val command = new CompilerCommand(argList, settings)
- object compiler extends Global(command.settings, reporter)
-
- def resCompile(line: String): Boolean = {
- // NestUI.verbose("compiling "+line)
- val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
- // NestUI.verbose("cmdArgs: "+cmdArgs)
- val sett = new Settings(workerError)
- sett.sourcepath.value = sourcepath
- val command = new CompilerCommand(cmdArgs, sett)
- // "scalac " + command.files.mkString(" ")
- pushTranscript("scalac " + command.files.mkString(" "))
- nextTestActionExpectTrue(
- "compilation failed",
- command.ok && {
- (new compiler.Run) compile command.files
- !reporter.hasErrors
- }
- )
- }
- def loop(): Boolean = {
- logWriter.print(prompt)
- resReader.readLine() match {
- case null | "" => logWriter.close() ; true
- case line => resCompile(line) && loop()
- }
- }
- // res/t687.res depends on ignoring its compilation failure
- // and just looking at the diff, so I made them all do that
- // because this is long enough.
- if (!Output.withRedirected(logWriter)(try loop() finally resReader.close()))
- setLastState(genPass())
-
- (diffIsOk, LogContext(logFile, swr, wr))
- }
-
- def run(): TestState = {
- // javac runner, for one, would merely append to an existing log file, so just delete it before we start
- logFile.delete()
-
- if (kind == "neg" || (kind endsWith "-neg")) runNegTest()
- else kind match {
- case "pos" => runTestCommon(true)
- case "ant" => runAntTest()
- case "scalacheck" => runScalacheckTest()
- case "res" => runResidentTest()
- case "scalap" => runScalapTest()
- case "script" => runScriptTest()
- case _ => runTestCommon(execTest(outDir, logFile) && diffIsOk)
- }
-
- lastState
- }
-
- def runScalapTest() = runTestCommon {
- val isPackageObject = testFile.getName startsWith "package"
- val className = testFile.getName.stripSuffix(".scala").capitalize + (if (!isPackageObject) "" else ".package")
- val loader = ScalaClassLoader.fromURLs(List(outDir.toURI.toURL), this.getClass.getClassLoader)
- val byteCode = ByteCode forClass (loader loadClass className)
- val result = decompileScala(byteCode.bytes, isPackageObject)
-
- logFile writeAll result
- diffIsOk
- }
- def runScriptTest() = {
- import scala.sys.process._
- val (swr, wr) = newTestWriters()
-
- val args = file2String(testFile changeExtension "args")
- val cmdFile = if (isWin) testFile changeExtension "bat" else testFile
- val succeeded = (((cmdFile + " " + args) #> logFile !) == 0) && diffIsOk
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- def cleanup() {
- if (lastState.isOk)
- logFile.delete()
- if (!isPartestDebug)
- Directory(outDir).deleteRecursively()
- }
-}
-
-case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader)
-
-/** Extended by Ant- and ConsoleRunner for running a set of tests. */
-trait DirectRunner {
- def fileManager: FileManager
-
- import PartestDefaults.{ numThreads, waitTime }
-
- setUncaughtHandler
-
- def runTestsForFiles(kindFiles: List[File], kind: String): List[TestState] = {
-
- NestUI.resetTestNumber(kindFiles.size)
-
- // this special class loader is for the benefit of scaladoc tests, which need a class path
- import PathSettings.{ testInterface, scalaCheck }
- val allUrls = scalaCheck.toURL :: testInterface.toURL :: fileManager.latestUrls
- val parentClassLoader = ScalaClassLoader fromURLs allUrls
- // add scalacheck.jar to a special classloader, but use our loader as parent with test-interface
- //val parentClassLoader = ScalaClassLoader fromURLs (List(scalaCheck.toURL), getClass().getClassLoader)
- val pool = Executors newFixedThreadPool numThreads
- val manager = new RunnerManager(kind, fileManager, TestRunParams(parentClassLoader))
- val futures = kindFiles map (f => pool submit callable(manager runTest f.getAbsoluteFile))
-
- pool.shutdown()
- Try (pool.awaitTermination(waitTime) {
- throw TimeoutException(waitTime)
- }) match {
- case Success(_) => futures map (_.get)
- case Failure(e) =>
- e match {
- case TimeoutException(d) =>
- NestUI warning "Thread pool timeout elapsed before all tests were complete!"
- case ie: InterruptedException =>
- NestUI warning "Thread pool was interrupted"
- ie.printStackTrace()
- }
- pool.shutdownNow() // little point in continuing
- // try to get as many completions as possible, in case someone cares
- val results = for (f <- futures) yield {
- try {
- Some(f.get(0, NANOSECONDS))
- } catch {
- case _: Throwable => None
- }
- }
- results.flatten
- }
- }
-}
-
-case class TimeoutException(duration: Duration) extends RuntimeException
-
-class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)])
-
-object LogContext {
- def apply(file: File, swr: StringWriter, wr: PrintWriter): LogContext = {
- require (file != null)
- new LogContext(file, Some((swr, wr)))
- }
- def apply(file: File): LogContext = new LogContext(file, None)
-}
-
-object Output {
- object outRedirect extends Redirecter(out)
- object errRedirect extends Redirecter(err)
-
- System.setOut(outRedirect)
- System.setErr(errRedirect)
-
- import scala.util.DynamicVariable
- private def out = java.lang.System.out
- private def err = java.lang.System.err
- private val redirVar = new DynamicVariable[Option[PrintStream]](None)
-
- class Redirecter(stream: PrintStream) extends PrintStream(new OutputStream {
- def write(b: Int) = withStream(_ write b)
-
- private def withStream(f: PrintStream => Unit) = f(redirVar.value getOrElse stream)
-
- override def write(b: Array[Byte]) = withStream(_ write b)
- override def write(b: Array[Byte], off: Int, len: Int) = withStream(_.write(b, off, len))
- override def flush = withStream(_.flush)
- override def close = withStream(_.close)
- })
-
- // this supports thread-safe nested output redirects
- def withRedirected[T](newstream: PrintStream)(func: => T): T = {
- // note down old redirect destination
- // this may be None in which case outRedirect and errRedirect print to stdout and stderr
- val saved = redirVar.value
- // set new redirecter
- // this one will redirect both out and err to newstream
- redirVar.value = Some(newstream)
-
- try func
- finally {
- newstream.flush()
- redirVar.value = saved
- }
- }
-}
-
-/** Use a Runner to run a test. */
-class RunnerManager(kind: String, fileManager: FileManager, params: TestRunParams) {
- fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
- fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
-
- def runTest(testFile: File): TestState = {
- val runner = new Runner(testFile, fileManager, params)
-
- // when option "--failed" is provided execute test only if log
- // is present (which means it failed before)
- if (fileManager.failed && !runner.logFile.canRead)
- runner.genPass()
- else {
- val (state, _) =
- try timed(runner.run())
- catch {
- case t: Throwable => throw new RuntimeException(s"Error running $testFile", t)
- }
- NestUI.reportTest(state)
- runner.cleanup()
- state
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
deleted file mode 100644
index 1cf3aa858f..0000000000
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-package scala.tools.partest
-package nest
-
-import java.io.File
-import scala.tools.nsc.io.{ Directory }
-import scala.util.Properties.setProp
-import scala.collection.JavaConverters._
-
-object SBTRunner extends DirectRunner {
-
- val fileManager = new FileManager {
- var JAVACMD: String = "java"
- var JAVAC_CMD: String = "javac"
- var CLASSPATH: String = _
- var LATEST_LIB: String = _
- var LATEST_REFLECT: String = _
- var LATEST_COMP: String = _
- var LATEST_PARTEST: String = _
- var LATEST_ACTORS: String = _
- val testRootPath: String = "test"
- val testRootDir: Directory = Directory(testRootPath)
- }
-
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): java.util.List[TestState] = {
- def failedOnlyIfRequired(files:List[File]):List[File]={
- if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
- }
- runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind).asJava
- }
-
- case class CommandLineOptions(classpath: Option[String] = None,
- tests: Map[String, Array[File]] = Map(),
- scalacOptions: Seq[String] = Seq(),
- justFailedTests: Boolean = false)
-
- def mainReflect(args: Array[String]): java.util.List[TestState] = {
- setProp("partest.debug", "true")
-
- val Argument = new scala.util.matching.Regex("-(.*)")
- def parseArgs(args: Seq[String], data: CommandLineOptions): CommandLineOptions = args match {
- case Seq("--failed", rest @ _*) => parseArgs(rest, data.copy(justFailedTests = true))
- case Seq("-cp", cp, rest @ _*) => parseArgs(rest, data.copy(classpath=Some(cp)))
- case Seq("-scalacoption", opt, rest @ _*) => parseArgs(rest, data.copy(scalacOptions= data.scalacOptions :+ opt))
- case Seq(Argument(name), runFiles, rest @ _*) => parseArgs(rest, data.copy(tests=data.tests + (name -> runFiles.split(",").map(new File(_)))))
- case Seq() => data
- case x => sys.error("Unknown command line options: " + x)
- }
- val config = parseArgs(args, CommandLineOptions())
- fileManager.SCALAC_OPTS = config.scalacOptions
- fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
-
- def findClasspath(jar: String, name: String): Option[String] = {
- val optJar = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+jar+".*\\.jar"))).headOption
- val optClassDir = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+name+File.separator+"classes"))).headOption
- optJar orElse optClassDir
- }
- // Find scala library jar file...
- fileManager.LATEST_LIB = findClasspath("scala-library", "scala-library") getOrElse sys.error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_REFLECT = findClasspath("scala-reflect", "scala-reflect") getOrElse sys.error("No scala-reflect found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_COMP = findClasspath("scala-compiler", "scala-compiler") getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_PARTEST = findClasspath("scala-partest", "partest") getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
- fileManager.LATEST_ACTORS = findClasspath("scala-actors", "actors") getOrElse sys.error("No scala-actors found! Classpath = " + fileManager.CLASSPATH)
-
- // TODO - Do something useful here!!!
- fileManager.JAVAC_CMD = "javac"
- fileManager.failed = config.justFailedTests
- // TODO - Make this a flag?
- //fileManager.updateCheck = true
- // Now run and report...
- val runs = config.tests.filterNot(_._2.isEmpty)
- val result = runs.toList flatMap { case (kind, files) => reflectiveRunTestsForFiles(files, kind).asScala }
-
- result.asJava
- }
-
- def main(args: Array[String]): Unit = {
- val failures = mainReflect(args).asScala collect { case s if !s.isOk => s.longStatus }
- // Re-list all failures so we can go figure out what went wrong.
- failures foreach System.err.println
- if(!failures.isEmpty) sys.exit(1)
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/StreamCapture.scala b/src/partest/scala/tools/partest/nest/StreamCapture.scala
deleted file mode 100644
index dc155b1787..0000000000
--- a/src/partest/scala/tools/partest/nest/StreamCapture.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-package scala.tools.partest
-package nest
-
-import java.io.{ Console => _, _ }
-
-object StreamCapture {
- case class Captured[T](stdout: String, stderr: String, result: T) {
- override def toString = s"""
- |result: $result
- |[stdout]
- |$stdout
- |[stderr]
- |$stderr""".stripMargin.trim
- }
-
- private def mkStream = {
- val swr = new StringWriter
- val wr = new PrintWriter(swr, true)
- val ostream = new PrintStream(new OutputStream { def write(b: Int): Unit = wr write b }, true) // autoFlush = true
-
- (ostream, () => { ostream.close() ; swr.toString })
- }
-
- def savingSystem[T](body: => T): T = {
- val savedOut = System.out
- val savedErr = System.err
- try body
- finally {
- System setErr savedErr
- System setOut savedOut
- }
- }
-
- def apply[T](body: => T): Captured[T] = {
- val (outstream, stdoutFn) = mkStream
- val (errstream, stderrFn) = mkStream
-
- val result = savingSystem {
- System setOut outstream
- System setErr errstream
- Console.withOut(outstream) {
- Console.withErr(errstream) {
- body
- }
- }
- }
- Captured(stdoutFn(), stderrFn(), result)
- }
-}
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
deleted file mode 100644
index 77400b1b9c..0000000000
--- a/src/partest/scala/tools/partest/package.scala
+++ /dev/null
@@ -1,241 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- */
-
-package scala.tools
-
-import java.util.concurrent.{ Callable, ExecutorService }
-import scala.concurrent.duration.Duration
-import scala.sys.process.javaVmArguments
-import scala.tools.partest.nest.NestUI
-import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional }
-
-package object partest {
- type File = java.io.File
- type SFile = scala.reflect.io.File
- type Directory = scala.reflect.io.Directory
- type Path = scala.reflect.io.Path
- type PathResolver = scala.tools.util.PathResolver
- type ClassPath[T] = scala.tools.nsc.util.ClassPath[T]
- type StringWriter = java.io.StringWriter
-
- val SFile = scala.reflect.io.File
- val Directory = scala.reflect.io.Directory
- val Path = scala.reflect.io.Path
- val PathResolver = scala.tools.util.PathResolver
- val ClassPath = scala.tools.nsc.util.ClassPath
-
- val space = "\u0020"
- val EOL = scala.compat.Platform.EOL
- def onull(s: String) = if (s == null) "" else s
- def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
- def ojoin(xs: String*): String = oempty(xs: _*) mkString space
- def nljoin(xs: String*): String = oempty(xs: _*) mkString EOL
-
- implicit val codec = scala.io.Codec.UTF8
-
- def setUncaughtHandler() = {
- Thread.setDefaultUncaughtExceptionHandler(
- new Thread.UncaughtExceptionHandler {
- def uncaughtException(thread: Thread, t: Throwable) {
- val t1 = Exceptional unwrap t
- System.err.println(s"Uncaught exception on thread $thread: $t1")
- t1.printStackTrace()
- }
- }
- )
- }
-
- /** Sources have a numerical group, specified by name_7 and so on. */
- private val GroupPattern = """.*_(\d+)""".r
-
- implicit class FileOps(val f: File) {
- private def sf = SFile(f)
-
- def testIdent = {
- f.toString split """[/\\]+""" takeRight 2 mkString "/" // e.g. pos/t1234
- }
-
- def mapInPlace(mapFn: String => String)(filterFn: String => Boolean = _ => true): Unit =
- writeAll(fileLines filter filterFn map (x => mapFn(x) + EOL): _*)
-
- def appendAll(strings: String*): Unit = sf.appendAll(strings: _*)
- def writeAll(strings: String*): Unit = sf.writeAll(strings: _*)
- def absolutePathSegments: List[String] = f.getAbsolutePath split """[/\\]+""" toList
-
- def isJava = f.isFile && (sf hasExtension "java")
- def isScala = f.isFile && (sf hasExtension "scala")
- def isJavaOrScala = isJava || isScala
-
- def extension = sf.extension
- def hasExtension(ext: String) = sf hasExtension ext
- def changeExtension(ext: String): File = (sf changeExtension ext).jfile
-
- /** The group number for this source file, or -1 for no group. */
- def group: Int =
- sf.stripExtension match {
- case GroupPattern(g) if g.toInt >= 0 => g.toInt
- case _ => -1
- }
-
- def fileContents: String = try sf.slurp() catch { case _: java.io.FileNotFoundException => "" }
- def fileLines: List[String] = augmentString(fileContents).lines.toList
- }
-
- implicit class PathOps(p: Path) extends FileOps(p.jfile) { }
-
- implicit class Copier(val f: SFile) extends AnyVal {
- def copyTo(dest: Path): Unit = dest.toFile writeAll f.slurp(scala.io.Codec.UTF8)
- }
-
- implicit class LoaderOps(val loader: ClassLoader) extends AnyVal {
- import scala.util.control.Exception.catching
- /** Like ScalaClassLoader.create for the case where the result type is
- * available to the current class loader, implying that the current
- * loader is a parent of `loader`.
- */
- def instantiate[A >: Null](name: String): A = (
- catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
- (loader loadClass name).newInstance.asInstanceOf[A] orNull
- )
- }
-
- implicit class ExecutorOps(val executor: ExecutorService) {
- def awaitTermination[A](wait: Duration)(failing: => A = ()): Option[A] = (
- if (executor awaitTermination (wait.length, wait.unit)) None
- else Some(failing)
- )
- }
-
- implicit def temporaryPath2File(x: Path): File = x.jfile
- implicit def stringPathToJavaFile(path: String): File = new File(path)
-
- implicit lazy val postfixOps = scala.language.postfixOps
- implicit lazy val implicitConversions = scala.language.implicitConversions
-
- def fileSeparator = java.io.File.separator
- def pathSeparator = java.io.File.pathSeparator
-
- def pathToTestIdent(path: Path) = path.jfile.testIdent
-
- def canonicalizeSlashes(line: String) = line.replaceAll("""[/\\]+""", "/")
-
- def words(s: String): List[String] = (s.trim split "\\s+").toList
-
- def timed[T](body: => T): (T, Long) = {
- val t1 = System.currentTimeMillis
- val result = body
- val t2 = System.currentTimeMillis
-
- (result, t2 - t1)
- }
-
- def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
-
- def file2String(f: File): String = f.fileContents
-
- def basename(name: String): String = Path(name).stripExtension
-
- /** In order to allow for spaces in flags/options, this
- * parses .flags, .javaopts, javacopts etc files as follows:
- * If it is exactly one line, it is split (naively) on spaces.
- * If it contains more than one line, each line is its own
- * token, spaces and all.
- */
- def readOptionsFile(file: File): List[String] = {
- file.fileLines match {
- case x :: Nil => words(x)
- case xs => xs map (_.trim)
- }
- }
-
- def findProgram(name: String): Option[File] = {
- val pathDirs = sys.env("PATH") match {
- case null => List("/usr/local/bin", "/usr/bin", "/bin")
- case path => path split "[:;]" filterNot (_ == "") toList
- }
- pathDirs.iterator map (d => new File(d, name)) find (_.canExecute)
- }
-
- def now = (new java.util.Date).toString
- def elapsedString(millis: Long): String = {
- val elapsedSecs = millis/1000
- val elapsedMins = elapsedSecs/60
- val elapsedHrs = elapsedMins/60
- val dispMins = elapsedMins - elapsedHrs * 60
- val dispSecs = elapsedSecs - elapsedMins * 60
-
- "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs)
- }
-
- def vmArgString = javaVmArguments.mkString(
- "Java VM started with arguments: '",
- " ",
- "'"
- )
-
- def allPropertiesString = {
- import scala.collection.JavaConversions._
- System.getProperties.toList.sorted map { case (k, v) => "%s -> %s\n".format(k, v) } mkString ""
- }
-
- def showAllJVMInfo() {
- vlog(vmArgString)
- vlog(allPropertiesString)
- }
-
- import scala.language.experimental.macros
-
- /**
- * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
- * {{{
- * trace> "".isEmpty
- * res: Boolean = true
- *
- * }}}
- *
- * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
- * test code in a string.
- */
- def trace[A](a: A) = macro traceImpl[A]
-
- import scala.reflect.macros.Context
- def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
- import c.universe._
- import definitions._
-
- // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0,
- // because this impairs reflection refactorings
- //
- // val exprCode = c.literal(show(a.tree))
- // val exprType = c.literal(show(a.actualType))
- // reify {
- // println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
- // a.splice
- // }
-
- c.Expr(Block(
- List(Apply(
- Select(Ident(PredefModule), TermName("println")),
- List(Apply(
- Select(Apply(
- Select(Ident(ScalaPackage), TermName("StringContext")),
- List(
- Literal(Constant("trace> ")),
- Literal(Constant("\\nres: ")),
- Literal(Constant(" = ")),
- Literal(Constant("\\n")))),
- TermName("s")),
- List(
- Literal(Constant(show(a.tree))),
- Literal(Constant(show(a.actualType))),
- a.tree))))),
- a.tree))
- }
-
- def isPartestTerse = NestUI.isTerse
- def isPartestDebug = NestUI.isDebug
- def isPartestVerbose = NestUI.isVerbose
-
- def vlog(msg: => String) = if (isPartestVerbose) System.err.println(msg)
-}
diff --git a/src/partest/scala/tools/partest/utils/Properties.scala b/src/partest/scala/tools/partest/utils/Properties.scala
deleted file mode 100644
index b9394b50c9..0000000000
--- a/src/partest/scala/tools/partest/utils/Properties.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.partest
-package utils
-
-/** Loads partest.properties from the jar. */
-object Properties extends scala.util.PropertiesTrait {
- protected def propCategory = "partest"
- protected def pickJarBasedOn = classOf[nest.RunnerManager]
- override def isAvian = super.isAvian
-}
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 009d9dbfdb..5b6ff2325c 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -8,6 +8,7 @@ package reflect
package api
import scala.reflect.runtime.{universe => ru}
+import scala.annotation.compileTimeOnly
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
@@ -91,7 +92,7 @@ trait Exprs { self: Universe =>
* }}}
* because expr of type Expr[T] itself does not have a method foo.
*/
- // @compileTimeOnly("Cannot use splice outside reify")
+ @compileTimeOnly("splice must be enclosed within a reify {} block")
def splice: T
/**
@@ -108,7 +109,7 @@ trait Exprs { self: Universe =>
* object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
* }}}
*/
- // @compileTimeOnly("Cannot use value except for signatures of macro implementations")
+ @compileTimeOnly("cannot use value except for signatures of macro implementations")
val value: T
override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 6b7aa2dddf..92f2a64ce9 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -227,10 +227,7 @@ trait Definitions extends api.StandardDefinitions {
scope
}
/** Is this symbol a member of Object or Any? */
- def isUniversalMember(sym: Symbol) = (
- (sym ne NoSymbol)
- && (ObjectClass isSubClass sym.owner)
- )
+ def isUniversalMember(sym: Symbol) = ObjectClass isSubClass sym.owner
/** Is this symbol unimportable? Unimportable symbols include:
* - constructors, because <init> is not a real name
@@ -253,6 +250,13 @@ trait Definitions extends api.StandardDefinitions {
|| tp =:= AnyRefTpe
)
+ def hasMultipleNonImplicitParamLists(member: Symbol): Boolean = hasMultipleNonImplicitParamLists(member.info)
+ def hasMultipleNonImplicitParamLists(info: Type): Boolean = info match {
+ case PolyType(_, restpe) => hasMultipleNonImplicitParamLists(restpe)
+ case MethodType(_, MethodType(p :: _, _)) if !p.isImplicit => true
+ case _ => false
+ }
+
private def fixupAsAnyTrait(tpe: Type): Type = tpe match {
case ClassInfoType(parents, decls, clazz) =>
if (parents.head.typeSymbol == AnyClass) tpe
@@ -384,6 +388,7 @@ trait Definitions extends api.StandardDefinitions {
def arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone)
def ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible)
def arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass)
+ def traversableDropMethod = getMemberMethod(ScalaRunTimeModule, nme.drop)
// classes with special meanings
lazy val StringAddClass = requiredClass[scala.runtime.StringAdd]
@@ -423,6 +428,15 @@ trait Definitions extends api.StandardDefinitions {
def isVarArgsList(params: Seq[Symbol]) = params.nonEmpty && isRepeatedParamType(params.last.tpe)
def isVarArgTypes(formals: Seq[Type]) = formals.nonEmpty && isRepeatedParamType(formals.last)
+ def firstParamType(tpe: Type): Type = tpe.paramTypes match {
+ case p :: _ => p
+ case _ => NoType
+ }
+ def isImplicitParamss(paramss: List[List[Symbol]]) = paramss match {
+ case (p :: _) :: _ => p.isImplicit
+ case _ => false
+ }
+
def hasRepeatedParam(tp: Type): Boolean = tp match {
case MethodType(formals, restpe) => isScalaVarArgs(formals) || hasRepeatedParam(restpe)
case PolyType(_, restpe) => hasRepeatedParam(restpe)
@@ -430,7 +444,12 @@ trait Definitions extends api.StandardDefinitions {
}
// wrapping and unwrapping
- def dropByName(tp: Type): Type = elementExtract(ByNameParamClass, tp) orElse tp
+ def dropByName(tp: Type): Type = elementExtract(ByNameParamClass, tp) orElse tp
+ def dropRepeated(tp: Type): Type = (
+ if (isJavaRepeatedParamType(tp)) elementExtract(JavaRepeatedParamClass, tp) orElse tp
+ else if (isScalaRepeatedParamType(tp)) elementExtract(RepeatedParamClass, tp) orElse tp
+ else tp
+ )
def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse tp
def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp
def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp
@@ -659,21 +678,23 @@ trait Definitions extends api.StandardDefinitions {
def isExactProductType(tp: Type): Boolean = isProductNSymbol(tp.typeSymbol)
/** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */
- def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match {
+ @deprecated("No longer used", "2.11.0") def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match {
case Some(x) => tpe.baseType(x).typeArgs
case _ => Nil
}
- def dropNullaryMethod(tp: Type) = tp match {
- case NullaryMethodType(restpe) => restpe
- case _ => tp
- }
-
- def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
+ @deprecated("No longer used", "2.11.0") def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
case RefinedType(p :: _, _) => p.dealiasWiden
case tp => tp
}
+ def getterMemberTypes(tpe: Type, getters: List[Symbol]): List[Type] =
+ getters map (m => dropNullaryMethod(tpe memberType m))
+
+ def dropNullaryMethod(tp: Type) = tp match {
+ case NullaryMethodType(restpe) => restpe
+ case _ => tp
+ }
def abstractFunctionForFunctionType(tp: Type) = {
assert(isFunctionType(tp), tp)
abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
@@ -696,6 +717,71 @@ trait Definitions extends api.StandardDefinitions {
def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg)
def seqType(arg: Type) = appliedType(SeqClass, arg)
+ // FYI the long clunky name is because it's really hard to put "get" into the
+ // name of a method without it sounding like the method "get"s something, whereas
+ // this method is about a type member which just happens to be named get.
+ def typeOfMemberNamedGet(tp: Type) = resultOfMatchingMethod(tp, nme.get)()
+ def typeOfMemberNamedHead(tp: Type) = resultOfMatchingMethod(tp, nme.head)()
+ def typeOfMemberNamedApply(tp: Type) = resultOfMatchingMethod(tp, nme.apply)(IntTpe)
+ def typeOfMemberNamedDrop(tp: Type) = resultOfMatchingMethod(tp, nme.drop)(IntTpe)
+ def typeOfMemberNamedGetOrSelf(tp: Type) = typeOfMemberNamedGet(tp) orElse tp
+ def typesOfSelectors(tp: Type) = getterMemberTypes(tp, productSelectors(tp))
+ def typesOfCaseAccessors(tp: Type) = getterMemberTypes(tp, tp.typeSymbol.caseFieldAccessors)
+
+ /** If this is a case class, the case field accessors (which may be an empty list.)
+ * Otherwise, if there are any product selectors, that list.
+ * Otherwise, a list containing only the type itself.
+ */
+ def typesOfSelectorsOrSelf(tp: Type): List[Type] = (
+ if (tp.typeSymbol.isCase)
+ typesOfCaseAccessors(tp)
+ else typesOfSelectors(tp) match {
+ case Nil => tp :: Nil
+ case tps => tps
+ }
+ )
+
+ /** If the given type has one or more product selectors, the type of the last one.
+ * Otherwise, the type itself.
+ */
+ def typeOfLastSelectorOrSelf(tp: Type) = typesOfSelectorsOrSelf(tp).last
+
+ def elementTypeOfLastSelectorOrSelf(tp: Type) = {
+ val last = typeOfLastSelectorOrSelf(tp)
+ ( typeOfMemberNamedHead(last)
+ orElse typeOfMemberNamedApply(last)
+ orElse elementType(ArrayClass, last)
+ )
+ }
+
+ /** Returns the method symbols for members _1, _2, ..., _N
+ * which exist in the given type.
+ */
+ def productSelectors(tpe: Type): List[Symbol] = {
+ def loop(n: Int): List[Symbol] = tpe member TermName("_" + n) match {
+ case NoSymbol => Nil
+ case m if m.paramss.nonEmpty => Nil
+ case m => m :: loop(n + 1)
+ }
+ loop(1)
+ }
+
+ /** If `tp` has a term member `name`, the first parameter list of which
+ * matches `paramTypes`, and which either has no further parameter
+ * lists or only an implicit one, then the result type of the matching
+ * method. Otherwise, NoType.
+ */
+ def resultOfMatchingMethod(tp: Type, name: TermName)(paramTypes: Type*): Type = {
+ def matchesParams(member: Symbol) = member.paramss match {
+ case Nil => paramTypes.isEmpty
+ case ps :: rest => (rest.isEmpty || isImplicitParamss(rest)) && (ps corresponds paramTypes)(_.tpe =:= _)
+ }
+ tp member name filter matchesParams match {
+ case NoSymbol => NoType
+ case member => (tp memberType member).finalResultType
+ }
+ }
+
def ClassType(arg: Type) = if (phase.erasedTypes) ClassClass.tpe else appliedType(ClassClass, arg)
/** Can we tell by inspecting the symbol that it will never
@@ -904,7 +990,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
- lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly")
+ lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.annotation.compileTimeOnly")
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance]
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 64713b8d41..7a2287664a 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -626,6 +626,7 @@ trait StdNames {
val clone_ : NameType = "clone"
val collection: NameType = "collection"
val conforms: NameType = "conforms"
+ val compare: NameType = "compare"
val copy: NameType = "copy"
val create: NameType = "create"
val currentMirror: NameType = "currentMirror"
@@ -657,6 +658,7 @@ trait StdNames {
val get: NameType = "get"
val hashCode_ : NameType = "hashCode"
val hash_ : NameType = "hash"
+ val head : NameType = "head"
val immutable: NameType = "immutable"
val implicitly: NameType = "implicitly"
val in: NameType = "in"
@@ -725,6 +727,7 @@ trait StdNames {
val toArray: NameType = "toArray"
val toList: NameType = "toList"
val toObjectArray : NameType = "toObjectArray"
+ val toSeq: NameType = "toSeq"
val TopScope: NameType = "TopScope"
val toString_ : NameType = "toString"
val toTypeConstructor: NameType = "toTypeConstructor"
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index c340670635..a6f9dfc164 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -60,6 +60,7 @@ abstract class SymbolTable extends macros.Universe
def shouldLogAtThisPhase = false
def isPastTyper = false
+ protected def isDeveloper: Boolean = settings.debug
@deprecated("Give us a reason", "2.10.0")
def abort(): Nothing = abort("unknown error")
@@ -69,8 +70,12 @@ abstract class SymbolTable extends macros.Universe
/** Override with final implementation for inlining. */
def debuglog(msg: => String): Unit = if (settings.debug) log(msg)
- def devWarning(msg: => String): Unit = if (settings.debug) Console.err.println(msg)
+ def devWarning(msg: => String): Unit = if (isDeveloper) Console.err.println(msg)
def throwableAsString(t: Throwable): String = "" + t
+ def throwableAsString(t: Throwable, maxFrames: Int): String = t.getStackTrace take maxFrames mkString "\n at "
+
+ @inline final def devWarningDumpStack(msg: => String, maxFrames: Int): Unit =
+ devWarning(msg + "\n" + throwableAsString(new Throwable, maxFrames))
/** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */
def debugStack(t: Throwable): Unit = devWarning(throwableAsString(t))
@@ -111,6 +116,13 @@ abstract class SymbolTable extends macros.Universe
result
}
+ @inline
+ final private[scala] def debuglogResultIf[T](msg: => String, cond: T => Boolean)(result: T): T = {
+ if (cond(result))
+ debuglog(msg + ": " + result)
+
+ result
+ }
// For too long have we suffered in order to sort NAMES.
// I'm pretty sure there's a reasonable default for that.
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index e41038cafc..a8efa938c8 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -153,7 +153,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
def asNameType(n: Name): NameType
- private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
+ // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
+ // The null check is for NoSymbol, which can't pass a reference to itself to the constructor and also
+ // can't call owner_= due to an assertion it contains.
+ private[this] var _rawowner = if (initOwner eq null) this else initOwner
private[this] var _rawflags: Long = _
def rawowner = _rawowner
@@ -610,7 +613,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
final def isLazyAccessor = isLazy && lazyAccessor != NoSymbol
- final def isOverridableMember = !(isClass || isEffectivelyFinal) && (this ne NoSymbol) && owner.isClass
+ final def isOverridableMember = !(isClass || isEffectivelyFinal) && safeOwner.isClass
/** Does this symbol denote a wrapper created by the repl? */
final def isInterpreterWrapper = (
@@ -999,13 +1002,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ owner attribute --------------------------------------------------------------
+ /** In general when seeking the owner of a symbol, one should call `owner`.
+ * The other possibilities include:
+ * - call `safeOwner` if it is expected that the target may be NoSymbol
+ * - call `assertOwner` if it is an unrecoverable error if the target is NoSymbol
+ *
+ * `owner` behaves like `safeOwner`, but logs NoSymbol.owner calls under -Xdev.
+ * `assertOwner` aborts compilation immediately if called on NoSymbol.
+ */
def owner: Symbol = {
if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
rawowner
}
-
- // Like owner, but NoSymbol.owner == NoSymbol instead of throwing an exception.
- final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner
+ final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner
+ final def assertOwner: Symbol = if (this eq NoSymbol) abort("no-symbol does not have an owner") else owner
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
@@ -1781,10 +1791,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
result
}
- @inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
-
- final def toOption: Option[Symbol] = if (exists) Some(this) else None
-
// ------ cloneing -------------------------------------------------------------------
/** A clone of this symbol. */
@@ -2179,8 +2185,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* the recursive knot.
*/
private def canMatchInheritedSymbols = (
- (this ne NoSymbol)
- && owner.isClass
+ owner.isClass
&& !this.isClass
&& !this.isConstructor
)
@@ -2352,6 +2357,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
@inline final def orElse(alt: => Symbol): Symbol = if (this ne NoSymbol) this else alt
@inline final def andAlso(f: Symbol => Unit): Symbol = { if (this ne NoSymbol) f(this) ; this }
+ @inline final def fold[T](none: => T)(f: Symbol => T): T = if (this ne NoSymbol) f(this) else none
+ @inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
+
+ final def toOption: Option[Symbol] = if (exists) Some(this) else None
+
// ------ toString -------------------------------------------------------------------
@@ -3340,7 +3350,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def enclosingPackageClass: Symbol = this
override def enclMethod: Symbol = this
override def associatedFile = NoAbstractFile
- override def ownerChain: List[Symbol] = List()
+ override def owner: Symbol = {
+ devWarningDumpStack("NoSymbol.owner", 15)
+ this
+ }
+ override def ownerChain: List[Symbol] = Nil
override def ownersIterator: Iterator[Symbol] = Iterator.empty
override def alternatives: List[Symbol] = List()
override def reset(completer: Type): this.type = this
@@ -3350,9 +3364,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def accessBoundary(base: Symbol): Symbol = enclosingRootClass
def cloneSymbolImpl(owner: Symbol, newFlags: Long) = abort("NoSymbol.clone()")
override def originalEnclosingMethod = this
-
- override def owner: Symbol =
- abort("no-symbol does not have an owner")
}
protected def makeNoSymbol: NoSymbol = new NoSymbol
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 5c92512193..34fe0afb1a 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -488,7 +488,7 @@ abstract class TreeInfo {
}
object WildcardStarArg {
- def unapply(tree: Typed): Option[Tree] = tree match {
+ def unapply(tree: Tree): Option[Tree] = tree match {
case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => Some(expr)
case _ => None
}
@@ -628,11 +628,12 @@ abstract class TreeInfo {
* case Extractor(a @ (b, c)) => 2
* }}}
*/
- def effectivePatternArity(args: List[Tree]): Int = (args.map(unbind) match {
+ def effectivePatternArity(args: List[Tree]): Int = flattenedPatternArgs(args).length
+
+ def flattenedPatternArgs(args: List[Tree]): List[Tree] = args map unbind match {
case Apply(fun, xs) :: Nil if isTupleSymbol(fun.symbol) => xs
case xs => xs
- }).length
-
+ }
// used in the symbols for labeldefs and valdefs emitted by the pattern matcher
// tailcalls, cps,... use this flag combination to detect translated matches
@@ -772,6 +773,17 @@ abstract class TreeInfo {
unapply(dissectApplied(tree))
}
+ /** Locates the synthetic Apply node corresponding to an extractor's call to
+ * unapply (unwrapping nested Applies) and returns the fun part of that Apply.
+ */
+ object Unapplied {
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case Apply(fun, Ident(nme.SELECTOR_DUMMY) :: Nil) => Some(fun)
+ case Apply(fun, _) => unapply(fun)
+ case _ => None
+ }
+ }
+
/** Is this file the body of a compilation unit which should not
* have Predef imported? This is the case iff the first import in the
* unit explicitly refers to Predef.
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 02bee5e369..fab1f45358 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -185,8 +185,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
def replace(from: Tree, to: Tree): Tree =
new TreeReplacer(from, to, positionAware = false) transform this
- def hasSymbolWhich(f: Symbol => Boolean) =
- (symbol ne null) && (symbol ne NoSymbol) && f(symbol)
+ def hasExistingSymbol = (symbol ne null) && (symbol ne NoSymbol)
+ def hasSymbolWhich(f: Symbol => Boolean) = hasExistingSymbol && f(symbol)
def isErroneous = (tpe ne null) && tpe.isErroneous
def isTyped = (tpe ne null) && !tpe.isErroneous
@@ -309,6 +309,14 @@ trait Trees extends api.Trees { self: SymbolTable =>
def rhs: Tree
}
+ object ValOrDefDef {
+ def unapply(tree: Tree): Option[(Modifiers, TermName, Tree, Tree)] = tree match {
+ case ValDef(mods, name, tpt, rhs) => Some((mods, name, tpt, rhs))
+ case DefDef(mods, name, _, _, tpt, rhs) => Some((mods, name, tpt, rhs))
+ case _ => None
+ }
+ }
+
case class ValDef(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) extends ValOrDefDef with ValDefApi
object ValDef extends ValDefExtractor
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
index 9c1342e68e..fd64d98ca2 100644
--- a/src/reflect/scala/reflect/internal/TypeDebugging.scala
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -36,7 +36,7 @@ trait TypeDebugging {
case ObjectClass => true
case _ => sym.hasPackageFlag
}
- def skipType(tpe: Type): Boolean = skipSym(tpe.typeSymbolDirect)
+ def skipType(tpe: Type): Boolean = (tpe eq null) || skipSym(tpe.typeSymbolDirect)
def skip(t: Tree): Boolean = t match {
case EmptyTree => true
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 71f46fedb7..b4ae384594 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -704,7 +704,7 @@ trait Types
case OverloadedType(_, alts) =>
OverloadedType(this, alts)
case tp =>
- tp.asSeenFrom(this, sym.owner)
+ if (sym eq NoSymbol) NoType else tp.asSeenFrom(this, sym.owner)
}
/** Substitute types `to` for occurrences of references to
@@ -3475,7 +3475,7 @@ trait Types
def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) =
if ((parents eq original.parents) && (decls eq original.decls)) original
else {
- val owner = if (original.typeSymbol == NoSymbol) NoSymbol else original.typeSymbol.owner
+ val owner = original.typeSymbol.owner
val result = refinedType(parents, owner)
val syms1 = decls.toList
for (sym <- syms1)
@@ -4015,9 +4015,12 @@ trait Types
def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType)
+ /** This appears to be equivalent to tp.isInstanceof[SingletonType],
+ * except it excludes ConstantTypes.
+ */
def isSingleType(tp: Type) = tp match {
case ThisType(_) | SuperType(_, _) | SingleType(_, _) => true
- case _ => false
+ case _ => false
}
def isConstantType(tp: Type) = tp match {
diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
deleted file mode 100644
index 2c9f909629..0000000000
--- a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package scala
-package reflect
-package internal
-package annotations
-
-import scala.annotation.meta._
-
-/**
- * An annotation that designates a member should not be referred to after
- * type checking (which includes macro expansion); it must only be used in
- * the arguments of some other macro that will eliminate it from the AST.
- *
- * Later on, this annotation should be removed and implemented with domain-specific macros.
- * If a certain method `inner` mustn't be called outside the context of a given macro `outer`,
- * then it should itself be declared as a macro.
- *
- * Approach #1. Expansion of `inner` checks whether its enclosures contain `outer` and
- * report an error if `outer` is not detected. In principle, we could use this approach right now,
- * but currently enclosures are broken, because contexts aren't exactly famous for keeping precise
- * track of the stack of the trees being typechecked.
- *
- * Approach #2. Default implementation of `inner` is just an invocation of `c.abort`.
- * `outer` is an untyped macro, which expands into a block, which contains a redefinition of `inner`
- * and a call to itself. The redefined `inner` could either be a stub like `Expr.splice` or carry out
- * domain-specific logic.
- *
- * @param message the error message to print during compilation if a reference remains
- * after type checking
- * @since 2.10.1
- */
-@getter @setter @beanGetter @beanSetter
-final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/annotations/package.scala b/src/reflect/scala/reflect/internal/annotations/package.scala
new file mode 100644
index 0000000000..ef299a600c
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/package.scala
@@ -0,0 +1,6 @@
+package scala.reflect.internal
+
+package object annotations {
+ @deprecated("Use scala.annotation.compileTimeOnly instead", "2.11.0")
+ type compileTimeOnly = scala.annotation.compileTimeOnly
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index bebc419c7c..0d9bbfa5e0 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -395,7 +395,7 @@ private[internal] trait TypeMaps {
s"Widened lone occurrence of $tp1 inside existential to $word bound"
}
if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam)
- logResult(msg)(repl)
+ debuglogResult(msg)(repl)
else
tp1
case _ =>
@@ -524,7 +524,7 @@ private[internal] trait TypeMaps {
private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = {
val TypeRef(_, lhsSym, lhsArgs) = lhs
val TypeRef(_, rhsSym, rhsArgs) = rhs
- require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
+ require(lhsSym.owner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
// Find the type parameter position; we'll use the corresponding argument.
// Why are we checking by name rather than by equality? Because for
@@ -539,7 +539,7 @@ private[internal] trait TypeMaps {
else {
// It's easy to get here when working on hardcore type machinery (not to
// mention when not doing so, see above) so let's provide a standout error.
- def own_s(s: Symbol) = s.nameString + " in " + s.safeOwner.nameString
+ def own_s(s: Symbol) = s.nameString + " in " + s.owner.nameString
def explain =
sm"""| sought ${own_s(lhsSym)}
| classSym ${own_s(rhsSym)}
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index 97cc19952c..f61c1f3c50 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -92,7 +92,7 @@ trait TraceSymbolActivity {
while (ph != NoPhase && ph.name != "erasure") {
ph = ph.prev
}
- ph
+ if (ph eq NoPhase) phase else ph
}
private def runBeforeErasure[T](body: => T): T = enteringPhase(findErasurePhase)(body)
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index ae318697ec..9596a360a9 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -7,6 +7,8 @@ package scala
package tools.nsc
package interpreter
+import PartialFunction.cond
+
import scala.language.implicitConversions
import scala.collection.mutable
@@ -20,7 +22,7 @@ import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
import scala.tools.util.PathResolver
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
-import scala.tools.nsc.util.{ ScalaClassLoader, stringFromWriter }
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromWriter, stackTracePrefixString }
import scala.tools.nsc.util.Exceptional.unwrap
import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
@@ -297,19 +299,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName
def translatePath(path: String) = {
val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path)
- sym match {
- case NoSymbol => None
- case _ => Some(flatPath(sym))
- }
- }
- def translateEnclosingClass(n: String) = {
- def enclosingClass(s: Symbol): Symbol =
- if (s == NoSymbol || s.isClass) s else enclosingClass(s.owner)
- enclosingClass(symbolOfTerm(n)) match {
- case NoSymbol => None
- case c => Some(flatPath(c))
- }
+ sym.toOption map flatPath
}
+ def translateEnclosingClass(n: String) = symbolOfTerm(n).enclClass.toOption map flatPath
private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) {
/** Overridden here to try translating a simple name to the generated
@@ -728,10 +720,18 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
throw t
val unwrapped = unwrap(t)
+
+ // Example input: $line3.$read$$iw$$iw$
+ val classNameRegex = (naming.lineRegex + ".*").r
+ def isWrapperInit(x: StackTraceElement) = cond(x.getClassName) {
+ case classNameRegex() if x.getMethodName == nme.CONSTRUCTOR.decoded => true
+ }
+ val stackTrace = util.stackTracePrefixString(unwrapped)(!isWrapperInit(_))
+
withLastExceptionLock[String]({
directBind[Throwable]("lastException", unwrapped)(StdReplTags.tagOfThrowable, classTag[Throwable])
- util.stackTraceString(unwrapped)
- }, util.stackTraceString(unwrapped))
+ stackTrace
+ }, stackTrace)
}
// TODO: split it out into a package object and a regular
diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
index 085a7c6065..c1faf30385 100644
--- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -71,7 +71,7 @@ trait MemberHandlers {
override def definesImplicit = member.mods.isImplicit
override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
- override def definedSymbols = if (symbol eq NoSymbol) Nil else List(symbol)
+ override def definedSymbols = if (symbol.exists) symbol :: Nil else Nil
}
/** Class to handle one member among all the members included
diff --git a/src/repl/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala
index 7f577b3a8b..cf38a2ae3a 100644
--- a/src/repl/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala
@@ -40,7 +40,7 @@ trait Naming {
// $line3.$read$$iw$$iw$Bippy@4a6a00ca
private def noMeta(s: String) = "\\Q" + s + "\\E"
- private lazy val lineRegex = {
+ lazy val lineRegex = {
val sn = sessionNames
val members = List(sn.read, sn.eval, sn.print) map noMeta mkString ("(?:", "|", ")")
debugging("lineRegex")(noMeta(sn.line) + """\d+[./]""" + members + """[$.]""")
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
deleted file mode 100644
index db4163c8af..0000000000
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ /dev/null
@@ -1,447 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{FreqMap,Buildable}
-
-sealed abstract class Arbitrary[T] {
- val arbitrary: Gen[T]
-}
-
-/** Defines implicit [[org.scalacheck.Arbitrary]] instances for common types.
- * <p>
- * ScalaCheck
- * uses implicit [[org.scalacheck.Arbitrary]] instances when creating properties
- * out of functions with the `Prop.property` method, and when
- * the `Arbitrary.arbitrary` method is used. For example, the
- * following code requires that there exists an implicit
- * `Arbitrary[MyClass]` instance:
- * </p>
- *
- * {{{
- * val myProp = Prop.forAll { myClass: MyClass =>
- * ...
- * }
- *
- * val myGen = Arbitrary.arbitrary[MyClass]
- * }}}
- *
- * <p>
- * The required implicit definition could look like this:
- * </p>
- *
- * {{{
- * implicit val arbMyClass: Arbitrary[MyClass] = Arbitrary(...)
- * }}}
- *
- * <p>
- * The factory method `Arbitrary(...)` takes a generator of type
- * `Gen[T]` and returns an instance of `Arbitrary[T]`.
- * </p>
- *
- * <p>
- * The `Arbitrary` module defines implicit [[org.scalacheck.Arbitrary]]
- * instances for common types, for convenient use in your properties and
- * generators.
- * </p>
- */
-object Arbitrary {
-
- import Gen.{value, choose, sized, listOf, listOf1,
- frequency, oneOf, containerOf, resize}
- import util.StdRand
- import scala.collection.{immutable, mutable}
- import java.util.Date
-
- /** Creates an Arbitrary instance */
- def apply[T](g: => Gen[T]): Arbitrary[T] = new Arbitrary[T] {
- lazy val arbitrary = g
- }
-
- /** Returns an arbitrary generator for the type T. */
- def arbitrary[T](implicit a: Arbitrary[T]): Gen[T] = a.arbitrary
-
- /**** Arbitrary instances for each AnyVal ****/
-
- /** Arbitrary AnyVal */
- implicit lazy val arbAnyVal: Arbitrary[AnyVal] = Arbitrary(oneOf(
- arbitrary[Unit], arbitrary[Boolean], arbitrary[Char], arbitrary[Byte],
- arbitrary[Short], arbitrary[Int], arbitrary[Long], arbitrary[Float],
- arbitrary[Double]
- ))
-
- /** Arbitrary instance of Boolean */
- implicit lazy val arbBool: Arbitrary[Boolean] =
- Arbitrary(oneOf(true, false))
-
- /** Arbitrary instance of Int */
- implicit lazy val arbInt: Arbitrary[Int] = Arbitrary(
- Gen.chooseNum(Int.MinValue, Int.MaxValue)
- )
-
- /** Arbitrary instance of Long */
- implicit lazy val arbLong: Arbitrary[Long] = Arbitrary(
- Gen.chooseNum(Long.MinValue, Long.MaxValue)
- )
-
- /** Arbitrary instance of Float */
- implicit lazy val arbFloat: Arbitrary[Float] = Arbitrary(
- Gen.chooseNum(
- Float.MinValue, Float.MaxValue
- // I find that including these by default is a little TOO testy.
- // Float.Epsilon, Float.NaN, Float.PositiveInfinity, Float.NegativeInfinity
- )
- )
-
- /** Arbitrary instance of Double */
- implicit lazy val arbDouble: Arbitrary[Double] = Arbitrary(
- Gen.chooseNum(
- Double.MinValue / 2, Double.MaxValue / 2
- // As above. Perhaps behind some option?
- // Double.Epsilon, Double.NaN, Double.PositiveInfinity, Double.NegativeInfinity
- )
- )
-
- /** Arbitrary instance of Char */
- implicit lazy val arbChar: Arbitrary[Char] = Arbitrary(
- Gen.frequency(
- (0xD800-Char.MinValue, Gen.choose(Char.MinValue,0xD800-1)),
- (Char.MaxValue-0xDFFF, Gen.choose(0xDFFF+1,Char.MaxValue))
- )
- )
-
- /** Arbitrary instance of Byte */
- implicit lazy val arbByte: Arbitrary[Byte] = Arbitrary(
- Gen.chooseNum(Byte.MinValue, Byte.MaxValue)
- )
-
- /** Arbitrary instance of Short */
- implicit lazy val arbShort: Arbitrary[Short] = Arbitrary(
- Gen.chooseNum(Short.MinValue, Short.MaxValue)
- )
-
- /** Absolutely, totally, 100% arbitrarily chosen Unit. */
- implicit lazy val arbUnit: Arbitrary[Unit] = Arbitrary(value(()))
-
- /**** Arbitrary instances of other common types ****/
-
- /** Arbitrary instance of String */
- implicit lazy val arbString: Arbitrary[String] =
- Arbitrary(arbitrary[List[Char]] map (_.mkString))
-
- /** Arbitrary instance of Date */
- implicit lazy val arbDate: Arbitrary[Date] = Arbitrary(for {
- l <- arbitrary[Long]
- d = new Date
- } yield new Date(d.getTime + l))
-
- /** Arbitrary instance of Throwable */
- implicit lazy val arbThrowable: Arbitrary[Throwable] =
- Arbitrary(value(new Exception))
-
- /** Arbitrary BigInt */
- implicit lazy val arbBigInt: Arbitrary[BigInt] = {
- def chooseBigInt: Gen[BigInt] = sized((s: Int) => choose(-s, s)) map (x => BigInt(x))
- def chooseReallyBigInt = chooseBigInt.combine(choose(32, 128))((x, y) => Some(x.get << y.get))
-
- Arbitrary(
- frequency(
- (5, chooseBigInt),
- (10, chooseReallyBigInt),
- (1, BigInt(0)),
- (1, BigInt(1)),
- (1, BigInt(-1)),
- (1, BigInt(Int.MaxValue) + 1),
- (1, BigInt(Int.MinValue) - 1),
- (1, BigInt(Long.MaxValue)),
- (1, BigInt(Long.MinValue)),
- (1, BigInt(Long.MaxValue) + 1),
- (1, BigInt(Long.MinValue) - 1)
- )
- )
- }
-
- /** Arbitrary BigDecimal */
- implicit lazy val arbBigDecimal: Arbitrary[BigDecimal] = {
- import java.math.MathContext._
- val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
- val bdGen = for {
- x <- arbBigInt.arbitrary
- mc <- mcGen
- limit <- value(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
- scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
- } yield {
- try {
- BigDecimal(x, scale, mc)
- } catch {
- case ae: java.lang.ArithmeticException => BigDecimal(x, scale, UNLIMITED) // Handle the case where scale/precision conflict
- }
- }
- Arbitrary(bdGen)
- }
-
- /** Arbitrary java.lang.Number */
- implicit lazy val arbNumber: Arbitrary[Number] = {
- val gen = Gen.oneOf(
- arbitrary[Byte], arbitrary[Short], arbitrary[Int], arbitrary[Long],
- arbitrary[Float], arbitrary[Double]
- )
- Arbitrary(gen map (_.asInstanceOf[Number]))
- // XXX TODO - restore BigInt and BigDecimal
- // Arbitrary(oneOf(arbBigInt.arbitrary :: (arbs map (_.arbitrary) map toNumber) : _*))
- }
-
- /** Generates an arbitrary property */
- implicit lazy val arbProp: Arbitrary[Prop] = {
- import Prop._
- val undecidedOrPassed = forAll { b: Boolean =>
- b ==> true
- }
- Arbitrary(frequency(
- (4, falsified),
- (4, passed),
- (3, proved),
- (3, undecidedOrPassed),
- (2, undecided),
- (1, exception(null))
- ))
- }
-
- /** Arbitrary instance of test params
- * @deprecated (in 1.10.0) Use `arbTestParameters` instead.
- */
- @deprecated("Use 'arbTestParameters' instead", "1.10.0")
- implicit lazy val arbTestParams: Arbitrary[Test.Params] =
- Arbitrary(for {
- minSuccTests <- choose(10,200)
- maxDiscTests <- choose(100,500)
- mnSize <- choose(0,500)
- sizeDiff <- choose(0,500)
- mxSize <- choose(mnSize, mnSize + sizeDiff)
- ws <- choose(1,4)
- } yield Test.Params(
- minSuccessfulTests = minSuccTests,
- maxDiscardedTests = maxDiscTests,
- minSize = mnSize,
- maxSize = mxSize,
- workers = ws
- ))
-
- /** Arbitrary instance of test parameters */
- implicit lazy val arbTestParameters: Arbitrary[Test.Parameters] =
- Arbitrary(for {
- _minSuccTests <- choose(10,200)
- _maxDiscardRatio <- choose(0.2f,10f)
- _minSize <- choose(0,500)
- sizeDiff <- choose(0,500)
- _maxSize <- choose(_minSize, _minSize + sizeDiff)
- _workers <- choose(1,4)
- } yield new Test.Parameters.Default {
- override val minSuccessfulTests = _minSuccTests
- override val maxDiscardRatio = _maxDiscardRatio
- override val minSize = _minSize
- override val maxSize = _maxSize
- override val workers = _workers
- })
-
- /** Arbitrary instance of gen params */
- implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
- Arbitrary(for {
- size <- arbitrary[Int] suchThat (_ >= 0)
- } yield Gen.Params(size, StdRand))
-
- /** Arbitrary instance of prop params */
- implicit lazy val arbPropParams: Arbitrary[Prop.Params] =
- Arbitrary(for {
- genPrms <- arbitrary[Gen.Params]
- } yield Prop.Params(genPrms, FreqMap.empty[immutable.Set[Any]]))
-
-
- // Higher-order types //
-
- /** Arbitrary instance of Gen */
- implicit def arbGen[T](implicit a: Arbitrary[T]): Arbitrary[Gen[T]] =
- Arbitrary(frequency(
- (5, arbitrary[T] map (value(_))),
- (1, Gen.fail)
- ))
-
- /** Arbitrary instance of option type */
- implicit def arbOption[T](implicit a: Arbitrary[T]): Arbitrary[Option[T]] =
- Arbitrary(sized(n => if(n == 0) value(None) else resize(n - 1, arbitrary[T]).map(Some(_))))
-
- implicit def arbEither[T, U](implicit at: Arbitrary[T], au: Arbitrary[U]): Arbitrary[Either[T, U]] =
- Arbitrary(oneOf(arbitrary[T].map(Left(_)), arbitrary[U].map(Right(_))))
-
- /** Arbitrary instance of immutable map */
- implicit def arbImmutableMap[T,U](implicit at: Arbitrary[T], au: Arbitrary[U]
- ): Arbitrary[immutable.Map[T,U]] = Arbitrary(
- for(seq <- arbitrary[Stream[(T,U)]]) yield immutable.Map(seq: _*)
- )
-
- /** Arbitrary instance of mutable map */
- implicit def arbMutableMap[T,U](implicit at: Arbitrary[T], au: Arbitrary[U]
- ): Arbitrary[mutable.Map[T,U]] = Arbitrary(
- for(seq <- arbitrary[Stream[(T,U)]]) yield mutable.Map(seq: _*)
- )
-
- /** Arbitrary instance of any buildable container (such as lists, arrays,
- * streams, etc). The maximum size of the container depends on the size
- * generation parameter. */
- implicit def arbContainer[C[_],T](implicit a: Arbitrary[T], b: Buildable[T,C]
- ): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T]))
-
- /** Arbitrary instance of any array. */
- implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassManifest[T]
- ): Arbitrary[Array[T]] = Arbitrary(containerOf[Array,T](arbitrary[T]))
-
-
- // Functions //
-
- /** Arbitrary instance of Function1 */
- implicit def arbFunction1[T1,R](implicit a: Arbitrary[R]
- ): Arbitrary[T1 => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1) => r
- )
-
- /** Arbitrary instance of Function2 */
- implicit def arbFunction2[T1,T2,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2) => r
- )
-
- /** Arbitrary instance of Function3 */
- implicit def arbFunction3[T1,T2,T3,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3) => r
- )
-
- /** Arbitrary instance of Function4 */
- implicit def arbFunction4[T1,T2,T3,T4,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3,T4) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4) => r
- )
-
- /** Arbitrary instance of Function5 */
- implicit def arbFunction5[T1,T2,T3,T4,T5,R](implicit a: Arbitrary[R]
- ): Arbitrary[(T1,T2,T3,T4,T5) => R] = Arbitrary(
- for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) => r
- )
-
-
- // Tuples //
-
- /** Arbitrary instance of 2-tuple */
- implicit def arbTuple2[T1,T2](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2]
- ): Arbitrary[(T1,T2)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- } yield (t1,t2))
-
- /** Arbitrary instance of 3-tuple */
- implicit def arbTuple3[T1,T2,T3](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
- ): Arbitrary[(T1,T2,T3)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- } yield (t1,t2,t3))
-
- /** Arbitrary instance of 4-tuple */
- implicit def arbTuple4[T1,T2,T3,T4](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
- ): Arbitrary[(T1,T2,T3,T4)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- } yield (t1,t2,t3,t4))
-
- /** Arbitrary instance of 5-tuple */
- implicit def arbTuple5[T1,T2,T3,T4,T5](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5]
- ): Arbitrary[(T1,T2,T3,T4,T5)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- } yield (t1,t2,t3,t4,t5))
-
- /** Arbitrary instance of 6-tuple */
- implicit def arbTuple6[T1,T2,T3,T4,T5,T6](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- } yield (t1,t2,t3,t4,t5,t6))
-
- /** Arbitrary instance of 7-tuple */
- implicit def arbTuple7[T1,T2,T3,T4,T5,T6,T7](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- } yield (t1,t2,t3,t4,t5,t6,t7))
-
- /** Arbitrary instance of 8-tuple */
- implicit def arbTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- t8 <- arbitrary[T8]
- } yield (t1,t2,t3,t4,t5,t6,t7,t8))
-
- /** Arbitrary instance of 9-tuple */
- implicit def arbTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
- a9: Arbitrary[T9]
- ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
- Arbitrary(for {
- t1 <- arbitrary[T1]
- t2 <- arbitrary[T2]
- t3 <- arbitrary[T3]
- t4 <- arbitrary[T4]
- t5 <- arbitrary[T5]
- t6 <- arbitrary[T6]
- t7 <- arbitrary[T7]
- t8 <- arbitrary[T8]
- t9 <- arbitrary[T9]
- } yield (t1,t2,t3,t4,t5,t6,t7,t8,t9))
-
-}
diff --git a/src/scalacheck/org/scalacheck/Arg.scala b/src/scalacheck/org/scalacheck/Arg.scala
deleted file mode 100644
index 4961c78a26..0000000000
--- a/src/scalacheck/org/scalacheck/Arg.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-case class Arg[+T](
- label: String,
- arg: T,
- shrinks: Int,
- origArg: T
-)(implicit prettyPrinter: T => Pretty) {
- lazy val prettyArg: Pretty = prettyPrinter(arg)
- lazy val prettyOrigArg: Pretty = prettyPrinter(origArg)
-}
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
deleted file mode 100644
index 604b68cb36..0000000000
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import Gen._
-import Prop._
-import Shrink._
-
-/** See User Guide for usage examples */
-trait Commands extends Prop {
-
- /** The abstract state data type. This type must be immutable.
- * The state type that encodes the abstract state. The abstract state
- * should model all the features we need from the real state, the system
- * under test. We should leave out all details that aren't needed for
- * specifying our pre- and postconditions. The state type must be called
- * State and be immutable. */
- type State <: AnyRef
-
- class Binding(private val key: State) {
- def get: Any = bindings.find(_._1 eq key) match {
- case None => sys.error("No value bound")
- case Some(x) => x._2
- }
- }
-
- /** Abstract commands are defined as subtypes of the traits Command or SetCommand.
- * Each command must have a run method and a method that returns the new abstract
- * state, as it should look after the command has been run.
- * A command can also define a precondition that states how the current
- * abstract state must look if the command should be allowed to run.
- * Finally, we can also define a postcondition which verifies that the
- * system under test is in a correct state after the command exectution. */
- trait Command {
-
- /** Used internally. */
- protected[Commands] def run_(s: State) = run(s)
-
- def run(s: State): Any
- def nextState(s: State): State
-
- /** Returns all preconditions merged into a single function */
- def preCondition: (State => Boolean) = s => preConditions.toList.forall(_.apply(s))
-
- /** A precondition is a function that
- * takes the current abstract state as parameter and returns a boolean
- * that says if the precondition is fulfilled or not. You can add several
- * conditions to the precondition list */
- val preConditions = new collection.mutable.ListBuffer[State => Boolean]
-
- /** Returns all postconditions merged into a single function */
- def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
-
- /** A postcondition is a function that
- * takes three parameters, s0, s1 and r. s0 is the abstract state before
- * the command was run, s1 is the abstract state after the command was
- * run, and r is the result from the command's run
- * method. The postcondition function should return a Boolean (or
- * a Prop instance) that says if the condition holds or not. You can add several
- * conditions to the postConditions list. */
- val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop]
- }
-
- /** A command that binds its result for later use */
- trait SetCommand extends Command {
- /** Used internally. */
- protected[Commands] final override def run_(s: State) = {
- val r = run(s)
- bindings += ((s,r))
- r
- }
-
- final def nextState(s: State) = nextState(s, new Binding(s))
- def nextState(s: State, b: Binding): State
- }
-
- private case class Cmds(cs: List[Command], ss: List[State]) {
- override def toString = cs.map(_.toString).mkString(", ")
- }
-
- private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)]
-
- private def initState() = {
- bindings.clear()
- initialState()
- }
-
- private def genCmds: Gen[Cmds] = {
- def sizedCmds(s: State)(sz: Int): Gen[Cmds] =
- if(sz <= 0) value(Cmds(Nil, Nil)) else for {
- c <- genCommand(s) suchThat (_.preCondition(s))
- Cmds(cs,ss) <- sizedCmds(c.nextState(s))(sz-1)
- } yield Cmds(c::cs, s::ss)
-
- for {
- s0 <- wrap(value(initialState()))
- cmds <- sized(sizedCmds(s0))
- } yield cmds
- }
-
- private def validCmds(s: State, cs: List[Command]): Option[Cmds] =
- cs match {
- case Nil => Some(Cmds(Nil, s::Nil))
- case c::_ if !c.preCondition(s) => None
- case c::cmds => for {
- Cmds(_, ss) <- validCmds(c.nextState(s), cmds)
- } yield Cmds(cs, s::ss)
- }
-
- private def runCommands(cmds: Cmds): Prop = cmds match {
- case Cmds(Nil, _) => proved
- case Cmds(c::cs, s::ss) =>
- c.postCondition(s,c.nextState(s),c.run_(s)) && runCommands(Cmds(cs,ss))
- case _ => sys.error("Should not be here")
- }
-
- private def commandsProp: Prop = {
- def shrinkCmds(cmds: Cmds) = cmds match { case Cmds(cs,_) =>
- shrink(cs)(shrinkContainer).flatMap(cs => validCmds(initialState(), cs).toList)
- }
-
- forAllShrink(genCmds label "COMMANDS", shrinkCmds)(runCommands _)
- }
-
- def apply(p: Prop.Params) = commandsProp(p)
-
- /** initialState should reset the system under test to a well defined
- * initial state, and return the abstract version of that state. */
- def initialState(): State
-
- /** The command generator. Given an abstract state, the generator
- * should return a command that is allowed to run in that state. Note that
- * it is still neccessary to define preconditions on the commands if there
- * are any. The generator is just giving a hint of which commands that are
- * suitable for a given state, the preconditions will still be checked before
- * a command runs. Sometimes you maybe want to adjust the distribution of
- * your command generator according to the state, or do other calculations
- * based on the state. */
- def genCommand(s: State): Gen[Command]
-
-}
diff --git a/src/scalacheck/org/scalacheck/ConsoleReporter.scala b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
deleted file mode 100644
index d565322d99..0000000000
--- a/src/scalacheck/org/scalacheck/ConsoleReporter.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import Pretty._
-import util.FreqMap
-
-class ConsoleReporter(val verbosity: Int) extends Test.TestCallback {
-
- private val prettyPrms = Params(verbosity)
-
- override def onTestResult(name: String, res: Test.Result) = {
- if(verbosity > 0) {
- if(name == "") {
- val s = (if(res.passed) "+ " else "! ") + pretty(res, prettyPrms)
- printf("\r%s\n", format(s, "", "", 75))
- } else {
- val s = (if(res.passed) "+ " else "! ") + name + ": " +
- pretty(res, prettyPrms)
- printf("\r%s\n", format(s, "", "", 75))
- }
- }
- }
-
-}
-
-object ConsoleReporter {
-
- /** Factory method, creates a ConsoleReporter with the
- * the given verbosity */
- def apply(verbosity: Int = 0) = new ConsoleReporter(verbosity)
-
- def testStatsEx(msg: String, res: Test.Result) = {
- lazy val m = if(msg.length == 0) "" else msg + ": "
- res.status match {
- case Test.Proved(_) => {}
- case Test.Passed => {}
- case f @ Test.Failed(_, _) => sys.error(m + f)
- case Test.Exhausted => {}
- case f @ Test.GenException(_) => sys.error(m + f)
- case f @ Test.PropException(_, _, _) => sys.error(m + f)
- }
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Gen.scala b/src/scalacheck/org/scalacheck/Gen.scala
deleted file mode 100644
index aec67159f1..0000000000
--- a/src/scalacheck/org/scalacheck/Gen.scala
+++ /dev/null
@@ -1,542 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import scala.collection.mutable.ListBuffer
-import util.Buildable
-import Prop._
-import Arbitrary._
-
-trait Choose[T] {
- def choose(min: T, max: T): Gen[T]
-}
-
-object Choose {
- import Gen.{fail, parameterized, value}
-
- implicit val chooseLong: Choose[Long] = new Choose[Long] {
- def choose(low: Long, high: Long) =
- if (low > high) fail
- else parameterized(prms => value(prms.choose(low,high)))
- }
-
- implicit val chooseDouble: Choose[Double] = new Choose[Double] {
- def choose(low: Double, high: Double) =
- if (low > high || (high-low > Double.MaxValue)) fail
- else parameterized(prms => value(prms.choose(low,high)))
- }
-
- implicit val chooseInt: Choose[Int] = new Choose[Int] {
- def choose(low: Int, high: Int) =
- chooseLong.choose(low, high).map(_.toInt)
- }
-
- implicit val chooseByte: Choose[Byte] = new Choose[Byte] {
- def choose(low: Byte, high: Byte) =
- chooseLong.choose(low, high).map(_.toByte)
- }
-
- implicit val chooseShort: Choose[Short] = new Choose[Short] {
- def choose(low: Short, high: Short) =
- chooseLong.choose(low, high).map(_.toShort)
- }
-
- implicit val chooseChar: Choose[Char] = new Choose[Char] {
- def choose(low: Char, high: Char) =
- chooseLong.choose(low, high).map(_.toChar)
- }
-
- implicit val chooseFloat: Choose[Float] = new Choose[Float] {
- def choose(low: Float, high: Float) =
- chooseDouble.choose(low, high).map(_.toFloat)
- }
-}
-
-case class FiniteGenRes[+T](
- r: T
-)
-
-sealed trait FiniteGen[+T] extends Gen[FiniteGenRes[T]]
-
-
-/** Class that represents a generator. */
-sealed trait Gen[+T] {
-
- import Gen.choose
-
- var label = "" // TODO: Ugly mutable field
-
- /** Put a label on the generator to make test reports clearer */
- def label(l: String): Gen[T] = {
- label = l
- this
- }
-
- /** Put a label on the generator to make test reports clearer */
- def :|(l: String) = label(l)
-
- /** Put a label on the generator to make test reports clearer */
- def |:(l: String) = label(l)
-
- /** Put a label on the generator to make test reports clearer */
- def :|(l: Symbol) = label(l.toString.drop(1))
-
- /** Put a label on the generator to make test reports clearer */
- def |:(l: Symbol) = label(l.toString.drop(1))
-
- def apply(prms: Gen.Params): Option[T]
-
- def map[U](f: T => U): Gen[U] = Gen(prms => this(prms).map(f)).label(label)
-
- def map2[U, V](g: Gen[U])(f: (T, U) => V) =
- combine(g)((t, u) => t.flatMap(t => u.flatMap(u => Some(f(t, u)))))
-
- def map3[U, V, W](gu: Gen[U], gv: Gen[V])(f: (T, U, V) => W) =
- combine3(gu, gv)((t, u, v) => t.flatMap(t => u.flatMap(u => v.flatMap(v => Some(f(t, u, v))))))
-
- def map4[U, V, W, X](gu: Gen[U], gv: Gen[V], gw: Gen[W])(f: (T, U, V, W) => X) =
- combine4(gu, gv, gw)((t, u, v, w) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => Some(f(t, u, v, w)))))))
-
- def map5[U, V, W, X, Y](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X])(f: (T, U, V, W, X) => Y) =
- combine5(gu, gv, gw, gx)((t, u, v, w, x) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => x.flatMap(x => Some(f(t, u, v, w, x))))))))
-
- def map6[U, V, W, X, Y, Z](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X], gy: Gen[Y])(f: (T, U, V, W, X, Y) => Z) =
- combine6(gu, gv, gw, gx, gy)((t, u, v, w, x, y) => t.flatMap(t => u.flatMap(u => v.flatMap(v => w.flatMap(w => x.flatMap(x => y.flatMap(y => Some(f(t, u, v, w, x, y)))))))))
-
- def flatMap[U](f: T => Gen[U]): Gen[U] = Gen(prms => for {
- t <- this(prms)
- u <- f(t)(prms)
- } yield u)
-
- def filter(p: T => Boolean): Gen[T] = Gen(prms => for {
- t <- this(prms)
- u <- if (p(t)) Some(t) else None
- } yield u).label(label)
-
- def withFilter(p: T => Boolean) = new GenWithFilter[T](this, p)
-
- final class GenWithFilter[+A](self: Gen[A], p: A => Boolean) {
- def map[B](f: A => B): Gen[B] = self filter p map f
- def flatMap[B](f: A => Gen[B]): Gen[B] = self filter p flatMap f
- def withFilter(q: A => Boolean): GenWithFilter[A] = new GenWithFilter[A](self, x => p(x) && q(x))
- }
-
- def suchThat(p: T => Boolean): Gen[T] = filter(p)
-
- def combine[U,V](g: Gen[U])(f: (Option[T],Option[U]) => Option[V]): Gen[V] =
- Gen(prms => f(this(prms), g(prms)))
-
- def combine3[U, V, W](gu: Gen[U], gv: Gen[V])
- (f: (Option[T], Option[U], Option[V]) => Option[W]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms)))
-
- def combine4[U, V, W, X](gu: Gen[U], gv: Gen[V], gw: Gen[W])
- (f: (Option[T], Option[U], Option[V], Option[W]) => Option[X]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms)))
-
- def combine5[U, V, W, X, Y](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X])
- (f: (Option[T], Option[U], Option[V], Option[W], Option[X]) => Option[Y]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms), gx(prms)))
-
- def combine6[U, V, W, X, Y, Z](gu: Gen[U], gv: Gen[V], gw: Gen[W], gx: Gen[X], gy: Gen[Y])
- (f: (Option[T], Option[U], Option[V], Option[W], Option[X], Option[Y]) => Option[Z]) =
- Gen(prms => f(this(prms), gu(prms), gv(prms), gw(prms), gx(prms), gy(prms)))
-
- def ap[U](g: Gen[T => U]) = flatMap(t => g.flatMap(u => Gen(p => Some(u(t)))))
-
- override def toString =
- if(label.length == 0) "Gen()" else "Gen(\"" + label + "\")"
-
- /** Returns a new property that holds if and only if both this
- * and the given generator generates the same result, or both
- * generators generate no result. */
- def ==[U](g: Gen[U]) = Prop(prms =>
- (this(prms.genPrms), g(prms.genPrms)) match {
- case (None,None) => proved(prms)
- case (Some(r1),Some(r2)) if r1 == r2 => proved(prms)
- case _ => falsified(prms)
- }
- )
-
- def !=[U](g: Gen[U]) = forAll(this)(r => forAll(g)(_ != r))
-
- def !==[U](g: Gen[U]) = Prop(prms =>
- (this(prms.genPrms), g(prms.genPrms)) match {
- case (None,None) => falsified(prms)
- case (Some(r1),Some(r2)) if r1 == r2 => falsified(prms)
- case _ => proved(prms)
- }
- )
-
- private var freq = 1
- def |[U >: T](g: Gen[U]): Gen[U] = {
- val h = Gen.frequency((freq, this), (1, g))
- h.freq = freq+1
- h
- }
-
- /** Generates a sample value by using default parameters */
- def sample: Option[T] = apply(Gen.Params())
-
-}
-
-
-/** Contains combinators for building generators. */
-object Gen {
-
- import Arbitrary._
- import Shrink._
-
- /** Record that encapsulates all parameters required for data generation */
- case class Params(
- size: Int = 100,
- rng: java.util.Random = util.StdRand
- ) {
- def resize(newSize: Int) = this.copy(size = newSize)
-
- /** @throws IllegalArgumentException if l is greater than h, or if
- * the range between l and h doesn't fit in a Long. */
- def choose(l: Long, h: Long): Long = {
- if (h < l) throw new IllegalArgumentException("Invalid range")
- val d = h - l + 1
- if (d <= 0) {
- var n = rng.nextLong
- while (n < l || n > h) {
- n = rng.nextLong
- }
- n
- } else {
- l + math.abs(rng.nextLong % d)
- }
- }
-
- /** @throws IllegalArgumentException if l is greater than h, or if
- * the range between l and h doesn't fit in a Double. */
- def choose(l: Double, h: Double) = {
- val d = h-l
- if (d < 0 || d > Double.MaxValue)
- throw new IllegalArgumentException("Invalid range")
- else if (d == 0) l
- else rng.nextDouble * (h-l) + l
- }
- }
-
- /* Generator factory method */
- def apply[T](g: Gen.Params => Option[T]) = new Gen[T] {
- def apply(p: Gen.Params) = g(p)
- }
-
- /* Convenience method for using the `frequency` method like this:
- * {{{
- * frequency((1, "foo"), (3, "bar"))
- * }}}
- */
- implicit def freqTuple[T](t: (Int, T)): (Int, Gen[T]) = (t._1, value(t._2))
-
-
- //// Various Generator Combinators ////
-
- /** Sequences generators. If any of the given generators fails, the
- * resulting generator will also fail. */
- def sequence[C[_],T](gs: Iterable[Gen[T]])(implicit b: Buildable[T,C]): Gen[C[T]] = Gen(prms => {
- val builder = b.builder
- var none = false
- val xs = gs.iterator
- while(xs.hasNext && !none) xs.next.apply(prms) match {
- case None => none = true
- case Some(x) => builder += x
- }
- if(none) None else Some(builder.result())
- })
-
- /** Wraps a generator lazily. The given parameter is only evalutated once,
- * and not until the wrapper generator is evaluated. */
- def lzy[T](g: => Gen[T]) = new Gen[T] {
- lazy val h = g
- def apply(prms: Params) = h(prms)
- }
-
- /** Wraps a generator for later evaluation. The given parameter is
- * evaluated each time the wrapper generator is evaluated. */
- def wrap[T](g: => Gen[T]) = Gen(p => g(p))
-
- /** A generator that always generates the given value */
- implicit def value[T](x: T) = Gen(p => Some(x))
-
- /** A generator that never generates a value */
- def fail[T]: Gen[T] = Gen(p => None)
-
- /** A generator that generates a random value in the given (inclusive)
- * range. If the range is invalid, the generator will not generate any value.
- */
- def choose[T](min: T, max: T)(implicit c: Choose[T]): Gen[T] = {
- c.choose(min, max)
- }
-
- /** Creates a generator that can access its generation parameters */
- def parameterized[T](f: Params => Gen[T]): Gen[T] = Gen(prms => f(prms)(prms))
-
- /** Creates a generator that can access its generation size */
- def sized[T](f: Int => Gen[T]) = parameterized(prms => f(prms.size))
-
- /** Creates a resized version of a generator */
- def resize[T](s: Int, g: Gen[T]) = Gen(prms => g(prms.resize(s)))
-
- /** Chooses one of the given generators with a weighted random distribution */
- def frequency[T](gs: (Int,Gen[T])*): Gen[T] = {
- lazy val tot = (gs.map(_._1) :\ 0) (_+_)
-
- def pick(n: Int, l: List[(Int,Gen[T])]): Gen[T] = l match {
- case Nil => fail
- case (k,g)::gs => if(n <= k) g else pick(n-k, gs)
- }
-
- for {
- n <- choose(1,tot)
- x <- pick(n,gs.toList)
- } yield x
- }
-
- /** Picks a random value from a list */
- def oneOf[T](xs: Seq[T]): Gen[T] = if(xs.isEmpty) fail else for {
- i <- choose(0, xs.size-1)
- } yield xs(i)
-
- /** Picks a random generator from a list */
- def oneOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) = for {
- i <- choose(0, gs.length+1)
- x <- if(i == 0) g1 else if(i == 1) g2 else gs(i-2)
- } yield x
-
-
- //// List Generators ////
-
- /** Generates a container of any type for which there exists an implicit
- * [[org.scalacheck.util.Buildable]] instance. The elements in the container will
- * be generated by the given generator. The size of the generated container
- * is given by `n`. */
- def containerOfN[C[_],T](n: Int, g: Gen[T])(implicit b: Buildable[T,C]
- ): Gen[C[T]] = sequence[C,T](new Iterable[Gen[T]] {
- def iterator = new Iterator[Gen[T]] {
- var i = 0
- def hasNext = i < n
- def next = { i += 1; g }
- }
- })
-
- /** Generates a container of any type for which there exists an implicit
- * [[org.scalacheck.util.Buildable]] instance. The elements in the container
- * will be generated by the given generator. The size of the container is
- * bounded by the size parameter used when generating values. */
- def containerOf[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
- sized(size => for(n <- choose(0,size); c <- containerOfN[C,T](n,g)) yield c)
-
- /** Generates a non-empty container of any type for which there exists an
- * implicit [[org.scalacheck.util.Buildable]] instance. The elements in the container
- * will be generated by the given generator. The size of the container is
- * bounded by the size parameter used when generating values. */
- def containerOf1[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
- sized(size => for(n <- choose(1,size); c <- containerOfN[C,T](n,g)) yield c)
-
- /** Generates a list of random length. The maximum length depends on the
- * size parameter. This method is equal to calling
- * `containerOf[List,T](g)`. */
- def listOf[T](g: => Gen[T]) = containerOf[List,T](g)
-
- /** Generates a non-empty list of random length. The maximum length depends
- * on the size parameter. This method is equal to calling
- * `containerOf1[List,T](g)`. */
- def listOf1[T](g: => Gen[T]) = containerOf1[List,T](g)
-
- /** Generates a list of the given length. This method is equal to calling
- * `containerOfN[List,T](n,g)`. */
- def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
-
- /** A generator that picks a random number of elements from a list */
- def someOf[T](l: Iterable[T]) = choose(0,l.size) flatMap (pick(_,l))
-
- /** A generator that picks a random number of elements from a list */
- def someOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) = for {
- n <- choose(0, gs.length+2)
- x <- pick(n, g1, g2, gs: _*)
- } yield x
-
- /** A generator that picks a given number of elements from a list, randomly */
- def pick[T](n: Int, l: Iterable[T]): Gen[Seq[T]] =
- if(n > l.size || n < 0) fail
- else Gen(prms => {
- val buf = new ListBuffer[T]
- buf ++= l
- while(buf.length > n) {
- val g = choose(0, buf.length-1)
- buf.remove(g(prms).get)
- }
- Some(buf)
- })
-
- /** A generator that picks a given number of elements from a list, randomly */
- def pick[T](n: Int, g1: Gen[T], g2: Gen[T], gs: Gen[T]*): Gen[Seq[T]] = for {
- is <- pick(n, 0 until (gs.size+2))
- allGs = gs ++ (g1::g2::Nil)
- xs <- sequence[List,T](is.toList.map(allGs(_)))
- } yield xs
-
-
- //// Character Generators ////
-
- /* Generates a numerical character */
- def numChar: Gen[Char] = choose(48,57) map (_.toChar)
-
- /* Generates an upper-case alpha character */
- def alphaUpperChar: Gen[Char] = choose(65,90) map (_.toChar)
-
- /* Generates a lower-case alpha character */
- def alphaLowerChar: Gen[Char] = choose(97,122) map (_.toChar)
-
- /* Generates an alpha character */
- def alphaChar = frequency((1,alphaUpperChar), (9,alphaLowerChar))
-
- /* Generates an alphanumerical character */
- def alphaNumChar = frequency((1,numChar), (9,alphaChar))
-
- //// String Generators ////
-
- /* Generates a string that starts with a lower-case alpha character,
- * and only contains alphanumerical characters */
- def identifier: Gen[String] = for {
- c <- alphaLowerChar
- cs <- listOf(alphaNumChar)
- } yield (c::cs).mkString
-
- /* Generates a string of alpha characters */
- def alphaStr: Gen[String] = for(cs <- listOf(Gen.alphaChar)) yield cs.mkString
-
- /* Generates a string of digits */
- def numStr: Gen[String] = for(cs <- listOf(Gen.numChar)) yield cs.mkString
-
- //// Number Generators ////
-
- /** Generates positive numbers of uniform distribution, with an
- * upper bound of the generation size parameter. */
- def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
- import num._
- sized(max => c.choose(one, fromInt(max)))
- }
-
- /** Generates negative numbers of uniform distribution, with an
- * lower bound of the negated generation size parameter. */
- def negNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
- import num._
- sized(max => c.choose(-fromInt(max), -one))
- }
-
- /** Generates numbers within the given inclusive range, with
- * extra weight on zero, +/- unity, both extremities, and any special
- * numbers provided. The special numbers must lie within the given range,
- * otherwise they won't be included. */
- def chooseNum[T](minT: T, maxT: T, specials: T*)(
- implicit num: Numeric[T], c: Choose[T]
- ): Gen[T] = {
- import num._
- val basics = List(minT, maxT, zero, one, -one)
- val basicsAndSpecials = for {
- t <- specials ++ basics if t >= minT && t <= maxT
- } yield (1, value(t))
- val allGens = basicsAndSpecials ++ List(
- (basicsAndSpecials.length, c.choose(minT, maxT))
- )
- frequency(allGens: _*)
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T,R](f: T => R)(implicit a: Arbitrary[T]): Gen[R] =
- arbitrary[T] map f
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,R](f: (T1,T2) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2]
- ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2)) }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,R](f: (T1,T2,T3) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
- ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2, _:T3)) }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,R](f: (T1,T2,T3,T4) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,R](f: (T1,T2,T3,T4,T5) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,R](
- f: (T1,T2,T3,T4,T5,T6) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
- a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,R](
- f: (T1,T2,T3,T4,T5,T6,T7) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
- a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,R](
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8))
- }
-
- /** Takes a function and returns a generator that generates arbitrary
- * results of that function by feeding it with arbitrarily generated input
- * parameters. */
- def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,T9,R](
- f: (T1,T2,T3,T4,T5,T6,T7,T8,T9) => R)(implicit
- a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
- a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
- a9: Arbitrary[T9]
- ): Gen[R] = arbitrary[T1] flatMap {
- t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8, _:T9))
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
deleted file mode 100644
index 3e8f6de5f6..0000000000
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import math.round
-
-
-sealed trait Pretty {
- def apply(prms: Pretty.Params): String
-
- def map(f: String => String) = Pretty(prms => f(Pretty.this(prms)))
-
- def flatMap(f: String => Pretty) = Pretty(prms => f(Pretty.this(prms))(prms))
-}
-
-object Pretty {
-
- case class Params(verbosity: Int)
-
- val defaultParams = Params(0)
-
- def apply(f: Params => String) = new Pretty { def apply(p: Params) = f(p) }
-
- def pretty[T <% Pretty](t: T, prms: Params): String = t(prms)
-
- def pretty[T <% Pretty](t: T): String = t(defaultParams)
-
- implicit def strBreak(s1: String) = new {
- def /(s2: String) = if(s2 == "") s1 else s1+"\n"+s2
- }
-
- def pad(s: String, c: Char, length: Int) =
- if(s.length >= length) s
- else s + List.fill(length-s.length)(c).mkString
-
- def break(s: String, lead: String, length: Int): String =
- if(s.length <= length) s
- else s.substring(0, length) / break(lead+s.substring(length), lead, length)
-
- def format(s: String, lead: String, trail: String, width: Int) =
- s.lines.map(l => break(lead+l+trail, " ", width)).mkString("\n")
-
- implicit def prettyAny(t: Any) = Pretty { p => t.toString }
-
- implicit def prettyString(t: String) = Pretty { p => "\""++t++"\"" }
-
- implicit def prettyList(l: List[Any]) = Pretty { p =>
- l.map("\""+_+"\"").mkString("List(", ", ", ")")
- }
-
- implicit def prettyThrowable(e: Throwable) = Pretty { prms =>
- val strs = e.getStackTrace.map { st =>
- import st._
- getClassName+"."+getMethodName + "("+getFileName+":"+getLineNumber+")"
- }
-
- val strs2 =
- if(prms.verbosity <= 0) Array[String]()
- else if(prms.verbosity <= 1) strs.take(5)
- else strs
-
- e.getClass.getName + ": " + e.getMessage / strs2.mkString("\n")
- }
-
- implicit def prettyArgs(args: List[Arg[Any]]): Pretty = Pretty { prms =>
- if(args.isEmpty) "" else {
- for((a,i) <- args.zipWithIndex) yield {
- val l = if(a.label == "") "ARG_"+i else a.label
- val s =
- if(a.shrinks == 0 || prms.verbosity <= 1) ""
- else " (orig arg: "+a.prettyOrigArg(prms)+")"
-
- "> "+l+": "+a.prettyArg(prms)+""+s
- }
- }.mkString("\n")
- }
-
- implicit def prettyFreqMap(fm: Prop.FM) = Pretty { prms =>
- if(fm.total == 0) ""
- else {
- "> Collected test data: " / {
- for {
- (xs,r) <- fm.getRatios
- ys = xs - ()
- if !ys.isEmpty
- } yield round(r*100)+"% " + ys.mkString(", ")
- }.mkString("\n")
- }
- }
-
- implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
- def labels(ls: collection.immutable.Set[String]) =
- if(ls.isEmpty) ""
- else "> Labels of failing property: " / ls.mkString("\n")
- val s = res.status match {
- case Test.Proved(args) => "OK, proved property."/pretty(args,prms)
- case Test.Passed => "OK, passed "+res.succeeded+" tests."
- case Test.Failed(args, l) =>
- "Falsified after "+res.succeeded+" passed tests."/labels(l)/pretty(args,prms)
- case Test.Exhausted =>
- "Gave up after only "+res.succeeded+" passed tests. " +
- res.discarded+" tests were discarded."
- case Test.PropException(args,e,l) =>
- "Exception raised on property evaluation."/labels(l)/pretty(args,prms)/
- "> Exception: "+pretty(e,prms)
- case Test.GenException(e) =>
- "Exception raised on argument generation."/
- "> Exception: "+pretty(e,prms)
- }
- val t = if(prms.verbosity <= 1) "" else "Elapsed time: "+prettyTime(res.time)
- s/t/pretty(res.freqMap,prms)
- }
-
- def prettyTime(millis: Long): String = {
- val min = millis/(60*1000)
- val sec = (millis-(60*1000*min)) / 1000d
- if(min <= 0) "%.3f sec ".format(sec)
- else "%d min %.3f sec ".format(min, sec)
- }
-}
diff --git a/src/scalacheck/org/scalacheck/Prop.scala b/src/scalacheck/org/scalacheck/Prop.scala
deleted file mode 100644
index 38e00f260f..0000000000
--- a/src/scalacheck/org/scalacheck/Prop.scala
+++ /dev/null
@@ -1,818 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.{FreqMap,Buildable}
-import scala.collection._
-import scala.annotation.tailrec
-
-/** A property is a generator that generates a property result */
-trait Prop {
-
- import Prop.{Result,Params,Proof,True,False,Exception,Undecided,provedToTrue}
- import Test.cmdLineParser.{Success, NoSuccess}
- import Result.merge
-
- def apply(prms: Params): Result
-
- def map(f: Result => Result): Prop = Prop(prms => f(this(prms)))
-
- def flatMap(f: Result => Prop): Prop = Prop(prms => f(this(prms))(prms))
-
- def combine(p: Prop)(f: (Result, Result) => Result) =
- for(r1 <- this; r2 <- p) yield f(r1,r2)
-
- /** Convenience method that checks this property with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead.
- * @deprecated (in 1.10.0) Use `check(Test.Parameters)` instead.
- */
- @deprecated("Use 'check(Test.Parameters)' instead", "1.10.0")
- def check(prms: Test.Params): Unit = Test.check(
- prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks this property with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead. */
- def check(prms: Test.Parameters): Unit = Test.check(
- prms copy (_testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks this property and reports the
- * result on the console. If you need to get the results from the test use
- * the `check` methods in [[org.scalacheck.Test]] instead. */
- def check: Unit = check(Test.Parameters.default)
-
- /** The logic for main, separated out to make it easier to
- * avoid System.exit calls. Returns exit code.
- */
- def mainRunner(args: Array[String]): Int = {
- Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) =>
- if (Test.check(params, this).passed) 0
- else 1
- case e: NoSuccess =>
- println("Incorrect options:"+"\n"+e+"\n")
- Test.cmdLineParser.printHelp
- -1
- }
- }
-
- /** Whether main should call System.exit with an exit code.
- * Defaults to true; override to change.
- */
- def mainCallsExit = true
-
- /** Convenience method that makes it possible to use this property
- * as an application that checks itself on execution */
- def main(args: Array[String]): Unit = {
- val code = mainRunner(args)
- if (mainCallsExit)
- System exit code
- }
-
- /** Returns a new property that holds if and only if both this
- * and the given property hold. If one of the properties doesn't
- * generate a result, the new property will generate false. */
- def &&(p: Prop) = combine(p)(_ && _)
-
- /** Returns a new property that holds if either this
- * or the given property (or both) hold. */
- def ||(p: Prop) = combine(p)(_ || _)
-
- /** Returns a new property that holds if and only if both this
- * and the given property hold. If one of the properties doesn't
- * generate a result, the new property will generate the same result
- * as the other property. */
- def ++(p: Prop): Prop = combine(p)(_ ++ _)
-
- /** Combines two properties through implication */
- def ==>(p: => Prop): Prop = flatMap { r1 =>
- if(r1.proved) p map { r2 => merge(r1,r2,r2.status) }
- else if(r1.success) p map { r2 => provedToTrue(merge(r1,r2,r2.status)) }
- else Prop(r1.copy(status = Undecided))
- }
-
- /** Returns a new property that holds if and only if both this
- * and the given property generates a result with the exact
- * same status. Note that this means that if one of the properties is
- * proved, and the other one passed, then the resulting property
- * will fail. */
- def ==(p: Prop) = this.flatMap { r1 =>
- p.map { r2 =>
- Result.merge(r1, r2, if(r1.status == r2.status) True else False)
- }
- }
-
- override def toString = "Prop"
-
- /** Put a label on the property to make test reports clearer */
- def label(l: String) = map(_.label(l))
-
- /** Put a label on the property to make test reports clearer */
- def :|(l: String) = label(l)
-
- /** Put a label on the property to make test reports clearer */
- def |:(l: String) = label(l)
-
- /** Put a label on the property to make test reports clearer */
- def :|(l: Symbol) = label(l.toString.drop(1))
-
- /** Put a label on the property to make test reports clearer */
- def |:(l: Symbol) = label(l.toString.drop(1))
-
-}
-
-object Prop {
-
- import Gen.{value, fail, frequency, oneOf}
- import Arbitrary._
- import Shrink._
-
-
- // Types
-
- type Args = List[Arg[Any]]
- type FM = FreqMap[immutable.Set[Any]]
-
- /** Property parameters */
- case class Params(val genPrms: Gen.Params, val freqMap: FM)
-
- object Result {
- def apply(st: Status) = new Result(
- st,
- Nil,
- immutable.Set.empty[Any],
- immutable.Set.empty[String]
- )
-
- def merge(x: Result, y: Result, status: Status) = new Result(
- status,
- x.args ++ y.args,
- (x.collected.asInstanceOf[Set[AnyRef]] ++ y.collected).asInstanceOf[immutable.Set[Any]],
- x.labels ++ y.labels
- )
- }
-
- /** The result of evaluating a property */
- case class Result(
- status: Status,
- args: Args,
- collected: immutable.Set[Any],
- labels: immutable.Set[String]
- ) {
- def success = status match {
- case True => true
- case Proof => true
- case _ => false
- }
-
- def failure = status match {
- case False => true
- case Exception(_) => true
- case _ => false
- }
-
- def proved = status == Proof
-
- def addArg(a: Arg[Any]) = copy(args = a::args)
-
- def collect(x: Any) = copy(collected = collected+x)
-
- def label(l: String) = copy(labels = labels+l)
-
- import Result.merge
-
- def &&(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,_) => this
- case (_,False) => r
-
- case (Undecided,_) => this
- case (_,Undecided) => r
-
- case (_,Proof) => merge(this, r, this.status)
- case (Proof,_) => merge(this, r, r.status)
-
- case (True,True) => merge(this, r, True)
- }
-
- def ||(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,False) => merge(this, r, False)
- case (False,_) => r
- case (_,False) => this
-
- case (Proof,_) => this
- case (_,Proof) => r
-
- case (True,_) => this
- case (_,True) => r
-
- case (Undecided,Undecided) => merge(this, r, Undecided)
- }
-
- def ++(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (_, Undecided) => this
- case (Undecided, _) => r
-
- case (_, Proof) => this
- case (Proof, _) => r
-
- case (_, True) => this
- case (True, _) => r
-
- case (False, _) => this
- case (_, False) => r
- }
-
- def ==>(r: Result) = (this.status, r.status) match {
- case (Exception(_),_) => this
- case (_,Exception(_)) => r
-
- case (False,_) => merge(this, r, Undecided)
-
- case (Undecided,_) => this
-
- case (Proof,_) => merge(this, r, r.status)
- case (True,_) => merge(this, r, r.status)
- }
-
- }
-
- sealed trait Status
-
- /** The property was proved */
- case object Proof extends Status
-
- /** The property was true */
- case object True extends Status
-
- /** The property was false */
- case object False extends Status
-
- /** The property could not be falsified or proved */
- case object Undecided extends Status
-
- /** Evaluating the property raised an exception */
- sealed case class Exception(e: Throwable) extends Status {
- override def equals(o: Any) = o match {
- case Exception(_) => true
- case _ => false
- }
- }
-
- def apply(f: Params => Result): Prop = new Prop {
- def apply(prms: Params) = f(prms)
- }
-
- def apply(r: Result): Prop = Prop(prms => r)
-
- def apply(b: Boolean): Prop = if(b) proved else falsified
-
-
- // Implicits
-
- /** A collection of property operators on [[Any]] values.
- * Import [[Prop.AnyOperators]] to make the operators available. */
- class ExtendedAny[T <% Pretty](x: => T) {
- /** See [[Prop.imply]] */
- def imply(f: PartialFunction[T,Prop]) = Prop.imply(x,f)
- /** See [[Prop.iff]] */
- def iff(f: PartialFunction[T,Prop]) = Prop.iff(x,f)
- @deprecated("Use 'Prop.throws' instead", "1.10.1")
- def throws[U <: Throwable](c: Class[U]): Prop = Prop.throws(c)(x)
- /** See [[Prop.?=]] */
- def ?=(y: T) = Prop.?=(x, y)
- /** See [[Prop.=?]] */
- def =?(y: T) = Prop.=?(x, y)
- }
-
- /** A collection of property operators on [[Boolean]] values.
- * Import [[Prop.BooleanOperators]] to make the operators available. */
- class ExtendedBoolean(b: => Boolean) {
- /** See [[Prop.==>]] */
- def ==>(p: => Prop) = Prop(b) ==> p
- }
-
- /** Implicit method that makes a number of property operators on values of
- * type [[Any]] available in the current scope. See [[Prop.ExtendedAny]] for
- * documentation on the operators. */
- @deprecated("Use 'Prop.AnyOperators' instead", "1.10.1")
- implicit def extendedAny[T <% Pretty](x: => T) = new ExtendedAny[T](x)
-
- /** Implicit method that makes a number of property operators on values of
- * type [[Any]] available in the current scope. See [[Prop.ExtendedAny]] for
- * documentation on the operators. */
- implicit def AnyOperators[T <% Pretty](x: => T) = new ExtendedAny[T](x)
-
- /** Implicit method that makes a number of property operators on boolean
- * values available in the current scope. See [[Prop.ExtendedBoolean]] for
- * documentation on the operators. */
- implicit def BooleanOperators(b: => Boolean) = new ExtendedBoolean(b)
-
- /** Implicit conversion of Boolean values to Prop values. */
- implicit def propBoolean(b: Boolean): Prop = Prop(b)
-
-
- // Private support functions
-
- private def provedToTrue(r: Result) = r.status match {
- case Proof => new Result(True, r.args, r.collected, r.labels)
- case _ => r
- }
-
-
- // Property combinators
-
- /** A property that never is proved or falsified */
- lazy val undecided = Prop(Result(Undecided))
-
- /** A property that always is false */
- lazy val falsified = Prop(Result(False))
-
- /** A property that always is proved */
- lazy val proved = Prop(Result(Proof))
-
- /** A property that always is passed */
- lazy val passed = Prop(Result(True))
-
- /** A property that denotes an exception */
- def exception(e: Throwable): Prop = Prop(Result(Exception(e)))
-
- /** A property that denotes an exception */
- lazy val exception: Prop = exception(null)
-
- /** Create a property that compares to values. If the values aren't equal,
- * the property will fail and report that first value doesn't match the
- * expected (second) value. */
- def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
- if(x == y) proved else falsified :| {
- val exp = Pretty.pretty[T](y, Pretty.Params(0))
- val act = Pretty.pretty[T](x, Pretty.Params(0))
- "Expected "+exp+" but got "+act
- }
-
- /** Create a property that compares to values. If the values aren't equal,
- * the property will fail and report that second value doesn't match the
- * expected (first) value. */
- def =?[T](x: T, y: T)(implicit pp: T => Pretty): Prop = ?=(y, x)
-
- /** A property that depends on the generator size */
- def sizedProp(f: Int => Prop): Prop = Prop { prms =>
- // provedToTrue since if the property is proved for
- // one size, it shouldn't be regarded as proved for
- // all sizes.
- provedToTrue(f(prms.genPrms.size)(prms))
- }
-
- /** Implication with several conditions */
- def imply[T](x: T, f: PartialFunction[T,Prop]): Prop =
- secure(if(f.isDefinedAt(x)) f(x) else undecided)
-
- /** Property holds only if the given partial function is defined at
- * `x`, and returns a property that holds */
- def iff[T](x: T, f: PartialFunction[T,Prop]): Prop =
- secure(if(f.isDefinedAt(x)) f(x) else falsified)
-
- /** Combines properties into one, which is true if and only if all the
- * properties are true */
- def all(ps: Prop*) = if(ps.isEmpty) proved else Prop(prms =>
- ps.map(p => p(prms)).reduceLeft(_ && _)
- )
-
- /** Combines properties into one, which is true if at least one of the
- * properties is true */
- def atLeastOne(ps: Prop*) = if(ps.isEmpty) falsified else Prop(prms =>
- ps.map(p => p(prms)).reduceLeft(_ || _)
- )
-
- /** A property that holds if at least one of the given generators
- * fails generating a value */
- def someFailing[T](gs: Seq[Gen[T]]) = atLeastOne(gs.map(_ == fail):_*)
-
- /** A property that holds iff none of the given generators
- * fails generating a value */
- def noneFailing[T](gs: Seq[Gen[T]]) = all(gs.map(_ !== fail):_*)
-
- /** A property that holds if the given statement throws an exception
- * of the specified type
- * @deprecated (in 1.10.1) Use `throws(...): Boolean` instead.
- */
- @deprecated("Use 'throws(...): Boolean' instead", "1.10.1")
- def throws[T <: Throwable](x: => Any, c: Class[T]): Prop = throws(c)(x)
-
- /** Returns true if the given statement throws an exception
- * of the specified type */
- def throws[T <: Throwable](c: Class[T])(x: => Any): Boolean =
- try { x; false } catch { case e if c.isInstance(e) => true }
-
- /** Collect data for presentation in test report */
- def collect[T, P <% Prop](f: T => P): T => Prop = t => Prop { prms =>
- val prop = f(t)
- prop(prms).collect(t)
- }
-
- /** Collect data for presentation in test report */
- def collect[T](t: T)(prop: Prop) = Prop { prms =>
- prop(prms).collect(t)
- }
-
- /** Collect data for presentation in test report */
- def classify(c: => Boolean, ifTrue: Any)(prop: Prop): Prop =
- if(c) collect(ifTrue)(prop) else collect(())(prop)
-
- /** Collect data for presentation in test report */
- def classify(c: => Boolean, ifTrue: Any, ifFalse: Any)(prop: Prop): Prop =
- if(c) collect(ifTrue)(prop) else collect(ifFalse)(prop)
-
- /** Wraps and protects a property */
- def secure[P <% Prop](p: => P): Prop =
- try { p: Prop } catch { case e: Throwable => exception(e) }
-
- /** Existential quantifier for an explicit generator. */
- def exists[A,P](f: A => P)(implicit
- pv: P => Prop,
- pp: A => Pretty,
- aa: Arbitrary[A]
- ): Prop = exists(aa.arbitrary)(f)
-
- /** Existential quantifier for an explicit generator. */
- def exists[A,P](g: Gen[A])(f: A => P)(implicit
- pv: P => Prop,
- pp: A => Pretty
- ): Prop = Prop { prms =>
- g(prms.genPrms) match {
- case None => undecided(prms)
- case Some(x) =>
- val p = secure(f(x))
- val r = p(prms).addArg(Arg(g.label,x,0,x))
- r.status match {
- case True => new Result(Proof, r.args, r.collected, r.labels)
- case False => new Result(Undecided, r.args, r.collected, r.labels)
- case _ => r
- }
- }
- }
-
- /** Universal quantifier for an explicit generator. Does not shrink failed
- * test cases. */
- def forAllNoShrink[T1,P](
- g1: Gen[T1])(
- f: T1 => P)(implicit
- pv: P => Prop,
- pp1: T1 => Pretty
- ): Prop = Prop { prms =>
- g1(prms.genPrms) match {
- case None => undecided(prms)
- case Some(x) =>
- val p = secure(f(x))
- provedToTrue(p(prms)).addArg(Arg(g1.label,x,0,x))
- }
- }
-
- /** Universal quantifier for two explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,P](
- g1: Gen[T1], g2: Gen[T2])(
- f: (T1,T2) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2)(f(t, _:T2)))
-
- /** Universal quantifier for three explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])(
- f: (T1,T2,T3) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3)(f(t, _:T2, _:T3)))
-
- /** Universal quantifier for four explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])(
- f: (T1,T2,T3,T4) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4)(f(t, _:T2, _:T3, _:T4)))
-
- /** Universal quantifier for five explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])(
- f: (T1,T2,T3,T4,T5) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5)))
-
- /** Universal quantifier for six explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])(
- f: (T1,T2,T3,T4,T5,T6) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)))
-
- /** Universal quantifier for seven explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])(
- f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty,
- pp7: T7 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)))
-
- /** Universal quantifier for eight explicit generators.
- * Does not shrink failed test cases. */
- def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,T8,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])(
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit
- p: P => Prop,
- pp1: T1 => Pretty,
- pp2: T2 => Pretty,
- pp3: T3 => Pretty,
- pp4: T4 => Pretty,
- pp5: T5 => Pretty,
- pp6: T6 => Pretty,
- pp7: T7 => Pretty,
- pp8: T8 => Pretty
- ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)))
-
- /** Universal quantifier for an explicit generator. Shrinks failed arguments
- * with the given shrink function */
- def forAllShrink[T <% Pretty, P <% Prop](g: Gen[T],
- shrink: T => Stream[T])(f: T => P
- ): Prop = Prop { prms =>
-
- /** Returns the first failed result in Left or success in Right */
- def getFirstFailure(xs: Stream[T]): Either[(T,Result),(T,Result)] = {
- assert(!xs.isEmpty, "Stream cannot be empty")
- val results = xs.map { x =>
- val p = secure(f(x))
- (x, provedToTrue(p(prms)))
- }
- results.dropWhile(!_._2.failure).headOption match {
- case None => Right(results.head)
- case Some(xr) => Left(xr)
- }
- }
-
- def shrinker(x: T, r: Result, shrinks: Int, orig: T): Result = {
- val xs = shrink(x)
- val res = r.addArg(Arg(g.label,x,shrinks,orig))
- if(xs.isEmpty) res else getFirstFailure(xs) match {
- case Right(_) => res
- case Left((x2,r2)) => shrinker(x2, r2, shrinks+1, orig)
- }
- }
-
- g(prms.genPrms) match {
- case None => undecided(prms)
- case Some(x) => getFirstFailure(Stream.cons(x, Stream.empty)) match {
- case Right((x,r)) => r.addArg(Arg(g.label,x,0,x))
- case Left((x,r)) => shrinker(x,r,0,x)
- }
- }
-
- }
-
- /** Universal quantifier for an explicit generator. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,P](
- g1: Gen[T1])(
- f: T1 => P)(implicit
- p: P => Prop,
- s1: Shrink[T1],
- pp1: T1 => Pretty
- ): Prop = forAllShrink(g1, shrink[T1])(f)
-
- /** Universal quantifier for two explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,P](
- g1: Gen[T1], g2: Gen[T2])(
- f: (T1,T2) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2)(f(t, _:T2)))
-
- /** Universal quantifier for three explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])(
- f: (T1,T2,T3) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3)(f(t, _:T2, _:T3)))
-
- /** Universal quantifier for four explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])(
- f: (T1,T2,T3,T4) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4)(f(t, _:T2, _:T3, _:T4)))
-
- /** Universal quantifier for five explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])(
- f: (T1,T2,T3,T4,T5) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5)))
-
- /** Universal quantifier for six explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])(
- f: (T1,T2,T3,T4,T5,T6) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)))
-
- /** Universal quantifier for seven explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,T7,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])(
- f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty,
- s7: Shrink[T7], pp7: T7 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)))
-
- /** Universal quantifier for eight explicit generators. Shrinks failed arguments
- * with the default shrink function for the type */
- def forAll[T1,T2,T3,T4,T5,T6,T7,T8,P](
- g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])(
- f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit
- p: P => Prop,
- s1: Shrink[T1], pp1: T1 => Pretty,
- s2: Shrink[T2], pp2: T2 => Pretty,
- s3: Shrink[T3], pp3: T3 => Pretty,
- s4: Shrink[T4], pp4: T4 => Pretty,
- s5: Shrink[T5], pp5: T5 => Pretty,
- s6: Shrink[T6], pp6: T6 => Pretty,
- s7: Shrink[T7], pp7: T7 => Pretty,
- s8: Shrink[T8], pp8: T8 => Pretty
- ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,P] (
- f: A1 => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty
- ): Prop = forAllShrink(arbitrary[A1],shrink[A1])(f andThen p)
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,P] (
- f: (A1,A2) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,P] (
- f: (A1,A2,A3) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,P] (
- f: (A1,A2,A3,A4) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,P] (
- f: (A1,A2,A3,A4,A5) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,P] (
- f: (A1,A2,A3,A4,A5,A6) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,A7,P] (
- f: (A1,A2,A3,A4,A5,A6,A7) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty,
- a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7)))
-
- /** Converts a function into a universally quantified property */
- def forAll[A1,A2,A3,A4,A5,A6,A7,A8,P] (
- f: (A1,A2,A3,A4,A5,A6,A7,A8) => P)(implicit
- p: P => Prop,
- a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty,
- a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty,
- a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty,
- a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty,
- a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty,
- a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty,
- a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty,
- a8: Arbitrary[A8], s8: Shrink[A8], pp8: A8 => Pretty
- ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7, _:A8)))
-
- /** Ensures that the property expression passed in completes within the given space of time. */
- def within(maximumMs: Long)(wrappedProp: => Prop): Prop = new Prop {
- @tailrec private def attempt(prms: Params, endTime: Long): Result = {
- val result = wrappedProp.apply(prms)
- if (System.currentTimeMillis > endTime) {
- (if (result.failure) result else Result(False)).label("Timeout")
- } else {
- if (result.success) result
- else attempt(prms, endTime)
- }
- }
- def apply(prms: Params) = attempt(prms, System.currentTimeMillis + maximumMs)
- }
-}
diff --git a/src/scalacheck/org/scalacheck/Properties.scala b/src/scalacheck/org/scalacheck/Properties.scala
deleted file mode 100644
index d4836d7420..0000000000
--- a/src/scalacheck/org/scalacheck/Properties.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-/** Represents a collection of properties, with convenient methods
- * for checking all properties at once. This class is itself a property, which
- * holds if and only if all of the contained properties hold.
- * <p>Properties are added in the following way:</p>
- *
- * {{{
- * object MyProps extends Properties("MyProps") {
- * property("myProp1") = forAll { (n:Int, m:Int) =>
- * n+m == m+n
- * }
- *
- * property("myProp2") = ((0/1) throws classOf[ArithmeticException])
- * }
- * }}}
- */
-class Properties(val name: String) extends Prop {
-
- import Test.cmdLineParser.{Success, NoSuccess}
-
- private val props = new scala.collection.mutable.ListBuffer[(String,Prop)]
-
- /** Returns one property which holds if and only if all of the
- * properties in this property collection hold */
- private def oneProperty: Prop = Prop.all((properties map (_._2)):_*)
-
- /** Returns all properties of this collection in a list of name/property
- * pairs. */
- def properties: Seq[(String,Prop)] = props
-
- def apply(p: Prop.Params) = oneProperty(p)
-
- /** Convenience method that checks the properties with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead. */
- override def check(prms: Test.Parameters): Unit = Test.checkProperties(
- prms copy (_testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks the properties with the given parameters
- * and reports the result on the console. If you need to get the results
- * from the test use the `check` methods in [[org.scalacheck.Test]]
- * instead.
- * @deprecated (in 1.10.0) Use `check(Test.Parameters)` instead.
- */
- @deprecated("Use 'check(Test.Parameters)' instead", "1.10.0")
- override def check(prms: Test.Params): Unit = Test.checkProperties(
- prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
- )
-
- /** Convenience method that checks the properties and reports the
- * result on the console. If you need to get the results from the test use
- * the `check` methods in [[org.scalacheck.Test]] instead. */
- override def check: Unit = check(Test.Parameters.default)
-
- /** The logic for main, separated out to make it easier to
- * avoid System.exit calls. Returns exit code.
- */
- override def mainRunner(args: Array[String]): Int = {
- Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) =>
- val res = Test.checkProperties(params, this)
- val failed = res.filter(!_._2.passed).size
- failed
- case e: NoSuccess =>
- println("Incorrect options:"+"\n"+e+"\n")
- Test.cmdLineParser.printHelp
- -1
- }
- }
-
- /** Adds all properties from another property collection to this one. */
- def include(ps: Properties) = for((n,p) <- ps.properties) property(n) = p
-
- /** Used for specifying properties. Usage:
- * {{{
- * property("myProp") = ...
- * }}}
- */
- class PropertySpecifier() {
- def update(propName: String, p: Prop) = props += ((name+"."+propName, p))
- }
-
- lazy val property = new PropertySpecifier()
-}
diff --git a/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala b/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala
deleted file mode 100644
index 7764101844..0000000000
--- a/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-// vim: set ts=2 sw=2 et:
-
-package org.scalacheck
-
-import org.scalatools.testing._
-
-class ScalaCheckFramework extends Framework {
-
- private case object PropFingerprint extends TestFingerprint {
- val superClassName = "org.scalacheck.Prop"
- val isModule = false
- }
-
- private case object PropsFingerprint extends TestFingerprint {
- val superClassName = "org.scalacheck.Properties"
- val isModule = true
- }
-
- val name = "ScalaCheck"
-
- val tests = Array[Fingerprint](PropsFingerprint, PropsFingerprint)
-
- def testRunner(loader: ClassLoader, loggers: Array[Logger]) = new Runner2 {
-
- private def asEvent(nr: (String, Test.Result)) = nr match {
- case (n: String, r: Test.Result) => new Event {
- val testName = n
- val description = n
- val result = r.status match {
- case Test.Passed => Result.Success
- case _:Test.Proved => Result.Success
- case _:Test.Failed => Result.Failure
- case Test.Exhausted => Result.Skipped
- case _:Test.PropException | _:Test.GenException => Result.Error
- }
- val error = r.status match {
- case Test.PropException(_, e, _) => e
- case _:Test.Failed => new Exception(Pretty.pretty(r,Pretty.Params(0)))
- case _ => null
- }
- }
- }
-
- def run(testClassName: String, fingerprint: Fingerprint, handler: EventHandler, args: Array[String]) {
-
- val testCallback = new Test.TestCallback {
- override def onPropEval(n: String, w: Int, s: Int, d: Int) = {}
-
- override def onTestResult(n: String, r: Test.Result) = {
- for (l <- loggers) {
- import Pretty._
- l.info(
- (if (r.passed) "+ " else "! ") + n + ": " + pretty(r, Params(0))
- )
- }
- handler.handle(asEvent((n,r)))
- }
- }
-
- import Test.cmdLineParser.{Success, NoSuccess}
- val prms = Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) =>
- params.copy(_testCallback = testCallback, _customClassLoader = Some(loader))
- // TODO: Maybe handle this a bit better than throwing exception?
- case e: NoSuccess => throw new Exception(e.toString)
- }
-
- fingerprint match {
- case fp: SubclassFingerprint =>
- if(fp.isModule) {
- val obj = Class.forName(testClassName + "$", true, loader)
- val ps = obj.getField("MODULE$").get(null).asInstanceOf[Properties]
- Test.checkProperties(prms, ps)
- } else {
- val p = Class.forName(testClassName, true, loader).newInstance.asInstanceOf[Prop]
- handler.handle(asEvent((testClassName, Test.check(prms, p))))
- }
- }
- }
-
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Shrink.scala b/src/scalacheck/org/scalacheck/Shrink.scala
deleted file mode 100644
index 4895171a35..0000000000
--- a/src/scalacheck/org/scalacheck/Shrink.scala
+++ /dev/null
@@ -1,208 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-import util.Buildable
-import scala.collection.{ JavaConversions => jcl }
-
-sealed abstract class Shrink[T] {
- def shrink(x: T): Stream[T]
-}
-
-object Shrink {
-
- import Stream.{cons, empty}
- import scala.collection._
- import java.util.ArrayList
-
- /** Interleaves to streams */
- private def interleave[T](xs: Stream[T], ys: Stream[T]): Stream[T] =
- if(xs.isEmpty) ys
- else if(ys.isEmpty) xs
- else Stream(xs.head, ys.head) append interleave(xs.tail, ys.tail)
-
- /** Shrink instance factory */
- def apply[T](s: T => Stream[T]): Shrink[T] = new Shrink[T] {
- override def shrink(x: T) = s(x)
- }
-
- /** Shrink a value */
- def shrink[T](x: T)(implicit s: Shrink[T]): Stream[T] = s.shrink(x)
-
- /** Default shrink instance */
- implicit def shrinkAny[T]: Shrink[T] = Shrink(x => empty)
-
- /** Shrink instance of container */
- implicit def shrinkContainer[C[_],T](implicit v: C[T] => Traversable[T], s: Shrink[T],
- b: Buildable[T,C]
- ): Shrink[C[T]] = Shrink { xs: C[T] =>
-
- def removeChunks(n: Int, xs: Stream[T]): Stream[Stream[T]] =
- if(xs.isEmpty) empty
- else if(xs.tail.isEmpty) cons(empty, empty)
- else {
- val n1 = n / 2
- val n2 = n - n1
- lazy val xs1 = xs.take(n1)
- lazy val xs2 = xs.drop(n1)
- lazy val xs3 =
- for(ys1 <- removeChunks(n1,xs1) if !ys1.isEmpty) yield ys1 append xs2
- lazy val xs4 =
- for(ys2 <- removeChunks(n2,xs2) if !ys2.isEmpty) yield xs1 append ys2
-
- cons(xs1, cons(xs2, interleave(xs3,xs4)))
- }
-
- def shrinkOne(zs: Stream[T]): Stream[Stream[T]] =
- if(zs.isEmpty) empty
- else {
- val x = zs.head
- val xs = zs.tail
- (for(y <- shrink(x)) yield cons(y,xs)) append
- (for(ys <- shrinkOne(xs)) yield cons(x,ys))
- }
-
- val ys = v(xs)
- val zs = ys.toStream
- removeChunks(ys.size,zs).append(shrinkOne(zs)).map(b.fromIterable)
-
- }
-
- /** Shrink instance of integer */
- implicit lazy val shrinkInt: Shrink[Int] = Shrink { n =>
-
- def halfs(n: Int): Stream[Int] =
- if(n == 0) empty else cons(n, halfs(n/2))
-
- if(n == 0) empty else {
- val ns = halfs(n/2).map(n - _)
- cons(0, interleave(ns, ns.map(-1 * _)))
- }
- }
-
- /** Shrink instance of String */
- implicit lazy val shrinkString: Shrink[String] = Shrink { s =>
- shrinkContainer[List,Char].shrink(s.toList).map(_.mkString)
- }
-
- /** Shrink instance of Option */
- implicit def shrinkOption[T](implicit s: Shrink[T]): Shrink[Option[T]] =
- Shrink {
- case None => empty
- case Some(x) => cons(None, for(y <- shrink(x)) yield Some(y))
- }
-
- /** Shrink instance of 2-tuple */
- implicit def shrinkTuple2[T1,T2](implicit
- s1: Shrink[T1], s2: Shrink[T2]
- ): Shrink[(T1,T2)] =
- Shrink { case (t1,t2) =>
- (for(x1 <- shrink(t1)) yield (x1, t2)) append
- (for(x2 <- shrink(t2)) yield (t1, x2))
- }
-
- /** Shrink instance of 3-tuple */
- implicit def shrinkTuple3[T1,T2,T3](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3]
- ): Shrink[(T1,T2,T3)] =
- Shrink { case (t1,t2,t3) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3))
- }
-
- /** Shrink instance of 4-tuple */
- implicit def shrinkTuple4[T1,T2,T3,T4](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4]
- ): Shrink[(T1,T2,T3,T4)] =
- Shrink { case (t1,t2,t3,t4) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4))
- }
-
- /** Shrink instance of 5-tuple */
- implicit def shrinkTuple5[T1,T2,T3,T4,T5](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5]
- ): Shrink[(T1,T2,T3,T4,T5)] =
- Shrink { case (t1,t2,t3,t4,t5) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5))
- }
-
- /** Shrink instance of 6-tuple */
- implicit def shrinkTuple6[T1,T2,T3,T4,T5,T6](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6]
- ): Shrink[(T1,T2,T3,T4,T5,T6)] =
- Shrink { case (t1,t2,t3,t4,t5,t6) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6))
- }
-
- /** Shrink instance of 7-tuple */
- implicit def shrinkTuple7[T1,T2,T3,T4,T5,T6,T7](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7]
- ): Shrink[(T1,T2,T3,T4,T5,T6,T7)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7)) append
- (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7))
- }
-
- /** Shrink instance of 8-tuple */
- implicit def shrinkTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7], s8: Shrink[T8]
- ): Shrink[(T1,T2,T3,T4,T5,T6,T7,T8)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7, t8)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7, t8)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7, t8)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7, t8)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7, t8)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7, t8)) append
- (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7, t8)) append
- (for(x8 <- shrink(t8)) yield (t1, t2, t3, t4, t5, t6, t7, x8))
- }
-
- /** Shrink instance of 9-tuple */
- implicit def shrinkTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit
- s1: Shrink[T1], s2: Shrink[T2], s3: Shrink[T3], s4: Shrink[T4],
- s5: Shrink[T5], s6: Shrink[T6], s7: Shrink[T7], s8: Shrink[T8],
- s9: Shrink[T9]
- ): Shrink[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
- Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8,t9) =>
- (for(x1 <- shrink(t1)) yield (x1, t2, t3, t4, t5, t6, t7, t8, t9)) append
- (for(x2 <- shrink(t2)) yield (t1, x2, t3, t4, t5, t6, t7, t8, t9)) append
- (for(x3 <- shrink(t3)) yield (t1, t2, x3, t4, t5, t6, t7, t8, t9)) append
- (for(x4 <- shrink(t4)) yield (t1, t2, t3, x4, t5, t6, t7, t8, t9)) append
- (for(x5 <- shrink(t5)) yield (t1, t2, t3, t4, x5, t6, t7, t8, t9)) append
- (for(x6 <- shrink(t6)) yield (t1, t2, t3, t4, t5, x6, t7, t8, t9)) append
- (for(x7 <- shrink(t7)) yield (t1, t2, t3, t4, t5, t6, x7, t8, t9)) append
- (for(x8 <- shrink(t8)) yield (t1, t2, t3, t4, t5, t6, t7, x8, t9)) append
- (for(x9 <- shrink(t9)) yield (t1, t2, t3, t4, t5, t6, t7, t8, x9))
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/Test.scala b/src/scalacheck/org/scalacheck/Test.scala
deleted file mode 100644
index 6e9b6b88fd..0000000000
--- a/src/scalacheck/org/scalacheck/Test.scala
+++ /dev/null
@@ -1,392 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck
-
-object Test {
-
- import util.FreqMap
- import scala.collection.immutable
- import Prop.FM
- import util.CmdLineParser
-
- /** Test parameters used by the `Test.check` method.
- */
- trait Parameters {
- /** The minimum number of tests that must succeed for ScalaCheck to
- * consider a property passed. */
- def minSuccessfulTests: Int
-
- /** The starting size given as parameter to the generators. */
- def minSize: Int
-
- /** The maximum size given as parameter to the generators. */
- def maxSize: Int
-
- /** The random numbe generator used. */
- def rng: java.util.Random
-
- /** The number of tests run in parallell. */
- def workers: Int
-
- /** A callback that ScalaCheck calls each time a test is executed. */
- def testCallback: TestCallback
-
- /** The maximum ratio between discarded and passed tests allowed before
- * ScalaCheck gives up and discards the property. At least
- * `minSuccesfulTests` will always be run, though. */
- def maxDiscardRatio: Float
-
- /** A custom class loader that should be used during test execution. */
- def customClassLoader: Option[ClassLoader]
-
- // private since we can't guarantee binary compatibility for this one
- private[scalacheck] def copy(
- _minSuccessfulTests: Int = Parameters.this.minSuccessfulTests,
- _minSize: Int = Parameters.this.minSize,
- _maxSize: Int = Parameters.this.maxSize,
- _rng: java.util.Random = Parameters.this.rng,
- _workers: Int = Parameters.this.workers,
- _testCallback: TestCallback = Parameters.this.testCallback,
- _maxDiscardRatio: Float = Parameters.this.maxDiscardRatio,
- _customClassLoader: Option[ClassLoader] = Parameters.this.customClassLoader
- ): Parameters = new Parameters {
- val minSuccessfulTests: Int = _minSuccessfulTests
- val minSize: Int = _minSize
- val maxSize: Int = _maxSize
- val rng: java.util.Random = _rng
- val workers: Int = _workers
- val testCallback: TestCallback = _testCallback
- val maxDiscardRatio: Float = _maxDiscardRatio
- val customClassLoader: Option[ClassLoader] = _customClassLoader
- }
- }
-
- /** Test parameters used by the `Test.check` method.
- *
- * To override default values, extend the
- * [[org.scalacheck.Test.Parameters.Default]] trait:
- *
- * {{{
- * val myParams = new Parameters.Default {
- * override val minSuccesfulTests = 600
- * override val maxDiscardRatio = 8
- * }
- * }}}
- */
- object Parameters {
- /** Default test parameters trait. This can be overriden if you need to
- * tweak the parameters. */
- trait Default extends Parameters {
- val minSuccessfulTests: Int = 100
- val minSize: Int = 0
- val maxSize: Int = Gen.Params().size
- val rng: java.util.Random = Gen.Params().rng
- val workers: Int = 1
- val testCallback: TestCallback = new TestCallback {}
- val maxDiscardRatio: Float = 5
- val customClassLoader: Option[ClassLoader] = None
- }
-
- /** Default test parameters instance. */
- val default: Parameters = new Default {}
- }
-
- /** Test parameters
- * @deprecated (in 1.10.0) Use [[org.scalacheck.Test.Parameters]] instead.
- */
- @deprecated("Use [[org.scalacheck.Test.Parameters]] instead", "1.10.0")
- case class Params(
- minSuccessfulTests: Int = 100,
- maxDiscardedTests: Int = -1,
- minSize: Int = 0,
- maxSize: Int = Gen.Params().size,
- rng: java.util.Random = Gen.Params().rng,
- workers: Int = 1,
- testCallback: TestCallback = new TestCallback {}
- )
-
- @deprecated("Use [[org.scalacheck.Test.Parameters]] instead", "1.10.0")
- private def paramsToParameters(params: Params) = new Parameters {
- val minSuccessfulTests = params.minSuccessfulTests
- val minSize = params.minSize
- val maxSize = params.maxSize
- val rng = params.rng
- val workers = params.workers
- val testCallback = params.testCallback
-
- // maxDiscardedTests is deprecated, but if someone
- // uses it let it override maxDiscardRatio
- val maxDiscardRatio =
- if(params.maxDiscardedTests < 0) Parameters.default.maxDiscardRatio
- else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
-
- val customClassLoader = Parameters.default.customClassLoader
- }
-
- /** Test statistics */
- case class Result(status: Status, succeeded: Int, discarded: Int, freqMap: FM, time: Long = 0) {
- def passed = status match {
- case Passed => true
- case Proved(_) => true
- case _ => false
- }
- }
-
- /** Test status */
- sealed trait Status
-
- /** ScalaCheck found enough cases for which the property holds, so the
- * property is considered correct. (It is not proved correct, though). */
- case object Passed extends Status
-
- /** ScalaCheck managed to prove the property correct */
- sealed case class Proved(args: Prop.Args) extends Status
-
- /** The property was proved wrong with the given concrete arguments. */
- sealed case class Failed(args: Prop.Args, labels: Set[String]) extends Status
-
- /** The property test was exhausted, it wasn't possible to generate enough
- * concrete arguments satisfying the preconditions to get enough passing
- * property evaluations. */
- case object Exhausted extends Status
-
- /** An exception was raised when trying to evaluate the property with the
- * given concrete arguments. */
- sealed case class PropException(args: Prop.Args, e: Throwable,
- labels: Set[String]) extends Status
-
- /** An exception was raised when trying to generate concrete arguments
- * for evaluating the property. */
- sealed case class GenException(e: Throwable) extends Status
-
- trait TestCallback { self =>
- /** Called each time a property is evaluated */
- def onPropEval(name: String, threadIdx: Int, succeeded: Int,
- discarded: Int): Unit = ()
-
- /** Called whenever a property has finished testing */
- def onTestResult(name: String, result: Result): Unit = ()
-
- def chain(testCallback: TestCallback) = new TestCallback {
- override def onPropEval(name: String, threadIdx: Int,
- succeeded: Int, discarded: Int
- ): Unit = {
- self.onPropEval(name,threadIdx,succeeded,discarded)
- testCallback.onPropEval(name,threadIdx,succeeded,discarded)
- }
-
- override def onTestResult(name: String, result: Result): Unit = {
- self.onTestResult(name,result)
- testCallback.onTestResult(name,result)
- }
- }
- }
-
- private def assertParams(prms: Parameters) = {
- import prms._
- if(
- minSuccessfulTests <= 0 ||
- maxDiscardRatio <= 0 ||
- minSize < 0 ||
- maxSize < minSize ||
- workers <= 0
- ) throw new IllegalArgumentException("Invalid test parameters")
- }
-
- private def secure[T](x: => T): Either[T,Throwable] =
- try { Left(x) } catch { case e: Throwable => Right(e) }
-
- private[scalacheck] lazy val cmdLineParser = new CmdLineParser {
- object OptMinSuccess extends IntOpt {
- val default = Parameters.default.minSuccessfulTests
- val names = Set("minSuccessfulTests", "s")
- val help = "Number of tests that must succeed in order to pass a property"
- }
- object OptMaxDiscarded extends IntOpt {
- val default = -1
- val names = Set("maxDiscardedTests", "d")
- val help =
- "Number of tests that can be discarded before ScalaCheck stops " +
- "testing a property. NOTE: this option is deprecated, please use " +
- "the option maxDiscardRatio (-r) instead."
- }
- object OptMaxDiscardRatio extends FloatOpt {
- val default = Parameters.default.maxDiscardRatio
- val names = Set("maxDiscardRatio", "r")
- val help =
- "The maximum ratio between discarded and succeeded tests " +
- "allowed before ScalaCheck stops testing a property. At " +
- "least minSuccessfulTests will always be tested, though."
- }
- object OptMinSize extends IntOpt {
- val default = Parameters.default.minSize
- val names = Set("minSize", "n")
- val help = "Minimum data generation size"
- }
- object OptMaxSize extends IntOpt {
- val default = Parameters.default.maxSize
- val names = Set("maxSize", "x")
- val help = "Maximum data generation size"
- }
- object OptWorkers extends IntOpt {
- val default = Parameters.default.workers
- val names = Set("workers", "w")
- val help = "Number of threads to execute in parallel for testing"
- }
- object OptVerbosity extends IntOpt {
- val default = 1
- val names = Set("verbosity", "v")
- val help = "Verbosity level"
- }
-
- val opts = Set[Opt[_]](
- OptMinSuccess, OptMaxDiscarded, OptMaxDiscardRatio, OptMinSize,
- OptMaxSize, OptWorkers, OptVerbosity
- )
-
- def parseParams(args: Array[String]) = parseArgs(args) {
- optMap => Parameters.default.copy(
- _minSuccessfulTests = optMap(OptMinSuccess),
- _maxDiscardRatio =
- if (optMap(OptMaxDiscarded) < 0) optMap(OptMaxDiscardRatio)
- else optMap(OptMaxDiscarded).toFloat / optMap(OptMinSuccess),
- _minSize = optMap(OptMinSize),
- _maxSize = optMap(OptMaxSize),
- _workers = optMap(OptWorkers),
- _testCallback = ConsoleReporter(optMap(OptVerbosity))
- )
- }
- }
-
- /** Tests a property with the given testing parameters, and returns
- * the test results.
- * @deprecated (in 1.10.0) Use
- * `check(Parameters, Properties)` instead.
- */
- @deprecated("Use 'checkProperties(Parameters, Properties)' instead", "1.10.0")
- def check(params: Params, p: Prop): Result = {
- check(paramsToParameters(params), p)
- }
-
- /** Tests a property with the given testing parameters, and returns
- * the test results. */
- def check(params: Parameters, p: Prop): Result = {
- import params._
-
- assertParams(params)
- if(workers > 1) {
- assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
- }
-
- val iterations = math.ceil(minSuccessfulTests / (workers: Double))
- val sizeStep = (maxSize-minSize) / (iterations*workers)
- var stop = false
-
- def worker(workerIdx: Int) =
- if (workers < 2) () => workerFun(workerIdx)
- else actors.Futures.future {
- params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
- workerFun(workerIdx)
- }
-
- def workerFun(workerIdx: Int) = {
- var n = 0 // passed tests
- var d = 0 // discarded tests
- var res: Result = null
- var fm = FreqMap.empty[immutable.Set[Any]]
- while(!stop && res == null && n < iterations) {
- val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
- val propPrms = Prop.Params(Gen.Params(size.round.toInt, params.rng), fm)
- secure(p(propPrms)) match {
- case Right(e) => res =
- Result(GenException(e), n, d, FreqMap.empty[immutable.Set[Any]])
- case Left(propRes) =>
- fm =
- if(propRes.collected.isEmpty) fm
- else fm + propRes.collected
- propRes.status match {
- case Prop.Undecided =>
- d += 1
- testCallback.onPropEval("", workerIdx, n, d)
- // The below condition is kind of hacky. We have to have
- // some margin, otherwise workers might stop testing too
- // early because they have been exhausted, but the overall
- // test has not.
- if (n+d > minSuccessfulTests && 1+workers*maxDiscardRatio*n < d)
- res = Result(Exhausted, n, d, fm)
- case Prop.True =>
- n += 1
- testCallback.onPropEval("", workerIdx, n, d)
- case Prop.Proof =>
- n += 1
- res = Result(Proved(propRes.args), n, d, fm)
- stop = true
- case Prop.False =>
- res = Result(Failed(propRes.args,propRes.labels), n, d, fm)
- stop = true
- case Prop.Exception(e) =>
- res = Result(PropException(propRes.args,e,propRes.labels), n, d, fm)
- stop = true
- }
- }
- }
- if (res == null) {
- if (maxDiscardRatio*n > d) Result(Passed, n, d, fm)
- else Result(Exhausted, n, d, fm)
- } else res
- }
-
- def mergeResults(r1: () => Result, r2: () => Result) = {
- val Result(st1, s1, d1, fm1, _) = r1()
- val Result(st2, s2, d2, fm2, _) = r2()
- if (st1 != Passed && st1 != Exhausted)
- () => Result(st1, s1+s2, d1+d2, fm1++fm2, 0)
- else if (st2 != Passed && st2 != Exhausted)
- () => Result(st2, s1+s2, d1+d2, fm1++fm2, 0)
- else {
- if (s1+s2 >= minSuccessfulTests && maxDiscardRatio*(s1+s2) >= (d1+d2))
- () => Result(Passed, s1+s2, d1+d2, fm1++fm2, 0)
- else
- () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, 0)
- }
- }
-
- val start = System.currentTimeMillis
- val results = for(i <- 0 until workers) yield worker(i)
- val r = results.reduceLeft(mergeResults)()
- stop = true
- results foreach (_.apply())
- val timedRes = r.copy(time = System.currentTimeMillis-start)
- params.testCallback.onTestResult("", timedRes)
- timedRes
- }
-
- /** Check a set of properties.
- * @deprecated (in 1.10.0) Use
- * `checkProperties(Parameters, Properties)` instead.
- */
- @deprecated("Use 'checkProperties(Parameters, Properties)' instead", "1.10.0")
- def checkProperties(prms: Params, ps: Properties): Seq[(String,Result)] =
- checkProperties(paramsToParameters(prms), ps)
-
- /** Check a set of properties. */
- def checkProperties(prms: Parameters, ps: Properties): Seq[(String,Result)] =
- ps.properties.map { case (name,p) =>
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) =
- prms.testCallback.onPropEval(name,t,s,d)
- override def onTestResult(n: String, r: Result) =
- prms.testCallback.onTestResult(name,r)
- }
- val res = check(prms copy (_testCallback = testCallback), p)
- (name,res)
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
deleted file mode 100644
index 140c541a95..0000000000
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import scala.collection._
-
-trait Buildable[T,C[_]] {
- def builder: mutable.Builder[T,C[T]]
- def fromIterable(it: Traversable[T]): C[T] = {
- val b = builder
- b ++= it
- b.result()
- }
-}
-
-object Buildable {
-
- implicit def buildableList[T] = new Buildable[T,List] {
- def builder = new mutable.ListBuffer[T]
- }
-
- implicit def buildableStream[T] = new Buildable[T,Stream] {
- def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
- }
-
- implicit def buildableArray[T](implicit cm: ClassManifest[T]) =
- new Buildable[T,Array] {
- def builder = mutable.ArrayBuilder.make[T]
- }
-
- implicit def buildableMutableSet[T] = new Buildable[T,mutable.Set] {
- def builder = new mutable.SetBuilder(mutable.Set.empty[T])
- }
-
- implicit def buildableImmutableSet[T] = new Buildable[T,immutable.Set] {
- def builder = new mutable.SetBuilder(immutable.Set.empty[T])
- }
-
- implicit def buildableSet[T] = new Buildable[T,Set] {
- def builder = new mutable.SetBuilder(Set.empty[T])
- }
-
- import java.util.ArrayList
- implicit def buildableArrayList[T] = new Buildable[T,ArrayList] {
- def builder = new mutable.Builder[T,ArrayList[T]] {
- val al = new ArrayList[T]
- def +=(x: T) = {
- al.add(x)
- this
- }
- def clear() = al.clear()
- def result() = al
- }
- }
-
-}
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
deleted file mode 100644
index eb3a91fe59..0000000000
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.Reader
-import scala.util.parsing.input.Position
-import scala.collection.Set
-import org.scalacheck.Test
-
-trait CmdLineParser extends Parsers {
-
- type Elem = String
-
- trait Opt[+T] {
- val default: T
- val names: Set[String]
- val help: String
- }
- trait Flag extends Opt[Unit]
- trait IntOpt extends Opt[Int]
- trait FloatOpt extends Opt[Float]
- trait StrOpt extends Opt[String]
-
- class OptMap {
- private val opts = new collection.mutable.HashMap[Opt[_], Any]
- def apply(flag: Flag): Boolean = opts.contains(flag)
- def apply[T](opt: Opt[T]): T = opts.get(opt) match {
- case None => opt.default
- case Some(v) => v.asInstanceOf[T]
- }
- def update[T](opt: Opt[T], optVal: T) = opts.update(opt, optVal)
- }
-
- val opts: Set[Opt[_]]
-
- private class ArgsReader(args: Array[String], i: Int) extends Reader[String] {
- val pos = new Position {
- val column = (args take i).foldLeft(1)(_ + _.length + 1)
- val line = 1
- val lineContents = args.mkString(" ")
- }
- val atEnd = i >= args.length
- def first = if(atEnd) null else args(i)
- def rest = if(atEnd) this else new ArgsReader(args, i+1)
- }
-
- private def getOpt(s: String) = {
- if(s == null || s.length == 0 || s.charAt(0) != '-') None
- else opts.find(_.names.contains(s.drop(1)))
- }
-
- private val opt: Parser[Opt[Any]] = accept("option name", {
- case s if getOpt(s).isDefined => getOpt(s).get
- })
-
- private val strVal: Parser[String] = accept("string", {
- case s if s != null => s
- })
-
- private val intVal: Parser[Int] = accept("integer", {
- case s if s != null && s.length > 0 && s.forall(_.isDigit) => s.toInt
- })
-
- private val floatVal: Parser[Float] = accept("float", {
- case s if s != null && s.matches("[0987654321]+\\.?[0987654321]*")
- => s.toFloat
- })
-
- private case class OptVal[T](o: Opt[T], v: T)
-
- private val optVal: Parser[OptVal[Any]] = opt into {
- case o: Flag => success(OptVal(o, ()))
- case o: IntOpt => intVal ^^ (v => OptVal(o, v))
- case o: FloatOpt => floatVal ^^ (v => OptVal(o, v))
- case o: StrOpt => strVal ^^ (v => OptVal(o, v))
- }
-
- val options: Parser[OptMap] = rep(optVal) ^^ { xs =>
- val map = new OptMap
- xs.foreach { case OptVal(o,v) => map(o) = v }
- map
- }
-
- def printHelp = {
- println("Available options:")
- opts.foreach { opt =>
- println(" " + opt.names.map("-"+_).mkString(", ") + ": " + opt.help)
- }
- }
-
- def parseArgs[T](args: Array[String])(f: OptMap => T) =
- phrase(options map f)(new ArgsReader(args,0))
-}
diff --git a/src/scalacheck/org/scalacheck/util/FreqMap.scala b/src/scalacheck/org/scalacheck/util/FreqMap.scala
deleted file mode 100644
index d0686aec72..0000000000
--- a/src/scalacheck/org/scalacheck/util/FreqMap.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-trait FreqMap[T] {
- protected val underlying: scala.collection.immutable.Map[T,Int]
- val total: Int
-
- def +(t: T) = new FreqMap[T] {
- private val n = FreqMap.this.underlying.get(t) match {
- case None => 1
- case Some(n) => n+1
- }
- val underlying = FreqMap.this.underlying + (t -> n)
- val total = FreqMap.this.total + 1
- }
-
- def -(t: T) = new FreqMap[T] {
- val underlying = FreqMap.this.underlying.get(t) match {
- case None => FreqMap.this.underlying
- case Some(n) => FreqMap.this.underlying + (t -> (n-1))
- }
- val total = FreqMap.this.total + 1
- }
-
- def ++(fm: FreqMap[T]) = new FreqMap[T] {
- private val keys = FreqMap.this.underlying.keySet ++ fm.underlying.keySet
- private val mappings = keys.toStream.map { x =>
- (x, fm.getCount(x).getOrElse(0) + FreqMap.this.getCount(x).getOrElse(0))
- }
- val underlying = scala.collection.immutable.Map(mappings: _*)
- val total = FreqMap.this.total + fm.total
- }
-
- def --(fm: FreqMap[T]) = new FreqMap[T] {
- val underlying = FreqMap.this.underlying transform {
- case (x,n) => n - fm.getCount(x).getOrElse(0)
- }
- lazy val total = (0 /: underlying.valuesIterator) (_ + _)
- }
-
- def getCount(t: T) = underlying.get(t)
-
- def getCounts: List[(T,Int)] = underlying.toList.sortBy(-_._2)
-
- def getRatio(t: T) = for(c <- getCount(t)) yield (c: Float)/total
-
- def getRatios = for((t,c) <- getCounts) yield (t, (c: Float)/total)
-
- override def toString = underlying.toString
-}
-
-object FreqMap {
- def empty[T] = new FreqMap[T] {
- val underlying = scala.collection.immutable.Map.empty[T,Int]
- val total = 0
- }
-}
diff --git a/src/scalacheck/org/scalacheck/util/StdRand.scala b/src/scalacheck/org/scalacheck/util/StdRand.scala
deleted file mode 100644
index 7c1dc8dcc4..0000000000
--- a/src/scalacheck/org/scalacheck/util/StdRand.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-/*-------------------------------------------------------------------------*\
-** ScalaCheck **
-** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
-** http://www.scalacheck.org **
-** **
-** This software is released under the terms of the Revised BSD License. **
-** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*------------------------------------------------------------------------ */
-
-package org.scalacheck.util
-
-object StdRand extends java.util.Random
diff --git a/starr.number b/starr.number
deleted file mode 100644
index d55aa7d7fc..0000000000
--- a/starr.number
+++ /dev/null
@@ -1 +0,0 @@
-starr.version=2.11.0-M4 \ No newline at end of file
diff --git a/test/build-partest.xml b/test/build-partest.xml
index 44502ffa61..22ad85ac03 100755
--- a/test/build-partest.xml
+++ b/test/build-partest.xml
@@ -7,18 +7,14 @@
<attribute name="srcdir" default="files"/> <!-- TODO: make targets for `pending` and other subdirs -->
<attribute name="colors" default="${partest.colors}"/>
<attribute name="scalacOpts" default="${scalac.args.optimise}"/>
- <attribute name="kinds" default="pos neg run jvm res scalap scalacheck specialized instrumented presentation"/>
+ <attribute name="kinds"/>
<sequential>
<property name="partest.dir" value="@{dir}" />
<partest srcdir="@{srcdir}"
kinds="@{kinds}"
colors="@{colors}"
scalacOpts="@{scalacOpts}"
- compilationpathref="partest.classpath">
- <compilationpath>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
- </partest>
+ compilationpathref="partest.compilation.path"/>
</sequential>
</macrodef>
</project>
diff --git a/test/files/ant/README b/test/files/ant/README
deleted file mode 100644
index 8cd8745970..0000000000
--- a/test/files/ant/README
+++ /dev/null
@@ -1,42 +0,0 @@
-README
-======
-
-Test cases in directory test/files/ant/ are executed by invoking an
-Ant script whose name ends with "build.xml" (eg. "fsc001-build.xml").
-
-The Scala Ant tasks fsc/scalac/scaladoc are instantiated from various
-binaries (quick/pack/latest/installed) and are executed with different
-combinations of Ant attributes/elements:
-
- +---------------------------+--------------------------+
- | Attributes | Nested elements |
-------------+---------------------------+--------------------------+
-fsc001 | srcdir,classpath (1) | compilerarg |
-fsc002 | srcref,classpathref (1) | compilerarg |
-fsc003 | (2) | compilerarg,src,include |
-------------+---------------------------+--------------------------+
-scalac001 | srcdir,classpath (1) | |
-scalac002 | srcref,classpathref (1) | |
-scalac003 | (2) | src,include |
-scalac004 | deprecation,unchecked (3) | |
-------------+---------------------------+--------------------------+
-scaladoc | srcdir,classpathref | |
-------------+---------------------------+--------------------------+
-
-Other attributes:
-(1) includes,destdir
-(2) destdir,classpathref
-(3) srcdir,includes,destdir,classpath
-
-
-The above test cases can also be run from the command prompt using one of
-the following shell commands:
-
-1) For quick/pack/latest binaries (-Dbinary=quick|pack|latest)
-
-$ ant -Dbinary=quick -Dproject.dir=$HOME/workspace/scala -f scalac001-build.xml
-
-2) For installed binaries (-Dbinary=installed)
-
-$ ant -Dbinary=installed -Dinstalled.dir=/opt/scala -f scalac001-build.xml
-
diff --git a/test/files/ant/fsc001-build.check b/test/files/ant/fsc001-build.check
deleted file mode 100644
index b5141f587b..0000000000
--- a/test/files/ant/fsc001-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/fsc001-ant.obj
- [fsc] Compiling 1 source file to [...]/files/ant/fsc001-ant.obj
diff --git a/test/files/ant/fsc001-build.xml b/test/files/ant/fsc001-build.xml
deleted file mode 100644
index 0130f3615c..0000000000
--- a/test/files/ant/fsc001-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="fsc001" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <pathconvert property="classpath" refid="build.classpath"/>
- <fsc
- srcdir="${source.dir}"
- includes="**/${ant.project.name}*.scala"
- destdir="${build.dir}"
- classpath="${classpath}">
- </fsc>
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/fsc001.scala b/test/files/ant/fsc001.scala
deleted file mode 100644
index 6ede5981ce..0000000000
--- a/test/files/ant/fsc001.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- println(args mkString " ")
- }
-}
diff --git a/test/files/ant/fsc002-build.check b/test/files/ant/fsc002-build.check
deleted file mode 100644
index 0c9c30dbfa..0000000000
--- a/test/files/ant/fsc002-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/fsc002-ant.obj
- [fsc] Compiling 1 source file to [...]/files/ant/fsc002-ant.obj
diff --git a/test/files/ant/fsc002-build.xml b/test/files/ant/fsc002-build.xml
deleted file mode 100644
index db91070fa1..0000000000
--- a/test/files/ant/fsc002-build.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="fsc002" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <path id="source.ref">
- <pathelement location="${source.dir}"/>
- </path>
- <fsc
- srcref="source.ref"
- includes="**/${ant.project.name}*.scala"
- destdir="${build.dir}"
- classpathref="build.classpath">
- </fsc>
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/fsc002.scala b/test/files/ant/fsc002.scala
deleted file mode 100644
index 47131daac6..0000000000
--- a/test/files/ant/fsc002.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]): Unit =
- Console.println(args.toList)
-}
diff --git a/test/files/ant/fsc003-build.check b/test/files/ant/fsc003-build.check
deleted file mode 100644
index c8c9ed857e..0000000000
--- a/test/files/ant/fsc003-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/fsc003-ant.obj
- [fsc] Compiling 1 source file to [...]/files/ant/fsc003-ant.obj
diff --git a/test/files/ant/fsc003-build.xml b/test/files/ant/fsc003-build.xml
deleted file mode 100644
index 5f71770bf2..0000000000
--- a/test/files/ant/fsc003-build.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="fsc003" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <fsc
- destdir="${build.dir}"
- classpathref="build.classpath">
- <src path="${source.dir}"/>
- <include name="**/${ant.project.name}*.scala"/>
- </fsc>
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/fsc003.scala b/test/files/ant/fsc003.scala
deleted file mode 100644
index 6ede5981ce..0000000000
--- a/test/files/ant/fsc003.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- println(args mkString " ")
- }
-}
diff --git a/test/files/ant/imported.xml b/test/files/ant/imported.xml
deleted file mode 100644
index 182c80aadf..0000000000
--- a/test/files/ant/imported.xml
+++ /dev/null
@@ -1,150 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="imported">
-
- <!-- This file is imported by the main Ant script. -->
-
- <!-- Prevents system classpath from being used -->
- <property name="build.sysclasspath" value="ignore"/>
-
-<!-- ===========================================================================
-PROPERTIES
-============================================================================ -->
-
- <property name="source.dir" value="${basedir}"/>
-
- <property file="${basedir}/build.properties"/>
-
- <property name="build.dir" location="${source.dir}/${ant.project.name}-ant.obj"/>
- <property name="log.dir" location="${source.dir}"/>
- <property name="log.file" value="${log.dir}/${ant.project.name}-build-ant.log"/>
- <property name="project.dir" value="../../.."/>
-
- <condition property="quick.binary">
- <equals arg1="${binary}" arg2="quick"/>
- </condition>
- <condition property="pack.binary">
- <equals arg1="${binary}" arg2="pack"/>
- </condition>
- <condition property="latest.binary">
- <equals arg1="${binary}" arg2="latest"/>
- </condition>
- <condition property="installed.binary">
- <equals arg1="${binary}" arg2="installed"/>
- </condition>
-
- <fail message="Property 'binary' must be set to either 'quick', 'pack', 'latest' or 'installed'.">
- <condition><not><or>
- <isset property="quick.binary"/>
- <isset property="pack.binary"/>
- <isset property="latest.binary"/>
- <isset property="installed.binary"/>
- </or></not></condition>
- </fail>
- <echo level="verbose" message="binary=${binary}"/>
- <echo level="verbose" message="build.dir=${build.dir}"/>
-
-<!-- ===========================================================================
-INITIALISATION
-============================================================================ -->
-
- <target name="quick.init" if="quick.binary">
- <property name="quick.dir" value="${project.dir}/build/quick"/>
- <fail message="Quick build could not be found.">
- <condition><not><available file="${quick.dir}"/></not></condition>
- </fail>
- <property name="scala.dir" value="${quick.dir}"/>
- <property name="scala-library.lib" value="${scala.dir}/classes/library/"/>
- <property name="scala-compiler.lib" value="${scala.dir}/classes/compiler/"/>
- </target>
-
- <target name="pack.init" if="pack.binary">
- <property name="pack.dir" value="${project.dir}/build/pack"/>
- <fail message="Pack build could not be found.">
- <condition><not><available file="${pack.dir}"/></not></condition>
- </fail>
- <property name="scala.dir" value="${pack.dir}"/>
- <property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
- <property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
- </target>
-
- <target name="latest.init" if="latest.binary">
- <property name="latest.dir" value="${project.dir}/dists/latest"/>
- <fail message="Latest build could not be found.">
- <condition><not><available file="${latest.dir}"/></not></condition>
- </fail>
- <property name="scala.dir" value="${latest.dir}"/>
- <property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
- <property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
- </target>
-
- <target name="installed.init" if="installed.binary">
- <property name="installed.dir" value="/opt/scala"/>
- <fail message="Installed distribution could not be found.">
- <condition><not><available file="${installed.dir}"/></not></condition>
- </fail>
- <property name="scala.dir" value="${installed.dir}"/>
- <property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
- <property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
- </target>
-
- <target name="init" depends="quick.init, pack.init, latest.init, installed.init">
- <echo level="verbose" message="scala.dir=${scala.dir}"/>
-
- <path id="scala.classpath">
- <pathelement location="${scala-library.lib}"/>
- <pathelement location="${scala-compiler.lib}"/>
- </path>
-
- <fail message="Scala library '${scala-library.lib}' or '${scala-compiler.lib}' is missing/broken">
- <condition><not><and>
- <available classname="scala.Predef"
- classpathref="scala.classpath"/>
- <available classname="scala.Option"
- classpathref="scala.classpath"/>
- <available classname="scala.runtime.ObjectRef"
- classpathref="scala.classpath"/>
- <available classname="scala.tools.ant.Scalac"
- classpathref="scala.classpath"/>
- <available classname="scala.tools.nsc.Main"
- classpathref="scala.classpath"/>
- <available classname="scala.tools.util.StringOps"
- classpathref="scala.classpath"/>
- </and></not></condition>
- </fail>
- <taskdef resource="scala/tools/ant/antlib.xml" classpathref="scala.classpath"/>
-
- <path id="build.classpath">
- <!--<pathelement location="${scala-actors.lib}"/>-->
- <pathelement location="${scala-library.lib}"/>
- <pathelement location="${build.dir}"/>
- </path>
-
- <!-- make sure the log file exists when the Ant build scripts -->
- <!-- are run manually from the command prompt -->
- <touch file="${log.file}"/>
- </target>
-
-<!-- ===========================================================================
-RUN
-============================================================================ -->
-
- <target name="run" depends="build, clean"/>
-
-<!-- ===========================================================================
-CLEAN
-============================================================================ -->
-
- <macrodef name="remove">
- <attribute name="dir"/>
- <sequential>
- <delete dir="@{dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </sequential>
- </macrodef>
-
- <target name="clean">
- <remove dir="${build.dir}"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac001-build.check b/test/files/ant/scalac001-build.check
deleted file mode 100644
index 05a43ba572..0000000000
--- a/test/files/ant/scalac001-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scalac001-ant.obj
- [scalac] Compiling 1 source file to [...]/files/ant/scalac001-ant.obj
diff --git a/test/files/ant/scalac001-build.xml b/test/files/ant/scalac001-build.xml
deleted file mode 100644
index 4ec7fc833c..0000000000
--- a/test/files/ant/scalac001-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac001" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <pathconvert property="classpath" refid="build.classpath"/>
- <scalac
- srcdir="${source.dir}"
- includes="**/${ant.project.name}*.scala"
- destdir="${build.dir}"
- classpath="${classpath}"
- />
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac001.scala b/test/files/ant/scalac001.scala
deleted file mode 100644
index 47131daac6..0000000000
--- a/test/files/ant/scalac001.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]): Unit =
- Console.println(args.toList)
-}
diff --git a/test/files/ant/scalac002-build.check b/test/files/ant/scalac002-build.check
deleted file mode 100644
index e7b3670a0c..0000000000
--- a/test/files/ant/scalac002-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scalac002-ant.obj
- [scalac] Compiling 1 source file to [...]/files/ant/scalac002-ant.obj
diff --git a/test/files/ant/scalac002-build.xml b/test/files/ant/scalac002-build.xml
deleted file mode 100644
index 07628afa64..0000000000
--- a/test/files/ant/scalac002-build.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac002" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <path id="source.ref">
- <pathelement location="${source.dir}"/>
- </path>
- <scalac
- srcref="source.ref"
- includes="**/${ant.project.name}*.scala"
- destdir="${build.dir}"
- classpathref="build.classpath"
- />
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac002.scala b/test/files/ant/scalac002.scala
deleted file mode 100644
index 6ede5981ce..0000000000
--- a/test/files/ant/scalac002.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- println(args mkString " ")
- }
-}
diff --git a/test/files/ant/scalac003-build.check b/test/files/ant/scalac003-build.check
deleted file mode 100644
index 7b0d3367ed..0000000000
--- a/test/files/ant/scalac003-build.check
+++ /dev/null
@@ -1,14 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scalac003-ant.obj
- [scalac] Compiling 1 source file to [...]/files/ant/scalac003-ant.obj
diff --git a/test/files/ant/scalac003-build.xml b/test/files/ant/scalac003-build.xml
deleted file mode 100644
index 1d70aa115e..0000000000
--- a/test/files/ant/scalac003-build.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac003" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <scalac
- destdir="${build.dir}"
- classpathref="build.classpath">
- <src path="${source.dir}"/>
- <include name="**/${ant.project.name}*.scala"/>
- </scalac>
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac003.scala b/test/files/ant/scalac003.scala
deleted file mode 100644
index 6ede5981ce..0000000000
--- a/test/files/ant/scalac003.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- println(args mkString " ")
- }
-}
diff --git a/test/files/ant/scalac004-build.check b/test/files/ant/scalac004-build.check
deleted file mode 100644
index ffe9e8c79a..0000000000
--- a/test/files/ant/scalac004-build.check
+++ /dev/null
@@ -1,24 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scalac004-ant.obj
- [scalac] Compiling 1 source file to [...]/files/ant/scalac004-ant.obj
- [scalac] [...]/files/ant/scalac004.scala:9: warning: method exit in object Predef is deprecated: Use sys.exit(status) instead
- [scalac] Predef.exit(0) //deprecated in 2.9.0
- [scalac] ^
- [scalac] [...]/files/ant/scalac004.scala:6: warning: match is not exhaustive!
- [scalac] missing combination Nil
- [scalac]
- [scalac] xs match { //(xs: @unchecked) match {
- [scalac] ^
- [scalac] two warnings found
- [scalac] Compile succeeded with 2 warnings; see the compiler output for details.
diff --git a/test/files/ant/scalac004-build.xml b/test/files/ant/scalac004-build.xml
deleted file mode 100644
index 66c19a39fb..0000000000
--- a/test/files/ant/scalac004-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac004" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <scalac
- deprecation="yes" unchecked="yes"
- srcdir="${source.dir}"
- includes="**/${ant.project.name}*.scala"
- destdir="${build.dir}"
- classpathref="build.classpath"
- />
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac004.scala b/test/files/ant/scalac004.scala
deleted file mode 100644
index 66b2ba7985..0000000000
--- a/test/files/ant/scalac004.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- val xs = List(1, 2, 3, 4)
- xs match { //(xs: @unchecked) match {
- case x::xs => println(x)
- }
- Predef.exit(0) //deprecated in 2.9.0
- }
-}
diff --git a/test/files/ant/scaladoc-build.check b/test/files/ant/scaladoc-build.check
deleted file mode 100644
index 1c82456ad0..0000000000
--- a/test/files/ant/scaladoc-build.check
+++ /dev/null
@@ -1,15 +0,0 @@
-
-quick.init:
-
-pack.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scaladoc-ant.obj
- [scaladoc] Documenting 1 source file to [...]/files/ant/scaladoc-ant.obj
- [scaladoc] model contains 3 documentable templates
diff --git a/test/files/ant/scaladoc-build.xml b/test/files/ant/scaladoc-build.xml
deleted file mode 100644
index fb4dc6fe69..0000000000
--- a/test/files/ant/scaladoc-build.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scaladoc" default="run">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <scaladoc
- srcdir="${source.dir}"
- includes="**/${ant.project.name}*.scala"
- deprecation="yes" unchecked="yes"
- destdir="${build.dir}"
- classpathref="build.classpath"
- />
- <echo level="verbose" message="log.file=${log.file}"/>
- <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
- </target>
-
-</project>
-
diff --git a/test/files/ant/scaladoc.scala b/test/files/ant/scaladoc.scala
deleted file mode 100644
index 6ede5981ce..0000000000
--- a/test/files/ant/scaladoc.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package test
-
-object Main {
- def main(args: Array[String]) {
- println(args mkString " ")
- }
-}
diff --git a/test/files/jvm/opt_value_class.check b/test/files/jvm/opt_value_class.check
new file mode 100644
index 0000000000..a0c18c5ca0
--- /dev/null
+++ b/test/files/jvm/opt_value_class.check
@@ -0,0 +1,2 @@
+[ok] <init> ()V public
+[ok] unapply (Ljava/lang/Object;)Ljava/lang/String; public (Ljava/lang/Object;)Ljava/lang/String;
diff --git a/test/files/jvm/opt_value_class/Value_1.scala b/test/files/jvm/opt_value_class/Value_1.scala
new file mode 100644
index 0000000000..2440609b9e
--- /dev/null
+++ b/test/files/jvm/opt_value_class/Value_1.scala
@@ -0,0 +1,28 @@
+final class Opt[+A >: Null](val value: A) extends AnyVal {
+ def get: A = value
+ def isEmpty = value == null
+}
+object Opt {
+ final val None = new Opt[Null](null)
+ def unapply[A >: Null](x: A): Opt[A] = if (x == null) None else Opt(x)
+ def empty[A >: Null] = None
+ def apply[A >: Null](value: A): Opt[A] = if (value == null) None else new Opt[A](value)
+}
+
+class ValueExtract {
+ def unapply(x: Any): Opt[String] = x match {
+ case _: String => Opt("String")
+ case _: List[_] => Opt("List")
+ case _: Int => Opt("Int")
+ case _ => Opt.None
+ }
+}
+
+class Direct {
+ def unapply(x: Any): String = x match {
+ case _: String => "String"
+ case _: List[_] => "List"
+ case _: Int => "Int"
+ case _ => null
+ }
+}
diff --git a/test/files/jvm/opt_value_class/test.scala b/test/files/jvm/opt_value_class/test.scala
new file mode 100644
index 0000000000..7aea7deb99
--- /dev/null
+++ b/test/files/jvm/opt_value_class/test.scala
@@ -0,0 +1,16 @@
+import scala.tools.partest.BytecodeTest
+
+// import scala.tools.nsc.util.JavaClassPath
+// import java.io.InputStream
+// import scala.tools.asm
+// import asm.ClassReader
+// import asm.tree.{ClassNode, InsnList}
+// import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode1 = loadClassNode("ValueExtract")
+ val classNode2 = loadClassNode("Direct")
+ sameMethodAndFieldDescriptors(classNode1, classNode2)
+ }
+}
diff --git a/test/files/lib/scalacheck.jar.desired.sha1 b/test/files/lib/scalacheck.jar.desired.sha1
deleted file mode 100644
index 2f15402d18..0000000000
--- a/test/files/lib/scalacheck.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b6f4dbb29f0c2ec1eba682414f60d52fea84f703 *scalacheck.jar
diff --git a/test/files/neg/compile-time-only-a.check b/test/files/neg/compile-time-only-a.check
new file mode 100644
index 0000000000..1c4c72171f
--- /dev/null
+++ b/test/files/neg/compile-time-only-a.check
@@ -0,0 +1,49 @@
+compile-time-only-a.scala:9: error: C3
+@compileTimeOnly("C3") case class C3(x: Int)
+ ^
+compile-time-only-a.scala:11: error: C4
+@compileTimeOnly("C4") case class C4(x: Int)
+ ^
+compile-time-only-a.scala:16: error: C5
+ implicit class C5(val x: Int) {
+ ^
+compile-time-only-a.scala:28: error: C1
+ new C1()
+ ^
+compile-time-only-a.scala:32: error: C2
+ C2
+ ^
+compile-time-only-a.scala:34: error: C3
+ new C3(2)
+ ^
+compile-time-only-a.scala:37: error: C4
+ new C4(2)
+ ^
+compile-time-only-a.scala:41: error: C5
+ 2.ext
+ ^
+compile-time-only-a.scala:42: error: C5
+ C5(2)
+ ^
+compile-time-only-a.scala:45: error: C6.x
+ val _ = c6.x
+ ^
+compile-time-only-a.scala:46: error: C6.foo
+ c6.foo
+ ^
+compile-time-only-a.scala:48: error: C6.y
+ c6.y = c6.y
+ ^
+compile-time-only-a.scala:48: error: C6.y
+ c6.y = c6.y
+ ^
+compile-time-only-a.scala:54: error: placebo
+@placebo
+ ^
+compile-time-only-a.scala:56: error: placebo
+ @placebo def x = (2: @placebo)
+ ^
+compile-time-only-a.scala:56: error: placebo
+ @placebo def x = (2: @placebo)
+ ^
+16 errors found
diff --git a/test/files/neg/compile-time-only-a.scala b/test/files/neg/compile-time-only-a.scala
new file mode 100644
index 0000000000..43d36dfab1
--- /dev/null
+++ b/test/files/neg/compile-time-only-a.scala
@@ -0,0 +1,57 @@
+import scala.annotation.compileTimeOnly
+
+@compileTimeOnly("C1") class C1
+object C1
+
+class C2
+@compileTimeOnly("C2") object C2
+
+@compileTimeOnly("C3") case class C3(x: Int)
+
+@compileTimeOnly("C4") case class C4(x: Int)
+object C4
+
+object pkg {
+ @compileTimeOnly("C5")
+ implicit class C5(val x: Int) {
+ def ext = ???
+ }
+}
+
+class C6(@compileTimeOnly("C6.x") val x: Int) {
+ @compileTimeOnly("C6.foo") def foo = 2
+ @compileTimeOnly("C6.Foo") type Foo = Int
+ @compileTimeOnly("C6.y") var y = 3
+}
+
+object Test extends App {
+ new C1()
+ C1
+
+ new C2()
+ C2
+
+ new C3(2)
+ C3(2)
+
+ new C4(2)
+ C4(2)
+
+ import pkg._
+ 2.ext
+ C5(2)
+
+ val c6 = new C6(2)
+ val _ = c6.x
+ c6.foo
+ type Foo = c6.Foo
+ c6.y = c6.y
+}
+
+@compileTimeOnly("placebo")
+class placebo extends scala.annotation.StaticAnnotation
+
+@placebo
+class Test {
+ @placebo def x = (2: @placebo)
+} \ No newline at end of file
diff --git a/test/files/neg/compile-time-only-b.check b/test/files/neg/compile-time-only-b.check
new file mode 100644
index 0000000000..8292a0ddeb
--- /dev/null
+++ b/test/files/neg/compile-time-only-b.check
@@ -0,0 +1,7 @@
+compile-time-only-b.scala:13: error: splice must be enclosed within a reify {} block
+ val ignored3 = reify(fortyTwo).splice
+ ^
+compile-time-only-b.scala:14: error: cannot use value except for signatures of macro implementations
+ val ignored4 = reify(fortyTwo).value
+ ^
+two errors found
diff --git a/test/files/neg/compile-time-only-b.scala b/test/files/neg/compile-time-only-b.scala
new file mode 100644
index 0000000000..d5568dbe67
--- /dev/null
+++ b/test/files/neg/compile-time-only-b.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ // HAHA!!!
+ // no compileTimeOnly errors here, because scalac does constant folding
+ // the type of reify(42) is Expr[42.type]
+ // therefore the type of expr.splice is 42.type, which is then constfolded
+ val expr = reify(42)
+ val ignored1 = expr.splice
+ val ignored2 = expr.value
+
+ val fortyTwo = 42
+ val ignored3 = reify(fortyTwo).splice
+ val ignored4 = reify(fortyTwo).value
+} \ No newline at end of file
diff --git a/test/files/neg/javac-error.check b/test/files/neg/javac-error.check
deleted file mode 100644
index e7d1ccc1a1..0000000000
--- a/test/files/neg/javac-error.check
+++ /dev/null
@@ -1,10 +0,0 @@
-#partest java6
-javac-error/J.java:2: method does not override or implement a method from a supertype
- @Override public void foo() { }
- ^
-1 error
-#partest java7
-javac-error/J.java:2: error: method does not override or implement a method from a supertype
- @Override public void foo() { }
- ^
-1 error
diff --git a/test/files/neg/t4425.check b/test/files/neg/t4425.check
index 0f2fe6f2d1..95b88a6b3d 100644
--- a/test/files/neg/t4425.check
+++ b/test/files/neg/t4425.check
@@ -1,4 +1,13 @@
-t4425.scala:3: error: isInstanceOf cannot test if value types are references.
+t4425.scala:3: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: Int)(y: Option[Int]): None.type exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
42 match { case _ X _ => () }
^
-one error found
+t4425.scala:8: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: Int)(y: Int): Some[(Int, Int)] exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ 42 match { case _ X _ => () }
+ ^
+t4425.scala:13: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Some[(Int, Int)] exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ "" match { case _ X _ => () }
+ ^
+three errors found
diff --git a/test/files/neg/t4425.scala b/test/files/neg/t4425.scala
index d8cc6922f7..1714955c27 100644
--- a/test/files/neg/t4425.scala
+++ b/test/files/neg/t4425.scala
@@ -2,3 +2,13 @@ object Foo {
object X { def unapply(x : Int)(y : Option[Int] = None) = None }
42 match { case _ X _ => () }
}
+
+object Foo2 {
+ object X { def unapply(x : Int)(y: Int) = Some((2,2)) }
+ 42 match { case _ X _ => () }
+}
+
+object Foo3 {
+ object X { def unapply(x : String)(y: String) = Some((2,2)) }
+ "" match { case _ X _ => () }
+} \ No newline at end of file
diff --git a/test/files/neg/t4425b.check b/test/files/neg/t4425b.check
new file mode 100644
index 0000000000..1186e8b609
--- /dev/null
+++ b/test/files/neg/t4425b.check
@@ -0,0 +1,61 @@
+t4425b.scala:5: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:6: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:7: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println( "" match { case X(_) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:8: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println((X: Any) match { case X(_) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:9: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:10: error: object X is not a case class constructor, nor does it have an unapply/unapplySeq method
+Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cannot be used as an extractor due to its second non-implicit parameter list
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:18: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:19: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:22: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:22: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:23: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:23: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:31: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:32: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:35: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:35: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:36: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+t4425b.scala:36: error: wrong number of patterns for object X offering Nothing: expected 1, found 2
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ ^
+18 errors found
diff --git a/test/files/neg/t4425b.scala b/test/files/neg/t4425b.scala
new file mode 100644
index 0000000000..861e9521f6
--- /dev/null
+++ b/test/files/neg/t4425b.scala
@@ -0,0 +1,38 @@
+object Test1 {
+ object X { def unapply(x : String)(y: String) = throw new Exception }
+
+ def f1() {
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ println( "" match { case X(_) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_) => "ok" ; case _ => "fail" })
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ }
+}
+
+object Test2 {
+ object X { def unapply(x : String) = throw new Exception }
+
+ def f1() {
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ println( "" match { case X(_) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_) => "ok" ; case _ => "fail" })
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ }
+}
+
+object Test3 {
+ object X { def unapply(x : String) = None }
+
+ def f1() {
+ println( "" match { case _ X _ => "ok" ; case _ => "fail" })
+ println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" })
+ println( "" match { case X(_) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_) => "ok" ; case _ => "fail" })
+ println( "" match { case X(_, _) => "ok" ; case _ => "fail" })
+ println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" })
+ }
+}
diff --git a/test/files/neg/t5903a.check b/test/files/neg/t5903a.check
new file mode 100644
index 0000000000..cbdcfd1bdd
--- /dev/null
+++ b/test/files/neg/t5903a.check
@@ -0,0 +1,7 @@
+Test_2.scala:4: error: wrong number of patterns for <$anon: AnyRef> offering (SomeTree.type, SomeTree.type): expected 2, found 3
+ case nq"$x + $y + $z" => println((x, y))
+ ^
+Test_2.scala:4: error: not found: value x
+ case nq"$x + $y + $z" => println((x, y))
+ ^
+two errors found
diff --git a/test/files/neg/t5903a/Macros_1.scala b/test/files/neg/t5903a/Macros_1.scala
new file mode 100644
index 0000000000..e82be0fc68
--- /dev/null
+++ b/test/files/neg/t5903a/Macros_1.scala
@@ -0,0 +1,28 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait Tree
+case object SomeTree extends Tree
+
+object NewQuasiquotes {
+ implicit class QuasiquoteInterpolation(c: StringContext) {
+ object nq {
+ def unapply(t: Tree) = macro QuasiquoteMacros.unapplyImpl
+ }
+ }
+}
+
+object QuasiquoteMacros {
+ def unapplyImpl(c: Context)(t: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ def isEmpty = false
+ def get = this
+ def _1 = SomeTree
+ def _2 = SomeTree
+ def unapply(t: Tree) = this
+ }.unapply($t)
+ """
+ }
+}
diff --git a/test/files/neg/t5903a/Test_2.scala b/test/files/neg/t5903a/Test_2.scala
new file mode 100644
index 0000000000..4d78dfb5e5
--- /dev/null
+++ b/test/files/neg/t5903a/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import NewQuasiquotes._
+ SomeTree match {
+ case nq"$x + $y + $z" => println((x, y))
+ }
+}
diff --git a/test/files/neg/t5903b.check b/test/files/neg/t5903b.check
new file mode 100644
index 0000000000..faeb73ad03
--- /dev/null
+++ b/test/files/neg/t5903b.check
@@ -0,0 +1,9 @@
+Test_2.scala:4: error: type mismatch;
+ found : Int
+ required: String
+ case t"$x" => println(x)
+ ^
+Test_2.scala:4: error: not found: value x
+ case t"$x" => println(x)
+ ^
+two errors found
diff --git a/test/files/neg/t5903b/Macros_1.scala b/test/files/neg/t5903b/Macros_1.scala
new file mode 100644
index 0000000000..b1b875969d
--- /dev/null
+++ b/test/files/neg/t5903b/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply[T](x: T) = macro Macros.unapplyImpl[T]
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ def isEmpty = false
+ def get = "2"
+ def unapply(x: String) = this
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/neg/t5903b/Test_2.scala b/test/files/neg/t5903b/Test_2.scala
new file mode 100644
index 0000000000..0f6f80d327
--- /dev/null
+++ b/test/files/neg/t5903b/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ 2 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/neg/t5903c.check b/test/files/neg/t5903c.check
new file mode 100644
index 0000000000..c9476edd11
--- /dev/null
+++ b/test/files/neg/t5903c.check
@@ -0,0 +1,7 @@
+Test_2.scala:4: error: String is not supported
+ case t"$x" => println(x)
+ ^
+Test_2.scala:4: error: not found: value x
+ case t"$x" => println(x)
+ ^
+two errors found
diff --git a/test/files/neg/t5903c/Macros_1.scala b/test/files/neg/t5903c/Macros_1.scala
new file mode 100644
index 0000000000..70efab3101
--- /dev/null
+++ b/test/files/neg/t5903c/Macros_1.scala
@@ -0,0 +1,26 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply[T](x: T) = macro Macros.unapplyImpl[T]
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+ import c.universe._
+ if (!(c.weakTypeOf[Int] =:= c.weakTypeOf[T])) c.abort(c.enclosingPosition, s"${c.weakTypeOf[T]} is not supported")
+ else {
+ q"""
+ new {
+ def isEmpty = false
+ def get = 2
+ def unapply(x: Int) = this
+ }.unapply($x)
+ """
+ }
+ }
+}
diff --git a/test/files/neg/t5903c/Test_2.scala b/test/files/neg/t5903c/Test_2.scala
new file mode 100644
index 0000000000..a1fd31dd49
--- /dev/null
+++ b/test/files/neg/t5903c/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ "2" match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/neg/t5903d.check b/test/files/neg/t5903d.check
new file mode 100644
index 0000000000..d5d3fdcc28
--- /dev/null
+++ b/test/files/neg/t5903d.check
@@ -0,0 +1,7 @@
+Test_2.scala:4: error: extractor macros can only expand into extractor calls
+ case t"$x" => println(x)
+ ^
+Test_2.scala:4: error: not found: value x
+ case t"$x" => println(x)
+ ^
+two errors found
diff --git a/test/files/neg/t5903d/Macros_1.scala b/test/files/neg/t5903d/Macros_1.scala
new file mode 100644
index 0000000000..15ff226cff
--- /dev/null
+++ b/test/files/neg/t5903d/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply(x: Int) = macro Macros.unapplyImpl
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl(c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ class Match(x: Int) {
+ def isEmpty = false
+ def get = x
+ }
+ new { def unapply(x: Int) = new Match(x) }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/neg/t5903d/Test_2.scala b/test/files/neg/t5903d/Test_2.scala
new file mode 100644
index 0000000000..95c717a9d8
--- /dev/null
+++ b/test/files/neg/t5903d/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ 42 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/neg/t5903e.check b/test/files/neg/t5903e.check
new file mode 100644
index 0000000000..3bdeb091a0
--- /dev/null
+++ b/test/files/neg/t5903e.check
@@ -0,0 +1,4 @@
+Test_2.scala:4: error: value class may not be a member of another class
+ case t"$x" => println(x)
+ ^
+one error found
diff --git a/test/files/neg/t5903e/Macros_1.scala b/test/files/neg/t5903e/Macros_1.scala
new file mode 100644
index 0000000000..4e1ce89c9f
--- /dev/null
+++ b/test/files/neg/t5903e/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply(x: Int) = macro Macros.unapplyImpl
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl(c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ class Match(x: Int) extends AnyVal {
+ def isEmpty = false
+ def get = x
+ }
+ def unapply(x: Int) = new Match(x)
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/neg/t5903e/Test_2.scala b/test/files/neg/t5903e/Test_2.scala
new file mode 100644
index 0000000000..d69d472436
--- /dev/null
+++ b/test/files/neg/t5903e/Test_2.scala
@@ -0,0 +1,6 @@
+class C {
+ import Interpolation._
+ 42 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/neg/t6289.check b/test/files/neg/t6289.check
new file mode 100644
index 0000000000..f6f43cabd3
--- /dev/null
+++ b/test/files/neg/t6289.check
@@ -0,0 +1,10 @@
+#partest java6
+t6289/J.java:2: method does not override or implement a method from a supertype
+ @Override public void foo() { }
+ ^
+1 error
+#partest java7
+t6289/J.java:2: error: method does not override or implement a method from a supertype
+ @Override public void foo() { }
+ ^
+1 error
diff --git a/test/files/neg/javac-error.flags b/test/files/neg/t6289.flags
index 85d8eb2ba2..85d8eb2ba2 100644
--- a/test/files/neg/javac-error.flags
+++ b/test/files/neg/t6289.flags
diff --git a/test/files/neg/javac-error/J.java b/test/files/neg/t6289/J.java
index 83f50c9ae2..83f50c9ae2 100644
--- a/test/files/neg/javac-error/J.java
+++ b/test/files/neg/t6289/J.java
diff --git a/test/files/neg/javac-error/SUT_5.scala b/test/files/neg/t6289/SUT_5.scala
index 0a996352c0..0a996352c0 100644
--- a/test/files/neg/javac-error/SUT_5.scala
+++ b/test/files/neg/t6289/SUT_5.scala
diff --git a/test/files/neg/t6675.check b/test/files/neg/t6675.check
index 3a277af866..aecf04cb68 100644
--- a/test/files/neg/t6675.check
+++ b/test/files/neg/t6675.check
@@ -1,4 +1,4 @@
-t6675.scala:10: warning: extractor pattern binds a single value to a Product3 of type (Int, Int, Int)
+t6675.scala:10: warning: object X expects 3 patterns to hold (Int, Int, Int) but crushing into 3-tuple to fit single pattern (SI-6675)
"" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/t7214neg.check b/test/files/neg/t7214neg.check
new file mode 100644
index 0000000000..0660cccd02
--- /dev/null
+++ b/test/files/neg/t7214neg.check
@@ -0,0 +1,7 @@
+t7214neg.scala:28: error: wrong number of patterns for object Extractor offering Any: expected 1, found 0
+ case Extractor() =>
+ ^
+t7214neg.scala:28: error: wrong number of patterns for object Extractor offering Any: expected 1, found 0
+ case Extractor() =>
+ ^
+two errors found
diff --git a/test/files/neg/t7214neg.scala b/test/files/neg/t7214neg.scala
new file mode 100644
index 0000000000..ff1ea8082d
--- /dev/null
+++ b/test/files/neg/t7214neg.scala
@@ -0,0 +1,57 @@
+// pattern matcher crashes here trying to synthesize an uneeded outer test.
+// no-symbol does not have an owner
+// at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:49)
+// at scala.tools.nsc.Global.abort(Global.scala:253)
+// at scala.reflect.internal.Symbols$NoSymbol.owner(Symbols.scala:3248)
+// at scala.reflect.internal.Symbols$Symbol.effectiveOwner(Symbols.scala:678)
+// at scala.reflect.internal.Symbols$Symbol.isDefinedInPackage(Symbols.scala:664)
+// at scala.reflect.internal.TreeGen.mkAttributedSelect(TreeGen.scala:188)
+// at scala.reflect.internal.TreeGen.mkAttributedRef(TreeGen.scala:124)
+// at scala.tools.nsc.ast.TreeDSL$CODE$.REF(TreeDSL.scala:308)
+// at scala.tools.nsc.typechecker.PatternMatching$TreeMakers$TypeTestTreeMaker$treeCondStrategy$.outerTest(PatternMatching.scala:1209)
+class Crash {
+ type Alias = C#T
+
+ val c = new C
+ val t = new c.T
+
+ // Crash via a Typed Pattern...
+ (t: Any) match {
+ case e: Alias =>
+ }
+
+ // ... or via a Typed Extractor Pattern.
+ object Extractor {
+ def unapply(a: Alias): Option[Any] = None
+ }
+ (t: Any) match {
+ case Extractor() =>
+ case _ =>
+ }
+
+ // checking that correct outer tests are applied when
+ // aliases for path dependent types are involved.
+ val c2 = new C
+ type CdotT = c.T
+ type C2dotT = c2.T
+
+ val outerField = t.getClass.getDeclaredFields.find(_.getName contains ("outer")).get
+ outerField.setAccessible(true)
+
+ (t: Any) match {
+ case _: C2dotT =>
+ println(s"!!! wrong match. t.outer=${outerField.get(t)} / c2 = $c2") // this matches on 2.10.0
+ case _: CdotT =>
+ case _ =>
+ println(s"!!! wrong match. t.outer=${outerField.get(t)} / c = $c")
+ }
+}
+
+class C {
+ class T
+}
+
+object Test extends App {
+ new Crash
+}
+
diff --git a/test/files/neg/t7721.check b/test/files/neg/t7721.check
new file mode 100644
index 0000000000..e056b9a293
--- /dev/null
+++ b/test/files/neg/t7721.check
@@ -0,0 +1,21 @@
+t7721.scala:11: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure
+ case x: Foo with Concrete => x.bippy + x.conco
+ ^
+t7721.scala:15: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure
+ case x: Concrete with Foo => x.bippy + x.conco
+ ^
+t7721.scala:19: warning: abstract type pattern A.this.Foo is unchecked since it is eliminated by erasure
+ case x: Foo with Bar => x.bippy + x.barry
+ ^
+t7721.scala:39: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure
+ case x: Foo with Concrete => x.bippy + x.dingo + x.conco
+ ^
+t7721.scala:43: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure
+ case x: Concrete with Foo => x.bippy + x.dingo + x.conco
+ ^
+t7721.scala:47: warning: abstract type pattern B.this.Foo is unchecked since it is eliminated by erasure
+ case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/t7721.flags b/test/files/neg/t7721.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t7721.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7721.scala b/test/files/neg/t7721.scala
new file mode 100644
index 0000000000..27884c9e35
--- /dev/null
+++ b/test/files/neg/t7721.scala
@@ -0,0 +1,140 @@
+import scala.language.reflectiveCalls
+
+trait A {
+ trait Concrete { def conco: Int = 1 }
+ type Foo <: { def bippy: Int }
+ type Bar <: { def barry: Int }
+
+ implicit def barTag: scala.reflect.ClassTag[Bar]
+
+ def f1(x: Any) = x match {
+ case x: Foo with Concrete => x.bippy + x.conco
+ case _ => -1
+ }
+ def f2(x: Any) = x match {
+ case x: Concrete with Foo => x.bippy + x.conco
+ case _ => -1
+ }
+ def f3(x: Any) = x match {
+ case x: Foo with Bar => x.bippy + x.barry
+ case _ => -1
+ }
+ def f4(x: Any) = x match {
+ case x: (Foo @unchecked) => x.bippy // warns, suppressed
+ case _ => -1
+ }
+ def f5(x: Any) = x match {
+ case x: (Bar @unchecked) => x.barry // warns (but about the "outer reference"), suppressed
+ case _ => -1
+ }
+}
+
+trait B extends A {
+ type Foo <: { def bippy: Int ; def dingo: Int }
+ type Bar <: { def barry: Int ; def bongo: Int }
+
+ override implicit def barTag: scala.reflect.ClassTag[Bar]
+
+ override def f1(x: Any) = x match {
+ case x: Foo with Concrete => x.bippy + x.dingo + x.conco
+ case _ => -1
+ }
+ override def f2(x: Any) = x match {
+ case x: Concrete with Foo => x.bippy + x.dingo + x.conco
+ case _ => -1
+ }
+ override def f3(x: Any) = x match {
+ case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo
+ case _ => -1
+ }
+ override def f4(x: Any) = x match {
+ case x: (Foo @unchecked) => x.bippy + x.dingo // warns, suppressed
+ case _ => -1
+ }
+ override def f5(x: Any) = x match {
+ case x: (Bar @unchecked) => x.barry + x.bongo // warns (but about the "outer reference"), suppressed
+ case _ => -1
+ }
+}
+
+object Test {
+ abstract class Base extends A {
+ trait Foo {
+ def bippy = 2
+ def dingo = 3
+ }
+ trait Bar {
+ def barry = 2
+ def bongo = 3
+ }
+ implicit def barTag: scala.reflect.ClassTag[Bar] = scala.reflect.ClassTag(classOf[Bar])
+
+ def run() {
+ println("f1")
+ wrap(f1(new Concrete {}))
+ wrap(f1(new Foo {}))
+ wrap(f1(new Bar {}))
+ wrap(f1(new Foo with Concrete {}))
+ wrap(f1(new Concrete with Foo {}))
+
+ println("\nf2")
+ wrap(f2(new Concrete {}))
+ wrap(f2(new Foo {}))
+ wrap(f2(new Bar {}))
+ wrap(f2(new Foo with Concrete {}))
+ wrap(f2(new Concrete with Foo {}))
+ wrap(f2(new Bar with Concrete {}))
+ wrap(f2(new Concrete with Bar {}))
+ wrap(f2(new Concrete with Foo with Bar {}))
+ wrap(f2(new Foo with Bar with Concrete {}))
+
+ println("\nf3")
+ wrap(f3(new Concrete {}))
+ wrap(f3(new Foo {}))
+ wrap(f3(new Bar {}))
+ wrap(f3(new Foo with Concrete {}))
+ wrap(f3(new Concrete with Foo {}))
+ wrap(f3(new Bar with Concrete {}))
+ wrap(f3(new Concrete with Bar {}))
+ wrap(f3(new Concrete with Foo with Bar {}))
+ wrap(f3(new Foo with Bar with Concrete {}))
+
+ println("\nf4")
+ wrap(f4(new Concrete {}))
+ wrap(f4(new Foo {}))
+ wrap(f4(new Bar {}))
+ wrap(f4(new Foo with Concrete {}))
+ wrap(f4(new Concrete with Foo {}))
+ wrap(f4(new Bar with Concrete {}))
+ wrap(f4(new Concrete with Bar {}))
+ wrap(f4(new Concrete with Foo with Bar {}))
+ wrap(f4(new Foo with Bar with Concrete {}))
+
+ println("\nf5")
+ wrap(f5(new Concrete {}))
+ wrap(f5(new Foo {}))
+ wrap(f5(new Bar {}))
+ wrap(f5(new Foo with Concrete {}))
+ wrap(f5(new Concrete with Foo {}))
+ wrap(f5(new Bar with Concrete {}))
+ wrap(f5(new Concrete with Bar {}))
+ wrap(f5(new Concrete with Foo with Bar {}))
+ wrap(f5(new Foo with Bar with Concrete {}))
+ }
+ }
+
+ object ao extends Base
+ object bo extends Base with B
+
+ private def wrap(body: => Any) {
+ try println(body)
+ catch { case ex: NoSuchMethodException => println(ex) }
+ }
+
+ def main(args: Array[String]) {
+ ao.run()
+ bo.run()
+ }
+}
+
+// java.lang.NoSuchMethodException: Test$$anon$1.bippy() \ No newline at end of file
diff --git a/test/files/neg/t7756a.check b/test/files/neg/t7756a.check
new file mode 100644
index 0000000000..8d42717e47
--- /dev/null
+++ b/test/files/neg/t7756a.check
@@ -0,0 +1,7 @@
+t7756a.scala:7: error: type arguments [Object] do not conform to trait TA's type parameter bounds [X <: CharSequence]
+ locally(null: TA[Object])
+ ^
+t7756a.scala:7: error: type arguments [Object] do not conform to trait TA's type parameter bounds [X <: CharSequence]
+ locally(null: TA[Object])
+ ^
+two errors found
diff --git a/test/files/neg/t7756a.scala b/test/files/neg/t7756a.scala
new file mode 100644
index 0000000000..4453e84963
--- /dev/null
+++ b/test/files/neg/t7756a.scala
@@ -0,0 +1,11 @@
+object Test {
+ def test: Unit = {
+ trait TA[X <: CharSequence]
+ 0 match {
+ case _ =>
+ // the bounds violation isn't reported. RefChecks seems to be too broadly disabled under virtpatmat: see 65340ed4ad2e
+ locally(null: TA[Object])
+ ()
+ }
+ }
+}
diff --git a/test/files/neg/t7756b.check b/test/files/neg/t7756b.check
new file mode 100644
index 0000000000..2817a7e230
--- /dev/null
+++ b/test/files/neg/t7756b.check
@@ -0,0 +1,6 @@
+t7756b.scala:3: warning: comparing values of types Int and String using `==' will always yield false
+ case _ => 0 == ""
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t7756b.flags b/test/files/neg/t7756b.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t7756b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t7756b.scala b/test/files/neg/t7756b.scala
new file mode 100644
index 0000000000..a2de29c8e7
--- /dev/null
+++ b/test/files/neg/t7756b.scala
@@ -0,0 +1,5 @@
+object Test {
+ 0 match {
+ case _ => 0 == ""
+ }
+}
diff --git a/test/files/neg/t7757a.check b/test/files/neg/t7757a.check
new file mode 100644
index 0000000000..de24e23004
--- /dev/null
+++ b/test/files/neg/t7757a.check
@@ -0,0 +1,4 @@
+t7757a.scala:1: error: ';' expected but '@' found.
+trait Foo @annot
+ ^
+one error found
diff --git a/test/files/neg/t7757a.scala b/test/files/neg/t7757a.scala
new file mode 100644
index 0000000000..24f6c16cb4
--- /dev/null
+++ b/test/files/neg/t7757a.scala
@@ -0,0 +1 @@
+trait Foo @annot \ No newline at end of file
diff --git a/test/files/neg/t7757b.check b/test/files/neg/t7757b.check
new file mode 100644
index 0000000000..3e5a0f1fa6
--- /dev/null
+++ b/test/files/neg/t7757b.check
@@ -0,0 +1,4 @@
+t7757b.scala:2: error: expected start of definition
+@annot2
+ ^
+one error found
diff --git a/test/files/neg/t7757b.scala b/test/files/neg/t7757b.scala
new file mode 100644
index 0000000000..e9a537dba1
--- /dev/null
+++ b/test/files/neg/t7757b.scala
@@ -0,0 +1,2 @@
+trait Foo2
+@annot2 \ No newline at end of file
diff --git a/test/files/neg/t997.check b/test/files/neg/t997.check
index 186095f44a..be1e92c369 100644
--- a/test/files/neg/t997.check
+++ b/test/files/neg/t997.check
@@ -1,7 +1,10 @@
-t997.scala:13: error: wrong number of arguments for object Foo
+t997.scala:13: error: wrong number of patterns for object Foo offering (String, String): expected 2, found 3
+"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
+ ^
+t997.scala:13: error: wrong number of patterns for object Foo offering (String, String): expected 2, found 3
"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
^
t997.scala:13: error: not found: value a
"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
^
-two errors found
+three errors found
diff --git a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
index cf58bc3dfd..ecf8916c46 100644
--- a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
+++ b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
@@ -22,7 +22,7 @@ object Macros {
var b1 = new Transformer {
override def transform(tree: Tree): Tree = tree match {
case Ident(x) if (x==n) => Ident(TermName("_arg"))
- case tt @ TypeTree() if tt.original != null => TypeTree(tt.tpe) setOriginal transform(tt.original)
+ case tt: TypeTree if tt.original != null => TypeTree(tt.tpe) setOriginal transform(tt.original)
// without the fix to LazyTreeCopier.Annotated, we would need to uncomment the line below to make the macro work
// that's because the pattern match in the input expression gets expanded into Typed(<x>, TypeTree(<Int @unchecked>))
// with the original of the TypeTree being Annotated(<@unchecked>, Ident(<x>))
diff --git a/test/files/pos/extractor-types.scala b/test/files/pos/extractor-types.scala
new file mode 100644
index 0000000000..bb9659a13c
--- /dev/null
+++ b/test/files/pos/extractor-types.scala
@@ -0,0 +1,30 @@
+package p1 {
+ object Ex { def unapply(p: Any): Option[_ <: Int] = null }
+ object Foo { val Ex(_) = null }
+}
+// a.scala:2: error: error during expansion of this match (this is a scalac bug).
+// The underlying error was: type mismatch;
+// found : Some[_$1(in value x$1)] where type _$1(in value x$1)
+// required: Some[_$1(in method unapply)]
+// object Foo { val Ex(_) = null }
+// ^
+// one error found
+
+package p2 {
+ trait Other {
+ class Quux
+ object Baz { def unapply(x: Any): Option[Quux] = None }
+ }
+ trait Reifiers {
+ def f() {
+ val u2: Other = null
+ (null: Any) match { case u2.Baz(x) => println(x) } //: u2.Quux) }
+ // The underlying error was: type mismatch;
+ // found : Other#Quux
+ // required: u2.Quux
+ // x match { case u2.Baz(x) => println(x: u2.Quux) }
+ // ^
+ // one error found
+ }
+ }
+}
diff --git a/test/files/pos/optmatch.scala b/test/files/pos/optmatch.scala
new file mode 100644
index 0000000000..354be65da7
--- /dev/null
+++ b/test/files/pos/optmatch.scala
@@ -0,0 +1,33 @@
+// final case class NonZeroLong(value: Long) extends AnyVal {
+// def get: Long = value
+// def isEmpty: Boolean = get == 0l
+// }
+
+class NonZeroLong(val value: Long) extends AnyVal {
+ def get: Long = value
+ def isEmpty: Boolean = get == 0l
+}
+object NonZeroLong {
+ def unapply(value: Long): NonZeroLong = new NonZeroLong(value)
+}
+
+
+object Foo {
+ def unapply(x: Int): NonZeroLong = new NonZeroLong(1L << x)
+ // public long unapply(int);
+ // 0: lconst_1
+ // 1: iload_1
+ // 2: lshl
+ // 3: lreturn
+}
+
+object Test {
+ def f(x: Int): Int = x match {
+ case Foo(1024l) => 1
+ case _ => 2
+ }
+ def main(args: Array[String]): Unit = {
+ println(f(10))
+ println(f(11))
+ }
+}
diff --git a/test/files/pos/overloaded-unapply.scala b/test/files/pos/overloaded-unapply.scala
new file mode 100644
index 0000000000..4105a25f10
--- /dev/null
+++ b/test/files/pos/overloaded-unapply.scala
@@ -0,0 +1,8 @@
+trait Baz {
+ type Type >: Null
+
+ case class HoleType(a: String, b: String, c: String)
+ object HoleType { def unapply(tpe: Type): Option[HoleType] = ??? }
+
+ (null: Type) match { case HoleType(holeTpe) => holeTpe }
+}
diff --git a/test/files/pos/patmat-extract-tparam.scala b/test/files/pos/patmat-extract-tparam.scala
new file mode 100644
index 0000000000..6417b49c2b
--- /dev/null
+++ b/test/files/pos/patmat-extract-tparam.scala
@@ -0,0 +1,13 @@
+trait Bip[T] { def h: T }
+trait BoolBip extends Bip[Boolean]
+
+class A {
+ def g(x: Boolean): Unit = ()
+ def f(xs: List[Bip[_]]) = xs foreach { case x: BoolBip => g(x.h) }
+}
+
+class B {
+ def g(x: Boolean): Unit = ()
+ def g(x: Int): Unit = ()
+ def f(xs: List[Bip[_]]) = xs foreach { case x: BoolBip => g(x.h) }
+}
diff --git a/test/files/pos/t6797.scala b/test/files/pos/t6797.scala
new file mode 100644
index 0000000000..ef1afa1eb3
--- /dev/null
+++ b/test/files/pos/t6797.scala
@@ -0,0 +1,4 @@
+object Test extends App /* workaround: don't extend App */ {
+ private class Matcher(aParam: Option[String] = None)
+ private val stringMatcher = new Matcher
+}
diff --git a/test/files/run/matchonseq.scala b/test/files/run/matchonseq.scala
index 49b406a6ec..f6f320245a 100644
--- a/test/files/run/matchonseq.scala
+++ b/test/files/run/matchonseq.scala
@@ -1,8 +1,8 @@
-object Test extends App{
- Vector(1,2,3) match {
- case head +: tail => println("It worked! head=" + head)
+object Test extends App {
+ Vector(1,2,3) match {
+ case head +: tail => println("It worked! head=" + head)
}
- Vector(1,2,3) match {
- case init :+ last => println("It worked! last=" + last)
+ Vector(1,2,3) match {
+ case init :+ last => println("It worked! last=" + last)
}
}
diff --git a/test/files/run/name-based-patmat.check b/test/files/run/name-based-patmat.check
new file mode 100644
index 0000000000..1cc605ea3d
--- /dev/null
+++ b/test/files/run/name-based-patmat.check
@@ -0,0 +1,10 @@
+catdog
+2 catdogs! A ha ha!
+3 catdogs! A ha ha!
+catdog
+2 catdogs! A ha ha!
+3 catdogs! A ha ha!
+1
+1
+2
+3
diff --git a/test/files/run/name-based-patmat.scala b/test/files/run/name-based-patmat.scala
new file mode 100644
index 0000000000..2c429c141f
--- /dev/null
+++ b/test/files/run/name-based-patmat.scala
@@ -0,0 +1,75 @@
+final class MiniSome[T](val get: T) extends AnyVal { def isEmpty = false }
+
+package p1 {
+ class Triple(val x: Any) extends AnyRef with Product3[String, String, String] {
+ private def s = "" + x
+ override def canEqual(x: Any) = this eq x.asInstanceOf[AnyRef]
+ def isEmpty = false
+ def get = this
+ def _1 = s
+ def _2 = "2 " + s + "s! A ha ha!"
+ def _3 = "3 " + s + "s! A ha ha!"
+
+ override def toString = s"Triple(${_1}, ${_2}, ${_3})"
+ }
+
+ object Triple {
+ def unapply(x: Any): Triple = new Triple(x)
+ }
+}
+
+package p2 {
+ class Triple(val x: Any) {
+ private def s = "" + x
+ def isEmpty = false
+ def get = this
+ def _1 = s
+ def _2 = "2 " + s + "s! A ha ha!"
+ def _3 = "3 " + s + "s! A ha ha!"
+ override def toString = s"Triple(${_1}, ${_2}, ${_3})"
+ }
+
+ object Triple {
+ def unapply(x: Any): Triple = new Triple(x)
+ }
+}
+
+package p3 {
+ case class Foo(x: Int, y: Int, zs: Int*)
+
+ object Bar {
+ def f(x: Foo) = x match {
+ case Foo(5, 10, 15, 20, _*) => 1
+ case Foo(5, 10, 15, _*) => 2
+ case Foo(5, 10, _*) => 3
+ case Foo(5, 10) => 4 // should warn unreachable
+ case _ => 5
+ }
+ }
+}
+
+object Test {
+
+ // def f(x: Any) = x match {
+ // case p1.Foo(x, y, z) => println((x, y, z))
+ // case x => println(x)
+ // }
+
+ def main(args: Array[String]): Unit = {
+ "catdog" match {
+ case p1.Triple(x, y, z) => List(x, y, z) foreach println
+ case x => println("fail: " + x)
+ }
+ // TODO
+ "catdog" match {
+ case p2.Triple(x, y, z) => List(x, y, z) foreach println
+ case x => println("fail: " + x)
+ }
+
+ println(p3.Bar.f(p3.Foo(5, 10, 15, 20, 25)))
+ println(p3.Bar.f(p3.Foo(5, 10, 15, 20)))
+ println(p3.Bar.f(p3.Foo(5, 10, 15)))
+ println(p3.Bar.f(p3.Foo(5, 10)))
+ // println(p3.Bar.f(p3.Foo(5)))
+ }
+}
diff --git a/test/files/run/patmat-behavior-2.check b/test/files/run/patmat-behavior-2.check
new file mode 100644
index 0000000000..a928fe7918
--- /dev/null
+++ b/test/files/run/patmat-behavior-2.check
@@ -0,0 +1,24 @@
+f1(Foo(1)) == true
+f1(Foo(1, 2)) == false
+f1(Foo(1, 2, 3)) == false
+
+f2(Foo(1)) == false
+f2(Foo(1, 2)) == true
+f2(Foo(1, 2, 3)) == false
+
+f3(Foo(1)) == false
+f3(Foo(1, 2)) == false
+f3(Foo(1, 2, 3)) == true
+
+f1seq(Foo(1)) == true
+f1seq(Foo(1, 2)) == true
+f1seq(Foo(1, 2, 3)) == true
+
+f2seq(Foo(1)) == false
+f2seq(Foo(1, 2)) == true
+f2seq(Foo(1, 2, 3)) == true
+
+f3seq(Foo(1)) == false
+f3seq(Foo(1, 2)) == false
+f3seq(Foo(1, 2, 3)) == true
+
diff --git a/test/files/run/patmat-behavior-2.scala b/test/files/run/patmat-behavior-2.scala
new file mode 100644
index 0000000000..b31f773772
--- /dev/null
+++ b/test/files/run/patmat-behavior-2.scala
@@ -0,0 +1,50 @@
+case class Foo(x: Int, ys: Int*) {
+ // We write our own toString because of SI-7735
+ override def toString = (x +: ys).mkString("Foo(", ", ", ")")
+}
+
+object Test {
+ def f1(x: Any) = x match {
+ case Foo(x) => true
+ case _ => false
+ }
+ def f2(x: Any) = x match {
+ case Foo(x, y) => true
+ case _ => false
+ }
+ def f3(x: Any) = x match {
+ case Foo(x, y, z) => true
+ case _ => false
+ }
+ def f1seq(x: Any) = x match {
+ case Foo(x, ys @ _*) => true
+ case _ => false
+ }
+ def f2seq(x: Any) = x match {
+ case Foo(x, y, zs @ _*) => true
+ case _ => false
+ }
+ def f3seq(x: Any) = x match {
+ case Foo(x, y, z, qs @ _*) => true
+ case _ => false
+ }
+
+ val x1 = Foo(1)
+ val x2 = Foo(1, 2)
+ val x3 = Foo(1, 2, 3)
+
+ val fs = List[Any => Boolean](f1, f2, f3)
+ val fseqs = List[Any => Boolean](f1seq, f2seq, f3seq)
+ val xs = List[Foo](x1, x2, x3)
+
+ def main(args: Array[String]): Unit = {
+ for ((f, i) <- fs.zipWithIndex) {
+ xs foreach (x => println(s"f${i+1}($x) == ${f(x)}"))
+ println("")
+ }
+ for ((f, i) <- fseqs.zipWithIndex) {
+ xs foreach (x => println(s"f${i+1}seq($x) == ${f(x)}"))
+ println("")
+ }
+ }
+}
diff --git a/test/files/run/patmat-behavior.check b/test/files/run/patmat-behavior.check
new file mode 100644
index 0000000000..273a1434fb
--- /dev/null
+++ b/test/files/run/patmat-behavior.check
@@ -0,0 +1,90 @@
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C10[A]
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C20[A]
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C01[A]
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C11[A]
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C21[A]
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C00[A]
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C20[A]
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C01[A]
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C11[A]
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C21[A]
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C00[A]
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C10[A]
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C01[A]
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C11[A]
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C21[A]
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C00[A]
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C10[A]
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C20[A]
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C11[A]
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C21[A]
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C00[A]
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C10[A]
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C20[A]
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C01[A]
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C21[A]
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C00[A]
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C10[A]
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C20[A]
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C01[A]
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
+patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C11[A]
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ ^
diff --git a/test/files/run/patmat-behavior.scala b/test/files/run/patmat-behavior.scala
new file mode 100644
index 0000000000..8b6370d796
--- /dev/null
+++ b/test/files/run/patmat-behavior.scala
@@ -0,0 +1,95 @@
+package s {
+ sealed trait C[+A]
+
+ case class C00[+A]() extends C[A]
+ case class C10[+A](x: A) extends C[A]
+ case class C20[+A](x: A, y: A) extends C[A]
+ case class C01[+A](xs: A*) extends C[A]
+ case class C11[+A](x: A, ys: A*) extends C[A]
+ case class C21[+A](x: A, y: A, zs: A*) extends C[A]
+
+ object E00 { def unapply[A](x: Any): Boolean = ??? }
+ object E10 { def unapply[A](x: Any): Option[A] = ??? }
+ object E20 { def unapply[A](x: Any): Option[(A, A)] = ??? }
+ object E01 { def unapplySeq[A](x: Any): Option[Seq[A]] = ??? }
+ object E11 { def unapplySeq[A](x: Any): Option[(A, Seq[A])] = ??? }
+ object E21 { def unapplySeq[A](x: Any): Option[(A, A, Seq[A])] = ??? }
+
+ object F00 { def unapply[A](x: C[A]): Boolean = ??? }
+ object F10 { def unapply[A](x: C[A]): Option[A] = ??? }
+ object F20 { def unapply[A](x: C[A]): Option[(A, A)] = ??? }
+ object F01 { def unapplySeq[A](x: C[A]): Option[Seq[A]] = ??? }
+ object F11 { def unapplySeq[A](x: C[A]): Option[(A, Seq[A])] = ??? }
+ object F21 { def unapplySeq[A](x: C[A]): Option[(A, A, Seq[A])] = ??? }
+
+ object G00 { def unapply[A](x: C00[A]): Boolean = ??? }
+ object G10 { def unapply[A](x: C10[A]): Option[A] = ??? }
+ object G20 { def unapply[A](x: C20[A]): Option[(A, A)] = ??? }
+ object G01 { def unapplySeq[A](x: C01[A]): Option[Seq[A]] = ??? }
+ object G11 { def unapplySeq[A](x: C11[A]): Option[(A, Seq[A])] = ??? }
+ object G21 { def unapplySeq[A](x: C21[A]): Option[(A, A, Seq[A])] = ??? }
+}
+import s._
+
+package pos {
+ object Test {
+ def ga1(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+ def ga2(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+ def ga3(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+ def ga4(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+ def ga5(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+ def ga6(x: Any) = x match { case C00() => 1 ; case C10(x) => 2 ; case C20(x, y) => 3 ; case C01(xs) => 4 ; case C11(x, ys) => 5 ; case C21(x, y, zs) => 6 }
+
+ def gb1[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb2[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb3[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb4[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb5[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb6[A](x: C[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+
+ def gc1[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc2[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc3[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc4[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc5[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc6[A](x: C[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+
+ def gd1[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gd2[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gd3[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gd4[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gd5[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gd6[A, B <: C[A]](x: B) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ }
+}
+
+package neg {
+ object Fail {
+ def gb1[A](x: C00[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb2[A](x: C10[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb3[A](x: C20[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb4[A](x: C01[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb5[A](x: C11[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+ def gb6[A](x: C21[A]) = x match { case E00() => ??? ; case E10(x) => x ; case E20(x, y) => x ; case E01(xs @ _*) => xs.head ; case E11(x, ys @ _*) => x ; case E21(x, y, zs @ _*) => x }
+
+ def gc1[A](x: C00[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc2[A](x: C10[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc3[A](x: C20[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc4[A](x: C01[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc5[A](x: C11[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+ def gc6[A](x: C21[A]) = x match { case F00() => ??? ; case F10(x) => x ; case F20(x, y) => x ; case F01(xs @ _*) => xs.head ; case F11(x, ys @ _*) => x ; case F21(x, y, zs @ _*) => x }
+
+ def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+
+ }
+}
diff --git a/test/files/run/patmat-bind-typed.check b/test/files/run/patmat-bind-typed.check
new file mode 100644
index 0000000000..8baef1b4ab
--- /dev/null
+++ b/test/files/run/patmat-bind-typed.check
@@ -0,0 +1 @@
+abc
diff --git a/test/files/run/patmat-bind-typed.scala b/test/files/run/patmat-bind-typed.scala
new file mode 100644
index 0000000000..10de921c51
--- /dev/null
+++ b/test/files/run/patmat-bind-typed.scala
@@ -0,0 +1,8 @@
+object Test {
+ def f(xs: List[Any]) = for (key @ (dummy: String) <- xs) yield key
+
+ def main(args: Array[String]): Unit = {
+ f("abc" :: Nil) foreach println
+ f(5 :: Nil) foreach println
+ }
+}
diff --git a/test/files/run/repl-trim-stack-trace.scala b/test/files/run/repl-trim-stack-trace.scala
new file mode 100644
index 0000000000..bbf46f2f19
--- /dev/null
+++ b/test/files/run/repl-trim-stack-trace.scala
@@ -0,0 +1,33 @@
+
+import scala.tools.partest.SessionTest
+
+// SI-7740
+object Test extends SessionTest {
+ def session =
+"""Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> def f = throw new Exception("Uh-oh")
+f: Nothing
+
+scala> f
+java.lang.Exception: Uh-oh
+ at .f(<console>:7)
+
+scala> def f = throw new Exception("")
+f: Nothing
+
+scala> f
+java.lang.Exception:
+ at .f(<console>:7)
+
+scala> def f = throw new Exception
+f: Nothing
+
+scala> f
+java.lang.Exception
+ at .f(<console>:7)
+
+scala> """
+
+}
diff --git a/test/files/run/string-extractor.check b/test/files/run/string-extractor.check
new file mode 100644
index 0000000000..47f3722c86
--- /dev/null
+++ b/test/files/run/string-extractor.check
@@ -0,0 +1,9 @@
+by
+BY
+oTheClown
+nope
+1: ob
+2: obby
+2: OBBY
+3: BOBO
+3: TomTomTheClown
diff --git a/test/files/run/string-extractor.scala b/test/files/run/string-extractor.scala
new file mode 100644
index 0000000000..c0fe911ff3
--- /dev/null
+++ b/test/files/run/string-extractor.scala
@@ -0,0 +1,60 @@
+final class StringExtract(val s: String) extends AnyVal {
+ def isEmpty = (s eq null) || (s == "")
+ def get = this
+ def length = s.length
+ def lengthCompare(n: Int) = s.length compare n
+ def apply(idx: Int): Char = s charAt idx
+ def head: Char = s charAt 0
+ def tail: String = s drop 1
+ def drop(n: Int): StringExtract = new StringExtract(s drop n)
+
+ override def toString = s
+}
+
+final class ThreeStringExtract(val s: String) extends AnyVal {
+ def isEmpty = (s eq null) || (s == "")
+ def get: (List[Int], Double, ThreeStringExtract) = ((s.length :: Nil, s.length.toDouble, this))
+ def length = s.length
+ def lengthCompare(n: Int) = s.length compare n
+ def apply(idx: Int): Char = s charAt idx
+ def head: Char = s charAt 0
+ def tail: String = s drop 1
+ def drop(n: Int): ThreeStringExtract = new ThreeStringExtract(s drop n)
+
+ override def toString = s
+}
+
+
+object Bippy {
+ def unapplySeq(x: Any): StringExtract = new StringExtract("" + x)
+}
+object TripleBippy {
+ def unapplySeq(x: Any): ThreeStringExtract = new ThreeStringExtract("" + x)
+}
+
+object Test {
+ def f(x: Any) = x match {
+ case Bippy('B' | 'b', 'O' | 'o', 'B' | 'b', xs @ _*) => xs
+ case _ => "nope"
+ }
+
+ def g(x: Any): String = x match {
+ case TripleBippy(3 :: Nil, 3.0, 'b', chars @ _*) => "1: " + chars
+ case TripleBippy(5 :: Nil, 5.0, 'b' | 'B', chars @ _*) => "2: " + chars
+ case TripleBippy(_, _, chars @ _*) => "3: " + chars
+ case _ => "nope"
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f("Bobby"))
+ println(f("BOBBY"))
+ println(f("BoBoTheClown"))
+ println(f("TomTomTheClown"))
+
+ println(g("bob"))
+ println(g("bobby"))
+ println(g("BOBBY"))
+ println(g("BOBO"))
+ println(g("TomTomTheClown"))
+ }
+}
diff --git a/test/files/run/t5903a.check b/test/files/run/t5903a.check
new file mode 100644
index 0000000000..ce6efd812d
--- /dev/null
+++ b/test/files/run/t5903a.check
@@ -0,0 +1 @@
+(SomeTree,SomeTree)
diff --git a/test/files/run/t5903a.flags b/test/files/run/t5903a.flags
new file mode 100644
index 0000000000..02ecab49e7
--- /dev/null
+++ b/test/files/run/t5903a.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls \ No newline at end of file
diff --git a/test/files/run/t5903a/Macros_1.scala b/test/files/run/t5903a/Macros_1.scala
new file mode 100644
index 0000000000..e82be0fc68
--- /dev/null
+++ b/test/files/run/t5903a/Macros_1.scala
@@ -0,0 +1,28 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait Tree
+case object SomeTree extends Tree
+
+object NewQuasiquotes {
+ implicit class QuasiquoteInterpolation(c: StringContext) {
+ object nq {
+ def unapply(t: Tree) = macro QuasiquoteMacros.unapplyImpl
+ }
+ }
+}
+
+object QuasiquoteMacros {
+ def unapplyImpl(c: Context)(t: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ def isEmpty = false
+ def get = this
+ def _1 = SomeTree
+ def _2 = SomeTree
+ def unapply(t: Tree) = this
+ }.unapply($t)
+ """
+ }
+}
diff --git a/test/files/run/t5903a/Test_2.scala b/test/files/run/t5903a/Test_2.scala
new file mode 100644
index 0000000000..3a0b68b568
--- /dev/null
+++ b/test/files/run/t5903a/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import NewQuasiquotes._
+ SomeTree match {
+ case nq"$x + $y" => println((x, y))
+ }
+}
diff --git a/test/files/run/t5903b.check b/test/files/run/t5903b.check
new file mode 100644
index 0000000000..75891bc672
--- /dev/null
+++ b/test/files/run/t5903b.check
@@ -0,0 +1 @@
+oops
diff --git a/test/files/run/t5903b.flags b/test/files/run/t5903b.flags
new file mode 100644
index 0000000000..02ecab49e7
--- /dev/null
+++ b/test/files/run/t5903b.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls \ No newline at end of file
diff --git a/test/files/run/t5903b/Macros_1.scala b/test/files/run/t5903b/Macros_1.scala
new file mode 100644
index 0000000000..c0124850b8
--- /dev/null
+++ b/test/files/run/t5903b/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply[T](x: T) = macro Macros.unapplyImpl[T]
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ def isEmpty = false
+ def get = this
+ def _1 = 2
+ def unapply(x: Int) = this
+ override def toString = "oops"
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/run/t5903b/Test_2.scala b/test/files/run/t5903b/Test_2.scala
new file mode 100644
index 0000000000..0f6f80d327
--- /dev/null
+++ b/test/files/run/t5903b/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ 2 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/run/t5903c.check b/test/files/run/t5903c.check
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/test/files/run/t5903c.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t5903c.flags b/test/files/run/t5903c.flags
new file mode 100644
index 0000000000..02ecab49e7
--- /dev/null
+++ b/test/files/run/t5903c.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls \ No newline at end of file
diff --git a/test/files/run/t5903c/Macros_1.scala b/test/files/run/t5903c/Macros_1.scala
new file mode 100644
index 0000000000..f8baa2275b
--- /dev/null
+++ b/test/files/run/t5903c/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply[T](x: T) = macro Macros.unapplyImpl[T]
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl[T: c.WeakTypeTag](c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ def isEmpty = false
+ def get = 2
+ def unapply(x: Int) = this
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/run/t5903c/Test_2.scala b/test/files/run/t5903c/Test_2.scala
new file mode 100644
index 0000000000..0f6f80d327
--- /dev/null
+++ b/test/files/run/t5903c/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ 2 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/run/t5903d.check b/test/files/run/t5903d.check
new file mode 100644
index 0000000000..d81cc0710e
--- /dev/null
+++ b/test/files/run/t5903d.check
@@ -0,0 +1 @@
+42
diff --git a/test/files/run/t5903d.flags b/test/files/run/t5903d.flags
new file mode 100644
index 0000000000..02ecab49e7
--- /dev/null
+++ b/test/files/run/t5903d.flags
@@ -0,0 +1 @@
+-Xlog-reflective-calls \ No newline at end of file
diff --git a/test/files/run/t5903d/Macros_1.scala b/test/files/run/t5903d/Macros_1.scala
new file mode 100644
index 0000000000..88d714e17b
--- /dev/null
+++ b/test/files/run/t5903d/Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Interpolation {
+ implicit class TestInterpolation(c: StringContext) {
+ object t {
+ def unapply(x: Int) = macro Macros.unapplyImpl
+ }
+ }
+}
+
+object Macros {
+ def unapplyImpl(c: Context)(x: c.Tree) = {
+ import c.universe._
+ q"""
+ new {
+ class Match(x: Int) {
+ def isEmpty = false
+ def get = x
+ }
+ def unapply(x: Int) = new Match(x)
+ }.unapply($x)
+ """
+ }
+}
diff --git a/test/files/run/t5903d/Test_2.scala b/test/files/run/t5903d/Test_2.scala
new file mode 100644
index 0000000000..95c717a9d8
--- /dev/null
+++ b/test/files/run/t5903d/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Interpolation._
+ 42 match {
+ case t"$x" => println(x)
+ }
+}
diff --git a/test/files/run/t6331b.scala b/test/files/run/t6331b.scala
index 3e09965ee8..3a560ea64b 100644
--- a/test/files/run/t6331b.scala
+++ b/test/files/run/t6331b.scala
@@ -1,4 +1,4 @@
-import scala.tools.partest.trace
+import scala.tools.partest.Util.trace
import scala.util.control.Exception.allCatch
diff --git a/test/files/run/t7214.scala b/test/files/run/t7214.scala
index ff1ea8082d..15c2c24fa0 100644
--- a/test/files/run/t7214.scala
+++ b/test/files/run/t7214.scala
@@ -25,7 +25,7 @@ class Crash {
def unapply(a: Alias): Option[Any] = None
}
(t: Any) match {
- case Extractor() =>
+ case Extractor(_) =>
case _ =>
}
diff --git a/test/files/run/value-class-extractor-2.check b/test/files/run/value-class-extractor-2.check
new file mode 100644
index 0000000000..5903b996b6
--- /dev/null
+++ b/test/files/run/value-class-extractor-2.check
@@ -0,0 +1,8 @@
+String
+List
+Int
+Something else
+String
+List
+Int
+Something else
diff --git a/test/files/run/value-class-extractor-2.scala b/test/files/run/value-class-extractor-2.scala
new file mode 100644
index 0000000000..d776c35eda
--- /dev/null
+++ b/test/files/run/value-class-extractor-2.scala
@@ -0,0 +1,108 @@
+final class Opt[+A >: Null](val value: A) extends AnyVal {
+ def get: A = value
+ def isEmpty = value == null
+}
+object Opt {
+ final val None = new Opt[Null](null)
+ def apply[A >: Null](value: A): Opt[A] = if (value == null) None else new Opt[A](value)
+}
+
+object ValueOpt {
+ // public java.lang.String unapply(java.lang.Object);
+ // 0: aload_1
+ // 1: instanceof #16 // class java/lang/String
+ // 4: ifeq 21
+ // 7: getstatic #21 // Field Opt$.MODULE$:LOpt$;
+ // 10: astore_2
+ // 11: ldc #23 // String String
+ // 13: checkcast #16 // class java/lang/String
+ // 16: astore 5
+ // 18: goto 71
+ // 21: aload_1
+ // 22: instanceof #25 // class scala/collection/immutable/List
+ // 25: ifeq 42
+ // 28: getstatic #21 // Field Opt$.MODULE$:LOpt$;
+ // 31: astore_3
+ // 32: ldc #27 // String List
+ // 34: checkcast #16 // class java/lang/String
+ // 37: astore 5
+ // 39: goto 71
+ // 42: aload_1
+ // 43: instanceof #29 // class java/lang/Integer
+ // 46: ifeq 64
+ // 49: getstatic #21 // Field Opt$.MODULE$:LOpt$;
+ // 52: astore 4
+ // 54: ldc #31 // String Int
+ // 56: checkcast #16 // class java/lang/String
+ // 59: astore 5
+ // 61: goto 71
+ // 64: getstatic #21 // Field Opt$.MODULE$:LOpt$;
+ // 67: pop
+ // 68: aconst_null
+ // 69: astore 5
+ // 71: aload 5
+ // 73: areturn
+ def unapply(x: Any): Opt[String] = x match {
+ case _: String => Opt("String")
+ case _: List[_] => Opt("List")
+ case _: Int => Opt("Int")
+ case _ => Opt.None
+ }
+}
+object RegularOpt {
+ // public scala.Option<java.lang.String> unapply(java.lang.Object);
+ // 0: aload_1
+ // 1: instanceof #16 // class java/lang/String
+ // 4: ifeq 20
+ // 7: new #18 // class scala/Some
+ // 10: dup
+ // 11: ldc #20 // String String
+ // 13: invokespecial #23 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 16: astore_2
+ // 17: goto 64
+ // 20: aload_1
+ // 21: instanceof #25 // class scala/collection/immutable/List
+ // 24: ifeq 40
+ // 27: new #18 // class scala/Some
+ // 30: dup
+ // 31: ldc #27 // String List
+ // 33: invokespecial #23 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 36: astore_2
+ // 37: goto 64
+ // 40: aload_1
+ // 41: instanceof #29 // class java/lang/Integer
+ // 44: ifeq 60
+ // 47: new #18 // class scala/Some
+ // 50: dup
+ // 51: ldc #31 // String Int
+ // 53: invokespecial #23 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 56: astore_2
+ // 57: goto 64
+ // 60: getstatic #36 // Field scala/None$.MODULE$:Lscala/None$;
+ // 63: astore_2
+ // 64: aload_2
+ // 65: areturn
+ def unapply(x: Any): Option[String] = x match {
+ case _: String => Some("String")
+ case _: List[_] => Some("List")
+ case _: Int => Some("Int")
+ case _ => None
+ }
+}
+
+object Test {
+ def f(x: Any) = x match {
+ case ValueOpt(s) => s
+ case _ => "Something else"
+ }
+ def g(x: Any) = x match {
+ case RegularOpt(s) => s
+ case _ => "Something else"
+ }
+ val xs = List("abc", Nil, 5, Test)
+
+ def main(args: Array[String]): Unit = {
+ xs map f foreach println
+ xs map g foreach println
+ }
+}
diff --git a/test/files/run/value-class-extractor-seq.check b/test/files/run/value-class-extractor-seq.check
new file mode 100644
index 0000000000..84552a7aa5
--- /dev/null
+++ b/test/files/run/value-class-extractor-seq.check
@@ -0,0 +1,3 @@
+Bip(1, 2, 3)
+Bip(1, 2, c @ Array(3, 4, 5): _*)
+class [I
diff --git a/test/files/run/value-class-extractor-seq.scala b/test/files/run/value-class-extractor-seq.scala
new file mode 100644
index 0000000000..f17a5314f2
--- /dev/null
+++ b/test/files/run/value-class-extractor-seq.scala
@@ -0,0 +1,59 @@
+import scala.runtime.ScalaRunTime.stringOf
+
+final class ArrayOpt[T](val xs: Array[T]) extends AnyVal {
+ def isEmpty = xs == null
+ def get = xs
+}
+
+object Bip {
+ def mkInts(xs: Array[Short]) = xs map (_.toInt)
+ def unapplySeq(x: Any): ArrayOpt[Int] = x match {
+ case xs: Array[Int] => new ArrayOpt(xs)
+ case xs: Array[Short] => new ArrayOpt(mkInts(xs))
+ case _ => new ArrayOpt(null)
+ }
+ // public int[] unapplySeq(java.lang.Object);
+ // 0: aload_1
+ // 1: astore_2
+ // 2: aload_2
+ // 3: instanceof #52 // class "[I"
+ // 6: ifeq 20
+ // 9: aload_2
+ // 10: checkcast #52 // class "[I"
+ // 13: astore_3
+ // 14: aload_3
+ // 15: astore 4
+ // 17: goto 47
+ // 20: aload_2
+ // 21: instanceof #58 // class "[S"
+ // 24: ifeq 44
+ // 27: aload_2
+ // 28: checkcast #58 // class "[S"
+ // 31: astore 5
+ // 33: aload_0
+ // 34: aload 5
+ // 36: invokevirtual #60 // Method mkInts:([S)[I
+ // 39: astore 4
+ // 41: goto 47
+ // 44: aconst_null
+ // 45: astore 4
+ // 47: aload 4
+ // 49: areturn
+}
+
+object Test {
+ def f(x: Any) = x match {
+ case Bip(a, b, c) => s"Bip($a, $b, $c)"
+ case Bip(a, b, c @ _*) => s"Bip($a, $b, c @ ${stringOf(c)}: _*)"
+ case _ => "" + x.getClass
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f(Array[Int](1,2,3)))
+ println(f(Array[Int](1,2,3,4,5)))
+ println(f(Array[Int](1)))
+ }
+ // Bip(1, 2, 3)
+ // Bip(1, 2, c @ [I@782be20e: _*)
+ // class [I
+}
diff --git a/test/files/run/value-class-extractor.check b/test/files/run/value-class-extractor.check
new file mode 100644
index 0000000000..e16447118c
--- /dev/null
+++ b/test/files/run/value-class-extractor.check
@@ -0,0 +1,9 @@
+'a'
+'b'
+'c'
+NoChar
+Some(a)
+Some(b)
+Some(c)
+None
+9
diff --git a/test/files/run/value-class-extractor.scala b/test/files/run/value-class-extractor.scala
new file mode 100644
index 0000000000..3eaffa0c23
--- /dev/null
+++ b/test/files/run/value-class-extractor.scala
@@ -0,0 +1,91 @@
+final class NonNullChar(val get: Char) extends AnyVal {
+ def isEmpty = get == 0.toChar
+ override def toString = if (isEmpty) "NoChar" else s"'$get'"
+}
+object NonNullChar {
+ @inline final val None = new NonNullChar(0.toChar)
+}
+
+final class SomeProduct extends Product3[String, Int, List[String]] {
+ def canEqual(x: Any) = x.isInstanceOf[SomeProduct]
+ def _1 = "abc"
+ def _2 = 5
+ def _3 = List("bippy")
+ def isEmpty = false
+ def get = this
+}
+object SomeProduct {
+ def unapply(x: SomeProduct) = x
+}
+
+object Test {
+ def prod(x: SomeProduct): Int = x match {
+ case SomeProduct(x, y, z) => x.length + y + z.length
+ case _ => -1
+ }
+
+ def f(x: Char): NonNullChar = x match {
+ case 'a' => new NonNullChar('a')
+ case 'b' => new NonNullChar('b')
+ case 'c' => new NonNullChar('c')
+ case _ => NonNullChar.None
+ }
+ // public char f(char);
+ // 0: iload_1
+ // 1: tableswitch { // 97 to 99
+ // 97: 47
+ // 98: 42
+ // 99: 37
+ // default: 28
+ // }
+ // 28: getstatic #19 // Field NonNullChar$.MODULE$:LNonNullChar$;
+ // 31: invokevirtual #23 // Method NonNullChar$.None:()C
+ // 34: goto 49
+ // 37: bipush 99
+ // 39: goto 49
+ // 42: bipush 98
+ // 44: goto 49
+ // 47: bipush 97
+ // 49: ireturn
+ def g(x: Char): Option[Char] = x match {
+ case 'a' => Some('a')
+ case 'b' => Some('b')
+ case 'c' => Some('c')
+ case _ => None
+ }
+ // public scala.Option<java.lang.Object> g(char);
+ // 0: iload_1
+ // 1: tableswitch { // 97 to 99
+ // 97: 64
+ // 98: 49
+ // 99: 34
+ // default: 28
+ // }
+ // 28: getstatic #33 // Field scala/None$.MODULE$:Lscala/None$;
+ // 31: goto 76
+ // 34: new #35 // class scala/Some
+ // 37: dup
+ // 38: bipush 99
+ // 40: invokestatic #41 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
+ // 43: invokespecial #44 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 46: goto 76
+ // 49: new #35 // class scala/Some
+ // 52: dup
+ // 53: bipush 98
+ // 55: invokestatic #41 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
+ // 58: invokespecial #44 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 61: goto 76
+ // 64: new #35 // class scala/Some
+ // 67: dup
+ // 68: bipush 97
+ // 70: invokestatic #41 // Method scala/runtime/BoxesRunTime.boxToCharacter:(C)Ljava/lang/Character;
+ // 73: invokespecial #44 // Method scala/Some."<init>":(Ljava/lang/Object;)V
+ // 76: areturn
+ def main(args: Array[String]): Unit = {
+ "abcd" foreach (ch => println(f(ch)))
+ "abcd" foreach (ch => println(g(ch)))
+ println(prod(new SomeProduct))
+ }
+}
+
+
diff --git a/test/partest b/test/partest
index 8d94facba3..f396459c6d 100755
--- a/test/partest
+++ b/test/partest
@@ -46,29 +46,40 @@ if $cygwin; then
SCALA_HOME=`cygpath --unix "$SCALA_HOME"`
fi
-# Constructing the extension classpath
-EXT_CLASSPATH=""
-if [ -z "$EXT_CLASSPATH" ] ; then
- if [ -f "$SCALA_HOME/lib/scala-partest.jar" ] ; then
- for ext in "$SCALA_HOME"/lib/* ; do
- if [ -z "$EXT_CLASSPATH" ] ; then
- EXT_CLASSPATH="$ext"
- else
- EXT_CLASSPATH="$EXT_CLASSPATH:$ext"
- fi
- done
- elif [ -f "$SCALA_HOME/build/pack/lib/scala-partest.jar" ] ; then
- for lib in `echo "scala-partest scala-library scala-parser-combinators scala-xml scala-reflect scala-compiler diffutils"`; do
- ext="$SCALA_HOME/build/pack/lib/$lib.jar"
- if [ -z "$EXT_CLASSPATH" ] ; then
- EXT_CLASSPATH="$ext"
- else
- EXT_CLASSPATH="$EXT_CLASSPATH:$ext"
- fi
- done
+# Let ant construct the classpath used to run partest (downloading partest from maven if necessary)
+# PARTEST_CLASSPATH=""
+if [ -z "$PARTEST_CLASSPATH" ] ; then
+ if [ ! -f "$SCALA_HOME/build/pack/partest.properties" ] ; then
+ (cd "$SCALA_HOME" && ant -q test.suite.init) # builds pack, downloads partest and writes classpath to build/pack/partest.properties
fi
+
+ PARTEST_CLASSPATH=$( cat "$SCALA_HOME/build/pack/partest.properties" | grep partest.classpath | sed -e 's/\\:/:/g' | cut -f2- -d= )
+
+ # sanity check, disabled to save time
+ # $( javap -classpath $PARTEST_CLASSPATH scala.tools.partest.nest.NestRunner &> /dev/null ) || unset PARTEST_CLASSPATH
fi
+# if [ -z "$PARTEST_CLASSPATH" ] ; then
+# if [ -f "$SCALA_HOME/lib/scala-partest.jar" ] ; then
+# for ext in "$SCALA_HOME"/lib/* ; do
+# if [ -z "$PARTEST_CLASSPATH" ] ; then
+# PARTEST_CLASSPATH="$ext"
+# else
+# PARTEST_CLASSPATH="$PARTEST_CLASSPATH:$ext"
+# fi
+# done
+# elif [ -f "$SCALA_HOME/build/pack/lib/scala-partest.jar" ] ; then
+# for lib in `echo "scala-partest scala-library scala-parser-combinators scala-xml scala-reflect scala-compiler diffutils"`; do
+# ext="$SCALA_HOME/build/pack/lib/$lib.jar"
+# if [ -z "$PARTEST_CLASSPATH" ] ; then
+# PARTEST_CLASSPATH="$ext"
+# else
+# PARTEST_CLASSPATH="$PARTEST_CLASSPATH:$ext"
+# fi
+# done
+# fi
+# fi
+
# Locate a javac command
# Try: JAVA_HOME, sibling to specific JAVACMD, or PATH
# Don't fail if there is no javac, since not all tests require it.
@@ -101,7 +112,7 @@ if $cygwin; then
JAVAC_CMD=`cygpath --$format "$JAVAC_CMD"`
fi
SCALA_HOME=`cygpath --$format "$SCALA_HOME"`
- EXT_CLASSPATH=`cygpath --path --$format "$EXT_CLASSPATH"`
+ PARTEST_CLASSPATH=`cygpath --path --$format "$PARTEST_CLASSPATH"`
fi
# last arg wins, so if JAVA_OPTS already contains -Xmx or -Xms the
@@ -123,7 +134,7 @@ fi
# be quoted: otherwise an empty string will appear as a command line
# argument, and java will think that is the program to run.
"${JAVACMD:=java}" \
- $JAVA_OPTS -cp "$EXT_CLASSPATH" \
+ $JAVA_OPTS -cp "$PARTEST_CLASSPATH" \
${partestDebugStr} \
${color_opts} \
-Dfile.encoding=UTF-8 \
@@ -132,4 +143,4 @@ fi
-Dpartest.java_opts="${JAVA_OPTS}" \
-Dpartest.scalac_opts="${SCALAC_OPTS}" \
-Dpartest.javac_cmd="${JAVAC_CMD}" \
- scala.tools.partest.nest.NestRunner "$@"
+ scala.tools.partest.nest.ConsoleRunner "$@"
diff --git a/test/pending/neg/t6680a.scala b/test/pending/neg/t6680a.scala
new file mode 100644
index 0000000000..745334b1cd
--- /dev/null
+++ b/test/pending/neg/t6680a.scala
@@ -0,0 +1,13 @@
+case class Cell[A](var x: A)
+object Test {
+ def f1(x: Any) = x match { case y @ Cell(_) => y } // Inferred type is Cell[Any]
+ // def f2(x: Cell[_]) = x match { case y @ Cell(_) => y } // Inferred type is Cell[_]
+ // def f3[A](x: Cell[A]) = x match { case y @ Cell(_) => y } // Inferred type is Cell[A]
+
+ def main(args: Array[String]): Unit = {
+ // val x = new Cell(1)
+ // val y = f1(x)
+ // y.x = "abc"
+ // println(x.x + 1)
+ }
+} \ No newline at end of file
diff --git a/test/pending/neg/t6680b.check b/test/pending/neg/t6680b.check
new file mode 100644
index 0000000000..a16812d91d
--- /dev/null
+++ b/test/pending/neg/t6680b.check
@@ -0,0 +1,6 @@
+t6680b.scala:8: error: type mismatch;
+ found : String("not what you\'d expect")
+ required: ?Hidden1 where type ?Hidden1 (this is a GADT skolem)
+ case Concrete(f) => f("not what you'd expect")
+ ^
+one error found
diff --git a/test/pending/neg/t6680b.scala b/test/pending/neg/t6680b.scala
new file mode 100644
index 0000000000..e9f6468315
--- /dev/null
+++ b/test/pending/neg/t6680b.scala
@@ -0,0 +1,10 @@
+trait Super[+A]
+// `Hidden` must occur in both variance positions (covariant/contravariant) for the sneakiness to work
+// this way type inference will infer Any for `Hidden` and `A` in the pattern below
+case class Concrete[Hidden, +A](havoc: Hidden => Hidden) extends Super[A]
+
+object Test extends App {
+ (Concrete((x: Int) => x): Super[Any]) match {
+ case Concrete(f) => f("not what you'd expect")
+ }
+} \ No newline at end of file
diff --git a/test/pending/neg/t6680c.scala b/test/pending/neg/t6680c.scala
new file mode 100644
index 0000000000..f69663a71b
--- /dev/null
+++ b/test/pending/neg/t6680c.scala
@@ -0,0 +1,17 @@
+package s
+
+trait Stream[+A]
+case class Unfold[S,+A](s: S, f: S => Option[(A,S)]) extends Stream[A]
+
+object Stream {
+ def fromList[A](a: List[A]): Stream[A] =
+ Unfold(a, (l:List[A]) => l.headOption.map((_,l.tail)))
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val res = Stream.fromList(List(1,2,3,4))
+
+ res match { case Unfold(s, f) => f("a string!") }
+ }
+}
diff --git a/versions.properties b/versions.properties
new file mode 100644
index 0000000000..044c57bb0f
--- /dev/null
+++ b/versions.properties
@@ -0,0 +1,7 @@
+starr.version=2.11.0-M4
+
+# the below is used for depending on dependencies like partest
+scala.binary.version=2.11.0-M4
+partest.version.number=1.0-RC4
+scala-xml.version.number=1.0-RC2
+scala-parser-combinators.version.number=1.0-RC1