diff options
Diffstat (limited to 'src')
103 files changed, 2787 insertions, 1479 deletions
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml index 412d7caab6..a2c3eefbca 100644 --- a/src/build/maven/maven-deploy.xml +++ b/src/build/maven/maven-deploy.xml @@ -1,21 +1,34 @@ <?xml version="1.0" encoding="UTF-8"?> +<!-- +THIS FILE WILL SOON SELF DESTRUCT; DO NOT USE +see publish.* targets in /build.xml +--> <project name="sabbus-maven-deploy" xmlns:artifact="urn:maven-artifact-ant"> <description> SuperSabbus extension for deploying a distribution to Maven. THIS FILE IS MEANT TO BE RUN STANDALONE IN THE MAVEN "distpack" DIRECTORY </description> + <macrodef name="deploy-remote"> <attribute name="jar" default=""/> <attribute name="pom"/> <element name="artifacts" implicit="true" optional="true"/> <sequential> - <artifact:deploy file="@{jar}" settingsFile="${settings.file}"> - <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" /> - <artifact:pom refid="@{pom}" /> - <artifacts/> - </artifact:deploy> + <if><equals arg1="@{jar}" arg2="true"/><then> + <artifact:deploy settingsFile="${settings.file}"> + <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" /> + <artifact:pom refid="@{pom}" /> + <artifacts/> + </artifact:deploy> + </then><else> + <artifact:deploy file="@{jar}" settingsFile="${settings.file}"> + <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" /> + <artifact:pom refid="@{pom}" /> + <artifacts/> + </artifact:deploy> + </else></if> </sequential> </macrodef> @@ -24,11 +37,19 @@ <attribute name="pom"/> <element name="artifacts" implicit="true" optional="true"/> <sequential> - <artifact:install file="@{jar}"> - <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" /> - <artifact:pom refid="@{pom}" /> - <artifacts/> - </artifact:install> + <if><equals arg1="@{jar}" arg2="true"/><then> + <artifact:install> + <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" /> + <artifact:pom refid="@{pom}" /> + <artifacts/> + </artifact:install> + </then><else> + <artifact:install file="@{jar}"> + <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" /> + <artifact:pom refid="@{pom}" /> + <artifacts/> + </artifact:install> + </else></if> </sequential> </macrodef> @@ -46,26 +67,25 @@ </sequential> </macrodef> - <macrodef name="deploy-one"> - <attribute name="dir" default=""/> + <macrodef name="filter-pom"> + <attribute name="path" /> <attribute name="name" /> - <attribute name="local" default="false"/> - <attribute name="signed" default="false"/> <sequential> - <local name="path"/> <property name="path" value="@{dir}@{name}/@{name}"/> - - <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo> - <copy file="${path}-pom.xml" tofile="${path}-pom-filtered.xml" overwrite="true"> <filterset> <filter token="VERSION" value="${maven.version.number}" /> <filter token="SCALA_BINARY_VERSION" value="${scala.binary.version}" /> <filter token="XML_VERSION" value="${scala-xml.version.number}" /> <filter token="PARSER_COMBINATORS_VERSION" value="${scala-parser-combinators.version.number}" /> + <filter token="CONTINUATIONS_PLUGIN_VERSION" value="${scala-continuations-plugin.version.number}" /> + <filter token="CONTINUATIONS_LIBRARY_VERSION" value="${scala-continuations-library.version.number}" /> + <filter token="SCALA_SWING_VERSION" value="${scala-swing.version.number}" /> <filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" /> <filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" /> <filter token="JLINE_VERSION" value="${jline.version}" /> + <filter token="AKKA_ACTOR_VERSION" value="${akka-actor.version.number}" /> + <filter token="ACTORS_MIGRATION_VERSION" value="${actors-migration.version.number}" /> <!-- TODO modularize compiler. <filter token="SCALA_COMPILER_DOC_VERSION" value="${scala-compiler-doc.version.number}" /> @@ -74,6 +94,20 @@ </filterset> </copy> <artifact:pom id="@{name}.pom" file="${path}-pom-filtered.xml" /> + </sequential> + </macrodef> + + <macrodef name="deploy-one"> + <attribute name="name" /> + <attribute name="local" default="false"/> + <attribute name="signed" default="false"/> + + <sequential> + <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/> + + <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo> + + <filter-pom name="@{name}" path="@{path}"/> <if><equals arg1="@{signed}" arg2="false"/><then> <if><isset property="docs.skip"/><then> @@ -108,26 +142,94 @@ </sequential> </macrodef> + <macrodef name="deploy-jar"> + <attribute name="name" /> + <attribute name="local" default="false"/> + <attribute name="signed" default="false"/> + + <sequential> + <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/> + + <echo>Deploying ${path}.jar with ${path}-pom.xml.</echo> + + <filter-pom name="@{name}" path="@{path}"/> + + <if><equals arg1="@{signed}" arg2="false"/><then> + <deploy-to local="@{local}" jar="${path}.jar" pom="@{name}.pom"/> + </then><else> + <local name="repo"/> + <if><equals arg1="@{local}" arg2="false"/><then> + <property name="repo" value="${remote.repository}"/> + </then><else> + <property name="repo" value="${local.repository}"/> + </else></if> + <artifact:mvn failonerror="true"> + <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" /> + <arg value="-Durl=${repo}" /> + <arg value="-DrepositoryId=${repository.credentials.id}" /> + <arg value="-DpomFile=${path}-pom-filtered.xml" /> + <arg value= "-Dfile=${path}.jar" /> + <arg value="-Pgpg" /> + <arg value="-Dgpg.useagent=true" /> + </artifact:mvn> + </else></if> + </sequential> + </macrodef> + + <macrodef name="deploy-pom"> + <attribute name="name" /> + <attribute name="local" default="false"/> + <attribute name="signed" default="false"/> + + <sequential> + <local name="path"/> <property name="path" value="${dist.maven}/@{name}/@{name}"/> + + <echo>Deploying ${path}-pom.xml.</echo> + + <filter-pom name="@{name}" path="@{path}"/> + + <if><equals arg1="@{signed}" arg2="false"/><then> + <deploy-to local="@{local}" pom="@{name}.pom"/> + </then><else> + <local name="repo"/> + <if><equals arg1="@{local}" arg2="false"/><then> + <property name="repo" value="${remote.repository}"/> + </then><else> + <property name="repo" value="${local.repository}"/> + </else></if> + <artifact:mvn failonerror="true"> + <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" /> + <arg value="-Durl=${repo}" /> + <arg value="-DrepositoryId=${repository.credentials.id}" /> + <arg value="-DpomFile=${path}-pom-filtered.xml" /> + <arg value= "-Dfile=${path}-pom-filtered.xml" /> + <arg value="-Pgpg" /> + <arg value="-Dgpg.useagent=true" /> + </artifact:mvn> + </else></if> + </sequential> + </macrodef> + <macrodef name="deploy"> - <attribute name="dir" default=""/> <attribute name="local" default="false"/> <attribute name="signed" default="false"/> <sequential> - <deploy-one dir="@{dir}" name="scala-library" local="@{local}" signed="@{signed}"/> - <deploy-one dir="@{dir}" name="scala-reflect" local="@{local}" signed="@{signed}"/> - <deploy-one dir="@{dir}" name="scala-compiler" local="@{local}" signed="@{signed}"/> + <deploy-one name="scala-library" local="@{local}" signed="@{signed}"/> + <deploy-one name="scala-reflect" local="@{local}" signed="@{signed}"/> + <deploy-one name="scala-compiler" local="@{local}" signed="@{signed}"/> <!-- TODO modularize compiler. - <deploy-one dir="@{dir}" name="scala-compiler-doc" local="@{local}" signed="@{signed}"/> - <deploy-one dir="@{dir}" name="scala-compiler-interactive" local="@{local}" signed="@{signed}"/> + <deploy-one name="scala-compiler-doc" local="@{local}" signed="@{signed}"/> + <deploy-one name="scala-compiler-interactive" local="@{local}" signed="@{signed}"/> --> - <deploy-one dir="@{dir}" name="scala-actors" local="@{local}" signed="@{signed}"/> - <deploy-one dir="@{dir}" name="scalap" local="@{local}" signed="@{signed}"/> + <deploy-one name="scala-actors" local="@{local}" signed="@{signed}"/> + <deploy-one name="scalap" local="@{local}" signed="@{signed}"/> </sequential> </macrodef> + <target name="boot.maven"> <!-- Pull in properties from build --> <property file="build.properties" /> @@ -162,7 +264,18 @@ </echo> </target> - <target name="deploy" depends="init.maven" description="Deploys unsigned artifacts to the maven repo."> <deploy/> </target> - <target name="deploy.local" depends="init.maven" description="Deploys unsigned artifacts to the local maven repo."> <deploy local="true"/> </target> - <target name="deploy.signed" depends="init.maven" description="Deploys signed artifacts to the remote maven repo."> <deploy signed="true"/> </target> + <target name="deploy" depends="init.maven" description="Deploys unsigned artifacts to the maven repo."> + <echo message="WARNING!1! THIS TARGET HAS BEEN DEPRECATED -- CALL `ant publish` FROM /build.xml"/> + <deploy/> + </target> + + <target name="deploy.local" depends="init.maven" description="Deploys unsigned artifacts to the local maven repo."> + <echo message="WARNING!1! THIS TARGET HAS BEEN DEPRECATED -- CALL `ant publish.local` FROM /build.xml"/> + <deploy local="true"/> + </target> + + <target name="deploy.signed" depends="init.maven" description="Deploys signed artifacts to the remote maven repo."> + <echo message="WARNING!1! THIS TARGET HAS BEEN DEPRECATED -- CALL `ant publish.signed` FROM /build.xml"/> + <deploy signed="true"/> + </target> </project> diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml index a16fe22343..4a000b27a1 100644 --- a/src/build/maven/scala-compiler-pom.xml +++ b/src/build/maven/scala-compiler-pom.xml @@ -50,7 +50,7 @@ <artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId> <version>@PARSER_COMBINATORS_VERSION@</version> </dependency> - <dependency> <!-- for scala-compiler-repl--> + <dependency> <!-- for scala-compiler-repl; once it moves there, make it required --> <groupId>jline</groupId> <artifactId>jline</artifactId> <version>@JLINE_VERSION@</version> diff --git a/src/build/maven/scala-dist-pom.xml b/src/build/maven/scala-dist-pom.xml new file mode 100644 index 0000000000..413da928bb --- /dev/null +++ b/src/build/maven/scala-dist-pom.xml @@ -0,0 +1,75 @@ +<?xml version="1.0"?> +<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> + <modelVersion>4.0.0</modelVersion> + <groupId>org.scala-lang</groupId> + <artifactId>scala-dist</artifactId> + <packaging>jar</packaging> + <version>@VERSION@</version> + <name>Scala Distribution Artifacts</name> + <description>The Artifacts Distributed with Scala</description> + <url>http://www.scala-lang.org/</url> + <inceptionYear>2002</inceptionYear> + <organization> + <name>LAMP/EPFL</name> + <url>http://lamp.epfl.ch/</url> + </organization> + <licenses> + <license> + <name>BSD 3-Clause</name> + <url>http://www.scala-lang.org/license.html</url> + <distribution>repo</distribution> + </license> + </licenses> + <scm> + <connection>scm:git:git://github.com/scala/scala.git</connection> + <url>https://github.com/scala/scala.git</url> + </scm> + <issueManagement> + <system>JIRA</system> + <url>https://issues.scala-lang.org/</url> + </issueManagement> + <dependencies> + <dependency> + <groupId>org.scala-lang</groupId> + <artifactId>scala-library-all</artifactId> + <version>@VERSION@</version> + </dependency> + <dependency> + <groupId>org.scala-lang</groupId> + <artifactId>scala-compiler</artifactId> + <version>@VERSION@</version> + </dependency> + <!-- duplicated from scala-compiler, where it's optional, + so that resolving scala-dist's transitive dependencies does not include jline, + even though we need to include it in the dist, but macros depending on the compiler + shouldn't have to require jline... + another reason to modularize and move the dependency to scala-compiler-repl + TODO: remove duplication once we have the scala-compiler-repl module --> + <dependency> + <groupId>jline</groupId> + <artifactId>jline</artifactId> + <version>@JLINE_VERSION@</version> + </dependency> + </dependencies> + <distributionManagement> + <repository> + <id>scala-tools.org</id> + <url>@RELEASE_REPOSITORY@</url> + </repository> + <snapshotRepository> + <id>scala-tools.org</id> + <url>@SNAPSHOT_REPOSITORY@</url> + <uniqueVersion>false</uniqueVersion> + </snapshotRepository> + </distributionManagement> + <developers> + <developer> + <id>lamp</id> + <name>EPFL LAMP</name> + </developer> + <developer> + <id>Typesafe</id> + <name>Typesafe, Inc.</name> + </developer> + </developers> +</project> diff --git a/src/build/maven/scala-library-all-pom.xml b/src/build/maven/scala-library-all-pom.xml new file mode 100644 index 0000000000..f34a28e79a --- /dev/null +++ b/src/build/maven/scala-library-all-pom.xml @@ -0,0 +1,99 @@ +<?xml version="1.0"?> +<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> + <modelVersion>4.0.0</modelVersion> + <groupId>org.scala-lang</groupId> + <artifactId>scala-library-all</artifactId> + <packaging>pom</packaging> + <version>@VERSION@</version> + <name>Scala Library Powerpack</name> + <description>The Scala Standard Library and Official Modules</description> + <url>http://www.scala-lang.org/</url> + <inceptionYear>2002</inceptionYear> + <organization> + <name>LAMP/EPFL</name> + <url>http://lamp.epfl.ch/</url> + </organization> + <licenses> + <license> + <name>BSD 3-Clause</name> + <url>http://www.scala-lang.org/license.html</url> + <distribution>repo</distribution> + </license> + </licenses> + <scm> + <connection>scm:git:git://github.com/scala/scala.git</connection> + <url>https://github.com/scala/scala.git</url> + </scm> + <issueManagement> + <system>JIRA</system> + <url>https://issues.scala-lang.org/</url> + </issueManagement> + <dependencies> + <dependency> + <groupId>org.scala-lang</groupId> + <artifactId>scala-library</artifactId> + <version>@VERSION@</version> + </dependency> + <dependency> + <groupId>org.scala-lang</groupId> + <artifactId>scala-reflect</artifactId> + <version>@VERSION@</version> + </dependency> + <dependency> + <groupId>org.scala-lang.modules</groupId> + <artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId> + <version>@XML_VERSION@</version> + </dependency> + <dependency> + <groupId>org.scala-lang.modules</groupId> + <artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId> + <version>@PARSER_COMBINATORS_VERSION@</version> + </dependency> + <dependency> + <groupId>org.scala-lang.plugins</groupId> + <artifactId>scala-continuations-plugin_@SCALA_BINARY_VERSION@</artifactId> + <version>@CONTINUATIONS_PLUGIN_VERSION@</version> + </dependency> + <dependency> + <groupId>org.scala-lang.plugins</groupId> + <artifactId>scala-continuations-library_@SCALA_BINARY_VERSION@</artifactId> + <version>@CONTINUATIONS_LIBRARY_VERSION@</version> + </dependency> + <dependency> + <groupId>org.scala-lang.modules</groupId> + <artifactId>scala-swing_@SCALA_BINARY_VERSION@</artifactId> + <version>@SCALA_SWING_VERSION@</version> + </dependency> + <dependency> + <groupId>com.typesafe.akka</groupId> + <artifactId>akka-actor_@SCALA_BINARY_VERSION@</artifactId> + <version>@AKKA_ACTOR_VERSION@</version> + </dependency> + <dependency> + <groupId>org.scala-lang</groupId> + <artifactId>scala-actors-migration_@SCALA_BINARY_VERSION@</artifactId> + <version>@ACTORS_MIGRATION_VERSION@</version> + </dependency> + </dependencies> + <distributionManagement> + <repository> + <id>scala-tools.org</id> + <url>@RELEASE_REPOSITORY@</url> + </repository> + <snapshotRepository> + <id>scala-tools.org</id> + <url>@SNAPSHOT_REPOSITORY@</url> + <uniqueVersion>false</uniqueVersion> + </snapshotRepository> + </distributionManagement> + <developers> + <developer> + <id>lamp</id> + <name>EPFL LAMP</name> + </developer> + <developer> + <id>Typesafe</id> + <name>Typesafe, Inc.</name> + </developer> + </developers> +</project> diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala index 4c30a9a85c..280baa2a42 100644 --- a/src/compiler/scala/reflect/macros/compiler/Errors.scala +++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala @@ -51,7 +51,7 @@ trait Errors extends Traces { def MacroBundleNonStaticError() = bundleRefError("macro bundles must be static") - def MacroBundleWrongShapeError() = bundleRefError("macro bundles must be monomorphic traits extending either BlackboxMacro or WhiteboxMacro and not implementing their `val c: BlackboxContext/WhiteboxContext` member") + def MacroBundleWrongShapeError() = bundleRefError("macro bundles must be concrete classes having a single constructor with a `val c: Context` parameter") // compatibility errors diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala index e4851632a5..d35f1c32a9 100644 --- a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala +++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala @@ -40,36 +40,11 @@ trait Resolvers { } val untypedImplRef = typer.silent(_.typedTypeConstructor(maybeBundleRef)) match { - case SilentResultValue(result) if mightBeMacroBundleType(result.tpe) => - val bundleProto = result.tpe.typeSymbol - val bundlePkg = bundleProto.enclosingPackageClass - if (!isMacroBundleProtoType(bundleProto.tpe)) MacroBundleWrongShapeError() - if (!bundleProto.owner.isStaticOwner) MacroBundleNonStaticError() - - // synthesize the bundle, i.e. given a static `trait Foo extends Macro { def expand = ... } ` - // create a top-level definition `class Foo$Bundle(val c: BlackboxContext/WhiteboxContext) extends Foo` in a package next to `Foo` - val bundlePid = gen.mkUnattributedRef(bundlePkg) - val bundlePrefix = - if (bundlePkg == EmptyPackageClass) bundleProto.fullName('$') - else bundleProto.fullName('$').substring(bundlePkg.fullName('$').length + 1) - val bundleName = TypeName(bundlePrefix + tpnme.MACRO_BUNDLE_SUFFIX) - val existingBundle = bundleProto.enclosingPackageClass.info.decl(bundleName) - if (!currentRun.compiles(existingBundle)) { - val contextType = if (isBlackboxMacroBundleType(bundleProto.tpe)) BlackboxContextClass.tpe else WhiteboxContextClass.tpe - def mkContextValDef(flags: Long) = ValDef(Modifiers(flags), nme.c, TypeTree(contextType), EmptyTree) - val contextField = mkContextValDef(PARAMACCESSOR) - val contextParam = mkContextValDef(PARAM | PARAMACCESSOR) - val bundleCtor = DefDef(Modifiers(), nme.CONSTRUCTOR, Nil, List(List(contextParam)), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))) - val bundleParent = gen.mkAppliedTypeTree(Ident(bundleProto), bundleProto.typeParams.map(sym => Ident(sym.name))) - val bundleTemplate = Template(List(bundleParent), noSelfType, List(contextField, bundleCtor)) - val bundle = atPos(bundleProto.pos)(ClassDef(NoMods, bundleName, bundleProto.typeParams.map(TypeDef(_)), bundleTemplate)) - currentRun.compileLate(bundleName + ".scala", PackageDef(bundlePid, List(bundle))) - } - - // synthesize the macro impl reference, which is going to look like: - // `new FooBundle(???).macroName` plus the optional type arguments - val bundleInstance = New(Select(bundlePid, bundleName), List(List(Ident(Predef_???)))) - atPos(macroDdef.rhs.pos)(gen.mkTypeApply(Select(bundleInstance, methName), targs)) + case SilentResultValue(result) if looksLikeMacroBundleType(result.tpe) => + val bundle = result.tpe.typeSymbol + if (!isMacroBundleType(bundle.tpe)) MacroBundleWrongShapeError() + if (!bundle.owner.isStaticOwner) MacroBundleNonStaticError() + atPos(macroDdef.rhs.pos)(gen.mkTypeApply(Select(New(bundle, Ident(Predef_???)), methName), targs)) case _ => macroDdef.rhs } diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala index 5936b52890..02c1f7c431 100644 --- a/src/compiler/scala/reflect/macros/compiler/Validators.scala +++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala @@ -26,9 +26,9 @@ trait Validators { if (macroImpl.isOverloaded) MacroImplOverloadedError() val implicitParams = aparamss.flatten filter (_.isImplicit) if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams) - val declaredInStaticObject = isImplMethod && (macroImplOwner.isStaticOwner || macroImplOwner.moduleClass.isStaticOwner) - val declaredInTopLevelClass = isImplBundle && macroImplOwner.owner.isPackageClass - if (!declaredInStaticObject && !declaredInTopLevelClass) MacroImplReferenceWrongShapeError() + val effectiveOwner = if (isImplMethod) macroImplOwner else macroImplOwner.owner + val declaredInStaticObject = effectiveOwner.isStaticOwner || effectiveOwner.moduleClass.isStaticOwner + if (!declaredInStaticObject) MacroImplReferenceWrongShapeError() } private def checkMacroDefMacroImplCorrespondence() = { @@ -93,20 +93,20 @@ trait Validators { * * For the following macro impl: * def fooBar[T: c.WeakTypeTag] - * (c: scala.reflect.macros.BlackboxContext) + * (c: scala.reflect.macros.blackbox.Context) * (xs: c.Expr[List[T]]) * : c.Expr[T] = ... * * This function will return: - * (c: scala.reflect.macros.BlackboxContext)(xs: c.Expr[List[T]])c.Expr[T] + * (c: scala.reflect.macros.blackbox.Context)(xs: c.Expr[List[T]])c.Expr[T] * * Note that type tag evidence parameters are not included into the result. * Type tag context bounds for macro impl tparams are optional. * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here. * * This method cannot be reduced to just macroImpl.info, because macro implementations might - * come in different shapes. If the implementation is an apply method of a BlackboxMacro/WhiteboxMacro-compatible object, - * then it won't have (c: BlackboxContext/WhiteboxContext) in its parameters, but will rather refer to BlackboxMacro/WhiteboxMacro.c. + * come in different shapes. If the implementation is an apply method of a *box.Macro-compatible object, + * then it won't have (c: *box.Context) in its parameters, but will rather refer to *boxMacro.c. * * @param macroImpl The macro implementation symbol */ @@ -123,8 +123,8 @@ trait Validators { * def foo[T](xs: List[T]): T = macro fooBar * * This function will return: - * (c: scala.reflect.macros.BlackboxContext)(xs: c.Expr[List[T]])c.Expr[T] or - * (c: scala.reflect.macros.WhiteboxContext)(xs: c.Expr[List[T]])c.Expr[T] + * (c: scala.reflect.macros.blackbox.Context)(xs: c.Expr[List[T]])c.Expr[T] or + * (c: scala.reflect.macros.whitebox.Context)(xs: c.Expr[List[T]])c.Expr[T] * * Note that type tag evidence parameters are not included into the result. * Type tag context bounds for macro impl tparams are optional. diff --git a/src/compiler/scala/reflect/macros/contexts/Context.scala b/src/compiler/scala/reflect/macros/contexts/Context.scala index 7b79b52a18..87dac18849 100644 --- a/src/compiler/scala/reflect/macros/contexts/Context.scala +++ b/src/compiler/scala/reflect/macros/contexts/Context.scala @@ -3,8 +3,8 @@ package contexts import scala.tools.nsc.Global -abstract class Context extends scala.reflect.macros.BlackboxContext - with scala.reflect.macros.WhiteboxContext +abstract class Context extends scala.reflect.macros.blackbox.Context + with scala.reflect.macros.whitebox.Context with Aliases with Enclosures with Names diff --git a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala index bb88c8d5e1..5e931817b5 100644 --- a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala +++ b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala @@ -8,10 +8,6 @@ trait Enclosures { import universe._ - type MacroRole = analyzer.MacroRole - def APPLY_ROLE = analyzer.APPLY_ROLE - def macroRole: MacroRole - private lazy val site = callsiteTyper.context private lazy val enclTrees = site.enclosingContextChain map (_.tree) private lazy val enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition) diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala index 450cb4d9ea..ecdd48db22 100644 --- a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala @@ -2,7 +2,9 @@ package scala.reflect.macros package runtime import scala.reflect.runtime.ReflectionUtils -import scala.reflect.macros.{Context => ApiContext} +import scala.reflect.macros.blackbox.{Context => BlackboxContext} +import scala.reflect.macros.whitebox.{Context => WhiteboxContext} +import java.lang.reflect.{Constructor => jConstructor} trait JavaReflectionRuntimes { self: scala.tools.nsc.typechecker.Analyzer => @@ -19,8 +21,15 @@ trait JavaReflectionRuntimes { macroLogVerbose(s"successfully loaded macro impl as ($implClass, $implMeth)") args => { val implObj = - if (isBundle) implClass.getConstructor(classOf[ApiContext]).newInstance(args.c) - else ReflectionUtils.staticSingletonInstance(implClass) + if (isBundle) { + def isMacroContext(clazz: Class[_]) = clazz == classOf[BlackboxContext] || clazz == classOf[WhiteboxContext] + def isBundleCtor(ctor: jConstructor[_]) = ctor.getParameterTypes match { + case Array(param) if isMacroContext(param) => true + case _ => false + } + val Array(bundleCtor) = implClass.getConstructors.filter(isBundleCtor) + bundleCtor.newInstance(args.c) + } else ReflectionUtils.staticSingletonInstance(implClass) val implArgs = if (isBundle) args.others else args.c +: args.others implMeth.invoke(implObj, implArgs.asInstanceOf[Seq[AnyRef]]: _*) } diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala index 7de3341304..5fd9c0db34 100644 --- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala @@ -4,7 +4,7 @@ package runtime import scala.reflect.internal.Flags._ import scala.reflect.runtime.ReflectionUtils -trait MacroRuntimes extends JavaReflectionRuntimes with ScalaReflectionRuntimes { +trait MacroRuntimes extends JavaReflectionRuntimes { self: scala.tools.nsc.typechecker.Analyzer => import global._ @@ -19,8 +19,14 @@ trait MacroRuntimes extends JavaReflectionRuntimes with ScalaReflectionRuntimes * @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors, * `null` otherwise. */ + def macroRuntime(expandee: Tree): MacroRuntime = pluginsMacroRuntime(expandee) + + /** Default implementation of `macroRuntime`. + * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroRuntime for more details) + */ private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime] - def macroRuntime(macroDef: Symbol): MacroRuntime = { + def standardMacroRuntime(expandee: Tree): MacroRuntime = { + val macroDef = expandee.symbol macroLogVerbose(s"looking for macro implementation: $macroDef") if (fastTrack contains macroDef) { macroLogVerbose("macro expansion is serviced by a fast track") @@ -43,8 +49,7 @@ trait MacroRuntimes extends JavaReflectionRuntimes with ScalaReflectionRuntimes /** Abstracts away resolution of macro runtimes. */ type MacroRuntime = MacroArgs => Any - class MacroRuntimeResolver(val macroDef: Symbol) extends JavaReflectionResolvers - with ScalaReflectionResolvers { + class MacroRuntimeResolver(val macroDef: Symbol) extends JavaReflectionResolvers { val binding = loadMacroImplBinding(macroDef).get val isBundle = binding.isBundle val className = binding.className @@ -57,7 +62,6 @@ trait MacroRuntimes extends JavaReflectionRuntimes with ScalaReflectionRuntimes try { macroLogVerbose(s"resolving macro implementation as $className.$methName (isBundle = $isBundle)") macroLogVerbose(s"classloader is: ${ReflectionUtils.show(defaultMacroClassloader)}") - // resolveScalaReflectionRuntime(defaultMacroClassloader) resolveJavaReflectionRuntime(defaultMacroClassloader) } catch { case ex: Exception => diff --git a/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala deleted file mode 100644 index 50f64310f8..0000000000 --- a/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala +++ /dev/null @@ -1,31 +0,0 @@ -package scala.reflect.macros -package runtime - -import scala.reflect.runtime.{universe => ru} - -trait ScalaReflectionRuntimes { - self: scala.tools.nsc.typechecker.Analyzer => - - trait ScalaReflectionResolvers { - self: MacroRuntimeResolver => - - def resolveScalaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = { - val macroMirror: ru.JavaMirror = ru.runtimeMirror(classLoader) - val implContainerSym = macroMirror.classSymbol(Class.forName(className, true, classLoader)) - val implMethSym = implContainerSym.typeSignature.member(ru.TermName(methName)).asMethod - macroLogVerbose(s"successfully loaded macro impl as ($implContainerSym, $implMethSym)") - args => { - val implContainer = - if (isBundle) { - val implCtorSym = implContainerSym.typeSignature.member(ru.nme.CONSTRUCTOR).asMethod - macroMirror.reflectClass(implContainerSym).reflectConstructor(implCtorSym)(args.c) - } else { - macroMirror.reflectModule(implContainerSym.module.asModule).instance - } - val implMeth = macroMirror.reflect(implContainer).reflectMethod(implMethSym) - val implArgs = if (isBundle) args.others else args.c +: args.others - implMeth(implArgs: _*) - } - } - } -} diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 5f079a428b..5492e563dd 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1710,25 +1710,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - /** Create and compile a synthetic compilation unit from the provided tree. - * - * This needs to create a virtual file underlying the compilation unit in order to appease SBT. - * However this file cannot have a randomly generated name, because then SBT 0.13 goes into a vicious loop - * as described on the mailing list: https://groups.google.com/forum/#!msg/scala-user/r1SgSoVfs0U/Wv4av0LOKukJ - * Therefore I have introduced an additional parameter that makes everyone specify meaningful file names. - */ - def compileLate(virtualFileName: String, code: PackageDef) { - // compatibility with SBT - // on the one hand, we need to specify some jfile here, otherwise sbt crashes with an NPE (SI-6870) - // on the other hand, we can't specify the obvious enclosingUnit, because then sbt somehow fails to run tests using type macros - val fakeJfile = new java.io.File(virtualFileName) - val virtualFile = new VirtualFile(virtualFileName) { override def file = fakeJfile } - val sourceFile = new BatchSourceFile(virtualFile, code.toString) - val unit = new CompilationUnit(sourceFile) - unit.body = code - compileLate(unit) - } - /** Reset package class to state at typer (not sure what this * is needed for?) */ diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 61ea9230a7..d122a1a207 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -623,15 +623,6 @@ self => syntaxError(tpt.pos, "no * parameter type allowed here", skipIt = false) } - /** Check that tree is a legal clause of a forSome. */ - def checkLegalExistential(t: Tree) = t match { - case TypeDef(_, _, _, TypeBoundsTree(_, _)) | - ValDef(_, _, _, EmptyTree) | EmptyTree => - ; - case _ => - syntaxError(t.pos, "not a legal existential clause", skipIt = false) - } - /* -------------- TOKEN CLASSES ------------------------------------------- */ def isModifier: Boolean = in.token match { @@ -885,9 +876,14 @@ self => } } private def makeExistentialTypeTree(t: Tree) = { - val whereClauses = refinement() - whereClauses foreach checkLegalExistential - ExistentialTypeTree(t, whereClauses) + // EmptyTrees in the result of refinement() stand for parse errors + // so it's okay for us to filter them out here + ExistentialTypeTree(t, refinement() flatMap { + case t @ TypeDef(_, _, _, TypeBoundsTree(_, _)) => Some(t) + case t @ ValDef(_, _, _, EmptyTree) => Some(t) + case EmptyTree => None + case _ => syntaxError(t.pos, "not a legal existential clause", skipIt = false); None + }) } /** {{{ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala index 5be5abd895..dd2d63ad17 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala @@ -835,6 +835,7 @@ abstract class BCodeTypes extends BCodeIdiomatic { if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0, if (sym.isArtifact) ACC_SYNTHETIC else 0, if (sym.isClass && !sym.isInterface) ACC_SUPER else 0, + if (sym.hasEnumFlag) ACC_ENUM else 0, if (sym.isVarargsMethod) ACC_VARARGS else 0, if (sym.hasFlag(symtab.Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0 ) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index e92f8c2541..7e1a82a155 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -244,6 +244,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0, if (sym.isArtifact) ACC_SYNTHETIC else 0, if (sym.isClass && !sym.isInterface) ACC_SUPER else 0, + if (sym.hasEnumFlag) ACC_ENUM else 0, if (sym.isVarargsMethod) ACC_VARARGS else 0, if (sym.hasFlag(Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0 ) diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 7932dd3459..9875d27047 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -792,7 +792,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val superclazz = AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType)) addCompanionObject(consts ::: statics ::: predefs, atPos(pos) { - ClassDef(mods, name, List(), + ClassDef(mods | Flags.ENUM, name, List(), makeTemplate(superclazz :: interfaces, body)) }) } @@ -811,10 +811,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { skipAhead() accept(RBRACE) } - // The STABLE flag is to signal to namer that this was read from a - // java enum, and so should be given a Constant type (thereby making - // it usable in annotations.) - ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr) + ValDef(Modifiers(Flags.ENUM | Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr) } } diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 183752d4a2..7837f9a11a 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -126,10 +126,11 @@ object Plugin { } /** Load all plugins specified by the arguments. - * Each of `jars` must be a valid plugin archive or exploded archive. + * Each location of `paths` must be a valid plugin archive or exploded archive. + * Each of `paths` must define one plugin. * Each of `dirs` may be a directory containing arbitrary plugin archives. * Skips all plugins named in `ignoring`. - * A single classloader is created and used to load all of them. + * A classloader is created to load each plugin. */ def loadAllFrom( paths: List[List[Path]], diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 7568c789fb..6ec364bcb6 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -94,11 +94,11 @@ trait ScalaSettings extends AbsScalaSettings val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.") val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.") val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)") - val plugin = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.") - val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).") + val plugin = MultiStringSetting("-Xplugin", "paths", "Load a plugin from each classpath.") + val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable plugins by name.") val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.") - val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless the given plugin(s) are available.") - val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search compiler plugins.", Defaults.scalaPluginPath) + val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") + val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) val Xprint = PhasesSetting ("-Xprint", "Print out program after") val writeICode = PhasesSetting ("-Xprint-icode", "Log internal icode to *.icode files after", "icode") val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions, as offsets.") @@ -172,7 +172,8 @@ trait ScalaSettings extends AbsScalaSettings val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.") val Ymemberpos = StringSetting ("-Yshow-member-pos", "output style", "Show start and end positions of members", "") withPostSetHook (_ => Yrangepos.value = true) val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.") - val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.") + val Ymacroexpand = ChoiceSetting ("-Ymacro-expand", "policy", "Control expansion of macros, useful for scaladoc and presentation compiler", List(MacroExpand.Normal, MacroExpand.None, MacroExpand.Discard), MacroExpand.Normal) + val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.") withDeprecationMessage(s"Use ${Ymacroexpand.name}:${MacroExpand.None}") withPostSetHook(_ => Ymacroexpand.value = MacroExpand.None) val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects") val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") @@ -249,4 +250,9 @@ trait ScalaSettings extends AbsScalaSettings def isBCodeAskedFor = (Ybackend.value != "GenASM") def isICodeAskedFor = ((Ybackend.value == "GenASM") || optimiseSettings.exists(_.value) || writeICode.isSetByUser) + object MacroExpand { + val None = "none" + val Normal = "normal" + val Discard = "discard" + } } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 2b96961291..664645e53e 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -515,7 +515,7 @@ abstract class ClassfileParser { val info = readType() val sym = ownerForFlags(jflags).newValue(name.toTermName, NoPosition, sflags) - // Note: the info may be overrwritten later with a generic signature + // Note: the info may be overwritten later with a generic signature // parsed from SignatureATTR sym setInfo { if (jflags.isEnum) ConstantType(Constant(sym)) diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 9738769db9..f14fce5de9 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -481,18 +481,33 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { * For instance, say we have a Scala class: * * class Cls { - * // ... - * def someSymbol = `symbolic - * // ... + * def someSymbol1 = 'Symbolic1 + * def someSymbol2 = 'Symbolic2 + * def sameSymbol1 = 'Symbolic1 + * val someSymbol3 = 'Symbolic3 * } * * After transformation, this class looks like this: * * class Cls { - * private "static" val <some_name>$symbolic = Symbol("symbolic") - * // ... - * def someSymbol = <some_name>$symbolic - * // ... + * private <static> var symbol$1: scala.Symbol + * private <static> var symbol$2: scala.Symbol + * private <static> var symbol$3: scala.Symbol + * private val someSymbol3: scala.Symbol + * + * private <static> def <clinit> = { + * symbol$1 = Symbol.apply("Symbolic1") + * symbol$2 = Symbol.apply("Symbolic2") + * } + * + * private def <init> = { + * someSymbol3 = symbol$3 + * } + * + * def someSymbol1 = symbol$1 + * def someSymbol2 = symbol$2 + * def sameSymbol1 = symbol$1 + * val someSymbol3 = someSymbol3 * } * * The reasoning behind this transformation is the following. Symbols get interned - they are stored @@ -502,17 +517,17 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { * is accessed only once during class loading, and after that, the unique symbol is in the static * member. Hence, it is cheap to both reach the unique symbol and do equality checks on it. * - * And, finally, be advised - scala symbol literal and the Symbol class of the compiler + * And, finally, be advised - Scala's Symbol literal (scala.Symbol) and the Symbol class of the compiler * have little in common. */ case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply => def transformApply = { - // add the symbol name to a map if it's not there already - val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil) - val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree) - // create a reference to a static field - val ntree = typedWithPos(tree.pos)(REF(staticFieldSym)) - super.transform(ntree) + // add the symbol name to a map if it's not there already + val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil) + val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree) + // create a reference to a static field + val ntree = typedWithPos(tree.pos)(REF(staticFieldSym)) + super.transform(ntree) } transformApply diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 844774e75f..ef50ae276f 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -457,12 +457,11 @@ abstract class UnCurry extends InfoTransform else super.transform(tree) case UnApply(fn, args) => - val fn1 = transform(fn) - val args1 = transformTrees(fn.symbol.name match { - case nme.unapply => args - case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, localTyper.expectedPatternTypes(fn, args)) - case _ => sys.error("internal error: UnApply node has wrong symbol") - }) + val fn1 = transform(fn) + val args1 = fn.symbol.name match { + case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, patmat.alignPatterns(tree).expectedTypes) + case _ => args + } treeCopy.UnApply(tree, fn1, args1) case Apply(fn, args) => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 63f4a4bf25..699e98f963 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -31,6 +31,30 @@ trait MatchTranslation { trait MatchTranslator extends TreeMakers with TreeMakerWarnings { import typer.context + /** A conservative approximation of which patterns do not discern anything. + * They are discarded during the translation. + */ + object WildcardPattern { + def unapply(pat: Tree): Boolean = pat match { + case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol! + case Star(WildcardPattern()) => true + case x: Ident => treeInfo.isVarPattern(x) + case Alternative(ps) => ps forall unapply + case EmptyTree => true + case _ => false + } + } + + object PatternBoundToUnderscore { + def unapply(pat: Tree): Boolean = pat match { + case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol! + case Ident(nme.WILDCARD) => true + case Alternative(ps) => ps forall unapply + case Typed(PatternBoundToUnderscore(), _) => true + case _ => false + } + } + object SymbolBound { def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match { case Bind(_, expr) if hasSym(tree) => Some(tree.symbol -> expr) @@ -86,10 +110,8 @@ trait MatchTranslation { // example check: List[Int] <:< ::[Int] private def extractorStep(): TranslationStep = { - import extractor.{ paramType, treeMaker } - if (!extractor.isTyped) - ErrorUtils.issueNormalTypeError(tree, "Could not typecheck extractor call: "+ extractor)(context) - + def paramType = extractor.aligner.wholeType + import extractor.treeMaker // chain a type-testing extractor before the actual extractor call // it tests the type, checks the outer pointer and casts to the expected type // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC] @@ -355,36 +377,20 @@ trait MatchTranslation { object ExtractorCall { // TODO: check unargs == args def apply(tree: Tree): ExtractorCall = tree match { - case UnApply(unfun, args) => new ExtractorCallRegular(unfun, args) // extractor - case Apply(fun, args) => new ExtractorCallProd(fun, args) // case class + case UnApply(unfun, args) => new ExtractorCallRegular(alignPatterns(tree), unfun, args) // extractor + case Apply(fun, args) => new ExtractorCallProd(alignPatterns(tree), fun, args) // case class } } - abstract class ExtractorCall { + abstract class ExtractorCall(val aligner: PatternAligned) { + import aligner._ def fun: Tree def args: List[Tree] - val nbSubPats = args.length - val starLength = if (hasStar) 1 else 0 - val nonStarLength = args.length - starLength - - // everything okay, captain? - def isTyped: Boolean - def isSeq: Boolean - - private def hasStar = nbSubPats > 0 && isStar(args.last) - private def isNonEmptySeq = nbSubPats > 0 && isSeq - - /** This is special cased so that a single pattern will accept any extractor - * result, even if it's a tuple (SI-6675) - */ - def isSingle = nbSubPats == 1 && !isSeq - - // to which type should the previous binder be casted? - def paramType : Type - - protected def rawSubPatTypes: List[Type] - protected def resultType: Type + // don't go looking for selectors if we only expect one pattern + def rawSubPatTypes = aligner.extractedTypes + def resultInMonad = if (isBool) UnitTpe else typeOfMemberNamedGet(resultType) + def resultType = fun.tpe.finalResultType /** Create the TreeMaker that embodies this extractor call * @@ -407,24 +413,14 @@ trait MatchTranslation { lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns) - private def nonStarSubPatTypes = formalTypes(rawInit :+ repeatedType, nonStarLength) + private def nonStarSubPatTypes = aligner.typedNonStarPatterns map (_.tpe) - def subPatTypes: List[Type] = ( - if (rawSubPatTypes.isEmpty || !isSeq) rawSubPatTypes - else if (hasStar) nonStarSubPatTypes :+ sequenceType - else nonStarSubPatTypes - ) - - private def rawGet = typeOfMemberNamedGetOrSelf(resultType) - private def rawInit = rawSubPatTypes dropRight 1 - protected def sequenceType = typeOfLastSelectorOrSelf(rawGet) - protected def elementType = elementTypeOfLastSelectorOrSelf(rawGet) - protected def repeatedType = scalaRepeatedType(elementType) + def subPatTypes: List[Type] = typedPatterns map (_.tpe) - // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple - protected def firstIndexingBinder = rawSubPatTypes.length - 1 - protected def lastIndexingBinder = nbSubPats - 1 - starLength - protected def expectedLength = lastIndexingBinder - firstIndexingBinder + 1 + // there are `productArity` non-seq elements in the tuple. + protected def firstIndexingBinder = productArity + protected def expectedLength = elementArity + protected def lastIndexingBinder = totalArity - starArity - 1 private def productElemsToN(binder: Symbol, n: Int): List[Tree] = 1 to n map tupleSel(binder) toList private def genTake(binder: Symbol, n: Int): List[Tree] = (0 until n).toList map (codegen index seqTree(binder)) @@ -438,12 +434,12 @@ trait MatchTranslation { // referenced by `binder` protected def subPatRefsSeq(binder: Symbol): List[Tree] = { def lastTrees: List[Tree] = ( - if (!hasStar) Nil + if (!aligner.isStar) Nil else if (expectedLength == 0) seqTree(binder) :: Nil else genDrop(binder, expectedLength) ) // this error-condition has already been checked by checkStarPatOK: - // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats)) + // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == totalArity, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats)) // [1] there are `firstIndexingBinder` non-seq tuple elements preceding the Seq // [2] then we have to index the binder that represents the sequence for the remaining subpatterns, except for... @@ -457,8 +453,10 @@ trait MatchTranslation { // the trees that select the subpatterns on the extractor's result, referenced by `binder` // require (nbSubPats > 0 && (!lastIsStar || isSeq)) - protected def subPatRefs(binder: Symbol): List[Tree] = - if (isNonEmptySeq) subPatRefsSeq(binder) else productElemsToN(binder, nbSubPats) + protected def subPatRefs(binder: Symbol): List[Tree] = ( + if (totalArity > 0 && isSeq) subPatRefsSeq(binder) + else productElemsToN(binder, totalArity) + ) private def compareInts(t1: Tree, t2: Tree) = gen.mkMethodCall(termMember(ScalaPackage, "math"), TermName("signum"), Nil, (t1 INT_- t2) :: Nil) @@ -478,7 +476,7 @@ trait MatchTranslation { // when the last subpattern is a wildcard-star the expectedLength is but a lower bound // (otherwise equality is required) def compareOp: (Tree, Tree) => Tree = - if (hasStar) _ INT_>= _ + if (aligner.isStar) _ INT_>= _ else _ INT_== _ // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero` @@ -487,26 +485,14 @@ trait MatchTranslation { def checkedLength: Option[Int] = // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied - if (!isSeq || expectedLength < starLength) None + if (!isSeq || expectedLength < starArity) None else Some(expectedLength) } // TODO: to be called when there's a def unapplyProd(x: T): U // U must have N members _1,..., _N -- the _i are type checked, call their type Ti, // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it) - class ExtractorCallProd(val fun: Tree, val args: List[Tree]) extends ExtractorCall { - private def constructorTp = fun.tpe - - def isTyped = fun.isTyped - - // to which type should the previous binder be casted? - def paramType = constructorTp.finalResultType - def resultType = fun.tpe.finalResultType - - def isSeq = isVarArgTypes(rawSubPatTypes) - - protected def rawSubPatTypes = constructorTp.paramTypes - + class ExtractorCallProd(aligner: PatternAligned, val fun: Tree, val args: List[Tree]) extends ExtractorCall(aligner) { /** Create the TreeMaker that embodies this extractor call * * `binder` has been casted to `paramType` if necessary @@ -535,20 +521,11 @@ trait MatchTranslation { if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1) else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN } - - override def toString() = s"ExtractorCallProd($fun:${fun.tpe} / ${fun.symbol} / args=$args)" } - class ExtractorCallRegular(extractorCallIncludingDummy: Tree, val args: List[Tree]) extends ExtractorCall { + class ExtractorCallRegular(aligner: PatternAligned, extractorCallIncludingDummy: Tree, val args: List[Tree]) extends ExtractorCall(aligner) { val Unapplied(fun) = extractorCallIncludingDummy - def tpe = fun.tpe - def paramType = firstParamType(tpe) - def resultType = tpe.finalResultType - def isTyped = (tpe ne NoType) && fun.isTyped && (resultInMonad ne ErrorType) - def isSeq = fun.symbol.name == nme.unapplySeq - def isBool = resultType =:= BooleanTpe - /** Create the TreeMaker that embodies this extractor call * * `binder` has been casted to `paramType` if necessary @@ -571,7 +548,7 @@ trait MatchTranslation { ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)( subPatBinders, subPatRefs(binder), - isBool, + aligner.isBool, checkedLength, patBinderOrCasted, ignoredSubPatBinders @@ -583,9 +560,9 @@ trait MatchTranslation { else super.seqTree(binder) // the trees that select the subpatterns on the extractor's result, referenced by `binder` - // require (nbSubPats > 0 && (!lastIsStar || isSeq)) + // require (totalArity > 0 && (!lastIsStar || isSeq)) override protected def subPatRefs(binder: Symbol): List[Tree] = - if (isSingle) REF(binder) :: Nil // special case for extractors + if (aligner.isSingle) REF(binder) :: Nil // special case for extractors else super.subPatRefs(binder) protected def spliceApply(binder: Symbol): Tree = { @@ -606,40 +583,7 @@ trait MatchTranslation { splice transform extractorCallIncludingDummy } - // what's the extractor's result type in the monad? It is the type of its nullary member `get`. - protected lazy val resultInMonad: Type = if (isBool) UnitTpe else typeOfMemberNamedGet(resultType) - - protected lazy val rawSubPatTypes = ( - if (isBool) Nil - else if (isSingle) resultInMonad :: Nil // don't go looking for selectors if we only expect one pattern - else typesOfSelectorsOrSelf(resultInMonad) - ) - - override def toString() = s"ExtractorCallRegular($fun: $tpe / ${fun.symbol})" - } - - /** A conservative approximation of which patterns do not discern anything. - * They are discarded during the translation. - */ - object WildcardPattern { - def unapply(pat: Tree): Boolean = pat match { - case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol! - case Star(WildcardPattern()) => true - case x: Ident => treeInfo.isVarPattern(x) - case Alternative(ps) => ps forall unapply - case EmptyTree => true - case _ => false - } - } - - object PatternBoundToUnderscore { - def unapply(pat: Tree): Boolean = pat match { - case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol! - case Ident(nme.WILDCARD) => true - case Alternative(ps) => ps forall unapply - case Typed(PatternBoundToUnderscore(), _) => true - case _ => false - } + override def rawSubPatTypes = aligner.extractor.varargsTypes } } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 7df03044aa..a80f158949 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -395,8 +395,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { debug.patmat("TTTM"+((prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))) lazy val outerTestNeeded = ( - !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass) - && needsOuterTest(expectedTp, testedBinder.info, matchOwner)) + (expectedTp.prefix ne NoPrefix) + && !expectedTp.prefix.typeSymbol.isPackageClass + && needsOuterTest(expectedTp, testedBinder.info, matchOwner) + ) // the logic to generate the run-time test that follows from the fact that // a `prevBinder` is expected to have type `expectedTp` @@ -406,44 +408,52 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def renderCondition(cs: TypeTestCondStrategy): cs.Result = { import cs._ - def default = - // do type test first to ensure we won't select outer on null - if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp)) - else typeTest(testedBinder, expectedTp) - // propagate expected type def expTp(t: Tree): t.type = t setType expectedTp + def testedWide = testedBinder.info.widen + def expectedWide = expectedTp.widen + def isAnyRef = testedWide <:< AnyRefTpe + def isAsExpected = testedWide <:< expectedTp + def isExpectedPrimitiveType = isAsExpected && isPrimitiveValueType(expectedTp) + def isExpectedReferenceType = isAsExpected && (expectedTp <:< AnyRefTpe) + def mkNullTest = nonNullTest(testedBinder) + def mkOuterTest = outerTest(testedBinder, expectedTp) + def mkTypeTest = typeTest(testedBinder, expectedWide) + + def mkEqualsTest(lhs: Tree): cs.Result = equalsTest(lhs, testedBinder) + def mkEqTest(lhs: Tree): cs.Result = eqTest(lhs, testedBinder) + def addOuterTest(res: cs.Result): cs.Result = if (outerTestNeeded) and(res, mkOuterTest) else res + + // If we conform to expected primitive type: + // it cannot be null and cannot have an outer pointer. No further checking. + // If we conform to expected reference type: + // have to test outer and non-null + // If we do not conform to expected type: + // have to test type and outer (non-null is implied by successful type test) + def mkDefault = ( + if (isExpectedPrimitiveType) tru + else addOuterTest( + if (isExpectedReferenceType) mkNullTest + else mkTypeTest + ) + ) + // true when called to type-test the argument to an extractor // don't do any fancy equality checking, just test the type - if (extractorArgTypeTest) default + // TODO: verify that we don't need to special-case Array + // I think it's okay: + // - the isInstanceOf test includes a test for the element type + // - Scala's arrays are invariant (so we don't drop type tests unsoundly) + if (extractorArgTypeTest) mkDefault else expectedTp match { - // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types - // this implies sym.isStable - case SingleType(_, sym) => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen)) - // must use == to support e.g. List() == Nil - case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen)) - case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefTpe - => eqTest(expTp(CODE.NULL), testedBinder) - case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder) - case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder) - - // TODO: verify that we don't need to special-case Array - // I think it's okay: - // - the isInstanceOf test includes a test for the element type - // - Scala's arrays are invariant (so we don't drop type tests unsoundly) - case _ if testedBinder.info.widen <:< expectedTp => - // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer - // since the types conform, no further checking is required - if (isPrimitiveValueType(expectedTp)) tru - // have to test outer and non-null only when it's a reference type - else if (expectedTp <:< AnyRefTpe) { - // do non-null check first to ensure we won't select outer on null - if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp)) - else nonNullTest(testedBinder) - } else default - - case _ => default + // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types - this implies sym.isStable + case SingleType(_, sym) => and(mkEqualsTest(gen.mkAttributedQualifier(expectedTp)), mkTypeTest) + case ThisType(sym) if sym.isModule => and(mkEqualsTest(CODE.REF(sym)), mkTypeTest) // must use == to support e.g. List() == Nil + case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(CODE.NULL)) + case ConstantType(const) => mkEqualsTest(expTp(Literal(const))) + case ThisType(sym) => mkEqTest(expTp(This(sym))) + case _ => mkDefault } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala new file mode 100644 index 0000000000..e84ccbf754 --- /dev/null +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala @@ -0,0 +1,155 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala +package tools +package nsc +package transform +package patmat + +/** An extractor returns: F1, F2, ..., Fi, opt[Seq[E] or E*] + * A case matches: P1, P2, ..., Pj, opt[Seq[E]] + * Put together: P1/F1, P2/F2, ... Pi/Fi, Pi+1/E, Pi+2/E, ... Pj/E, opt[Seq[E]] + * + * Here Pm/Fi is the last pattern to match the fixed arity section. + * + * productArity: the value of i, i.e. the number of non-sequence types in the extractor + * nonStarArity: the value of j, i.e. the number of non-star patterns in the case definition + * elementArity: j - i, i.e. the number of non-star patterns which must match sequence elements + * starArity: 1 or 0 based on whether there is a star (sequence-absorbing) pattern + * totalArity: nonStarArity + starArity, i.e. the number of patterns in the case definition + * + * Note that productArity is a function only of the extractor, and + * nonStar/star/totalArity are all functions of the patterns. The key + * value for aligning and typing the patterns is elementArity, as it + * is derived from both sets of information. + */ +trait PatternExpander[Pattern, Type] { + /** You'll note we're not inside the cake. "Pattern" and "Type" are + * arbitrary types here, and NoPattern and NoType arbitrary values. + */ + def NoPattern: Pattern + def NoType: Type + + /** It's not optimal that we're carrying both sequence and repeated + * type here, but the implementation requires more unraveling before + * it can be avoided. + * + * sequenceType is Seq[T], elementType is T, repeatedType is T*. + */ + sealed case class Repeated(sequenceType: Type, elementType: Type, repeatedType: Type) { + def exists = elementType != NoType + + def elementList = if (exists) elementType :: Nil else Nil + def sequenceList = if (exists) sequenceType :: Nil else Nil + def repeatedList = if (exists) repeatedType :: Nil else Nil + + override def toString = s"${elementType}*" + } + object NoRepeated extends Repeated(NoType, NoType, NoType) { + override def toString = "<none>" + } + + final case class Patterns(fixed: List[Pattern], star: Pattern) { + def hasStar = star != NoPattern + def starArity = if (hasStar) 1 else 0 + def nonStarArity = fixed.length + def totalArity = nonStarArity + starArity + def starPatterns = if (hasStar) star :: Nil else Nil + def all = fixed ::: starPatterns + + override def toString = all mkString ", " + } + + /** An 'extractor' can be a case class or an unapply or unapplySeq method. + * Decoding what it is that they extract takes place before we arrive here, + * so that this class can concentrate only on the relationship between + * patterns and types. + * + * In a case class, the class is the unextracted type and the fixed and + * repeated types are derived from its constructor parameters. + * + * In an unapply, this is reversed: the parameter to the unapply is the + * unextracted type, and the other types are derived based on the return + * type of the unapply method. + * + * In other words, this case class and unapply are encoded the same: + * + * case class Foo(x: Int, y: Int, zs: Char*) + * def unapplySeq(x: Foo): Option[(Int, Int, Seq[Char])] + * + * Both are Extractor(Foo, Int :: Int :: Nil, Repeated(Seq[Char], Char, Char*)) + * + * @param whole The type in its unextracted form + * @param fixed The non-sequence types which are extracted + * @param repeated The sequence type which is extracted + */ + final case class Extractor(whole: Type, fixed: List[Type], repeated: Repeated) { + require(whole != NoType, s"expandTypes($whole, $fixed, $repeated)") + + def productArity = fixed.length + def hasSeq = repeated.exists + def elementType = repeated.elementType + def sequenceType = repeated.sequenceType + def allTypes = fixed ::: repeated.sequenceList + def varargsTypes = fixed ::: repeated.repeatedList + def isErroneous = allTypes contains NoType + + private def typeStrings = fixed.map("" + _) ::: ( if (hasSeq) List("" + repeated) else Nil ) + + def offeringString = if (isErroneous) "<error>" else typeStrings match { + case Nil => "Boolean" + case tp :: Nil => tp + case tps => tps.mkString("(", ", ", ")") + } + override def toString = "%s => %s".format(whole, offeringString) + } + + final case class TypedPat(pat: Pattern, tpe: Type) { + override def toString = s"$pat: $tpe" + } + + /** If elementArity is... + * 0: A perfect match between extractor and the fixed patterns. + * If there is a star pattern it will match any sequence. + * > 0: There are more patterns than products. There will have to be a + * sequence which can populate at least <elementArity> patterns. + * < 0: There are more products than patterns: compile time error. + */ + final case class Aligned(patterns: Patterns, extractor: Extractor) { + def elementArity = patterns.nonStarArity - productArity + def productArity = extractor.productArity + def starArity = patterns.starArity + def totalArity = patterns.totalArity + + def wholeType = extractor.whole + def sequenceType = extractor.sequenceType + def productTypes = extractor.fixed + def extractedTypes = extractor.allTypes + def typedNonStarPatterns = products ::: elements + def typedPatterns = typedNonStarPatterns ::: stars + + def isBool = !isSeq && productArity == 0 + def isSingle = !isSeq && totalArity == 1 + def isStar = patterns.hasStar + def isSeq = extractor.hasSeq + + private def typedAsElement(pat: Pattern) = TypedPat(pat, extractor.elementType) + private def typedAsSequence(pat: Pattern) = TypedPat(pat, extractor.sequenceType) + private def productPats = patterns.fixed take productArity + private def elementPats = patterns.fixed drop productArity + private def products = (productPats, productTypes).zipped map TypedPat + private def elements = elementPats map typedAsElement + private def stars = patterns.starPatterns map typedAsSequence + + override def toString = s""" + |Aligned { + | patterns $patterns + | extractor $extractor + | arities $productArity/$elementArity/$starArity // product/element/star + | typed ${typedPatterns mkString ", "} + |}""".stripMargin.trim + } +} diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 394ba98f17..f6c960d089 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -34,7 +34,8 @@ import scala.reflect.internal.util.Position * - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?) * - recover exhaustivity/unreachability of user-defined extractors by partitioning the types they match on using an HList or similar type-level structure */ -trait PatternMatching extends Transform with TypingTransformers +trait PatternMatching extends Transform + with TypingTransformers with Debugging with Interface with MatchTranslation @@ -45,7 +46,8 @@ trait PatternMatching extends Transform with TypingTransformers with Solving with MatchAnalysis with MatchOptimization - with MatchWarnings { + with MatchWarnings + with ScalacPatternExpanders { import global._ val phaseName: String = "patmat" diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala new file mode 100644 index 0000000000..7858cb5586 --- /dev/null +++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala @@ -0,0 +1,154 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala +package tools +package nsc +package transform +package patmat + +/** This is scalac-specific logic layered on top of the scalac-agnostic + * "matching products to patterns" logic defined in PatternExpander. + */ +trait ScalacPatternExpanders { + val global: Global + + import global._ + import definitions._ + import treeInfo._ + + type PatternAligned = ScalacPatternExpander#Aligned + + implicit class AlignedOps(val aligned: PatternAligned) { + import aligned._ + def expectedTypes = typedPatterns map (_.tpe) + def unexpandedFormals = extractor.varargsTypes + } + trait ScalacPatternExpander extends PatternExpander[Tree, Type] { + def NoPattern = EmptyTree + def NoType = global.NoType + + def newPatterns(patterns: List[Tree]): Patterns = patterns match { + case init :+ last if isStar(last) => Patterns(init, last) + case _ => Patterns(patterns, NoPattern) + } + def elementTypeOf(tpe: Type) = { + val seq = repeatedToSeq(tpe) + + ( typeOfMemberNamedHead(seq) + orElse typeOfMemberNamedApply(seq) + orElse definitions.elementType(ArrayClass, seq) + ) + } + def newExtractor(whole: Type, fixed: List[Type], repeated: Repeated): Extractor = + logResult(s"newExtractor($whole, $fixed, $repeated")(Extractor(whole, fixed, repeated)) + + // Turn Seq[A] into Repeated(Seq[A], A, A*) + def repeatedFromSeq(seqType: Type): Repeated = { + val elem = elementTypeOf(seqType) + val repeated = scalaRepeatedType(elem) + + Repeated(seqType, elem, repeated) + } + // Turn A* into Repeated(Seq[A], A, A*) + def repeatedFromVarargs(repeated: Type): Repeated = + Repeated(repeatedToSeq(repeated), repeatedToSingle(repeated), repeated) + + /** In this case we are basing the pattern expansion on a case class constructor. + * The argument is the MethodType carried by the primary constructor. + */ + def applyMethodTypes(method: Type): Extractor = { + val whole = method.finalResultType + + method.paramTypes match { + case init :+ last if isScalaRepeatedParamType(last) => newExtractor(whole, init, repeatedFromVarargs(last)) + case tps => newExtractor(whole, tps, NoRepeated) + } + } + + /** In this case, expansion is based on an unapply or unapplySeq method. + * Unfortunately the MethodType does not carry the information of whether + * it was unapplySeq, so we have to funnel that information in separately. + */ + def unapplyMethodTypes(method: Type, isSeq: Boolean): Extractor = { + val whole = firstParamType(method) + val result = method.finalResultType + val expanded = ( + if (result =:= BooleanTpe) Nil + else typeOfMemberNamedGet(result) match { + case rawGet if !hasSelectors(rawGet) => rawGet :: Nil + case rawGet => typesOfSelectors(rawGet) + } + ) + expanded match { + case init :+ last if isSeq => newExtractor(whole, init, repeatedFromSeq(last)) + case tps => newExtractor(whole, tps, NoRepeated) + } + } + } + object alignPatterns extends ScalacPatternExpander { + /** Converts a T => (A, B, C) extractor to a T => ((A, B, CC)) extractor. + */ + def tupleExtractor(extractor: Extractor): Extractor = + extractor.copy(fixed = tupleType(extractor.fixed) :: Nil) + + private def validateAligned(tree: Tree, aligned: Aligned): Aligned = { + import aligned._ + + def owner = tree.symbol.owner + def offering = extractor.offeringString + def symString = tree.symbol.fullLocationString + def offerString = if (extractor.isErroneous) "" else s" offering $offering" + def arityExpected = ( if (extractor.hasSeq) "at least " else "" ) + productArity + + def err(msg: String) = currentUnit.error(tree.pos, msg) + def warn(msg: String) = currentUnit.warning(tree.pos, msg) + def arityError(what: String) = err(s"$what patterns for $owner$offerString: expected $arityExpected, found $totalArity") + + if (isStar && !isSeq) + err("Star pattern must correspond with varargs or unapplySeq") + else if (elementArity < 0) + arityError("not enough") + else if (elementArity > 0 && !extractor.hasSeq) + arityError("too many") + + aligned + } + + def apply(sel: Tree, args: List[Tree]): Aligned = { + val fn = sel match { + case Unapplied(fn) => fn + case _ => sel + } + val patterns = newPatterns(args) + val isSeq = sel.symbol.name == nme.unapplySeq + val isUnapply = sel.symbol.name == nme.unapply + val extractor = sel.symbol.name match { + case nme.unapply => unapplyMethodTypes(fn.tpe, isSeq = false) + case nme.unapplySeq => unapplyMethodTypes(fn.tpe, isSeq = true) + case _ => applyMethodTypes(fn.tpe) + } + + /** Rather than let the error that is SI-6675 pollute the entire matching + * process, we will tuple the extractor before creation Aligned so that + * it contains known good values. + */ + def productArity = extractor.productArity + def acceptMessage = if (extractor.isErroneous) "" else s" to hold ${extractor.offeringString}" + val requiresTupling = isUnapply && patterns.totalArity == 1 && productArity > 1 + + if (settings.lint && requiresTupling && effectivePatternArity(args) == 1) + currentUnit.warning(sel.pos, s"${sel.symbol.owner} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)") + + val normalizedExtractor = if (requiresTupling) tupleExtractor(extractor) else extractor + validateAligned(fn, Aligned(patterns, normalizedExtractor)) + } + + def apply(tree: Tree): Aligned = tree match { + case Apply(fn, args) => apply(fn, args) + case UnApply(fn, args) => apply(fn, args) + } + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala index 54e4fefc15..fa6e5399eb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala @@ -13,7 +13,6 @@ package typechecker trait AnalyzerPlugins { self: Analyzer => import global._ - trait AnalyzerPlugin { /** * Selectively activate this analyzer plugin, e.g. according to the compiler phase. @@ -156,6 +155,117 @@ trait AnalyzerPlugins { self: Analyzer => def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe } + /** + * @define nonCumulativeReturnValueDoc Returns `None` if the plugin doesn't want to customize the default behavior + * or something else if the plugin knows better that the implementation provided in scala-compiler.jar. + * If multiple plugins return a non-empty result, it's going to be a compilation error. + */ + trait MacroPlugin { + /** + * Selectively activate this analyzer plugin, e.g. according to the compiler phase. + * + * Note that the current phase can differ from the global compiler phase (look for `enteringPhase` + * invocations in the compiler). For instance, lazy types created by the UnPickler are completed + * at the phase in which their symbol is created. Observations show that this can even be the + * parser phase. Since symbol completion can trigger subtyping, typing etc, your plugin might + * need to be active also in phases other than namer and typer. + * + * Typically, this method can be implemented as + * + * global.phase.id < global.currentRun.picklerPhase.id + */ + def isActive(): Boolean = true + + /** + * Typechecks the right-hand side of a macro definition (which typically features + * a mere reference to a macro implementation). + * + * Default implementation provided in `self.standardTypedMacroBody` makes sure that the rhs + * resolves to a reference to a method in either a static object or a macro bundle, + * verifies that the referred method is compatible with the macro def and upon success + * attaches a macro impl binding to the macro def's symbol. + * + * $nonCumulativeReturnValueDoc. + */ + def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Option[Tree] = None + + /** + * Expands an application of a def macro (i.e. of a symbol that has the MACRO flag set), + * possibly using the current typer mode and the provided prototype. + * + * Default implementation provided in `self.standardMacroExpand` figures out whether the `expandee` + * needs to be expanded right away or its expansion has to be delayed until all undetermined + * parameters are inferred, then loads the macro implementation using `self.pluginsMacroRuntime`, + * prepares the invocation arguments for the macro implementation using `self.pluginsMacroArgs`, + * and finally calls into the macro implementation. After the call returns, it typechecks + * the expansion and performs some bookkeeping. + * + * This method is typically implemented if your plugin requires significant changes to the macro engine. + * If you only need to customize the macro context, consider implementing `pluginsMacroArgs`. + * If you only need to customize how macro implementation are invoked, consider going for `pluginsMacroRuntime`. + * + * $nonCumulativeReturnValueDoc. + */ + def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Option[Tree] = None + + /** + * Computes the arguments that need to be passed to the macro impl corresponding to a particular expandee. + * + * Default implementation provided in `self.standardMacroArgs` instantiates a `scala.reflect.macros.contexts.Context`, + * gathers type and value arguments of the macro application and throws them together into `MacroArgs`. + * + * $nonCumulativeReturnValueDoc. + */ + def pluginsMacroArgs(typer: Typer, expandee: Tree): Option[MacroArgs] = None + + /** + * Summons a function that encapsulates macro implementation invocations for a particular expandee. + * + * Default implementation provided in `self.standardMacroRuntime` returns a function that + * loads the macro implementation binding from the macro definition symbol, + * then uses either Java or Scala reflection to acquire the method that corresponds to the impl, + * and then reflectively calls into that method. + * + * $nonCumulativeReturnValueDoc. + */ + def pluginsMacroRuntime(expandee: Tree): Option[MacroRuntime] = None + + /** + * Creates a symbol for the given tree in lexical context encapsulated by the given namer. + * + * Default implementation provided in `namer.standardEnterSym` handles MemberDef's and Imports, + * doing nothing for other trees (DocDef's are seen through and rewrapped). Typical implementation + * of `enterSym` for a particular tree flavor creates a corresponding symbol, assigns it to the tree, + * enters the symbol into scope and then might even perform some code generation. + * + * $nonCumulativeReturnValueDoc. + */ + def pluginsEnterSym(namer: Namer, tree: Tree): Boolean = false + + /** + * Makes sure that for the given class definition, there exists a companion object definition. + * + * Default implementation provided in `namer.standardEnsureCompanionObject` looks up a companion symbol for the class definition + * and then checks whether the resulting symbol exists or not. If it exists, then nothing else is done. + * If not, a synthetic object definition is created using the provided factory, which is then entered into namer's scope. + * + * $nonCumulativeReturnValueDoc. + */ + def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Option[Symbol] = None + + /** + * Prepares a list of statements for being typechecked by performing domain-specific type-agnostic code synthesis. + * + * Trees passed into this method are going to be named, but not typed. + * In particular, you can rely on the compiler having called `enterSym` on every stat prior to passing calling this method. + * + * Default implementation does nothing. Current approaches to code syntheses (generation of underlying fields + * for getters/setters, creation of companion objects for case classes, etc) are too disparate and ad-hoc + * to be treated uniformly, so I'm leaving this for future work. + */ + def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = stats + } + /** A list of registered analyzer plugins */ @@ -167,59 +277,158 @@ trait AnalyzerPlugins { self: Analyzer => analyzerPlugins = plugin :: analyzerPlugins } + private abstract class CumulativeOp[T] { + def default: T + def accumulate: (T, AnalyzerPlugin) => T + } + + private def invoke[T](op: CumulativeOp[T]): T = { + if (analyzerPlugins.isEmpty) op.default + else analyzerPlugins.foldLeft(op.default)((current, plugin) => + if (!plugin.isActive()) current else op.accumulate(current, plugin)) + } /** @see AnalyzerPlugin.pluginsPt */ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type = + // performance opt if (analyzerPlugins.isEmpty) pt - else analyzerPlugins.foldLeft(pt)((pt, plugin) => - if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode)) + else invoke(new CumulativeOp[Type] { + def default = pt + def accumulate = (pt, p) => p.pluginsPt(pt, typer, tree, mode) + }) /** @see AnalyzerPlugin.pluginsTyped */ - def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = { - // support deprecated methods in annotation checkers - val annotCheckersTpe = addAnnotations(tree, tpe) - if (analyzerPlugins.isEmpty) annotCheckersTpe - else analyzerPlugins.foldLeft(annotCheckersTpe)((tpe, plugin) => - if (!plugin.isActive()) tpe else plugin.pluginsTyped(tpe, typer, tree, mode, pt)) - } + def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = + // performance opt + if (analyzerPlugins.isEmpty) addAnnotations(tree, tpe) + else invoke(new CumulativeOp[Type] { + // support deprecated methods in annotation checkers + def default = addAnnotations(tree, tpe) + def accumulate = (tpe, p) => p.pluginsTyped(tpe, typer, tree, mode, pt) + }) /** @see AnalyzerPlugin.pluginsTypeSig */ - def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = - if (analyzerPlugins.isEmpty) tpe - else analyzerPlugins.foldLeft(tpe)((tpe, plugin) => - if (!plugin.isActive()) tpe else plugin.pluginsTypeSig(tpe, typer, defTree, pt)) + def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = invoke(new CumulativeOp[Type] { + def default = tpe + def accumulate = (tpe, p) => p.pluginsTypeSig(tpe, typer, defTree, pt) + }) /** @see AnalyzerPlugin.pluginsTypeSigAccessor */ - def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = - if (analyzerPlugins.isEmpty) tpe - else analyzerPlugins.foldLeft(tpe)((tpe, plugin) => - if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym)) + def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = invoke(new CumulativeOp[Type] { + def default = tpe + def accumulate = (tpe, p) => p.pluginsTypeSigAccessor(tpe, typer, tree, sym) + }) /** @see AnalyzerPlugin.canAdaptAnnotations */ - def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = { + def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = invoke(new CumulativeOp[Boolean] { // support deprecated methods in annotation checkers - val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt) - annotCheckersExists || { - if (analyzerPlugins.isEmpty) false - else analyzerPlugins.exists(plugin => - plugin.isActive() && plugin.canAdaptAnnotations(tree, typer, mode, pt)) - } - } + def default = global.canAdaptAnnotations(tree, mode, pt) + def accumulate = (curr, p) => curr || p.canAdaptAnnotations(tree, typer, mode, pt) + }) /** @see AnalyzerPlugin.adaptAnnotations */ - def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = { + def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = invoke(new CumulativeOp[Tree] { // support deprecated methods in annotation checkers - val annotCheckersTree = global.adaptAnnotations(tree, mode, pt) - if (analyzerPlugins.isEmpty) annotCheckersTree - else analyzerPlugins.foldLeft(annotCheckersTree)((tree, plugin) => - if (!plugin.isActive()) tree else plugin.adaptAnnotations(tree, typer, mode, pt)) - } + def default = global.adaptAnnotations(tree, mode, pt) + def accumulate = (tree, p) => p.adaptAnnotations(tree, typer, mode, pt) + }) /** @see AnalyzerPlugin.pluginsTypedReturn */ - def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = { - val annotCheckersType = adaptTypeOfReturn(tree.expr, pt, tpe) - if (analyzerPlugins.isEmpty) annotCheckersType - else analyzerPlugins.foldLeft(annotCheckersType)((tpe, plugin) => - if (!plugin.isActive()) tpe else plugin.pluginsTypedReturn(tpe, typer, tree, pt)) + def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = invoke(new CumulativeOp[Type] { + def default = adaptTypeOfReturn(tree.expr, pt, tpe) + def accumulate = (tpe, p) => p.pluginsTypedReturn(tpe, typer, tree, pt) + }) + + /** A list of registered macro plugins */ + private var macroPlugins: List[MacroPlugin] = Nil + + /** Registers a new macro plugin */ + def addMacroPlugin(plugin: MacroPlugin) { + if (!macroPlugins.contains(plugin)) + macroPlugins = plugin :: macroPlugins + } + + private abstract class NonCumulativeOp[T] { + def position: Position + def description: String + def default: T + def custom(plugin: MacroPlugin): Option[T] + } + + private def invoke[T](op: NonCumulativeOp[T]): T = { + if (macroPlugins.isEmpty) op.default + else { + val results = macroPlugins.filter(_.isActive()).map(plugin => (plugin, op.custom(plugin))) + results.flatMap { case (p, Some(result)) => Some((p, result)); case _ => None } match { + case (p1, _) :: (p2, _) :: _ => typer.context.error(op.position, s"both $p1 and $p2 want to ${op.description}"); op.default + case (_, custom) :: Nil => custom + case Nil => op.default + } + } + } + + /** @see MacroPlugin.pluginsTypedMacroBody */ + def pluginsTypedMacroBody(typer: Typer, ddef: DefDef): Tree = invoke(new NonCumulativeOp[Tree] { + def position = ddef.pos + def description = "typecheck this macro definition" + def default = standardTypedMacroBody(typer, ddef) + def custom(plugin: MacroPlugin) = plugin.pluginsTypedMacroBody(typer, ddef) + }) + + /** @see MacroPlugin.pluginsMacroExpand */ + def pluginsMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = invoke(new NonCumulativeOp[Tree] { + def position = expandee.pos + def description = "expand this macro application" + def default = standardMacroExpand(typer, expandee, mode, pt) + def custom(plugin: MacroPlugin) = plugin.pluginsMacroExpand(typer, expandee, mode, pt) + }) + + /** @see MacroPlugin.pluginsMacroArgs */ + def pluginsMacroArgs(typer: Typer, expandee: Tree): MacroArgs = invoke(new NonCumulativeOp[MacroArgs] { + def position = expandee.pos + def description = "compute macro arguments for this macro application" + def default = standardMacroArgs(typer, expandee) + def custom(plugin: MacroPlugin) = plugin.pluginsMacroArgs(typer, expandee) + }) + + /** @see MacroPlugin.pluginsMacroRuntime */ + def pluginsMacroRuntime(expandee: Tree): MacroRuntime = invoke(new NonCumulativeOp[MacroRuntime] { + def position = expandee.pos + def description = "compute macro runtime for this macro application" + def default = standardMacroRuntime(expandee) + def custom(plugin: MacroPlugin) = plugin.pluginsMacroRuntime(expandee) + }) + + /** @see MacroPlugin.pluginsEnterSym */ + def pluginsEnterSym(namer: Namer, tree: Tree): Context = + if (macroPlugins.isEmpty) namer.standardEnterSym(tree) + else invoke(new NonCumulativeOp[Context] { + def position = tree.pos + def description = "enter a symbol for this tree" + def default = namer.standardEnterSym(tree) + def custom(plugin: MacroPlugin) = { + val hasExistingSym = tree.symbol != NoSymbol + val result = plugin.pluginsEnterSym(namer, tree) + if (result && hasExistingSym) Some(namer.context) + else if (result && tree.isInstanceOf[Import]) Some(namer.context.make(tree)) + else if (result) Some(namer.context) + else None + } + }) + + /** @see MacroPlugin.pluginsEnsureCompanionObject */ + def pluginsEnsureCompanionObject(namer: Namer, cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = invoke(new NonCumulativeOp[Symbol] { + def position = cdef.pos + def description = "enter a companion symbol for this tree" + def default = namer.standardEnsureCompanionObject(cdef, creator) + def custom(plugin: MacroPlugin) = plugin.pluginsEnsureCompanionObject(namer, cdef, creator) + }) + + /** @see MacroPlugin.pluginsEnterStats */ + def pluginsEnterStats(typer: Typer, stats: List[Tree]): List[Tree] = { + // performance opt + if (macroPlugins.isEmpty) stats + else macroPlugins.foldLeft(stats)((current, plugin) => + if (!plugin.isActive()) current else plugin.pluginsEnterStats(typer, stats)) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 719d04a7f9..4d0eda2377 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -625,8 +625,7 @@ trait ContextErrors { setError(tree) } - def CaseClassConstructorError(tree: Tree) = { - val baseMessage = tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method" + def CaseClassConstructorError(tree: Tree, baseMessage: String) = { val addendum = directUnapplyMember(tree.symbol.info) match { case sym if hasMultipleNonImplicitParamLists(sym) => s"\nNote: ${sym.defString} exists in ${tree.symbol}, but it cannot be used as an extractor due to its second non-implicit parameter list" case _ => "" @@ -726,8 +725,9 @@ trait ContextErrors { NormalTypeError(expandee, "too many argument lists for " + fun) } - def MacroInvalidExpansionError(expandee: Tree, role: String, allowedExpansions: String) = { - issueNormalTypeError(expandee, s"macro in $role role can only expand into $allowedExpansions") + def MacroIncompatibleEngineError(macroEngine: String) = { + val message = s"macro cannot be expanded, because it was compiled by an incompatible macro engine $macroEngine" + issueNormalTypeError(lastTreeToTyper, message) } case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index fdec1edcc0..19fba639e3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1145,7 +1145,7 @@ trait Implicits { gen.mkAttributedThis(thisSym) case _ => // if `pre` is not a PDT, e.g. if someone wrote - // implicitly[scala.reflect.macros.BlackboxContext#TypeTag[Int]] + // implicitly[scala.reflect.macros.blackbox.Context#TypeTag[Int]] // then we need to fail, because we don't know the prefix to use during type reification // upd. we also need to fail silently, because this is a very common situation // e.g. quite often we're searching for BaseUniverse#TypeTag, e.g. for a type tag in any universe diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 0d46a96b81..cf82d6baac 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -29,7 +29,7 @@ import Fingerprint._ * Then fooBar needs to point to a static method of the following form: * * def fooBar[T: c.WeakTypeTag] // type tag annotation is optional - * (c: scala.reflect.macros.BlackboxContext) + * (c: scala.reflect.macros.blackbox.Context) * (xs: c.Expr[List[T]]) * : c.Expr[T] = { * ... @@ -67,7 +67,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { * * This solution is very simple, but unfortunately it's also lacking. If we use it, then * signatures of macro defs become transitively dependent on scala-reflect.jar - * (because they refer to macro impls, and macro impls refer to scala.reflect.macros.BlackboxContext/WhiteboxContext defined in scala-reflect.jar). + * (because they refer to macro impls, and macro impls refer to *box.Context defined in scala-reflect.jar). * More details can be found in comments to https://issues.scala-lang.org/browse/SI-5940. * * Therefore we have to avoid putting macro impls into binding pickles and come up with our own serialization format. @@ -81,9 +81,9 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { * and various accounting information necessary when composing an argument list for the reflective invocation. */ case class MacroImplBinding( - // Is this macro impl a bundle (a trait extending BlackboxMacro or WhiteboxMacro) or a vanilla def? + // Is this macro impl a bundle (a trait extending *box.Macro) or a vanilla def? val isBundle: Boolean, - // Is this macro impl blackbox (i.e. having BlackboxContext in its signature)? + // Is this macro impl blackbox (i.e. having blackbox.Context in its signature)? val isBlackbox: Boolean, // Java class name of the class that contains the macro implementation // is used to load the corresponding object with Java reflection @@ -97,8 +97,8 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { // * c.Expr[T] => LiftedTyped // * c.Tree => LiftedUntyped // * c.WeakTypeTag[T] => Tagged(index of the type parameter corresponding to that type tag) - // * everything else (e.g. scala.reflect.macros.BlackboxContext/WhiteboxContext) => Other - // f.ex. for: def impl[T: WeakTypeTag, U, V: WeakTypeTag](c: BlackboxContext)(x: c.Expr[T], y: c.Tree): (U, V) = ??? + // * everything else (e.g. *box.Context) => Other + // f.ex. for: def impl[T: WeakTypeTag, U, V: WeakTypeTag](c: blackbox.Context)(x: c.Expr[T], y: c.Tree): (U, V) = ??? // `signature` will be equal to List(List(Other), List(LiftedTyped, LiftedUntyped), List(Tagged(0), Tagged(2))) signature: List[List[Fingerprint]], // type arguments part of a macro impl ref (the right-hand side of a macro definition) @@ -116,23 +116,22 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { * with synthetic content that carries the payload described in `MacroImplBinding`. * * For example, for a pair of macro definition and macro implementation: - * def impl(c: scala.reflect.macros.BlackboxContext): c.Expr[Unit] = ??? + * def impl(c: scala.reflect.macros.blackbox.Context): c.Expr[Unit] = ??? * def foo: Unit = macro impl * * We will have the following annotation added on the macro definition `foo`: * * @scala.reflect.macros.internal.macroImpl( * `macro`( + * "macroEngine" = <current macro engine>, * "isBundle" = false, * "isBlackbox" = true, * "signature" = List(Other), * "methodName" = "impl", - * "versionFormat" = <current version format>, * "className" = "Macros$")) */ + def macroEngine = "v7.0 (implemented in Scala 2.11.0-M8)" object MacroImplBinding { - val versionFormat = 6.0 - def pickleAtom(obj: Any): Tree = obj match { case list: List[_] => Apply(Ident(ListModule), list map pickleAtom) @@ -183,12 +182,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { } val payload = List[(String, Any)]( - "versionFormat" -> versionFormat, - "isBundle" -> isBundle, - "isBlackbox" -> isBlackbox, - "className" -> className, - "methodName" -> macroImpl.name.toString, - "signature" -> signature + "macroEngine" -> macroEngine, + "isBundle" -> isBundle, + "isBlackbox" -> isBlackbox, + "className" -> className, + "methodName" -> macroImpl.name.toString, + "signature" -> signature ) // the shape of the nucleus is chosen arbitrarily. it doesn't carry any payload. @@ -237,8 +236,8 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { raw.asInstanceOf[T] } - val pickleVersionFormat = unpickle("versionFormat", classOf[Double]) - if (versionFormat != pickleVersionFormat) fail(s"expected version format $versionFormat, actual $pickleVersionFormat") + val macroEngine = unpickle("macroEngine", classOf[String]) + if (self.macroEngine != macroEngine) typer.TyperErrorGen.MacroIncompatibleEngineError(macroEngine) val isBundle = unpickle("isBundle", classOf[Boolean]) val isBlackbox = unpickle("isBlackbox", classOf[Boolean]) @@ -315,7 +314,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { * @return Macro impl reference for the given macro definition if everything is okay. * EmptyTree if an error occurs. */ - def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree = { + def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree = pluginsTypedMacroBody(typer, macroDdef) + + /** Default implementation of `typedMacroBody`. + * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsTypedMacroBody for more details) + */ + def standardTypedMacroBody(typer: Typer, macroDdef: DefDef): Tree = { val macroDef = macroDdef.symbol assert(macroDef.isMacro, macroDdef) @@ -350,7 +354,6 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { val universe: self.global.type = self.global val callsiteTyper: universe.analyzer.Typer = typer.asInstanceOf[global.analyzer.Typer] val expandee = universe.analyzer.macroExpanderAttachment(expandeeTree).original orElse duplicateAndKeepPositions(expandeeTree) - val macroRole = universe.analyzer.macroExpanderAttachment(expandeeTree).role } with UnaffiliatedMacroContext { val prefix = Expr[Nothing](prefixTree)(TypeTag.Nothing) override def toString = "MacroContext(%s@%s +%d)".format(expandee.symbol.name, expandee.pos, enclosingMacros.length - 1 /* exclude myself */) @@ -360,8 +363,12 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { /** Calculate the arguments to pass to a macro implementation when expanding the provided tree. */ case class MacroArgs(c: MacroContext, others: List[Any]) + def macroArgs(typer: Typer, expandee: Tree): MacroArgs = pluginsMacroArgs(typer, expandee) - private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = { + /** Default implementation of `macroArgs`. + * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroArgs for more details) + */ + def standardMacroArgs(typer: Typer, expandee: Tree): MacroArgs = { val macroDef = expandee.symbol val paramss = macroDef.paramss val treeInfo.Applied(core, targs, argss) = expandee @@ -471,20 +478,14 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { } /** Keeps track of macros in-flight. - * See more informations in comments to `openMacros` in `scala.reflect.macros.WhiteboxContext`. + * See more informations in comments to `openMacros` in `scala.reflect.macros.whitebox.Context`. */ - private var _openMacros = List[MacroContext]() + var _openMacros = List[MacroContext]() def openMacros = _openMacros - private def pushMacroContext(c: MacroContext) = _openMacros ::= c - private def popMacroContext() = _openMacros = _openMacros.tail + def pushMacroContext(c: MacroContext) = _openMacros ::= c + def popMacroContext() = _openMacros = _openMacros.tail def enclosingMacroPosition = openMacros map (_.macroApplication.pos) find (_ ne NoPosition) getOrElse NoPosition - /** Describes the role that the macro expandee is performing. - */ - type MacroRole = scala.tools.nsc.typechecker.MacroRole - final def APPLY_ROLE = MacroRole.Apply - final def UNAPPLY_ROLE = MacroRole.Unapply - /** Performs macro expansion: * * ========= Expandable trees ========= @@ -527,30 +528,24 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { * the expandee with an error marker set if the expansion has been cancelled due malformed arguments or implementation * the expandee with an error marker set if there has been an error */ - private abstract class MacroExpander[Result: ClassTag](val role: MacroRole, val typer: Typer, val expandee: Tree) { - def allowExpandee(expandee: Tree): Boolean = true - def allowExpanded(expanded: Tree): Boolean = true - def allowedExpansions: String = "anything" - def allowResult(result: Result): Boolean = true - - def onSuccess(expanded: Tree): Result - def onFallback(expanded: Tree): Result - def onSuppressed(expandee: Tree): Result = expandee match { case expandee: Result => expandee } - def onDelayed(expanded: Tree): Result = expanded match { case expanded: Result => expanded } - def onSkipped(expanded: Tree): Result = expanded match { case expanded: Result => expanded } - def onFailure(expanded: Tree): Result = { typer.infer.setError(expandee); expandee match { case expandee: Result => expandee } } - - def apply(desugared: Tree): Result = { + abstract class MacroExpander(val typer: Typer, val expandee: Tree) { + def onSuccess(expanded: Tree): Tree + def onFallback(expanded: Tree): Tree + def onSuppressed(expandee: Tree): Tree = expandee + def onDelayed(expanded: Tree): Tree = expanded + def onSkipped(expanded: Tree): Tree = expanded + def onFailure(expanded: Tree): Tree = { typer.infer.setError(expandee); expandee } + + def apply(desugared: Tree): Tree = { if (isMacroExpansionSuppressed(desugared)) onSuppressed(expandee) else expand(desugared) } - protected def expand(desugared: Tree): Result = { + protected def expand(desugared: Tree): Tree = { def showDetailed(tree: Tree) = showRaw(tree, printIds = true, printTypes = true) def summary() = s"expander = $this, expandee = ${showDetailed(expandee)}, desugared = ${if (expandee == desugared) () else showDetailed(desugared)}" if (macroDebugVerbose) println(s"macroExpand: ${summary()}") - assert(allowExpandee(expandee), summary()) - linkExpandeeAndDesugared(expandee, desugared, role) + linkExpandeeAndDesugared(expandee, desugared) val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null if (Statistics.canEnable) Statistics.incCounter(macroExpandCount) @@ -562,21 +557,17 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { onFailure(typer.infer.setError(expandee)) } else try { val expanded = { - val runtime = macroRuntime(expandee.symbol) + val runtime = macroRuntime(expandee) if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime) else macroExpandWithoutRuntime(typer, expandee) } expanded match { case Success(expanded) => - if (allowExpanded(expanded)) { - // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc - val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext() - if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1) - if (allowResult(expanded1)) expanded1 else onFailure(expanded) - } else { - typer.TyperErrorGen.MacroInvalidExpansionError(expandee, role.name, allowedExpansions) - onFailure(expanded) - } + // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc + val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext() + if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1) + if (settings.Ymacroexpand.value == settings.MacroExpand.Discard) expandee.setType(expanded1.tpe) + else expanded1 case Fallback(fallback) => onFallback(fallback) case Delayed(delayed) => onDelayed(delayed) case Skipped(skipped) => onSkipped(skipped) @@ -592,151 +583,136 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { } } - /** Expands a tree that carries a term, which happens to be a term macro. - * @see MacroExpander - */ - private abstract class TermMacroExpander(role: MacroRole, typer: Typer, expandee: Tree, mode: Mode, pt: Type) - extends MacroExpander[Tree](role, typer, expandee) { - override def allowedExpansions: String = "term trees" - override def allowExpandee(expandee: Tree) = expandee.isTerm - override def onSuccess(expanded: Tree) = typer.typed(expanded, mode, pt) - override def onFallback(fallback: Tree) = typer.typed(fallback, mode, pt) - } - /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`. * @param outerPt Expected type that comes from enclosing context (something that's traditionally called `pt`). * @param innerPt Expected type that comes from the signature of a macro def, possibly wildcarded to help type inference. - * @see MacroExpander */ - def macroExpandApply(typer: Typer, expandee: Tree, mode: Mode, outerPt: Type): Tree = { - object expander extends TermMacroExpander(APPLY_ROLE, typer, expandee, mode, outerPt) { - lazy val innerPt = { - val tp = if (isNullaryInvocation(expandee)) expandee.tpe.finalResultType else expandee.tpe - if (isBlackbox(expandee)) tp - else { - // approximation is necessary for whitebox macros to guide type inference - // read more in the comments for onDelayed below - val undetparams = tp collect { case tp if tp.typeSymbol.isTypeParameter => tp.typeSymbol } - deriveTypeWithWildcards(undetparams)(tp) - } + class DefMacroExpander(typer: Typer, expandee: Tree, mode: Mode, outerPt: Type) + extends MacroExpander(typer, expandee) { + lazy val innerPt = { + val tp = if (isNullaryInvocation(expandee)) expandee.tpe.finalResultType else expandee.tpe + if (isBlackbox(expandee)) tp + else { + // approximation is necessary for whitebox macros to guide type inference + // read more in the comments for onDelayed below + val undetparams = tp collect { case tp if tp.typeSymbol.isTypeParameter => tp.typeSymbol } + deriveTypeWithWildcards(undetparams)(tp) } - override def onSuccess(expanded0: Tree) = { - // prematurely annotate the tree with a macro expansion attachment - // so that adapt called indirectly by typer.typed knows that it needs to apply the existential fixup - linkExpandeeAndExpanded(expandee, expanded0) - - def typecheck(label: String, tree: Tree, pt: Type): Tree = { - if (tree.isErrorTyped) tree - else { - if (macroDebugVerbose) println(s"$label (against pt = $pt): $tree") - // `macroExpandApply` is called from `adapt`, where implicit conversions are disabled - // therefore we need to re-enable the conversions back temporarily - val result = typer.context.withImplicitsEnabled(typer.typed(tree, mode, pt)) - if (result.isErrorTyped && macroDebugVerbose) println(s"$label has failed: ${typer.context.reportBuffer.errors}") - result - } - } + } + override def onSuccess(expanded0: Tree) = { + // prematurely annotate the tree with a macro expansion attachment + // so that adapt called indirectly by typer.typed knows that it needs to apply the existential fixup + linkExpandeeAndExpanded(expandee, expanded0) - if (isBlackbox(expandee)) { - val expanded1 = atPos(enclosingMacroPosition.makeTransparent)(Typed(expanded0, TypeTree(innerPt))) - typecheck("blackbox typecheck", expanded1, outerPt) - } else { - val expanded1 = expanded0 - val expanded2 = typecheck("whitebox typecheck #1", expanded1, outerPt) - typecheck("whitebox typecheck #2", expanded2, innerPt) + def typecheck(label: String, tree: Tree, pt: Type): Tree = { + if (tree.isErrorTyped) tree + else { + if (macroDebugVerbose) println(s"$label (against pt = $pt): $tree") + // `macroExpandApply` is called from `adapt`, where implicit conversions are disabled + // therefore we need to re-enable the conversions back temporarily + val result = typer.context.withImplicitsEnabled(typer.typed(tree, mode, pt)) + if (result.isErrorTyped && macroDebugVerbose) println(s"$label has failed: ${typer.context.reportBuffer.errors}") + result } } - override def onDelayed(delayed: Tree) = { - // =========== THE SITUATION =========== - // - // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee), - // then there are two possible situations we're in: - // 1) We're in POLYmode, when the typer tests the waters wrt type inference - // (e.g. as in typedArgToPoly in doTypedApply). - // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type - // (e.g. if we're an argument to a function call, then this means that no previous argument lists - // can determine our type variables for us). - // - // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that - // there's nothing outrageously wrong with our undetermined type params (from what I understand!). - // - // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer - // the undetermined type params. Therefore we need to do something ourselves or otherwise this - // expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum - // is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases, - // but sometimes, if the inferencer lacks information, it will be forced to approximate. - // - // =========== THE PROBLEM =========== - // - // Consider the following example (thanks, Miles!): - // - // Iso represents an isomorphism between two datatypes: - // 1) An arbitrary one (e.g. a random case class) - // 2) A uniform representation for all datatypes (e.g. an HList) - // - // trait Iso[T, U] { - // def to(t : T) : U - // def from(u : U) : T - // } - // implicit def materializeIso[T, U]: Iso[T, U] = macro ??? - // - // case class Foo(i: Int, s: String, b: Boolean) - // def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c) - // foo(Foo(23, "foo", true)) - // - // In the snippet above, even though we know that there's a fundep going from T to U - // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype, - // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information - // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want. - // - // =========== THE SOLUTION (ENABLED ONLY FOR WHITEBOX MACROS) =========== - // - // To give materializers a chance to say their word before vanilla inference kicks in, - // we infer as much as possible (e.g. in the example above even though L is hopeless, C still can be inferred to Foo) - // and then trigger macro expansion with the undetermined type parameters still there. - // Thanks to that the materializer can take a look at what's going on and react accordingly. - val shouldInstantiate = typer.context.undetparams.nonEmpty && !mode.inPolyMode - if (shouldInstantiate) { - if (isBlackbox(expandee)) typer.instantiatePossiblyExpectingUnit(delayed, mode, outerPt) - else { - forced += delayed - typer.infer.inferExprInstance(delayed, typer.context.extractUndetparams(), outerPt, keepNothings = false) - macroExpandApply(typer, delayed, mode, outerPt) - } - } else delayed + + if (isBlackbox(expandee)) { + val expanded1 = atPos(enclosingMacroPosition.makeTransparent)(Typed(expanded0, TypeTree(innerPt))) + typecheck("blackbox typecheck", expanded1, outerPt) + } else { + val expanded1 = expanded0 + val expanded2 = typecheck("whitebox typecheck #1", expanded1, outerPt) + typecheck("whitebox typecheck #2", expanded2, innerPt) } } - expander(expandee) + override def onDelayed(delayed: Tree) = { + // =========== THE SITUATION =========== + // + // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee), + // then there are two possible situations we're in: + // 1) We're in POLYmode, when the typer tests the waters wrt type inference + // (e.g. as in typedArgToPoly in doTypedApply). + // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type + // (e.g. if we're an argument to a function call, then this means that no previous argument lists + // can determine our type variables for us). + // + // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that + // there's nothing outrageously wrong with our undetermined type params (from what I understand!). + // + // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer + // the undetermined type params. Therefore we need to do something ourselves or otherwise this + // expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum + // is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases, + // but sometimes, if the inferencer lacks information, it will be forced to approximate. + // + // =========== THE PROBLEM =========== + // + // Consider the following example (thanks, Miles!): + // + // Iso represents an isomorphism between two datatypes: + // 1) An arbitrary one (e.g. a random case class) + // 2) A uniform representation for all datatypes (e.g. an HList) + // + // trait Iso[T, U] { + // def to(t : T) : U + // def from(u : U) : T + // } + // implicit def materializeIso[T, U]: Iso[T, U] = macro ??? + // + // case class Foo(i: Int, s: String, b: Boolean) + // def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c) + // foo(Foo(23, "foo", true)) + // + // In the snippet above, even though we know that there's a fundep going from T to U + // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype, + // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information + // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want. + // + // =========== THE SOLUTION (ENABLED ONLY FOR WHITEBOX MACROS) =========== + // + // To give materializers a chance to say their word before vanilla inference kicks in, + // we infer as much as possible (e.g. in the example above even though L is hopeless, C still can be inferred to Foo) + // and then trigger macro expansion with the undetermined type parameters still there. + // Thanks to that the materializer can take a look at what's going on and react accordingly. + val shouldInstantiate = typer.context.undetparams.nonEmpty && !mode.inPolyMode + if (shouldInstantiate) { + if (isBlackbox(expandee)) typer.instantiatePossiblyExpectingUnit(delayed, mode, outerPt) + else { + forced += delayed + typer.infer.inferExprInstance(delayed, typer.context.extractUndetparams(), outerPt, keepNothings = false) + macroExpand(typer, delayed, mode, outerPt) + } + } else delayed + } + override def onFallback(fallback: Tree) = typer.typed(fallback, mode, outerPt) } - /** Expands a term macro used in unapply role as `u.Quasiquote(StringContext("", "")).q.unapply(x)` in `case q"$x" => ...`. - * @see MacroExpander + /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`. + * @see DefMacroExpander */ - def macroExpandUnapply(typer: Typer, original: Tree, fun: Tree, unapply: Symbol, args: List[Tree], mode: Mode, pt: Type) = { - val expandee = treeCopy.Apply(original, gen.mkAttributedSelect(fun, unapply), args) - object expander extends TermMacroExpander(UNAPPLY_ROLE, typer, expandee, mode, pt) { - override def allowedExpansions: String = "unapply trees" - override def allowExpandee(expandee: Tree) = expandee.isInstanceOf[Apply] - private def unsupported(what: String) = abort("unapply macros currently don't support " + what) - override def onFallback(fallback: Tree) = unsupported("fallback") - override def onDelayed(delayed: Tree) = unsupported("advanced interaction with type inference") - } - expander(original) + def macroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = pluginsMacroExpand(typer, expandee, mode, pt) + + /** Default implementation of `macroExpand`. + * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroExpand for more details) + */ + def standardMacroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = { + val expander = new DefMacroExpander(typer, expandee, mode, pt) + expander(expandee) } - private sealed abstract class MacroStatus(val result: Tree) - private case class Success(expanded: Tree) extends MacroStatus(expanded) - private case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true } - private case class Delayed(delayed: Tree) extends MacroStatus(delayed) - private case class Skipped(skipped: Tree) extends MacroStatus(skipped) - private case class Failure(failure: Tree) extends MacroStatus(failure) - private def Delay(expanded: Tree) = Delayed(expanded) - private def Skip(expanded: Tree) = Skipped(expanded) + sealed abstract class MacroStatus(val result: Tree) + case class Success(expanded: Tree) extends MacroStatus(expanded) + case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true } + case class Delayed(delayed: Tree) extends MacroStatus(delayed) + case class Skipped(skipped: Tree) extends MacroStatus(skipped) + case class Failure(failure: Tree) extends MacroStatus(failure) + def Delay(expanded: Tree) = Delayed(expanded) + def Skip(expanded: Tree) = Skipped(expanded) /** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded * Meant for internal use within the macro infrastructure, don't use it elsewhere. */ - private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroStatus = { + def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroStatus = { val wasDelayed = isDelayed(expandee) val undetparams = calculateUndetparams(expandee) val nowDelayed = !typer.context.macrosEnabled || undetparams.nonEmpty @@ -767,7 +743,31 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { macroLogLite("" + expanded + "\n" + showRaw(expanded)) val freeSyms = expanded.freeTerms ++ expanded.freeTypes freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym)) - Success(atPos(enclosingMacroPosition.focus)(expanded)) + // Macros might have spliced arguments with range positions into non-compliant + // locations, notably, under a tree without a range position. Or, they might + // splice a tree that `resetAttrs` has assigned NoPosition. + // + // Here, we just convert all positions in the tree to offset positions, and + // convert NoPositions to something sensible. + // + // Given that the IDE now sees the expandee (by using -Ymacro-expand:discard), + // this loss of position fidelity shouldn't cause any real problems. + // + // Alternatively, we could pursue a way to exclude macro expansions from position + // invariant checking, or find a way not to touch expansions that happen to validate. + // + // This would be useful for cases like: + // + // macro1 { macro2 { "foo" } } + // + // to allow `macro1` to see the range position of the "foo". + val expandedPos = enclosingMacroPosition.focus + def fixPosition(pos: Position) = + if (pos == NoPosition) expandedPos else pos.focus + expanded.foreach(t => t.pos = fixPosition(t.pos)) + + val result = atPos(enclosingMacroPosition.focus)(expanded) + Success(result) } expanded match { case expanded: Expr[_] if expandee.symbol.isTermMacro => validateResultingTree(expanded.tree) @@ -793,7 +793,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { /** Expands a macro when a runtime (i.e. the macro implementation) cannot be loaded * Meant for internal use within the macro infrastructure, don't use it elsewhere. */ - private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = { + def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = { import typer.TyperErrorGen._ val fallbackSym = expandee.symbol.nextOverriddenSymbol orElse MacroImplementationNotFoundError(expandee) macroLogLite(s"falling back to: $fallbackSym") @@ -871,7 +871,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers { context.implicitsEnabled = typer.context.implicitsEnabled context.enrichmentEnabled = typer.context.enrichmentEnabled context.macrosEnabled = typer.context.macrosEnabled - macroExpandApply(newTyper(context), tree, EXPRmode, WildcardType) + macroExpand(newTyper(context), tree, EXPRmode, WildcardType) case _ => tree }) @@ -902,12 +902,3 @@ object Fingerprint { val LiftedTyped = new Fingerprint(-2) val LiftedUntyped = new Fingerprint(-3) } - -class MacroRole private[MacroRole](val name: String) extends AnyVal { - override def toString = name -} - -object MacroRole { - val Apply = new MacroRole("apply") - val Unapply = new MacroRole("unapply") -} diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 86bb99e7fa..27e8698676 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -22,7 +22,7 @@ trait Namers extends MethodSynthesis { import global._ import definitions._ - private var _lockedCount = 0 + var _lockedCount = 0 def lockedCount = this._lockedCount /** Replaces any Idents for which cond is true with fresh TypeTrees(). @@ -107,8 +107,8 @@ trait Namers extends MethodSynthesis { } protected def owner = context.owner - private def contextFile = context.unit.source.file - private def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = { + def contextFile = context.unit.source.file + def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = { case ex: TypeError => // H@ need to ensure that we handle only cyclic references TypeSigError(tree, ex) @@ -122,10 +122,31 @@ trait Namers extends MethodSynthesis { || (vd.mods.isPrivateLocal && !vd.mods.isCaseAccessor) || (vd.name startsWith nme.OUTER) || (context.unit.isJava) + || isEnumConstant(vd) ) + def noFinishGetterSetter(vd: ValDef) = ( (vd.mods.isPrivateLocal && !vd.mods.isLazy) // all lazy vals need accessors, even private[this] - || vd.symbol.isModuleVar) + || vd.symbol.isModuleVar + || isEnumConstant(vd)) + + /** Determines whether this field holds an enum constant. + * To qualify, the following conditions must be met: + * - The field's class has the ENUM flag set + * - The field's class extends java.lang.Enum + * - The field has the ENUM flag set + * - The field is static + * - The field is stable + */ + def isEnumConstant(vd: ValDef) = { + val ownerHasEnumFlag = + // Necessary to check because scalac puts Java's static members into the companion object + // while Scala's enum constants live directly in the class. + // We don't check for clazz.superClass == JavaEnumClass, because this causes a illegal + // cyclic reference error. See the commit message for details. + if (context.unit.isJava) owner.companionClass.hasEnumFlag else owner.hasEnumFlag + vd.mods.hasAllFlags(ENUM | STABLE | STATIC) && ownerHasEnumFlag + } def setPrivateWithin[T <: Symbol](tree: Tree, sym: T, mods: Modifiers): T = if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym @@ -243,7 +264,12 @@ trait Namers extends MethodSynthesis { validate(sym2.companionClass) } - def enterSym(tree: Tree): Context = { + def enterSym(tree: Tree): Context = pluginsEnterSym(this, tree) + + /** Default implementation of `enterSym`. + * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsEnterSym for more details) + */ + def standardEnterSym(tree: Tree): Context = { def dispatch() = { var returnContext = this.context tree match { @@ -309,7 +335,7 @@ trait Namers extends MethodSynthesis { * be transferred to the symbol as they are, supply a mask containing * the flags to keep. */ - private def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = { + def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = { val pos = tree.pos val isParameter = tree.mods.isParameter val flags = tree.mods.flags & mask @@ -327,14 +353,14 @@ trait Namers extends MethodSynthesis { else owner.newValue(name.toTermName, pos, flags) } } - private def createFieldSymbol(tree: ValDef): TermSymbol = + def createFieldSymbol(tree: ValDef): TermSymbol = owner.newValue(tree.localName, tree.pos, tree.mods.flags & FieldFlags | PrivateLocal) - private def createImportSymbol(tree: Tree) = + def createImportSymbol(tree: Tree) = NoSymbol.newImport(tree.pos) setInfo completerOf(tree) /** All PackageClassInfoTypes come from here. */ - private def createPackageSymbol(pos: Position, pid: RefTree): Symbol = { + def createPackageSymbol(pos: Position, pid: RefTree): Symbol = { val pkgOwner = pid match { case Ident(_) => if (owner.isEmptyPackageClass) rootMirror.RootClass else owner case Select(qual: RefTree, _) => createPackageSymbol(pos, qual).moduleClass @@ -393,7 +419,7 @@ trait Namers extends MethodSynthesis { /** Given a ClassDef or ModuleDef, verifies there isn't a companion which * has been defined in a separate file. */ - private def validateCompanionDefs(tree: ImplDef) { + def validateCompanionDefs(tree: ImplDef) { val sym = tree.symbol orElse { return } val ctx = if (context.owner.isPackageObjectClass) context.outer else context val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name @@ -466,7 +492,13 @@ trait Namers extends MethodSynthesis { * class definition tree. * @return the companion object symbol. */ - def ensureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = { + def ensureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = + pluginsEnsureCompanionObject(this, cdef, creator) + + /** Default implementation of `ensureCompanionObject`. + * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsEnsureCompanionObject for more details) + */ + def standardEnsureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = { val m = companionSymbolOf(cdef.symbol, context) // @luc: not sure why "currentRun.compiles(m)" is needed, things breaks // otherwise. documentation welcome. @@ -609,11 +641,7 @@ trait Namers extends MethodSynthesis { else enterGetterSetter(tree) - // When java enums are read from bytecode, they are known to have - // constant types by the jvm flag and assigned accordingly. When - // they are read from source, the java parser marks them with the - // STABLE flag, and now we receive that signal. - if (tree.symbol hasAllFlags STABLE | JAVA) + if (isEnumConstant(tree)) tree.symbol setInfo ConstantType(Constant(tree.symbol)) } @@ -828,9 +856,10 @@ trait Namers extends MethodSynthesis { * assigns the type to the tpt's node. Returns the type. */ private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = { - val rhsTpe = - if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt) - else defnTyper.computeType(tree.rhs, pt) + val rhsTpe = tree match { + case ddef: DefDef if tree.symbol.isTermMacro => defnTyper.computeMacroDefType(ddef, pt) + case _ => defnTyper.computeType(tree.rhs, pt) + } val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt) tree.tpt defineType defnTpe setPos tree.pos.focus @@ -1620,7 +1649,7 @@ trait Namers extends MethodSynthesis { val tree: Tree } - def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new LockingTypeCompleter { + def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new LockingTypeCompleter with FlagAgnosticCompleter { val tree = t def completeImpl(sym: Symbol) = c(sym) } diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 069d6d5fb2..41c656f8ce 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -78,26 +78,34 @@ trait PatternTypers { // Do some ad-hoc overloading resolution and update the tree's symbol and type // do not update the symbol if the tree's symbol's type does not define an unapply member // (e.g. since it's some method that returns an object with an unapply member) - val fun = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember) - def caseClass = fun.tpe.typeSymbol.linkedClassOfClass + val fun = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember) + val caseClass = fun.tpe.typeSymbol.linkedClassOfClass + val member = unapplyMember(fun.tpe) + def resultType = (fun.tpe memberType member).finalResultType + def isEmptyType = resultOfMatchingMethod(resultType, nme.isEmpty)() + def isOkay = ( + resultType.isErroneous + || (resultType <:< BooleanTpe) + || (isEmptyType <:< BooleanTpe) + || member.isMacro + || member.isOverloaded // the whole overloading situation is over the rails + ) // Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala // A case class with 23+ params has no unapply method. - // A case class constructor be overloaded with unapply methods in the companion. - if (caseClass.isCase && !unapplyMember(fun.tpe).isOverloaded) + // A case class constructor may be overloaded with unapply methods in the companion. + if (caseClass.isCase && !member.isOverloaded) logResult(s"convertToCaseConstructor($fun, $caseClass, pt=$pt)")(convertToCaseConstructor(fun, caseClass, pt)) - else if (hasUnapplyMember(fun)) + else if (!reallyExists(member)) + CaseClassConstructorError(fun, s"${fun.symbol} is not a case class, nor does it have an unapply/unapplySeq member") + else if (isOkay) fun + else if (isEmptyType == NoType) + CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean") else - CaseClassConstructorError(fun) + CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean (found: def isEmpty: $isEmptyType)") } - def expectedPatternTypes(fun: Tree, args: List[Tree]): List[Type] = - newExtractorShape(fun, args).expectedPatternTypes - - def typedPatternArgs(fun: Tree, args: List[Tree], mode: Mode): List[Tree] = - typedArgsForFormals(args, newExtractorShape(fun, args).formals, mode) - def typedArgsForFormals(args: List[Tree], formals: List[Type], mode: Mode): List[Tree] = { def typedArgWithFormal(arg: Tree, pt: Type) = { val newMode = if (isByNameParamType(pt)) mode.onlySticky else mode.onlySticky | BYVALmode @@ -158,109 +166,6 @@ trait PatternTypers { case _ => wrapClassTagUnapply(treeTyped, extractor, tpe) } } - - def newExtractorShape(fun: Tree, args: List[Tree]): ExtractorShape = ExtractorShape(fun, args) - - case class CaseClassInfo(clazz: Symbol, classType: Type) { - def constructor = clazz.primaryConstructor - def constructorType = classType.prefix memberType clazz memberType constructor - def accessors = clazz.caseFieldAccessors - } - object NoCaseClassInfo extends CaseClassInfo(NoSymbol, NoType) { - override def toString = "NoCaseClassInfo" - } - - case class UnapplyMethodInfo(unapply: Symbol, tpe: Type) { - def name = unapply.name - def isUnapplySeq = name == nme.unapplySeq - def unapplyType = tpe memberType method - def resultType = tpe.finalResultType - def method = unapplyMember(tpe) - def paramType = firstParamType(unapplyType) - def rawGet = if (isBool) UnitTpe else typeOfMemberNamedGetOrSelf(resultType) - def rawTypes = if (isBool) Nil else typesOfSelectorsOrSelf(rawGet) - def isBool = resultType =:= BooleanTpe // aka "Tuple0" or "Option[Unit]" - } - - object NoUnapplyMethodInfo extends UnapplyMethodInfo(NoSymbol, NoType) { - override def toString = "NoUnapplyMethodInfo" - } - - case class ExtractorShape(fun: Tree, args: List[Tree]) { - def pos = fun.pos - private def symbol = fun.symbol - private def tpe = fun.tpe - - val ccInfo = tpe.typeSymbol.linkedClassOfClass match { - case clazz if clazz.isCase => CaseClassInfo(clazz, tpe) - case _ => NoCaseClassInfo - } - val exInfo = UnapplyMethodInfo(symbol, tpe) - import exInfo.{ rawGet, rawTypes, isUnapplySeq } - - override def toString = s"ExtractorShape($fun, $args)" - - def unapplyMethod = exInfo.method - def unapplyType = exInfo.unapplyType - def unapplyParamType = exInfo.paramType - def enclClass = symbol.enclClass - - // TODO - merge these. The difference between these two methods is that expectedPatternTypes - // expands the list of types so it is the same length as the number of patterns, whereas formals - // leaves the varargs type unexpanded. - def formals = ( - if (isUnapplySeq) productTypes :+ varargsType - else if (elementArity == 0) productTypes - else if (isSingle) squishIntoOne() - else wrongArity(patternFixedArity) - ) - def expectedPatternTypes = elementArity match { - case 0 => productTypes - case _ if elementArity > 0 && isUnapplySeq => productTypes ::: elementTypes - case _ if productArity > 1 && patternFixedArity == 1 => squishIntoOne() - case _ => wrongArity(patternFixedArity) - } - - def elementType = elementTypeOfLastSelectorOrSelf(rawGet) - - private def hasBogusExtractor = directUnapplyMember(tpe).exists && !unapplyMethod.exists - private def expectedArity = "" + productArity + ( if (isUnapplySeq) "+" else "") - private def wrongArityMsg(n: Int) = ( - if (hasBogusExtractor) s"$enclClass does not define a valid extractor method" - else s"wrong number of patterns for $enclClass offering $rawTypes_s: expected $expectedArity, found $n" - ) - private def rawTypes_s = rawTypes match { - case Nil => "()" - case tp :: Nil => "" + tp - case tps => tps.mkString("(", ", ", ")") - } - - private def err(msg: String) = { unit.error(pos, msg) ; throw new TypeError(msg) } - private def wrongArity(n: Int) = err(wrongArityMsg(n)) - - def squishIntoOne() = { - if (settings.lint) - unit.warning(pos, s"$enclClass expects $expectedArity patterns to hold $rawGet but crushing into $productArity-tuple to fit single pattern (SI-6675)") - - rawGet :: Nil - } - // elementArity is the number of non-sequence patterns minus the - // the number of non-sequence product elements returned by the extractor. - // If it is zero, there is a perfect match between those parts, and - // if there is a wildcard star it will match any sequence. - // If it is positive, there are more patterns than products, - // so a sequence will have to fill in the elements. If it is negative, - // there are more products than patterns, which is a compile time error. - def elementArity = patternFixedArity - productArity - def patternFixedArity = treeInfo effectivePatternArity args - def productArity = productTypes.size - def isSingle = !isUnapplySeq && (patternFixedArity == 1) - - def productTypes = if (isUnapplySeq) rawTypes dropRight 1 else rawTypes - def elementTypes = List.fill(elementArity)(elementType) - def varargsType = scalaRepeatedType(elementType) - } - private class VariantToSkolemMap extends TypeMap(trackVariance = true) { private val skolemBuffer = mutable.ListBuffer[TypeSymbol]() @@ -365,10 +270,12 @@ trait PatternTypers { case OverloadedType(_, _) => OverloadedUnapplyError(fun) ; ErrorType case _ => UnapplyWithSingleArgError(fun) ; ErrorType } - val shape = newExtractorShape(fun, args) - import shape.{ unapplyParamType, unapplyType, unapplyMethod } + val unapplyMethod = unapplyMember(fun.tpe) + val unapplyType = fun.tpe memberType unapplyMethod + val unapplyParamType = firstParamType(unapplyType) + def isSeq = unapplyMethod.name == nme.unapplySeq - def extractor = extractorForUncheckedType(shape.pos, unapplyParamType) + def extractor = extractorForUncheckedType(fun.pos, unapplyParamType) def canRemedy = unapplyParamType match { case RefinedType(_, decls) if !decls.isEmpty => false case RefinedType(parents, _) if parents exists isUncheckable => false @@ -400,7 +307,8 @@ trait PatternTypers { // the union of the expected type and the inferred type of the argument to unapply val glbType = glb(ensureFullyDefined(pt) :: unapplyArg.tpe_* :: Nil) val wrapInTypeTest = canRemedy && !(fun1.symbol.owner isNonBottomSubClass ClassTagClass) - val args1 = typedPatternArgs(fun1, args, mode) + val formals = patmat.alignPatterns(fun1, args).unexpandedFormals + val args1 = typedArgsForFormals(args, formals, mode) val result = UnApply(fun1, args1) setPos tree.pos setType glbType if (wrapInTypeTest) diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala index 14f47a00fd..995f98cc2c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala +++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala @@ -16,7 +16,7 @@ trait StdAttachments { /** Scratchpad for the macro expander, which is used to store all intermediate data except the details about the runtime. */ - case class MacroExpanderAttachment(original: Tree, desugared: Tree, role: MacroRole) + case class MacroExpanderAttachment(original: Tree, desugared: Tree) /** Loads underlying MacroExpanderAttachment from a macro expandee or returns a default value for that attachment. */ @@ -24,15 +24,15 @@ trait StdAttachments { tree.attachments.get[MacroExpanderAttachment] getOrElse { tree match { case Apply(fn, _) if tree.isInstanceOf[ApplyToImplicitArgs] => macroExpanderAttachment(fn) - case _ => MacroExpanderAttachment(tree, EmptyTree, APPLY_ROLE) + case _ => MacroExpanderAttachment(tree, EmptyTree) } } /** After macro expansion is completed, links the expandee and the expansion result * by annotating them both with a `MacroExpansionAttachment`. */ - def linkExpandeeAndDesugared(expandee: Tree, desugared: Tree, role: MacroRole): Unit = { - val metadata = MacroExpanderAttachment(expandee, desugared, role) + def linkExpandeeAndDesugared(expandee: Tree, desugared: Tree): Unit = { + val metadata = MacroExpanderAttachment(expandee, desugared) expandee updateAttachment metadata desugared updateAttachment metadata } @@ -95,7 +95,7 @@ trait StdAttachments { /** Determines whether a tree should not be expanded, because someone has put SuppressMacroExpansionAttachment on it or one of its children. */ def isMacroExpansionSuppressed(tree: Tree): Boolean = - ( settings.Ymacronoexpand.value // SI-6812 + ( settings.Ymacroexpand.value == settings.MacroExpand.None // SI-6812 || tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined || (tree match { // we have to account for the fact that during typechecking an expandee might become wrapped, diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index f0252251f7..9516f94135 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -380,7 +380,7 @@ trait SyntheticMethods extends ast.TreeDSL { val original = ddef.symbol val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc => newAcc.makePublic - newAcc resetFlag (ACCESSOR | PARAMACCESSOR) + newAcc resetFlag (ACCESSOR | PARAMACCESSOR | OVERRIDE) ddef.rhs.duplicate } // TODO: shouldn't the next line be: `original resetFlag CASEACCESSOR`? diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index dbe85f4f5a..6b5afce993 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1102,7 +1102,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (tree.isType) adaptType() else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree) && !isMacroExpansionSuppressed(tree)) - macroExpandApply(this, tree, mode, pt) + macroExpand(this, tree, mode, pt) else if (mode.typingConstructorPattern) typedConstructorPattern(tree, pt) else if (shouldInsertApply(tree)) @@ -1853,12 +1853,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } protected def enterSym(txt: Context, tree: Tree): Context = - if (txt eq context) namer.enterSym(tree) - else newNamer(txt).enterSym(tree) + if (txt eq context) namer enterSym tree + else newNamer(txt) enterSym tree /** <!-- 2 --> Check that inner classes do not inherit from Annotation */ - def typedTemplate(templ: Template, parents1: List[Tree]): Template = { + def typedTemplate(templ0: Template, parents1: List[Tree]): Template = { + val templ = templ0 + // please FIXME: uncommenting this line breaks everything + // val templ = treeCopy.Template(templ0, templ0.body, templ0.self, templ0.parents) val clazz = context.owner clazz.annotations.map(_.completeInfo()) if (templ.symbol == NoSymbol) @@ -1886,7 +1889,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ) // the following is necessary for templates generated later assert(clazz.info.decls != EmptyScope, clazz) - enterSyms(context.outer.make(templ, clazz, clazz.info.decls), templ.body) + val body1 = pluginsEnterStats(this, templ.body) + enterSyms(context.outer.make(templ, clazz, clazz.info.decls), body1) if (!templ.isErrorTyped) // if `parentTypes` has invalidated the template, don't validate it anymore validateParentClasses(parents1, selfType) if (clazz.isCase) @@ -1900,26 +1904,26 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType]) - val body = { - val body = - if (isPastTyper || reporter.hasErrors) templ.body - else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _)) - val primaryCtor = treeInfo.firstConstructor(body) + val body2 = { + val body2 = + if (isPastTyper || reporter.hasErrors) body1 + else body1 flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _)) + val primaryCtor = treeInfo.firstConstructor(body2) val primaryCtor1 = primaryCtor match { case DefDef(_, _, _, _, _, Block(earlyVals :+ global.pendingSuperCall, unit)) => val argss = superArgs(parents1.head) getOrElse Nil - val pos = wrappingPos(parents1.head.pos, argss.flatten) + val pos = wrappingPos(parents1.head.pos, primaryCtor :: argss.flatten).makeTransparent val superCall = atPos(pos)(PrimarySuperCall(argss)) deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos case _ => primaryCtor } - body mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat } + body2 mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat } } - val body1 = typedStats(body, templ.symbol) + val body3 = typedStats(body2, templ.symbol) if (clazz.info.firstParent.typeSymbol == AnyValClass) - validateDerivedValueClass(clazz, body1) + validateDerivedValueClass(clazz, body3) if (clazz.isTrait) { for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) { @@ -1927,7 +1931,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe_* + treeCopy.Template(templ, parents1, self1, body3) setType clazz.tpe_* } /** Remove definition annotations from modifiers (they have been saved @@ -2309,10 +2313,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - def typedBlock(block: Block, mode: Mode, pt: Type): Block = { + def typedBlock(block0: Block, mode: Mode, pt: Type): Block = { val syntheticPrivates = new ListBuffer[Symbol] try { - namer.enterSyms(block.stats) + namer.enterSyms(block0.stats) + val block = treeCopy.Block(block0, pluginsEnterStats(this, block0.stats), block0.expr) for (stat <- block.stats) enterLabelDef(stat) if (phaseId(currentPeriod) <= currentRun.typerPhase.id) { @@ -3797,7 +3802,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Mode): Tree = { for (wc <- tree.whereClauses) - if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL } + if (wc.symbol == NoSymbol) { namer enterSym wc; wc.symbol setFlag EXISTENTIAL } else context.scope enter wc.symbol val whereClauses1 = typedStats(tree.whereClauses, context.owner) for (vd @ ValDef(_, _, _, _) <- whereClauses1) @@ -4948,7 +4953,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val sym: Symbol = tree.symbol if ((sym ne null) && (sym ne NoSymbol)) sym.initialize - def typedPackageDef(pdef: PackageDef) = { + def typedPackageDef(pdef0: PackageDef) = { + val pdef = treeCopy.PackageDef(pdef0, pdef0.pid, pluginsEnterStats(this, pdef0.stats)) val pid1 = typedQualifier(pdef.pid).asInstanceOf[RefTree] assert(sym.moduleClass ne NoSymbol, sym) val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls)) @@ -5494,25 +5500,23 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper tpe } - def computeMacroDefType(tree: Tree, pt: Type): Type = { + def computeMacroDefType(ddef: DefDef, pt: Type): Type = { assert(context.owner.isMacro, context.owner) - assert(tree.symbol.isMacro, tree.symbol) - assert(tree.isInstanceOf[DefDef], tree.getClass) - val ddef = tree.asInstanceOf[DefDef] + assert(ddef.symbol.isMacro, ddef.symbol) - val tree1 = + val rhs1 = if (transformed contains ddef.rhs) { // macro defs are typechecked in `methodSig` (by calling this method) in order to establish their link to macro implementation asap // if a macro def doesn't have explicitly specified return type, this method will be called again by `assignTypeToTree` // here we guard against this case transformed(ddef.rhs) } else { - val tree1 = typedMacroBody(this, ddef) - transformed(ddef.rhs) = tree1 - tree1 + val rhs1 = typedMacroBody(this, ddef) + transformed(ddef.rhs) = rhs1 + rhs1 } - val isMacroBodyOkay = !tree.symbol.isErroneous && !(tree1 exists (_.isErroneous)) && tree1 != EmptyTree + val isMacroBodyOkay = !ddef.symbol.isErroneous && !(rhs1 exists (_.isErroneous)) && rhs1 != EmptyTree val shouldInheritMacroImplReturnType = ddef.tpt.isEmpty if (isMacroBodyOkay && shouldInheritMacroImplReturnType) { val commonMessage = "macro defs must have explicitly specified return types" @@ -5524,7 +5528,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val explanation = s"inference of $inferredType from macro impl's c.Expr[$inferredType] is deprecated and is going to stop working in 2.12" unit.deprecationWarning(ddef.pos, s"$commonMessage ($explanation)") } - computeMacroDefTypeFromMacroImplRef(ddef, tree1) match { + computeMacroDefTypeFromMacroImplRef(ddef, rhs1) match { case ErrorType => ErrorType case NothingTpe => NothingTpe case NoType => reportFailure(); AnyTpe diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 27f10ff00a..b55a573029 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -88,7 +88,6 @@ trait InteractiveAnalyzer extends Analyzer { } } - /** The main class of the presentation compiler in an interactive environment such as an IDE */ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends { @@ -105,6 +104,9 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") import definitions._ + if (!settings.Ymacroexpand.isSetByUser) + settings.Ymacroexpand.value = settings.MacroExpand.Discard + val debugIDE: Boolean = settings.YpresentationDebug.value val verboseIDE: Boolean = settings.YpresentationVerbose.value diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala index bc490d8d45..343986a45d 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala @@ -94,7 +94,7 @@ private[tests] trait CoreTestDefs askAllSources(HyperlinkMarker) { pos => askTypeAt(pos)(NullReporter) } { (pos, tree) => - if(tree.symbol == compiler.NoSymbol) { + if(tree.symbol == compiler.NoSymbol || tree.symbol == null) { reporter.println("\nNo symbol is associated with tree: "+tree) } else { diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt index 0722d808bf..4795a47efe 100644 --- a/src/library/rootdoc.txt +++ b/src/library/rootdoc.txt @@ -2,21 +2,54 @@ This is the documentation for the Scala standard library. == Package structure == -The [[scala]] package contains core types. - -[[scala.collection `scala.collection`]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable `scala.collection.immutable`]] and [[scala.collection.mutable `scala.collection.mutable`]] data structures are available, with immutable as the default. The [[scala.collection.parallel `scala.collection.parallel`]] collections provide automatic parallel operation. - -Other important packages include: - - - [[scala.actors `scala.actors`]] - Concurrency framework inspired by Erlang. - - [[scala.io `scala.io`]] - Input and output. - - [[scala.math `scala.math`]] - Basic math functions and additional numeric types. - - [[scala.sys `scala.sys`]] - Interaction with other processes and the operating system. - - [[scala.util.matching `scala.util.matching`]] - Pattern matching in text using regular expressions. - - [[scala.util.parsing.combinator `scala.util.parsing.combinator`]] - Composable combinators for parsing. - - [[scala.xml `scala.xml`]] - XML parsing, manipulation, and serialization. - -Many other packages exist. See the complete list on the left. +The [[scala]] package contains core types like [[scala.Int `Int`]], [[scala.Float `Float`]], [[scala.Array `Array`]] +or [[scala.Option `Option`]] which are accessible in all Scala compilation units without explicit qualification or +imports. + +Notable packages include: + + - [[scala.collection `scala.collection`]] and its sub-packages contain Scala's collections framework + - [[scala.collection.immutable `scala.collection.immutable`]] - Immutable, sequential data-structures such as + [[scala.collection.immutable.Vector `Vector`]], [[scala.collection.immutable.List `List`]], + [[scala.collection.immutable.Range `Range`]], [[scala.collection.immutable.HashMap `HashMap`]] or + [[scala.collection.immutable.HashSet `HasSet`]] + - [[scala.collection.mutable `scala.collection.mutable`]] - Mutable, sequential data-structures such as + [[scala.collection.mutable.ArrayBuffer `ArrayBuffer`]], + [[scala.collection.mutable.StringBuilder `StringBuilder`]], + [[scala.collection.mutable.HashMap `HashMap`]] or [[scala.collection.mutable.HashSet `HashSet`]] + - [[scala.collection.concurrent `scala.collection.concurrent`]] - Mutable, concurrent data-structures such as + [[scala.collection.concurrent.TrieMap `TrieMap`]] + - [[scala.collection.parallel.immutable `scala.collection.parallel.immutable`]] - Immutable, parallel + data-structures such as [[scala.collection.parallel.immutable.ParVector `ParVector`]], + [[scala.collection.parallel.immutable.ParRange `ParRange`]], + [[scala.collection.parallel.immutable.ParHashMap `ParHashMap`]] or + [[scala.collection.parallel.immutable.ParHashSet `ParHashSet`]] + - [[scala.collection.parallel.mutable `scala.collection.parallel.mutable`]] - Mutable, parallel + data-structures such as [[scala.collection.parallel.mutable.ParArray `ParArray`]], + [[scala.collection.parallel.mutable.ParHashMap `ParHashMap`]], + [[scala.collection.parallel.mutable.ParTrieMap `ParTrieMap`]] or + [[scala.collection.parallel.mutable.ParHashSet `ParHashSet`]] + - [[scala.concurrent `scala.concurrent`]] - Primitives for concurrent programming such as + [[scala.concurrent.Future `Futures`]] and [[scala.concurrent.Promise `Promises`]] + - [[scala.io `scala.io`]] - Input and output operations + - [[scala.math `scala.math`]] - Basic math functions and additional numeric types like + [[scala.math.BigInt `BigInt`]] and [[scala.math.BigDecimal `BigDecimal`]] + - [[scala.sys `scala.sys`]] - Interaction with other processes and the operating system + - [[scala.util.matching `scala.util.matching`]] - [[scala.util.matching.Regex Regular expressions]] + +Other packages exist. See the complete list on the left. + +Additional parts of the standard library are shipped as separate libraries. These include: + + - [[scala.reflect `scala.reflect`]] - Scala's reflection API (scala-reflect.jar) + - [[scala.xml `scala.xml`]] - XML parsing, manipulation, and serialization (scala-xml.jar) + - [[scala.swing `scala.swing`]] - A convenient wrapper around Java's GUI framework called Swing (scala-swing.jar) + - [[scala.util.continuations `scala.util.continuations`]] - Delimited continuations using continuation-passing-style + (scala-continuations-library.jar, scala-continuations-plugin.jar) + - [[scala.util.parsing `scala.util.parsing`]] - [[scala.util.parsing.combinator Parser combinators]], including an + example implementation of a [[scala.util.parsing.json JSON parser]] (scala-parser-combinators.jar) + - [[scala.actors `scala.actors`]] - Actor-based concurrency (deprecated and replaced by Akka actors, + scala-actors.jar) == Automatic imports == diff --git a/src/library/scala/AnyVal.scala b/src/library/scala/AnyVal.scala index 9def6cb054..ff62948413 100644 --- a/src/library/scala/AnyVal.scala +++ b/src/library/scala/AnyVal.scala @@ -33,7 +33,7 @@ package scala * * User-defined value classes which avoid object allocation... * - * - must have a single, public `val` parameter that is the underlying runtime representation. + * - must have a single `val` parameter that is the underlying runtime representation. * - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s. * - typically extend no other trait apart from `AnyVal`. * - cannot be used in type tests or pattern matching. diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala index 90a8977e81..ef39ee2134 100644 --- a/src/library/scala/App.scala +++ b/src/library/scala/App.scala @@ -28,9 +28,8 @@ import scala.collection.mutable.ListBuffer * functionality, which means that fields of the object will not have been initialized * before the main method has been executed.''''' * - * It should also be noted that the `main` method will not normally need to be overridden: - * the purpose is to turn the whole class body into the “main method”. You should only - * chose to override it if you know what you are doing. + * It should also be noted that the `main` method should not be overridden: + * the whole class body becomes the “main method”. * * @author Martin Odersky * @version 2.1, 15/02/2011 @@ -61,11 +60,12 @@ trait App extends DelayedInit { } /** The main method. - * This stores all argument so that they can be retrieved with `args` - * and the executes all initialization code segments in the order they were - * passed to `delayedInit` + * This stores all arguments so that they can be retrieved with `args` + * and then executes all initialization code segments in the order in which + * they were passed to `delayedInit`. * @param args the arguments passed to the main method */ + @deprecatedOverriding("main should not be overridden", "2.11.0") def main(args: Array[String]) = { this._args = args for (proc <- initCode) proc() diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index 59be0cdfa3..d4b9c17eab 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -11,7 +11,7 @@ package scala import scala.collection.{ mutable, immutable, generic, SortedSetLike, AbstractSet } import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField } import scala.reflect.NameTransformer._ -import java.util.regex.Pattern +import scala.util.matching.Regex /** Defines a finite set of values specific to the enumeration. Typically * these values enumerate all possible forms something can take and provide @@ -64,7 +64,7 @@ abstract class Enumeration (initial: Int) extends Serializable { */ override def toString = ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split - Pattern.quote(NAME_JOIN_STRING)).last + Regex.quote(NAME_JOIN_STRING)).last /** The mapping from the integer used to identify values to the actual * values. */ diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala index 27b75c0491..c3bad60072 100644 --- a/src/library/scala/collection/GenSeqLike.scala +++ b/src/library/scala/collection/GenSeqLike.scala @@ -38,8 +38,8 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal * Example: * * {{{ - * scala> val x = LinkedList(1, 2, 3, 4, 5) - * x: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4, 5) + * scala> val x = List(1, 2, 3, 4, 5) + * x: List[Int] = List(1, 2, 3, 4, 5) * * scala> x(3) * res1: Int = 4 @@ -190,7 +190,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal */ def lastIndexWhere(p: A => Boolean, end: Int): Int - /** Returns new $coll wih elements in reversed order. + /** Returns new $coll with elements in reversed order. * * $willNotTerminateInf * @@ -302,14 +302,14 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal * * Example: * {{{ - * scala> val x = LinkedList(1) - * x: scala.collection.mutable.LinkedList[Int] = LinkedList(1) + * scala> val x = List(1) + * x: List[Int] = List(1) * * scala> val y = 2 +: x - * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2, 1) + * y: List[Int] = List(2, 1) * * scala> println(x) - * LinkedList(1) + * List(1) * }}} * * @return a new $coll consisting of `elem` followed @@ -335,17 +335,14 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal * * Example: * {{{ - * scala> import scala.collection.mutable.LinkedList - * import scala.collection.mutable.LinkedList - * - * scala> val a = LinkedList(1) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1) - * + * scala> val a = List(1) + * a: List[Int] = List(1) + * * scala> val b = a :+ 2 - * b: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * + * b: List[Int] = List(1, 2) + * * scala> println(a) - * LinkedList(1) + * List(1) * }}} * * @return a new $coll consisting of diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala index a0c519884c..ca098e57b9 100644 --- a/src/library/scala/collection/GenTraversableLike.scala +++ b/src/library/scala/collection/GenTraversableLike.scala @@ -267,20 +267,20 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with * * Example: * {{{ - * scala> val a = LinkedList(1) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1) - * - * scala> val b = LinkedList(2) - * b: scala.collection.mutable.LinkedList[Int] = LinkedList(2) - * + * scala> val a = List(1) + * a: List[Int] = List(1) + * + * scala> val b = List(2) + * b: List[Int] = List(2) + * * scala> val c = a ++ b - * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2) - * - * scala> val d = LinkedList('a') - * d: scala.collection.mutable.LinkedList[Char] = LinkedList(a) - * + * c: List[Int] = List(1, 2) + * + * scala> val d = List('a') + * d: List[Char] = List(a) + * * scala> val e = c ++ d - * e: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, a) + * e: List[AnyVal] = List(1, 2, a) * }}} * * @return a new $coll which contains all elements of this $coll diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala index a9fe279599..01d179aeb6 100644 --- a/src/library/scala/collection/GenTraversableOnce.scala +++ b/src/library/scala/collection/GenTraversableOnce.scala @@ -130,8 +130,8 @@ trait GenTraversableOnce[+A] extends Any { * * Note that the folding function used to compute b is equivalent to that used to compute c. * {{{ - * scala> val a = LinkedList(1,2,3,4) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4) + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) * * scala> val b = (5 /: a)(_+_) * b: Int = 15 @@ -167,8 +167,8 @@ trait GenTraversableOnce[+A] extends Any { * * Note that the folding function used to compute b is equivalent to that used to compute c. * {{{ - * scala> val a = LinkedList(1,2,3,4) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4) + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) * * scala> val b = (a :\ 5)(_+_) * b: Int = 15 diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index 26af32046c..072fd3da44 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -320,14 +320,14 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { * Example: * * {{{ - * scala> val a = LinkedList(1,2,3,4) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4) - * + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * * scala> val b = new StringBuilder() - * b: StringBuilder = - * - * scala> a.addString(b, "LinkedList(", ", ", ")") - * res1: StringBuilder = LinkedList(1, 2, 3, 4) + * b: StringBuilder = + * + * scala> a.addString(b , "List(" , ", " , ")") + * res5: StringBuilder = List(1, 2, 3, 4) * }}} * * @param b the string builder to which elements are appended. @@ -362,9 +362,9 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { * Example: * * {{{ - * scala> val a = LinkedList(1,2,3,4) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4) - * + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * * scala> val b = new StringBuilder() * b: StringBuilder = * @@ -385,14 +385,14 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { * Example: * * {{{ - * scala> val a = LinkedList(1,2,3,4) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4) - * + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * * scala> val b = new StringBuilder() * b: StringBuilder = * * scala> val h = a.addString(b) - * b: StringBuilder = 1234 + * h: StringBuilder = 1234 * }}} * @param b the string builder to which elements are appended. diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala index 56f1802509..14ae57c43a 100644 --- a/src/library/scala/collection/convert/Wrappers.scala +++ b/src/library/scala/collection/convert/Wrappers.scala @@ -102,8 +102,14 @@ private[collection] trait Wrappers { override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying)) } + // Note various overrides to avoid performance gotchas. class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] { self => + override def contains(o: Object): Boolean = { + try { underlying.contains(o.asInstanceOf[A]) } + catch { case cce: ClassCastException => false } + } + override def isEmpty = underlying.isEmpty def size = underlying.size def iterator = new ju.Iterator[A] { val ui = underlying.iterator diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index fb0a34e64d..0a8524c139 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -59,7 +59,6 @@ class HashMap[A, +B] extends AbstractMap[A, B] override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): HashMap[A, B1] = this + elem1 + elem2 ++ elems - // TODO: optimize (might be able to use mutable updates) def - (key: A): HashMap[A, B] = removed0(key, computeHash(key), 0) @@ -168,8 +167,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { } } - // TODO: add HashMap2, HashMap3, ... - class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] { override def size = 1 @@ -277,7 +274,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { elems(index & 0x1f).get0(key, hash, level + 5) } else if ((bitmap & mask) != 0) { val offset = Integer.bitCount(bitmap & (mask-1)) - // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) elems(offset).get0(key, hash, level + 5) } else None @@ -289,7 +285,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { val offset = Integer.bitCount(bitmap & (mask-1)) if ((bitmap & mask) != 0) { val sub = elems(offset) - // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) val subNew = sub.updated0(key, hash, level + 5, value, kv, merger) if(subNew eq sub) this else { val elemsNew = new Array[HashMap[A,B1]](elems.length) @@ -312,7 +307,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { val offset = Integer.bitCount(bitmap & (mask-1)) if ((bitmap & mask) != 0) { val sub = elems(offset) - // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) val subNew = sub.removed0(key, hash, level + 5) if (subNew eq sub) this else if (subNew.isEmpty) { diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 9eaceccd9f..115be09502 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -12,9 +12,9 @@ package scala package collection package immutable -import scala.annotation.unchecked.{ uncheckedVariance => uV } import generic._ import scala.collection.parallel.immutable.ParHashSet +import scala.collection.GenSet /** This class implements immutable sets using a hash trie. * @@ -54,11 +54,34 @@ class HashSet[A] extends AbstractSet[A] def contains(e: A): Boolean = get0(e, computeHash(e), 0) + override def subsetOf(that: GenSet[A]) = that match { + case that:HashSet[A] => + // call the specialized implementation with a level of 0 since both this and that are top-level hash sets + subsetOf0(that, 0) + case _ => + // call the generic implementation + super.subsetOf(that) + } + + /** + * A specialized implementation of subsetOf for when both this and that are HashSet[A] and we can take advantage + * of the tree structure of both operands and the precalculated hashcodes of the HashSet1 instances. + * @param that the other set + * @param level the level of this and that hashset + * The purpose of level is to keep track of how deep we are in the tree. + * We need this information for when we arrive at a leaf and have to call get0 on that + * The value of level is 0 for a top-level HashSet and grows in increments of 5 + * @return true if all elements of this set are contained in that set + */ + protected def subsetOf0(that: HashSet[A], level: Int) = { + // The default implementation is for the empty set and returns true because the empty set is a subset of all sets + true + } + override def + (e: A): HashSet[A] = updated0(e, computeHash(e), 0) override def + (elem1: A, elem2: A, elems: A*): HashSet[A] = this + elem1 + elem2 ++ elems - // TODO: optimize (might be able to use mutable updates) def - (e: A): HashSet[A] = removed0(e, computeHash(e), 0) @@ -128,14 +151,20 @@ object HashSet extends ImmutableSetFactory[HashSet] { } } - // TODO: add HashSet2, HashSet3, ... - class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends HashSet[A] { override def size = 1 override def get0(key: A, hash: Int, level: Int): Boolean = (hash == this.hash && key == this.key) + override def subsetOf0(that: HashSet[A], level: Int) = { + // check if that contains this.key + // we use get0 with our key and hash at the correct level instead of calling contains, + // which would not work since that might not be a top-level HashSet + // and in any case would be inefficient because it would require recalculating the hash code + that.get0(key, hash, level) + } + override def updated0(key: A, hash: Int, level: Int): HashSet[A] = if (hash == this.hash && key == this.key) this else { @@ -162,6 +191,14 @@ object HashSet extends ImmutableSetFactory[HashSet] { override def get0(key: A, hash: Int, level: Int): Boolean = if (hash == this.hash) ks.contains(key) else false + override def subsetOf0(that: HashSet[A], level: Int) = { + // we have to check each element + // we use get0 with our hash at the correct level instead of calling contains, + // which would not work since that might not be a top-level HashSet + // and in any case would be inefficient because it would require recalculating the hash code + ks.forall(key => that.get0(key, hash, level)) + } + override def updated0(key: A, hash: Int, level: Int): HashSet[A] = if (hash == this.hash) new HashSetCollision1(hash, ks + key) else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level) @@ -197,6 +234,42 @@ object HashSet extends ImmutableSetFactory[HashSet] { } + /** + * A branch node of the HashTrieSet with at least one and up to 32 children. + * + * @param bitmap encodes which element corresponds to which child + * @param elems the up to 32 children of this node. + * the number of children must be identical to the number of 1 bits in bitmap + * @param size0 the total number of elements. This is stored just for performance reasons. + * @tparam A the type of the elements contained in this hash set. + * + * How levels work: + * + * When looking up or adding elements, the part of the hashcode that is used to address the children array depends + * on how deep we are in the tree. This is accomplished by having a level parameter in all internal methods + * that starts at 0 and increases by 5 (32 = 2^5) every time we go deeper into the tree. + * + * hashcode (binary): 00000000000000000000000000000000 + * level=0 (depth=0) ^^^^^ + * level=5 (depth=1) ^^^^^ + * level=10 (depth=2) ^^^^^ + * ... + * + * Be careful: a non-toplevel HashTrieSet is not a self-contained set, so e.g. calling contains on it will not work! + * It relies on its depth in the Trie for which part of a hash to use to address the children, but this information + * (the level) is not stored due to storage efficiency reasons but has to be passed explicitly! + * + * How bitmap and elems correspond: + * + * A naive implementation of a HashTrieSet would always have an array of size 32 for children and leave the unused + * children empty (null). But that would be very wasteful regarding memory. Instead, only non-empty children are + * stored in elems, and the bitmap is used to encode which elem corresponds to which child bucket. The lowest 1 bit + * corresponds to the first element, the second-lowest to the second, etc. + * + * bitmap (binary): 00010000000000000000100000000000 + * elems: [a,b] + * children: ---b----------------a----------- + */ class HashTrieSet[A](private val bitmap: Int, private[collection] val elems: Array[HashSet[A]], private val size0: Int) extends HashSet[A] { assert(Integer.bitCount(bitmap) == elems.length) @@ -212,7 +285,6 @@ object HashSet extends ImmutableSetFactory[HashSet] { elems(index & 0x1f).get0(key, hash, level + 5) } else if ((bitmap & mask) != 0) { val offset = Integer.bitCount(bitmap & (mask-1)) - // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site) elems(offset).get0(key, hash, level + 5) } else false @@ -223,7 +295,6 @@ object HashSet extends ImmutableSetFactory[HashSet] { val mask = (1 << index) val offset = Integer.bitCount(bitmap & (mask-1)) if ((bitmap & mask) != 0) { - // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site) val sub = elems(offset) val subNew = sub.updated0(key, hash, level + 5) if (sub eq subNew) this @@ -249,7 +320,6 @@ object HashSet extends ImmutableSetFactory[HashSet] { val offset = Integer.bitCount(bitmap & (mask-1)) if ((bitmap & mask) != 0) { val sub = elems(offset) - // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) val subNew = sub.removed0(key, hash, level + 5) if (sub eq subNew) this else if (subNew.isEmpty) { @@ -279,6 +349,49 @@ object HashSet extends ImmutableSetFactory[HashSet] { } } + override def subsetOf0(that: HashSet[A], level: Int): Boolean = if (that eq this) true else that match { + case that: HashTrieSet[A] if this.size0 <= that.size0 => + // create local mutable copies of members + var abm = this.bitmap + val a = this.elems + var ai = 0 + val b = that.elems + var bbm = that.bitmap + var bi = 0 + if ((abm & bbm) == abm) { + // I tried rewriting this using tail recursion, but the generated java byte code was less than optimal + while(abm!=0) { + // highest remaining bit in abm + val alsb = abm ^ (abm & (abm - 1)) + // highest remaining bit in bbm + val blsb = bbm ^ (bbm & (bbm - 1)) + // if both trees have a bit set at the same position, we need to check the subtrees + if (alsb == blsb) { + // we are doing a comparison of a child of this with a child of that, + // so we have to increase the level by 5 to keep track of how deep we are in the tree + if (!a(ai).subsetOf0(b(bi), level + 5)) + return false + // clear lowest remaining one bit in abm and increase the a index + abm &= ~alsb; ai += 1 + } + // clear lowermost remaining one bit in bbm and increase the b index + // we must do this in any case + bbm &= ~blsb; bi += 1 + } + true + } else { + // the bitmap of this contains more one bits than the bitmap of that, + // so this can not possibly be a subset of that + false + } + case _ => + // if the other set is a HashTrieSet but has less elements than this, it can not be a subset + // if the other set is a HashSet1, we can not be a subset of it because we are a HashTrieSet with at least two children (see assertion) + // if the other set is a HashSetCollision1, we can not be a subset of it because we are a HashTrieSet with at least two different hash codes + // if the other set is the empty set, we are not a subset of it because we are not empty + false + } + override def iterator = new TrieIterator[A](elems.asInstanceOf[Array[Iterable[A]]]) { final override def getElem(cc: AnyRef): A = cc.asInstanceOf[HashSet1[A]].key } diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index 486c2b6c8f..249d76584d 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -175,9 +175,36 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { catch { case _: ClassCastException => false } final override def sum[B >: T](implicit num: Numeric[B]): B = { - if (isEmpty) this.num fromInt 0 - else if (numRangeElements == 1) head - else ((this.num fromInt numRangeElements) * (head + last) / (this.num fromInt 2)) + // arithmetic series formula can be used for regular addition + if ((num eq scala.math.Numeric.IntIsIntegral)|| + (num eq scala.math.Numeric.BigIntIsIntegral)|| + (num eq scala.math.Numeric.ShortIsIntegral)|| + (num eq scala.math.Numeric.ByteIsIntegral)|| + (num eq scala.math.Numeric.CharIsIntegral)|| + (num eq scala.math.Numeric.LongIsIntegral)|| + (num eq scala.math.Numeric.FloatAsIfIntegral)|| + (num eq scala.math.Numeric.BigDecimalIsFractional)|| + (num eq scala.math.Numeric.DoubleAsIfIntegral)) { + val numAsIntegral = num.asInstanceOf[Integral[B]] + import numAsIntegral._ + if (isEmpty) num fromInt 0 + else if (numRangeElements == 1) head + else ((num fromInt numRangeElements) * (head + last) / (num fromInt 2)) + } else { + // user provided custom Numeric, we cannot rely on arithmetic series formula + if (isEmpty) num.zero + else { + var acc = num.zero + var i = head + var idx = 0 + while(idx < length) { + acc = num.plus(acc, i) + i = i + step + idx = idx + 1 + } + acc + } + } } override lazy val hashCode = super.hashCode() diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala index 589661a343..3a64820be6 100644 --- a/src/library/scala/collection/immutable/PagedSeq.scala +++ b/src/library/scala/collection/immutable/PagedSeq.scala @@ -188,7 +188,10 @@ extends scala.collection.AbstractSeq[T] val s = start + _start val e = if (_end == UndeterminedEnd) _end else start + _end var f = first1 - while (f.end <= s && !f.isLast) f = f.next + while (f.end <= s && !f.isLast) { + if (f.next eq null) f.addMore(more) + f = f.next + } new PagedSeq(more, f, s, e) } diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 00f398a4b0..786b18cd21 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -259,9 +259,24 @@ extends scala.collection.AbstractSeq[Int] final def contains(x: Int) = isWithinBoundaries(x) && ((x - start) % step == 0) final override def sum[B >: Int](implicit num: Numeric[B]): Int = { - if (isEmpty) 0 - else if (numRangeElements == 1) head - else (numRangeElements.toLong * (head + last) / 2).toInt + if (num eq scala.math.Numeric.IntIsIntegral) { + // this is normal integer range with usual addition. arithmetic series formula can be used + if (isEmpty) 0 + else if (numRangeElements == 1) head + else (numRangeElements.toLong * (head + last) / 2).toInt + } else { + // user provided custom Numeric, we cannot rely on arithmetic series formula + if (isEmpty) num.toInt(num.zero) + else { + var acc = num.zero + var i = head + while(i != terminalElement) { + acc = num.plus(acc, i) + i = i + step + } + num.toInt(acc) + } + } } override def toIterable = this diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 5a0d24ddd2..43d46cf4d0 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -164,8 +164,8 @@ self => * @return the resulting string */ def replaceAllLiterally(literal: String, replacement: String): String = { - val arg1 = java.util.regex.Pattern.quote(literal) - val arg2 = java.util.regex.Matcher.quoteReplacement(replacement) + val arg1 = Regex.quote(literal) + val arg2 = Regex.quoteReplacement(replacement) toString.replaceAll(arg1, arg2) } diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala index e1f18a7036..e342e134b4 100644 --- a/src/library/scala/collection/mutable/ArrayOps.scala +++ b/src/library/scala/collection/mutable/ArrayOps.scala @@ -53,14 +53,14 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza super.toArray[U] } - def :+[B >: T: scala.reflect.ClassTag](elem: B): Array[B] = { + def :+[B >: T: ClassTag](elem: B): Array[B] = { val result = Array.ofDim[B](repr.length + 1) Array.copy(repr, 0, result, 0, repr.length) result(repr.length) = elem result } - def +:[B >: T: scala.reflect.ClassTag](elem: B): Array[B] = { + def +:[B >: T: ClassTag](elem: B): Array[B] = { val result = Array.ofDim[B](repr.length + 1) result(0) = elem Array.copy(repr, 0, result, 1, repr.length) @@ -107,6 +107,54 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza bb.result() } } + + /** Converts an array of pairs into an array of first elements and an array of second elements. + * + * @tparam T1 the type of the first half of the element pairs + * @tparam T2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this Array is a pair. + * @return a pair of Arrays, containing, respectively, the first and second half + * of each element pair of this Array. + */ + def unzip[T1: ClassTag, T2: ClassTag](implicit asPair: T => (T1, T2)): (Array[T1], Array[T2]) = { + val a1 = new Array[T1](length) + val a2 = new Array[T2](length) + var i = 0 + while (i < length) { + val e = apply(i) + a1(i) = e._1 + a2(i) = e._2 + i += 1 + } + (a1, a2) + } + + /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. + * + * @tparam T1 the type of the first of three elements in the triple + * @tparam T2 the type of the second of three elements in the triple + * @tparam T3 the type of the third of three elements in the triple + * @param asTriple an implicit conversion which asserts that the element type + * of this Array is a triple. + * @return a triple of Arrays, containing, respectively, the first, second, and third + * elements from each element triple of this Array. + */ + def unzip3[T1: ClassTag, T2: ClassTag, T3: ClassTag](implicit asTriple: T => (T1, T2, T3)): (Array[T1], Array[T2], Array[T3]) = { + val a1 = new Array[T1](length) + val a2 = new Array[T2](length) + val a3 = new Array[T3](length) + var i = 0 + while (i < length) { + val e = apply(i) + a1(i) = e._1 + a2(i) = e._2 + a3(i) = e._3 + i += 1 + } + (a1, a2, a3) + } + def seq = thisCollection diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala index 7129f22f60..986cd0390f 100644 --- a/src/library/scala/runtime/AbstractPartialFunction.scala +++ b/src/library/scala/runtime/AbstractPartialFunction.scala @@ -35,15 +35,3 @@ abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala // let's not make it final so as not to confuse anyone /*final*/ def apply(x: T1): R = applyOrElse(x, PartialFunction.empty) } - -// Manual stand-ins for formerly specialized variations. -// Not comprehensive, only sufficent to run scala-check built scala 2.11.0-M5 -// TODO Scala 2.10.0.M6 Remove this once scalacheck is published against M6. -private[runtime] abstract class AbstractPartialFunction$mcIL$sp extends scala.runtime.AbstractPartialFunction[Any, Int] { - override def apply(x: Any): Int = apply$mcIL$sp(x) - def apply$mcIL$sp(x: Any): Int = applyOrElse(x, PartialFunction.empty) -} -private[runtime] abstract class AbstractPartialFunction$mcFL$sp extends scala.runtime.AbstractPartialFunction[Any, Float] { - override def apply(x: Any): Float = apply$mcIL$sp(x) - def apply$mcIL$sp(x: Any): Float = applyOrElse(x, PartialFunction.empty) -} diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index 13f2362d00..d597feb898 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -173,7 +173,7 @@ private[scala] trait PropertiesTrait { * isJavaAtLeast("1.6") // true * isJavaAtLeast("1.7") // true * isJavaAtLeast("1.8") // false - * }} + * }}} */ def isJavaAtLeast(version: String): Boolean = { def parts(x: String) = { diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 22dbb37789..86132bb876 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -704,6 +704,14 @@ object Regex { def replace(rs: String) = matcher.appendReplacement(sb, rs) } + /** Quotes strings to be used literally in regex patterns. + * + * All regex metacharacters in the input match themselves literally in the output. + * + * @example {{{List("US$", "CAN$").map(Regex.quote).mkString("|").r}}} + */ + def quote(text: String): String = Pattern quote text + /** Quotes replacement strings to be used in replacement methods. * * Replacement methods give special meaning to backslashes (`\`) and diff --git a/src/manual/scala/man1/scala.scala b/src/manual/scala/man1/scala.scala index f48b99bd5a..6b3be8b77f 100644 --- a/src/manual/scala/man1/scala.scala +++ b/src/manual/scala/man1/scala.scala @@ -39,16 +39,16 @@ object scala extends Command { CmdOptionBound("howtorun:", Argument("how")), "How to execute " & Argument("torun") & ", if it is present. " & "Options for " & Argument("how") & " are " & Mono("guess") & - " (the default), " & Mono("script") & ", and " & Mono("object") & + " (the default), " & Mono("script") & ", " & Mono("jar") & ", and " & Mono("object") & "."), Definition( - CmdOption("i"), + CmdOption("i", Argument("file")), "Requests that a file be pre-loaded. It is only " & "meaningful for interactive shells."), Definition( - CmdOption("e"), + CmdOption("e", Argument("string")), "Requests that its argument be executed as Scala code."), Definition( diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index 1c0c7c4a96..52e918595c 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -109,6 +109,9 @@ object scalac extends Command { CmdOption("extdirs", Argument("dirs")), "Override location of installed extensions."), Definition( + CmdOption("feature"), + "Emit warning and location for usages of features that should be imported explicitly."), + Definition( CmdOptionBound("g:", "{none,source,line,vars,notailcalls}"), SeqPara( Mono("\"none\"") & " generates no debugging info,", @@ -128,6 +131,9 @@ object scalac extends Command { CmdOption("javaextdirs", Argument("path")), "Override Java extdirs classpath."), Definition( + CmdOptionBound("language:", Argument("feature")), + "Enable one or more language features."), + Definition( CmdOption("no-specialization"), "Ignore " & MItalic("@specialize") & " annotations."), Definition( @@ -146,6 +152,12 @@ object scalac extends Command { CmdOption("sourcepath", Argument("path")), "Specify location(s) of source files."), Definition( + CmdOptionBound("target:", "{jvm-1.5,jvm-1.6,jvm-1.7}"), + SeqPara( + Mono("\"jvm-1.5\"") & " target JVM 1.5 (deprecated),", + Mono("\"jvm-1.6\"") & " target JVM 1.6 (default),", + Mono("\"jvm-1.7\"") & " target JVM 1.7,")), + Definition( CmdOption("toolcp", Argument("path")), "Add to the runner classpath."), Definition( @@ -159,6 +171,12 @@ object scalac extends Command { CmdOption("uniqid"), "Uniquely tag all identifiers in debugging output."), Definition( + CmdOption("usejavacp"), + "Utilize the java.class.path in classpath resolution."), + Definition( + CmdOption("usemanifestcp"), + "Utilize the manifest in classpath resolution."), + Definition( CmdOption("verbose"), "Output messages about what the compiler is doing"), Definition( @@ -175,11 +193,11 @@ object scalac extends Command { Section("Advanced Options", DefinitionList( Definition( - CmdOption("Xcheck-null"), - "Warn upon selection of nullable reference"), - Definition( CmdOption("Xcheckinit"), "Wrap field accessors to throw an exception on uninitialized access."), + Definition( + CmdOption("Xdev"), + "Enable warnings for developers working on the Scala compiler"), Definition( CmdOption("Xdisable-assertions"), "Generate no assertions and assumptions"), @@ -193,6 +211,9 @@ object scalac extends Command { Definition( CmdOption("Xfatal-warnings"), "Fail the compilation if there are any warnings."), + Definition( + CmdOption("Xfull-lubs"), + "Retain pre 2.10 behavior of less aggressive truncation of least upper bounds."), Definition( CmdOption("Xfuture"), "Turn on future language features."), @@ -202,18 +223,39 @@ object scalac extends Command { Definition( CmdOption("Xlint"), "Enable recommended additional warnings."), + Definition( + CmdOption("Xlog-free-terms"), + "Print a message when reification creates a free term."), + Definition( + CmdOption("Xlog-free-types"), + "Print a message when reification resorts to generating a free type."), + Definition( + CmdOption("Xlog-implicit-conversions"), + "Print a message whenever an implicit conversion is inserted."), Definition( CmdOption("Xlog-implicits"), "Show more detail on why some implicits are not applicable."), + Definition( + CmdOption("Xlog-reflective-calls"), + "Print a message when a reflective method call is generated."), + Definition( + CmdOptionBound("Xmacro-settings:", Argument("option")), + "Custom settings for macros."), + Definition( + CmdOption("Xmain-class", Argument("path")), + "Class for manifest's Main-Class entry (only useful with -d <jar>)."), Definition( CmdOption("Xmax-classfile-name", Argument("n")), "Maximum filename length for generated classes."), Definition( - CmdOption("Xmigration"), - "Warn about constructs whose behavior may have changed between 2.7 and 2.8."), + CmdOptionBound("Xmigration:", Argument("version")), + "Warn about constructs whose behavior may have changed since" & Argument("version") & "."), Definition( CmdOption("Xno-forwarders"), "Do not generate static forwarders in mirror classes."), + Definition( + CmdOption("Xno-patmat-analysis"), + "Don't perform exhaustivity/unreachability analysis. Also, ignore " & MItalic("@switch") & " annotation."), Definition( CmdOption("Xno-uescape"), "Disable handling of " & BSlash & "u unicode escapes"), @@ -221,26 +263,26 @@ object scalac extends Command { CmdOption("Xnojline"), "Do not use JLine for editing."), Definition( - CmdOptionBound("Xplugin:", Argument("file")), - "Load a plugin from a file"), + CmdOptionBound("Xplugin:", Argument("paths")), + "Load a plugin from each classpath."), Definition( CmdOptionBound("Xplugin-disable:", Argument("plugin")), - "Disable a plugin"), + "Disable plugins by name."), Definition( CmdOption("Xplugin-list"), - "Print a synopsis of loaded plugins"), + "Print a synopsis of loaded plugins."), Definition( CmdOptionBound("Xplugin-require:", Argument("plugin")), - "Abort unless the given plugin(s) are available"), + "Abort if a named plugin is not loaded."), Definition( CmdOption("Xpluginsdir", Argument("path")), - "Path to search compiler plugins."), + "Path to search for plugin archives."), Definition( CmdOptionBound("Xprint:", Argument("phases")), "Print out program after " & Argument("phases") & " (see below)."), Definition( - CmdOption("Xprint-icode"), - "Log internal icode to *.icode files."), + CmdOptionBound("Xprint-icode", "[:" & Argument("phases") & "]"), + "Log internal icode to *.icode files after" & Argument("phases") & " (default: icode)."), Definition( CmdOption("Xprint-pos"), "Print tree positions, as offsets."), @@ -269,9 +311,12 @@ object scalac extends Command { Definition( CmdOption("Xsource-reader", Argument("classname")), "Specify a custom method for reading source files."), + Definition( + CmdOption("Xstrict-inference"), + "Don't infer known-unsound types."), Definition( CmdOption("Xverify"), - "Verify generic signatures in generated bytecode."), + "Verify generic signatures in generated bytecode (asm backend only)."), Definition( CmdOption("Y"), "Print a synopsis of private options.") @@ -281,65 +326,101 @@ object scalac extends Command { Section("Compilation Phases", DefinitionList( Definition( - MItalic("initial"), - "initializing compiler"), - Definition( - MItalic("parse"), - "parse source files"), + MItalic("parser"), + "parse source into ASTs, perform simple desugaring"), Definition( MItalic("namer"), - "create symbols"), + "resolve names, attach symbols to named trees"), + Definition( + MItalic("packageobjects"), + "load package objects"), + Definition( + MItalic("typer"), + "the meat and potatoes: type the trees"), + Definition( + MItalic("patmat"), + "translate match expressions"), + Definition( + MItalic("superaccessors"), + "add super accessors in traits and nested classes"), + Definition( + MItalic("extmethods"), + "add extension methods for inline classes"), + Definition( + MItalic("pickler"), + "serialize symbol tables"), + Definition( + MItalic("refchecks"), + "reference/override checking, translate nested objects"), + Definition( + MItalic("selectiveanf"), + "ANF pre-transform for " & MItalic("@cps") & " (CPS plugin)"), + Definition( + MItalic("selectivecps"), + MItalic("@cps") & "-driven transform of selectiveanf assignements (CPS plugin)"), + Definition( + MItalic("uncurry"), + "uncurry, translate function values to anonymous classes"), Definition( - MItalic("analyze"), - "name and type analysis"), + MItalic("tailcalls"), + "replace tail calls by jumps"), Definition( - MItalic("refcheck"), - "reference checking"), + MItalic("specialize"), + MItalic("@specialized") & "-driven class and method specialization"), Definition( - MItalic("uncurry"), - "uncurry function types and applications"), + MItalic("explicitouter"), + "this refs to outer pointers, translate patterns"), + Definition( + MItalic("erasure"), + "erase types, add interfaces for traits"), + Definition( + MItalic("posterasure"), + "clean up erased inline classes"), + Definition( + MItalic("lazyvals"), + "allocate bitmaps, translate lazy vals into lazified defs"), Definition( MItalic("lambdalift"), - "lambda lifter"), + "move nested functions to top level"), Definition( - MItalic("typesasvalues"), - "represent types as values"), + MItalic("constructors"), + "move field definitions into constructors"), Definition( - MItalic("addaccessors"), - "add accessors for constructor arguments"), + MItalic("flatten"), + "eliminate inner classes"), Definition( - MItalic("explicitouterclasses"), - "make links from inner classes to enclosing one explicit"), + MItalic("mixin"), + "mixin composition"), Definition( - MItalic("addconstructors"), - "add explicit constructor for each class"), + MItalic("cleanup"), + "platform-specific cleanups, generate reflective calls"), Definition( - MItalic("tailcall"), - "add tail-calls"), + MItalic("delambdafy"), + "remove lambdas"), Definition( - MItalic("wholeprog"), - "perform whole program analysis"), + MItalic("icode"), + "generate portable intermediate code"), Definition( - MItalic("addinterfaces"), - "add one interface per class"), + MItalic("inliner"), + "optimization: do inlining"), Definition( - MItalic("expandmixins"), - "expand mixins by code copying"), + MItalic("inlineHandlers"), + "optimization: inline exception handlers"), Definition( - MItalic("boxing"), - "makes boxing explicit"), + MItalic("closelim"), + "optimization: eliminate uncalled closures"), Definition( - MItalic("erasure"), - "type eraser"), + MItalic("constopt"), + "optimization: optimize null and other constants"), Definition( - MItalic("icode"), - "generate icode"), + MItalic("dce"), + "optimization: eliminate dead code"), Definition( - MItalic("codegen"), - "enable code generation"), + MItalic("jvm"), + "generate JVM bytecode"), Definition( MItalic("terminal"), - "compilation terminated"), + "the last phase in the compiler chain"), Definition( MItalic("all"), "matches all phases")))) diff --git a/src/partest-extras/scala/tools/partest/Util.scala b/src/partest-extras/scala/tools/partest/Util.scala index 8214396291..60e9dbb0f9 100644 --- a/src/partest-extras/scala/tools/partest/Util.scala +++ b/src/partest-extras/scala/tools/partest/Util.scala @@ -16,8 +16,8 @@ object Util { */ def trace[A](a: A) = macro traceImpl[A] - import scala.reflect.macros.BlackboxContext - def traceImpl[A: c.WeakTypeTag](c: BlackboxContext)(a: c.Expr[A]): c.Expr[A] = { + import scala.reflect.macros.blackbox.Context + def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = { import c.universe._ import definitions._ diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala index 50c8aa8779..5b6ff2325c 100644 --- a/src/reflect/scala/reflect/api/Exprs.scala +++ b/src/reflect/scala/reflect/api/Exprs.scala @@ -106,7 +106,7 @@ trait Exprs { self: Universe => * * The corresponding macro implementation should have the following signature (note how the return type denotes path-dependency on x): * {{{ - * object Impls { def foo_impl(c: BlackboxContext)(x: c.Expr[X]): c.Expr[x.value.T] = ... } + * object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... } * }}} */ @compileTimeOnly("cannot use value except for signatures of macro implementations") diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala index 3d5a213f2f..54b65166d8 100644 --- a/src/reflect/scala/reflect/api/FlagSets.scala +++ b/src/reflect/scala/reflect/api/FlagSets.scala @@ -169,6 +169,14 @@ trait FlagSets { self: Universe => /** Flag indicating that tree was generated by the compiler */ val SYNTHETIC: FlagSet + + /** Flag indicating that tree represents an enum. + * + * It can only appear at + * - the enum's class + * - enum constants + **/ + val ENUM: FlagSet } /** The empty set of flags diff --git a/src/reflect/scala/reflect/api/Importers.scala b/src/reflect/scala/reflect/api/Importers.scala index e239b86452..5667d93e29 100644 --- a/src/reflect/scala/reflect/api/Importers.scala +++ b/src/reflect/scala/reflect/api/Importers.scala @@ -34,7 +34,7 @@ package api * {{{ * def staticEval[T](x: T) = macro staticEval[T] * - * def staticEval[T](c: scala.reflect.macros.BlackboxContext)(x: c.Expr[T]) = { + * def staticEval[T](c: scala.reflect.macros.blackbox.Context)(x: c.Expr[T]) = { * // creates a runtime reflection universe to host runtime compilation * import scala.reflect.runtime.{universe => ru} * val mirror = ru.runtimeMirror(c.libraryClassLoader) diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala index a4cd531053..0f0e2b81c9 100644 --- a/src/reflect/scala/reflect/api/Mirrors.scala +++ b/src/reflect/scala/reflect/api/Mirrors.scala @@ -29,19 +29,19 @@ package api * Compile-time `Mirror`s make use of only classloader `Mirror`s to load `Symbol`s * by name. * - * The entry point to classloader `Mirror`s is via [[scala.reflect.macros.BlackboxContext#mirror]] or [[scala.reflect.macros.WhiteboxContext#mirror]]. + * The entry point to classloader `Mirror`s is via [[scala.reflect.macros.blackbox.Context#mirror]] or [[scala.reflect.macros.whitebox.Context#mirror]]. * Typical methods which use classloader `Mirror`s include [[scala.reflect.api.Mirror#staticClass]], * [[scala.reflect.api.Mirror#staticModule]], and [[scala.reflect.api.Mirror#staticPackage]]. For * example: * {{{ - * import scala.reflect.macros.BlackboxContext + * import scala.reflect.macros.blackbox.Context * * case class Location(filename: String, line: Int, column: Int) * * object Macros { * def currentLocation: Location = macro impl * - * def impl(c: BlackboxContext): c.Expr[Location] = { + * def impl(c: Context): c.Expr[Location] = { * import c.universe._ * val pos = c.macroApplication.pos * val clsLocation = c.mirror.staticModule("Location") // get symbol of "Location" object diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala index 1e0854d171..5bc92d3893 100644 --- a/src/reflect/scala/reflect/api/Printers.scala +++ b/src/reflect/scala/reflect/api/Printers.scala @@ -201,6 +201,25 @@ trait Printers { self: Universe => */ protected def newTreePrinter(out: PrintWriter): TreePrinter + /** + * Renders the code of the passed tree, so that: + * 1) it can be later compiled by scalac retaining the same meaning, + * 2) it looks pretty. + * At the moment we have handled #1 for unattributed trees and + * later on plan to account for typical idiosyncrasies of the typechecker. + * #2 is more or less okay indentation-wise, but at the moment there's a lot of desugaring + * left in place, and that's what we also plan to improve in the future. + * + * @group Printers + */ + def showCode(tree: Tree) = render(tree, newCodePrinter) + + /** + * Hook to define what `showCode(...)` means. + * @group Printers + */ + protected def newCodePrinter(out: PrintWriter): TreePrinter + /** Renders internal structure of a reflection artifact as the * visualization of a Scala syntax tree. * diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index 241747e6d8..83da5141b9 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -2058,8 +2058,8 @@ trait Trees { self: Universe => * @group Extractors */ abstract class ExistentialTypeTreeExtractor { - def apply(tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree - def unapply(existentialTypeTree: ExistentialTypeTree): Option[(Tree, List[Tree])] + def apply(tpt: Tree, whereClauses: List[MemberDef]): ExistentialTypeTree + def unapply(existentialTypeTree: ExistentialTypeTree): Option[(Tree, List[MemberDef])] } /** The API that all existential type trees support @@ -2069,8 +2069,12 @@ trait Trees { self: Universe => /** The underlying type of the existential type. */ def tpt: Tree - /** The clauses of the definition of the existential type. */ - def whereClauses: List[Tree] + /** The clauses of the definition of the existential type. + * Elements are one of the following: + * 1) TypeDef with TypeBoundsTree right-hand side + * 2) ValDef with empty right-hand side + */ + def whereClauses: List[MemberDef] } /** A synthetic tree holding an arbitrary type. Not to be confused with @@ -2533,7 +2537,7 @@ trait Trees { self: Universe => /** Creates a `ExistentialTypeTree` node from the given components, having a given `tree` as a prototype. * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result. */ - def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree + def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[MemberDef]): ExistentialTypeTree } // ---------------------- traversing and transforming ------------------------------ @@ -2654,6 +2658,8 @@ trait Trees { self: Universe => def transformValDefss(treess: List[List[ValDef]]): List[List[ValDef]] = treess mapConserve (transformValDefs(_)) /** Transforms a list of `CaseDef` nodes. */ + def transformMemberDefs(trees: List[MemberDef]): List[MemberDef] = + trees mapConserve (tree => transform(tree).asInstanceOf[MemberDef]) def transformCaseDefs(trees: List[CaseDef]): List[CaseDef] = trees mapConserve (tree => transform(tree).asInstanceOf[CaseDef]) /** Transforms a list of `Ident` nodes. */ diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala index 1da2c24306..1c9b77581a 100644 --- a/src/reflect/scala/reflect/api/Universe.scala +++ b/src/reflect/scala/reflect/api/Universe.scala @@ -41,11 +41,11 @@ package api * res1: reflect.runtime.universe.Type = scala.Either[String,Int] * }}} * - * To obtain a `Universe` for use within a Scala macro, use [[scala.reflect.macros.BlackboxContext#universe]]. - * or [[scala.reflect.macros.WhiteboxContext#universe]]. For example: + * To obtain a `Universe` for use within a Scala macro, use [[scala.reflect.macros.blackbox.Context#universe]]. + * or [[scala.reflect.macros.whitebox.Context#universe]]. For example: * {{{ * def printf(format: String, params: Any*): Unit = macro impl - * def impl(c: BlackboxContext)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit] = { + * def impl(c: Context)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit] = { * import c.universe._ * ... * } diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 19c67879f5..0ca8611719 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -166,9 +166,10 @@ trait BaseTypeSeqs { val index = new Array[Int](nparents) var i = 0 for (p <- parents) { + val parentBts = p.dealias.baseTypeSeq // dealias need for SI-8046. pbtss(i) = - if (p.baseTypeSeq eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq - else p.baseTypeSeq + if (parentBts eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq + else parentBts index(i) = 0 i += 1 } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 1fe6f249b8..0091f50fc6 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -363,6 +363,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait lazy val JavaCloneableClass = requiredClass[java.lang.Cloneable] lazy val JavaNumberClass = requiredClass[java.lang.Number] + lazy val JavaEnumClass = requiredClass[java.lang.Enum[_]] lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote] lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException] @@ -482,12 +483,8 @@ trait Definitions extends api.StandardDefinitions { lazy val TypeCreatorClass = getClassIfDefined("scala.reflect.api.TypeCreator") // defined in scala-reflect.jar, so we need to be careful lazy val TreeCreatorClass = getClassIfDefined("scala.reflect.api.TreeCreator") // defined in scala-reflect.jar, so we need to be careful - lazy val BlackboxMacroClass = getClassIfDefined("scala.reflect.macros.BlackboxMacro") // defined in scala-reflect.jar, so we need to be careful - def BlackboxMacroContextValue = BlackboxMacroClass.map(sym => getMemberValue(sym, nme.c)) - lazy val WhiteboxMacroClass = getClassIfDefined("scala.reflect.macros.WhiteboxMacro") // defined in scala-reflect.jar, so we need to be careful - def WhiteboxMacroContextValue = WhiteboxMacroClass.map(sym => getMemberValue(sym, nme.c)) - lazy val BlackboxContextClass = getClassIfDefined("scala.reflect.macros.BlackboxContext") // defined in scala-reflect.jar, so we need to be careful - lazy val WhiteboxContextClass = getClassIfDefined("scala.reflect.macros.WhiteboxContext") // defined in scala-reflect.jar, so we need to be careful + lazy val BlackboxContextClass = getClassIfDefined("scala.reflect.macros.blackbox.Context") // defined in scala-reflect.jar, so we need to be careful + lazy val WhiteboxContextClass = getClassIfDefined("scala.reflect.macros.whitebox.Context") // defined in scala-reflect.jar, so we need to be careful def MacroContextPrefix = BlackboxContextClass.map(sym => getMemberMethod(sym, nme.prefix)) def MacroContextPrefixType = BlackboxContextClass.map(sym => getTypeMember(sym, tpnme.PrefixType)) def MacroContextUniverse = BlackboxContextClass.map(sym => getMemberMethod(sym, nme.universe)) @@ -603,32 +600,31 @@ trait Definitions extends api.StandardDefinitions { def isWhiteboxContextType(tp: Type) = isMacroContextType(tp) && (tp <:< WhiteboxContextClass.tpe) - def mightBeMacroBundleType(tp: Type) = - tp.baseClasses.contains(WhiteboxMacroClass) || - tp.baseClasses.contains(BlackboxMacroClass) - - def isMacroBundleType(tp: Type) = tp.baseClasses match { - case _ :: proto :: _ if isMacroBundleProtoType(proto.tpe) => true - case _ => false + private def macroBundleParamInfo(tp: Type) = { + val ctor = tp.erasure.typeSymbol.primaryConstructor + ctor.paramss match { + case List(List(c)) => + val sym = c.info.typeSymbol + val isContextCompatible = sym.isNonBottomSubClass(BlackboxContextClass) || sym.isNonBottomSubClass(WhiteboxContextClass) + if (isContextCompatible) c.info else NoType + case _ => + NoType + } } - def isBlackboxMacroBundleType(tp: Type) = - isMacroBundleType(tp) && (tp <:< BlackboxMacroClass.tpe) && !(tp <:< WhiteboxMacroClass.tpe) + def looksLikeMacroBundleType(tp: Type) = + macroBundleParamInfo(tp) != NoType - def isMacroBundleProtoType(tp: Type) = { - val sym = tp.typeSymbol - val isNonTrivial = tp != ErrorType && tp != NothingTpe && tp != NullTpe - def subclasses(sym: Symbol) = sym != NoSymbol && tp.baseClasses.contains(sym) - val isMacroCompatible = subclasses(BlackboxMacroClass) ^ subclasses(WhiteboxMacroClass) - val isBundlePrototype = sym != BlackboxMacroClass && sym != WhiteboxMacroClass && sym.isTrait && { - val c = sym.info.member(nme.c) - def overrides(sym: Symbol) = c.overrideChain.contains(sym) - val cIsOk = (overrides(BlackboxMacroContextValue) || overrides(WhiteboxMacroContextValue)) && c.isDeferred - cIsOk && sym.isMonomorphicType - } - isNonTrivial && isMacroCompatible && isBundlePrototype + def isMacroBundleType(tp: Type) = { + val isContextCompatible = macroBundleParamInfo(tp) != NoType + val hasSingleConstructor = !tp.declaration(nme.CONSTRUCTOR).isOverloaded + val nonAbstract = !tp.erasure.typeSymbol.isAbstractClass + isContextCompatible && hasSingleConstructor && nonAbstract } + def isBlackboxMacroBundleType(tp: Type) = + isMacroBundleType(tp) && (macroBundleParamInfo(tp) <:< BlackboxContextClass.tpe) + def isIterableType(tp: Type) = tp <:< classExistentialType(IterableClass) // These "direct" calls perform no dealiasing. They are most needed when @@ -812,46 +808,32 @@ trait Definitions extends api.StandardDefinitions { def byNameType(arg: Type) = appliedType(ByNameParamClass, arg) def iteratorOfType(tp: Type) = appliedType(IteratorClass, tp) def javaRepeatedType(arg: Type) = appliedType(JavaRepeatedParamClass, arg) + def optionType(tp: Type) = appliedType(OptionClass, tp) def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg) def seqType(arg: Type) = appliedType(SeqClass, arg) // FYI the long clunky name is because it's really hard to put "get" into the // name of a method without it sounding like the method "get"s something, whereas // this method is about a type member which just happens to be named get. - def typeOfMemberNamedGet(tp: Type) = resultOfMatchingMethod(tp, nme.get)() - def typeOfMemberNamedHead(tp: Type) = resultOfMatchingMethod(tp, nme.head)() - def typeOfMemberNamedApply(tp: Type) = resultOfMatchingMethod(tp, nme.apply)(IntTpe) - def typeOfMemberNamedDrop(tp: Type) = resultOfMatchingMethod(tp, nme.drop)(IntTpe) - def typeOfMemberNamedGetOrSelf(tp: Type) = typeOfMemberNamedGet(tp) orElse tp - def typesOfSelectors(tp: Type) = getterMemberTypes(tp, productSelectors(tp)) - def typesOfCaseAccessors(tp: Type) = getterMemberTypes(tp, tp.typeSymbol.caseFieldAccessors) - - /** If this is a case class, the case field accessors (which may be an empty list.) - * Otherwise, if there are any product selectors, that list. - * Otherwise, a list containing only the type itself. - */ - def typesOfSelectorsOrSelf(tp: Type): List[Type] = ( - if (tp.typeSymbol.isCase) - typesOfCaseAccessors(tp) - else typesOfSelectors(tp) match { - case Nil => tp :: Nil - case tps => tps - } - ) - - /** If the given type has one or more product selectors, the type of the last one. - * Otherwise, the type itself. - */ - def typeOfLastSelectorOrSelf(tp: Type) = typesOfSelectorsOrSelf(tp).last - - def elementTypeOfLastSelectorOrSelf(tp: Type) = { - val last = typeOfLastSelectorOrSelf(tp) - ( typeOfMemberNamedHead(last) - orElse typeOfMemberNamedApply(last) - orElse elementType(ArrayClass, last) - ) + def typeOfMemberNamedGet(tp: Type) = typeArgOfBaseTypeOr(tp, OptionClass)(resultOfMatchingMethod(tp, nme.get)()) + def typeOfMemberNamedHead(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.head)()) + def typeOfMemberNamedApply(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.apply)(IntTpe)) + def typeOfMemberNamedDrop(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.drop)(IntTpe)) + def typesOfSelectors(tp: Type) = getterMemberTypes(tp, productSelectors(tp)) + // SI-8128 Still using the type argument of the base type at Seq/Option if this is an old-style (2.10 compatible) + // extractor to limit exposure to regressions like the reported problem with existentials. + // TODO fix the existential problem in the general case, see test/pending/pos/t8128.scala + private def typeArgOfBaseTypeOr(tp: Type, baseClass: Symbol)(or: => Type): Type = (tp baseType baseClass).typeArgs match { + case x :: Nil => x + case _ => or } + // Can't only check for _1 thanks to pos/t796. + def hasSelectors(tp: Type) = ( + (tp.members containsName nme._1) + && (tp.members containsName nme._2) + ) + /** Returns the method symbols for members _1, _2, ..., _N * which exist in the given type. */ @@ -861,7 +843,9 @@ trait Definitions extends api.StandardDefinitions { case m if m.paramss.nonEmpty => Nil case m => m :: loop(n + 1) } - loop(1) + // Since ErrorType always returns a symbol from a call to member, we + // had better not start looking for _1, _2, etc. expecting it to run out. + if (tpe.isErroneous) Nil else loop(1) } /** If `tp` has a term member `name`, the first parameter list of which @@ -1207,15 +1191,21 @@ trait Definitions extends api.StandardDefinitions { } def getMemberMethod(owner: Symbol, name: Name): TermSymbol = { getMember(owner, name.toTermName) match { - // todo. member symbol becomes a term symbol in cleanup. is this a bug? - // case x: MethodSymbol => x case x: TermSymbol => x case _ => fatalMissingSymbol(owner, name, "method") } } + private lazy val erasurePhase = findPhaseWithName("erasure") def getMemberIfDefined(owner: Symbol, name: Name): Symbol = - owner.info.nonPrivateMember(name) + // findMember considered harmful after erasure; e.g. + // + // scala> exitingErasure(Symbol_apply).isOverloaded + // res27: Boolean = true + // + enteringPhaseNotLaterThan(erasurePhase )( + owner.info.nonPrivateMember(name) + ) /** Using getDecl rather than getMember may avoid issues with * OverloadedTypes turning up when you don't want them, if you diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala index 84825ff2da..799f85054a 100644 --- a/src/reflect/scala/reflect/internal/FlagSets.scala +++ b/src/reflect/scala/reflect/internal/FlagSets.scala @@ -43,5 +43,6 @@ trait FlagSets extends api.FlagSets { self: SymbolTable => val PRESUPER : FlagSet = Flags.PRESUPER val DEFAULTINIT : FlagSet = Flags.DEFAULTINIT val SYNTHETIC : FlagSet = Flags.SYNTHETIC + val ENUM : FlagSet = Flags.ENUM } } diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index dcdf6728ce..11c1d66190 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -63,7 +63,7 @@ import scala.collection.{ mutable, immutable } // 45: SYNCHRONIZED/M // 46: ARTIFACT // 47: DEFAULTMETHOD/M -// 48: +// 48: ENUM // 49: // 50: // 51: lateDEFERRED @@ -119,6 +119,7 @@ class ModifierFlags { final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method + final val ENUM = 1L << 48 // symbol is an enum /** Symbols which are marked ARTIFACT. (Expand this list?) * @@ -142,7 +143,7 @@ class ModifierFlags { } object ModifierFlags extends ModifierFlags -/** All flags and associated operatins */ +/** All flags and associated operations */ class Flags extends ModifierFlags { final val METHOD = 1 << 6 // a method final val MODULE = 1 << 8 // symbol is module or class implementing a module @@ -446,7 +447,7 @@ class Flags extends ModifierFlags { case SYNCHRONIZED => "<synchronized>" // (1L << 45) case ARTIFACT => "<artifact>" // (1L << 46) case DEFAULTMETHOD => "<defaultmethod>" // (1L << 47) - case 0x1000000000000L => "" // (1L << 48) + case ENUM => "<enum>" // (1L << 48) case 0x2000000000000L => "" // (1L << 49) case 0x4000000000000L => "" // (1L << 50) case `lateDEFERRED` => "<latedeferred>" // (1L << 51) diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala index ecbf839bab..1131c94da0 100644 --- a/src/reflect/scala/reflect/internal/HasFlags.scala +++ b/src/reflect/scala/reflect/internal/HasFlags.scala @@ -82,6 +82,7 @@ trait HasFlags { def hasAbstractFlag = hasFlag(ABSTRACT) def hasAccessorFlag = hasFlag(ACCESSOR) def hasDefault = hasFlag(DEFAULTPARAM) && hasFlag(METHOD | PARAM) // Second condition disambiguates with TRAIT + def hasEnumFlag = hasFlag(ENUM) def hasLocalFlag = hasFlag(LOCAL) def hasModuleFlag = hasFlag(MODULE) def hasPackageFlag = hasFlag(PACKAGE) diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index cc6e55192f..91ba552012 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -409,7 +409,7 @@ trait Importers extends api.Importers { to: SymbolTable => case from.TypeBoundsTree(lo, hi) => new TypeBoundsTree(importTree(lo), importTree(hi)) case from.ExistentialTypeTree(tpt, whereClauses) => - new ExistentialTypeTree(importTree(tpt), whereClauses map importTree) + new ExistentialTypeTree(importTree(tpt), whereClauses map importMemberDef) case from.EmptyTree => EmptyTree case null => @@ -475,6 +475,7 @@ trait Importers extends api.Importers { to: SymbolTable => new ImportSelector(importName(sel.name), sel.namePos, if (sel.rename != null) importName(sel.rename) else null, sel.renamePos) def importValDef(tree: from.ValDef): ValDef = importTree(tree).asInstanceOf[ValDef] def importTypeDef(tree: from.TypeDef): TypeDef = importTree(tree).asInstanceOf[TypeDef] + def importMemberDef(tree: from.MemberDef): MemberDef = importTree(tree).asInstanceOf[MemberDef] def importTemplate(tree: from.Template): Template = importTree(tree).asInstanceOf[Template] def importRefTree(tree: from.RefTree): RefTree = importTree(tree).asInstanceOf[RefTree] def importIdent(tree: from.Ident): Ident = importTree(tree).asInstanceOf[Ident] diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 424e73dce8..8b72f98e4d 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -17,8 +17,6 @@ trait Printers extends api.Printers { self: SymbolTable => //nsc import treeInfo.{ IsTrue, IsFalse } - final val showOuterTests = false - /** Adds backticks if the name is a scala keyword. */ def quotedName(name: Name, decode: Boolean): String = { val s = if (decode) name.decode else name.toString @@ -53,8 +51,8 @@ trait Printers extends api.Printers { self: SymbolTable => */ def backquotedPath(t: Tree): String = { t match { - case Select(qual, name) if name.isTermName => "%s.%s".format(backquotedPath(qual), symName(t, name)) - case Select(qual, name) if name.isTypeName => "%s#%s".format(backquotedPath(qual), symName(t, name)) + case Select(qual, name) if name.isTermName => s"${backquotedPath(qual)}.${symName(t, name)}" + case Select(qual, name) if name.isTypeName => s"${backquotedPath(qual)}#${symName(t, name)}" case Ident(name) => symName(t, name) case _ => t.toString } @@ -76,7 +74,7 @@ trait Printers extends api.Printers { self: SymbolTable => def printPosition(tree: Tree) = if (printPositions) print(tree.pos.show) - def println() { + def println() = { out.println() while (indentMargin > indentString.length()) indentString += indentString @@ -84,116 +82,221 @@ trait Printers extends api.Printers { self: SymbolTable => out.write(indentString, 0, indentMargin) } - def printSeq[a](ls: List[a])(printelem: a => Unit)(printsep: => Unit) { + def printSeq[a](ls: List[a])(printelem: a => Unit)(printsep: => Unit): Unit = ls match { case List() => case List(x) => printelem(x) case x :: rest => printelem(x); printsep; printSeq(rest)(printelem)(printsep) } - } - def printColumn(ts: List[Tree], start: String, sep: String, end: String) { + def printColumn(ts: List[Tree], start: String, sep: String, end: String) = { print(start); indent(); println() printSeq(ts){print(_)}{print(sep); println()}; undent(); println(); print(end) } - def printRow(ts: List[Tree], start: String, sep: String, end: String) { + def printRow(ts: List[Tree], start: String, sep: String, end: String): Unit = { print(start); printSeq(ts){print(_)}{print(sep)}; print(end) } - def printRow(ts: List[Tree], sep: String) { printRow(ts, "", sep, "") } + def printRow(ts: List[Tree], sep: String): Unit = printRow(ts, "", sep, "") - def printTypeParams(ts: List[TypeDef]) { - if (!ts.isEmpty) { + def printTypeParams(ts: List[TypeDef]): Unit = + if (ts.nonEmpty) { print("["); printSeq(ts){ t => printAnnotations(t) + if (t.mods.hasFlag(CONTRAVARIANT)) { + print("-") + } else if (t.mods.hasFlag(COVARIANT)) { + print("+") + } printParam(t) }{print(", ")}; print("]") } - } - def printLabelParams(ps: List[Ident]) { + def printLabelParams(ps: List[Ident]) = { print("(") printSeq(ps){printLabelParam}{print(", ")} print(")") } - def printLabelParam(p: Ident) { + def printLabelParam(p: Ident) = { print(symName(p, p.name)); printOpt(": ", TypeTree() setType p.tpe) } - def printValueParams(ts: List[ValDef]) { - print("(") - if (!ts.isEmpty) printFlags(ts.head.mods.flags & IMPLICIT, "") - printSeq(ts){printParam}{print(", ")} - print(")") + protected def parenthesize(condition: Boolean = true)(body: => Unit) = { + if (condition) print("(") + body + if (condition) print(")") } + + protected def printImplicitInParamsList(vds: List[ValDef]) = + if (vds.nonEmpty) printFlags(vds.head.mods.flags & IMPLICIT, "") + + def printValueParams(ts: List[ValDef], inParentheses: Boolean = true): Unit = + parenthesize(inParentheses){ + printImplicitInParamsList(ts) + printSeq(ts){printParam}{print(", ")} + } - def printParam(tree: Tree) { + def printParam(tree: Tree) = tree match { - case ValDef(mods, name, tp, rhs) => + case vd @ ValDef(mods, name, tp, rhs) => printPosition(tree) - printAnnotations(tree) + printAnnotations(vd) print(symName(tree, name)); printOpt(": ", tp); printOpt(" = ", rhs) case TypeDef(mods, name, tparams, rhs) => printPosition(tree) print(symName(tree, name)) printTypeParams(tparams); print(rhs) } - } - def printBlock(tree: Tree) { + def printBlock(tree: Tree) = tree match { case Block(_, _) => print(tree) case _ => printColumn(List(tree), "{", ";", "}") } - } private def symFn[T](tree: Tree, f: Symbol => T, orElse: => T): T = tree.symbol match { - case null | NoSymbol => orElse - case sym => f(sym) + case null | NoSymbol => orElse + case sym => f(sym) } private def ifSym(tree: Tree, p: Symbol => Boolean) = symFn(tree, p, false) - def printOpt(prefix: String, tree: Tree) { - if (!tree.isEmpty) { print(prefix, tree) } - } + def printOpt(prefix: String, tree: Tree) = if (tree.nonEmpty) { print(prefix, tree) } def printModifiers(tree: Tree, mods: Modifiers): Unit = printFlags( - if (tree.symbol == NoSymbol) mods.flags else tree.symbol.flags, "" + ( - if (tree.symbol == NoSymbol) mods.privateWithin - else if (tree.symbol.hasAccessBoundary) tree.symbol.privateWithin.name - else "" + if (tree.symbol == NoSymbol) mods.flags else tree.symbol.flags, "" + ( + if (tree.symbol == NoSymbol) mods.privateWithin + else if (tree.symbol.hasAccessBoundary) tree.symbol.privateWithin.name + else "" ) ) - def printFlags(flags: Long, privateWithin: String) { + def printFlags(flags: Long, privateWithin: String) = { val mask: Long = if (settings.debug) -1L else PrintableFlags val s = flagsToString(flags & mask, privateWithin) if (s != "") print(s + " ") } - def printAnnotations(tree: Tree) { + def printAnnotations(tree: MemberDef) = { // SI-5885: by default this won't print annotations of not yet initialized symbols val annots = tree.symbol.annotations match { - case Nil => tree.asInstanceOf[MemberDef].mods.annotations + case Nil => tree.mods.annotations case anns => anns } - annots foreach (annot => print("@"+annot+" ")) + annots foreach (annot => print(s"@$annot ")) } private var currentOwner: Symbol = NoSymbol private var selectorType: Type = NoType + + protected def printPackageDef(tree: PackageDef, separator: String) = { + val PackageDef(packaged, stats) = tree + printAnnotations(tree) + print("package ", packaged); printColumn(stats, " {", separator, "}") + } + + protected def printValDef(tree: ValDef, resultName: => String)(printTypeSignature: => Unit)(printRhs: => Unit) = { + val ValDef(mods, name, tp, rhs) = tree + printAnnotations(tree) + printModifiers(tree, mods) + print(if (mods.isMutable) "var " else "val ", resultName) + printTypeSignature + printRhs + } + + protected def printDefDef(tree: DefDef, resultName: => String)(printTypeSignature: => Unit)(printRhs: => Unit) = { + val DefDef(mods, name, tparams, vparamss, tp, rhs) = tree + printAnnotations(tree) + printModifiers(tree, mods) + print("def " + resultName) + printTypeParams(tparams); + vparamss foreach {printValueParams(_)} + printTypeSignature + printRhs + } + + protected def printTypeDef(tree: TypeDef, resultName: => String) = { + val TypeDef(mods, name, tparams, rhs) = tree + if (mods hasFlag (PARAM | DEFERRED)) { + printAnnotations(tree) + printModifiers(tree, mods) + print("type ") + printParam(tree) + } else { + printAnnotations(tree) + printModifiers(tree, mods) + print("type " + resultName) + printTypeParams(tparams) + printOpt(" = ", rhs) + } + } + + protected def printImport(tree: Import, resSelect: => String) = { + val Import(expr, selectors) = tree + // Is this selector renaming a name (i.e, {name1 => name2}) + def isNotRename(s: ImportSelector): Boolean = + s.name == nme.WILDCARD || s.name == s.rename + + def selectorToString(s: ImportSelector): String = { + val from = quotedName(s.name) + if (isNotRename(s)) from + else from + "=>" + quotedName(s.rename) + } + print("import ", resSelect, ".") + selectors match { + case List(s) => + // If there is just one selector and it is not renaming a name, no braces are needed + if (isNotRename(s)) print(selectorToString(s)) + else print("{", selectorToString(s), "}") + // If there is more than one selector braces are always needed + case many => + print(many.map(selectorToString).mkString("{", ", ", "}")) + } + } + + protected def printCaseDef(tree: CaseDef) = { + val CaseDef(pat, guard, body) = tree + print("case ") + def patConstr(pat: Tree): Tree = pat match { + case Apply(fn, args) => patConstr(fn) + case _ => pat + } + + print(pat); printOpt(" if ", guard) + print(" => ", body) + } + + protected def printFunction(tree: Function)(printValueParams: => Unit) = { + val Function(vparams, body) = tree + print("("); + printValueParams + print(" => ", body, ")") + if (printIds && tree.symbol != null) print("#" + tree.symbol.id) + } + + protected def printSuper(tree: Super, resultName: => String) = { + val Super(This(qual), mix) = tree + if (qual.nonEmpty || tree.symbol != NoSymbol) print(resultName + ".") + print("super") + if (mix.nonEmpty) print(s"[$mix]") + } + + protected def printThis(tree: This, resultName: => String) = { + val This(qual) = tree + if (qual.nonEmpty) print(resultName + ".") + print("this") + } - def printTree(tree: Tree) { + def printTree(tree: Tree) = { tree match { case EmptyTree => print("<empty>") - case ClassDef(mods, name, tparams, impl) => - printAnnotations(tree) + case cd @ ClassDef(mods, name, tparams, impl) => + printAnnotations(cd) printModifiers(tree, mods) val word = if (mods.isTrait) "trait" @@ -204,81 +307,45 @@ trait Printers extends api.Printers { self: SymbolTable => printTypeParams(tparams) print(if (mods.isDeferred) " <: " else " extends ", impl) - case PackageDef(packaged, stats) => - printAnnotations(tree) - print("package ", packaged); printColumn(stats, " {", ";", "}") + case pd @ PackageDef(packaged, stats) => + printPackageDef(pd, ";") - case ModuleDef(mods, name, impl) => - printAnnotations(tree) + case md @ ModuleDef(mods, name, impl) => + printAnnotations(md) printModifiers(tree, mods) print("object " + symName(tree, name), " extends ", impl) - case ValDef(mods, name, tp, rhs) => - printAnnotations(tree) - printModifiers(tree, mods) - print(if (mods.isMutable) "var " else "val ", symName(tree, name)) - printOpt(": ", tp) - if (!mods.isDeferred) - print(" = ", if (rhs.isEmpty) "_" else rhs) + case vd @ ValDef(mods, name, tp, rhs) => + printValDef(vd, symName(tree, name))(printOpt(": ", tp)) { + if (!mods.isDeferred) print(" = ", if (rhs.isEmpty) "_" else rhs) + } - case DefDef(mods, name, tparams, vparamss, tp, rhs) => - printAnnotations(tree) - printModifiers(tree, mods) - print("def " + symName(tree, name)) - printTypeParams(tparams); vparamss foreach printValueParams - printOpt(": ", tp); printOpt(" = ", rhs) + case dd @ DefDef(mods, name, tparams, vparamss, tp, rhs) => + printDefDef(dd, symName(tree, name))(printOpt(": ", tp))(printOpt(" = ", rhs)) - case TypeDef(mods, name, tparams, rhs) => - if (mods hasFlag (PARAM | DEFERRED)) { - printAnnotations(tree) - printModifiers(tree, mods); print("type "); printParam(tree) - } else { - printAnnotations(tree) - printModifiers(tree, mods); print("type " + symName(tree, name)) - printTypeParams(tparams); printOpt(" = ", rhs) - } + case td @ TypeDef(mods, name, tparams, rhs) => + printTypeDef(td, symName(tree, name)) case LabelDef(name, params, rhs) => print(symName(tree, name)); printLabelParams(params); printBlock(rhs) - case Import(expr, selectors) => - // Is this selector remapping a name (i.e, {name1 => name2}) - def isNotRemap(s: ImportSelector) : Boolean = (s.name == nme.WILDCARD || s.name == s.rename) - def selectorToString(s: ImportSelector): String = { - val from = quotedName(s.name) - if (isNotRemap(s)) from - else from + "=>" + quotedName(s.rename) - } - print("import ", backquotedPath(expr), ".") - selectors match { - case List(s) => - // If there is just one selector and it is not remapping a name, no braces are needed - if (isNotRemap(s)) print(selectorToString(s)) - else print("{", selectorToString(s), "}") - // If there is more than one selector braces are always needed - case many => - print(many.map(selectorToString).mkString("{", ", ", "}")) - } + case imp @ Import(expr, _) => + printImport(imp, backquotedPath(expr)) - case Template(parents, self, body) => + case Template(parents, self, body) => val currentOwner1 = currentOwner if (tree.symbol != NoSymbol) currentOwner = tree.symbol.owner -// if (parents exists isReferenceToAnyVal) { -// print("AnyVal") -// } -// else { printRow(parents, " with ") - if (!body.isEmpty) { + if (body.nonEmpty) { if (self.name != nme.WILDCARD) { print(" { ", self.name); printOpt(": ", self.tpt); print(" => ") - } else if (!self.tpt.isEmpty) { + } else if (self.tpt.nonEmpty) { print(" { _ : ", self.tpt, " => ") } else { print(" {") } printColumn(body, "", ";", "}") } -// } currentOwner = currentOwner1 case Block(stats, expr) => @@ -290,18 +357,8 @@ trait Printers extends api.Printers { self: SymbolTable => print(selector); printColumn(cases, " match {", "", "}") selectorType = selectorType1 - case CaseDef(pat, guard, body) => - print("case ") - def patConstr(pat: Tree): Tree = pat match { - case Apply(fn, args) => patConstr(fn) - case _ => pat - } - if (showOuterTests && - needsOuterTest( - patConstr(pat).tpe.finalResultType, selectorType, currentOwner)) - print("???") - print(pat); printOpt(" if ", guard) - print(" => ", body) + case cd @ CaseDef(pat, guard, body) => + printCaseDef(cd) case Alternative(trees) => printRow(trees, "(", "| ", ")") @@ -318,9 +375,8 @@ trait Printers extends api.Printers { self: SymbolTable => case ArrayValue(elemtpt, trees) => print("Array[", elemtpt); printRow(trees, "]{", ", ", "}") - case Function(vparams, body) => - print("("); printValueParams(vparams); print(" => ", body, ")") - if (printIds && tree.symbol != null) print("#"+tree.symbol.id) + case f @ Function(vparams, body) => + printFunction(f)(printValueParams(vparams)) case Assign(lhs, rhs) => print(lhs, " = ", rhs) @@ -331,7 +387,7 @@ trait Printers extends api.Printers { self: SymbolTable => case If(cond, thenp, elsep) => print("if (", cond, ")"); indent(); println() print(thenp); undent() - if (!elsep.isEmpty) { + if (elsep.nonEmpty) { println(); print("else"); indent(); println(); print(elsep); undent() } @@ -340,7 +396,7 @@ trait Printers extends api.Printers { self: SymbolTable => case Try(block, catches, finalizer) => print("try "); printBlock(block) - if (!catches.isEmpty) printColumn(catches, " catch {", "", "}") + if (catches.nonEmpty) printColumn(catches, " catch {", "", "}") printOpt(" finally ", finalizer) case Throw(expr) => @@ -362,22 +418,18 @@ trait Printers extends api.Printers { self: SymbolTable => print("<apply-dynamic>(", qual, "#", tree.symbol.nameString) printRow(vargs, ", (", ", ", "))") - case Super(This(qual), mix) => - if (!qual.isEmpty || tree.symbol != NoSymbol) print(symName(tree, qual) + ".") - print("super") - if (!mix.isEmpty) - print("[" + mix + "]") + case st @ Super(This(qual), mix) => + printSuper(st, symName(tree, qual)) case Super(qual, mix) => print(qual, ".super") - if (!mix.isEmpty) + if (mix.nonEmpty) print("[" + mix + "]") - case This(qual) => - if (!qual.isEmpty) print(symName(tree, qual) + ".") - print("this") + case th @ This(qual) => + printThis(th, symName(tree, qual)) - case Select(qual @ New(tpe), name) if !settings.debug => + case Select(qual: New, name) if !settings.debug => print(qual) case Select(qualifier, name) => @@ -400,10 +452,10 @@ trait Printers extends api.Printers { self: SymbolTable => print(tree.tpe.toString) } - case Annotated(Apply(Select(New(tpt), nme.CONSTRUCTOR), args), tree) => + case an @ Annotated(Apply(Select(New(tpt), nme.CONSTRUCTOR), args), tree) => def printAnnot() { print("@", tpt) - if (!args.isEmpty) + if (args.nonEmpty) printRow(args, "(", ",", ")") } print(tree, if (tree.isType) " " else ": ") @@ -435,11 +487,11 @@ trait Printers extends api.Printers { self: SymbolTable => print(tpt) printColumn(whereClauses, " forSome { ", ";", "}") -// SelectFromArray is no longer visible in scala.reflect.internal. -// eliminated until we figure out what we will do with both Printers and -// SelectFromArray. -// case SelectFromArray(qualifier, name, _) => -// print(qualifier); print(".<arr>"); print(symName(tree, name)) + // SelectFromArray is no longer visible in scala.reflect.internal. + // eliminated until we figure out what we will do with both Printers and + // SelectFromArray. + // case SelectFromArray(qualifier, name, _) => + // print(qualifier); print(".<arr>"); print(symName(tree, name)) case tree => xprintTree(this, tree) @@ -459,11 +511,496 @@ trait Printers extends api.Printers { self: SymbolTable => out.print(if (arg == null) "null" else arg.toString) } } + + // it's the printer for trees after parser and before typer phases + class ParsedTreePrinter(out: PrintWriter) extends TreePrinter(out) { + override def withTypes = this + override def withIds = this + override def withKinds = this + override def withMirrors = this + override def withPositions = this + + // TODO: add print parameters to typed trees printer + printTypes = false + printIds = false + printKinds = false + printMirrors = false + printPositions = false + + protected val parentsStack = scala.collection.mutable.Stack[Tree]() + + protected def currentTree = if (parentsStack.nonEmpty) Some(parentsStack.top) else None + + protected def currentParent = if (parentsStack.length > 1) Some(parentsStack(1)) else None + + protected def printedName(name: Name, decoded: Boolean = true) = { + import Chars._ + val decName = name.decoded + val bslash = '\\' + val brackets = List('[',']','(',')','{','}') + + def addBackquotes(s: String) = + if (decoded && (decName.exists(ch => brackets.contains(ch) || isWhitespace(ch)) || + (name.isOperatorName && decName.exists(isOperatorPart) && decName.exists(isScalaLetter) && !decName.contains(bslash)))) + s"`$s`" else s + + if (name == nme.CONSTRUCTOR) "this" + else addBackquotes(quotedName(name, decoded)) + } + + protected def isIntLitWithDecodedOp(qual: Tree, name: Name) = { + val qualIsIntLit = qual match { + case Literal(Constant(x: Int)) => true + case _ => false + } + qualIsIntLit && name.isOperatorName + } + + protected def needsParentheses(parent: Tree)(insideIf: Boolean = true, insideMatch: Boolean = true, + insideTry: Boolean = true, insideAnnotated: Boolean = true, insideBlock: Boolean = true, insideLabelDef: Boolean = true) = { + parent match { + case _: If => insideIf + case _: Match => insideMatch + case _: Try => insideTry + case _: Annotated => insideAnnotated + case _: Block => insideBlock + case _: LabelDef => insideLabelDef + case _ => false + } + } + + protected def checkForBlank(cond: Boolean) = if (cond) " " else "" + protected def blankForOperatorName(name: Name) = checkForBlank(name.isOperatorName) + protected def blankForName(name: Name) = checkForBlank(name.isOperatorName || name.endsWith("_")) + + protected def resolveSelect(t: Tree): String = { + t match { + // case for: 1) (if (a) b else c).meth1.meth2 or 2) 1 + 5 should be represented as (1).+(5) + case Select(qual, name) if (name.isTermName && needsParentheses(qual)(insideLabelDef = false)) || isIntLitWithDecodedOp(qual, name) => s"(${resolveSelect(qual)}).${printedName(name)}" + case Select(qual, name) if name.isTermName => s"${resolveSelect(qual)}.${printedName(name)}" + case Select(qual, name) if name.isTypeName => s"${resolveSelect(qual)}#${blankForOperatorName(name)}%${printedName(name)}" + case Ident(name) => printedName(name) + case _ => showCode(t) + } + } + + val defaultClasses = List(tpnme.AnyRef) + val defaultTraitsForCase = List(tpnme.Product, tpnme.Serializable) + protected def removeDefaultTypesFromList(trees: List[Tree])(classesToRemove: List[Name] = defaultClasses)(traitsToRemove: List[Name]) = { + def removeDefaultTraitsFromList(trees: List[Tree], traitsToRemove: List[Name]): List[Tree] = + trees match { + case Nil => trees + case init :+ last => last match { + case Select(Ident(sc), name) if traitsToRemove.contains(name) && sc == nme.scala_ => + removeDefaultTraitsFromList(init, traitsToRemove) + case _ => trees + } + } + + removeDefaultTraitsFromList(removeDefaultClassesFromList(trees, classesToRemove), traitsToRemove) + } + + protected def removeDefaultClassesFromList(trees: List[Tree], classesToRemove: List[Name] = defaultClasses) = trees filter { + case Select(Ident(sc), name) => !(classesToRemove.contains(name) && sc == nme.scala_) + case _ => true + } + + def printFlags(mods: Modifiers, primaryCtorParam: Boolean = false): Unit = { + val base = AccessFlags | OVERRIDE | ABSTRACT | FINAL | SEALED | LAZY + val mask = if (primaryCtorParam) base else base | IMPLICIT + + val s = mods.flagString(mask) + if (s != "") print(s"$s ") + // case flag should be the last + if (mods.isCase) print(mods.flagBitsToString(CASE) + " ") + if (mods.isAbstractOverride) print("abstract override ") + } + + override def printModifiers(tree: Tree, mods: Modifiers): Unit = printModifiers(mods, primaryCtorParam = false) + + def printModifiers(mods: Modifiers, primaryCtorParam: Boolean): Unit = { + def modsAccepted = List(currentTree, currentParent) exists (_ map { + case _: ClassDef | _: ModuleDef | _: Template | _: PackageDef => true + case _ => false + } getOrElse false) + + if (currentParent.isEmpty || modsAccepted) + printFlags(mods, primaryCtorParam) + else + List(IMPLICIT, CASE, LAZY, SEALED).foreach{flag => if (mods.hasFlag(flag)) print(s"${mods.flagBitsToString(flag)} ")} + } + + def printParam(tree: Tree, primaryCtorParam: Boolean): Unit = + tree match { + case vd @ ValDef(mods, name, tp, rhs) => + printAnnotations(vd) + val mutableOrOverride = mods.isOverride || mods.isMutable + val hideCtorMods = mods.isParamAccessor && mods.isPrivateLocal && !mutableOrOverride + val hideCaseCtorMods = mods.isCaseAccessor && mods.isPublic && !mutableOrOverride + + if (primaryCtorParam && !(hideCtorMods || hideCaseCtorMods)) { + printModifiers(mods, primaryCtorParam) + print(if (mods.isMutable) "var " else "val "); + } + print(printedName(name), blankForName(name)); + printOpt(": ", tp); + printOpt(" = ", rhs) + case TypeDef(_, name, tparams, rhs) => + print(printedName(name)) + printTypeParams(tparams); + print(rhs) + case _ => + super.printParam(tree) + } + + override def printParam(tree: Tree): Unit = { + printParam(tree, primaryCtorParam = false) + } + + protected def printArgss(argss: List[List[Tree]]) = + argss foreach {x: List[Tree] => if (!(x.isEmpty && argss.size == 1)) printRow(x, "(", ", ", ")")} + + override def printAnnotations(tree: MemberDef) = { + val annots = tree.mods.annotations + annots foreach {annot => printAnnot(annot); print(" ")} + } + + protected def printAnnot(tree: Tree) = { + tree match { + case treeInfo.Applied(core, _, argss) => + print("@") + core match { + case Select(New(tree), _) => print(tree) + case _ => + } + printArgss(argss) + case _ => super.printTree(tree) + } + } + + override def printTree(tree: Tree): Unit = { + parentsStack.push(tree) + tree match { + case cl @ ClassDef(mods, name, tparams, impl) => + if (mods.isJavaDefined) super.printTree(cl) + printAnnotations(cl) + // traits + val clParents: List[Tree] = if (mods.isTrait) { + // avoid abstract modifier for traits + printModifiers(tree, mods &~ ABSTRACT) + print("trait ", printedName(name)) + printTypeParams(tparams) + + val build.SyntacticTraitDef(_, _, _, _, parents, _, _) = tree + parents + // classes + } else { + printModifiers(tree, mods) + print("class ", printedName(name)) + printTypeParams(tparams) + + val build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) = cl + + // constructor's modifier + if (ctorMods.hasFlag(AccessFlags)) { + print(" ") + printModifiers(ctorMods, primaryCtorParam = false) + } + + def printConstrParams(ts: List[ValDef]): Unit = { + parenthesize() { + printImplicitInParamsList(ts) + printSeq(ts)(printParam(_, primaryCtorParam = true))(print(", ")) + } + } + // constructor's params processing (don't print single empty constructor param list) + vparamss match { + case Nil | List(Nil) if (!mods.isCase && !ctorMods.hasFlag(AccessFlags)) => + case _ => vparamss foreach printConstrParams + } + parents + } + + // get trees without default classes and traits (when they are last) + val printedParents = removeDefaultTypesFromList(clParents)()(if (mods.hasFlag(CASE)) defaultTraitsForCase else Nil) + print(if (mods.isDeferred) "<: " else if (printedParents.nonEmpty) " extends " else "", impl) + + case pd @ PackageDef(packaged, stats) => + packaged match { + case Ident(name) if name == nme.EMPTY_PACKAGE_NAME => + printSeq(stats) { + print(_) + } { + print(";"); + println() + }; + case _ => + printPackageDef(pd, "\n") + } + + case md @ ModuleDef(mods, name, impl) => + printAnnotations(md) + printModifiers(tree, mods) + val Template(parents, self, methods) = impl + val parWithoutAnyRef = removeDefaultClassesFromList(parents) + print("object " + printedName(name), if (parWithoutAnyRef.nonEmpty) " extends " else "", impl) + + case vd @ ValDef(mods, name, tp, rhs) => + printValDef(vd, printedName(name)) { + // place space after symbolic def name (val *: Unit does not compile) + printOpt(s"${blankForName(name)}: ", tp) + } { + if (!mods.isDeferred) print(" = ", if (rhs.isEmpty) "_" else rhs) + } + + case dd @ DefDef(mods, name, tparams, vparamss, tp, rhs) => + printDefDef(dd, printedName(name)) { + if (tparams.isEmpty && (vparamss.isEmpty || vparamss(0).isEmpty)) print(blankForName(name)) + printOpt(": ", tp) + } { + printOpt(" = " + (if (mods.isMacro) "macro " else ""), rhs) + } + + case td @ TypeDef(mods, name, tparams, rhs) => + printTypeDef(td, printedName(name)) + + case LabelDef(name, params, rhs) => + if (name.startsWith(nme.WHILE_PREFIX)) { + val If(cond, thenp, elsep) = rhs + print("while (", cond, ") ") + val Block(list, wh) = thenp + printColumn(list, "", ";", "") + } else if (name.startsWith(nme.DO_WHILE_PREFIX)) { + val Block(bodyList, ifCond @ If(cond, thenp, elsep)) = rhs + print("do ") + printColumn(bodyList, "", ";", "") + print(" while (", cond, ") ") + } else { + print(printedName(name)); printLabelParams(params); + printBlock(rhs) + } + + case imp @ Import(expr, _) => + printImport(imp, resolveSelect(expr)) + + case Template(parents, self, body) => + val printedParents = + currentParent map { + case _: CompoundTypeTree => parents + case ClassDef(mods, name, _, _) if mods.isCase => removeDefaultTypesFromList(parents)()(List(tpnme.Product, tpnme.Serializable)) + case _ => removeDefaultClassesFromList(parents) + } getOrElse (parents) + + val primaryCtr = treeInfo.firstConstructor(body) + val ap: Option[Apply] = primaryCtr match { + case DefDef(_, _, _, _, _, Block(ctBody, _)) => + val earlyDefs = treeInfo.preSuperFields(ctBody) ::: body.filter { + case td: TypeDef => treeInfo.isEarlyDef(td) + case _ => false + } + if (earlyDefs.nonEmpty) { + print("{") + printColumn(earlyDefs, "", ";", "") + print("} " + (if (printedParents.nonEmpty) "with " else "")) + } + ctBody collectFirst { + case apply: Apply => apply + } + case _ => None + } + + if (printedParents.nonEmpty) { + val (clParent :: traits) = printedParents + print(clParent) + + val constrArgss = ap match { + case Some(treeInfo.Applied(_, _, argss)) => argss + case _ => Nil + } + printArgss(constrArgss) + if (traits.nonEmpty) { + printRow(traits, " with ", " with ", "") + } + } + /* Remove primary constr def and constr val and var defs + * right contains all constructors + */ + val (left, right) = body.filter { + // remove valdefs defined in constructor and presuper vals + case vd: ValDef => !vd.mods.isParamAccessor && !treeInfo.isEarlyValDef(vd) + // remove $this$ from traits + case dd: DefDef => dd.name != nme.MIXIN_CONSTRUCTOR + case td: TypeDef => !treeInfo.isEarlyDef(td) + case EmptyTree => false + case _ => true + } span { + case dd: DefDef => dd.name != nme.CONSTRUCTOR + case _ => true + } + val modBody = left ::: right.drop(1) + val showBody = !(modBody.isEmpty && (self == noSelfType || self.isEmpty)) + if (showBody) { + if (self.name != nme.WILDCARD) { + print(" { ", self.name); + printOpt(": ", self.tpt); + print(" =>") + } else if (self.tpt.nonEmpty) { + print(" { _ : ", self.tpt, " =>") + } else { + print(" {") + } + printColumn(modBody, "", ";", "}") + } + + case Block(stats, expr) => super.printTree(tree) + + case Match(selector, cases) => + /* Insert braces if match is inner + * make this function available for other cases + * passing required type for checking + */ + def insertBraces(body: => Unit): Unit = + if (parentsStack.nonEmpty && parentsStack.tail.exists(_.isInstanceOf[Match])) { + print("(") + body + print(")") + } else body + + val printParentheses = needsParentheses(selector)(insideLabelDef = false) + tree match { + case Match(EmptyTree, cs) => + printColumn(cases, "{", "", "}") + case _ => + insertBraces { + parenthesize(printParentheses)(print(selector)) + printColumn(cases, " match {", "", "}") + } + } + + case cd @ CaseDef(pat, guard, body) => + printCaseDef(cd) + + case Star(elem) => + print(elem, "*") + + case Bind(name, t) => + if (t == EmptyTree) print("(", printedName(name), ")") + else if (t.exists(_.isInstanceOf[Star])) print(printedName(name), " @ ", t) + else print("(", printedName(name), " @ ", t, ")") + + case f @ Function(vparams, body) => + // parentheses are not allowed for val a: Int => Int = implicit x => x + val printParentheses = vparams match { + case head :: _ => !head.mods.isImplicit + case _ => true + } + printFunction(f)(printValueParams(vparams, inParentheses = printParentheses)) + + case Typed(expr, tp) => + tp match { + case Function(List(), EmptyTree) => print("(", expr, " _)") //func _ + // parentheses required when (a match {}) : Type + case _ => print("((", expr, "): ", tp, ")") + } + + case Apply(fun, vargs) => + tree match { + // processing methods ending on colons (x \: list) + case Apply(Block(l1 @ List(sVD: ValDef), a1 @ Apply(Select(_, methodName), l2 @ List(Ident(iVDName)))), l3) + if sVD.mods.isSynthetic && treeInfo.isLeftAssoc(methodName) && sVD.name == iVDName => + val printBlock = Block(l1, Apply(a1, l3)) + print(printBlock) + case Apply(tree1, _) if (needsParentheses(tree1)(insideAnnotated = false)) => + parenthesize()(print(fun)); printRow(vargs, "(", ", ", ")") + case _ => super.printTree(tree) + } + + case st @ Super(This(qual), mix) => + printSuper(st, printedName(qual)) + + case th @ This(qual) => + printThis(th, printedName(qual)) + + case Select(qual: New, name) => + print(qual) + + case Select(qualifier, name) => { + val printParentheses = needsParentheses(qualifier)(insideAnnotated = false) || isIntLitWithDecodedOp(qualifier, name) + if (printParentheses) print("(", resolveSelect(qualifier), ").", printedName(name)) + else print(resolveSelect(qualifier), ".", printedName(name)) + } + + case id @ Ident(name) => + if (name.nonEmpty) { + if (name == nme.dollarScope) { + print(s"scala.xml.${nme.TopScope}") + } else { + val str = printedName(name) + val strIsBackquoted = str.startsWith("`") && str.endsWith("`") + print(if (id.isBackquoted && !strIsBackquoted) "`" + str + "`" else str) + } + } else { + print("") + } + + case l @ Literal(x) => + x match { + case Constant(v: String) if { + val strValue = x.stringValue + strValue.contains("\n") && strValue.contains("\"\"\"") && strValue.size > 1 + } => + val splitValue = x.stringValue.split('\n'.toString).toList + val multilineStringValue = if (x.stringValue.endsWith("\n")) splitValue :+ "" else splitValue + val trQuotes = "\"\"\"" + print(trQuotes); printSeq(multilineStringValue) { print(_) } { print("\n") }; print(trQuotes) + case _ => + // processing Float constants + val printValue = x.escapedStringValue + (if (x.value.isInstanceOf[Float]) "F" else "") + print(printValue) + } + + case an @ Annotated(ap, tree) => + val printParentheses = needsParentheses(tree)() + parenthesize(printParentheses) { print(tree) }; print(if (tree.isType) " " else ": ") + printAnnot(ap) + + case SelectFromTypeTree(qualifier, selector) => + print("(", qualifier, ")#", blankForOperatorName(selector), printedName(selector)) + + case AppliedTypeTree(tp, args) => + // it's possible to have (=> String) => String type but Function1[=> String, String] is not correct + val containsByNameTypeParam = args exists treeInfo.isByNameParamType + + if (containsByNameTypeParam) { + print("(") + printRow(args.init, "(", ", ", ")") + print(" => ", args.last, ")") + } else { + if (treeInfo.isRepeatedParamType(tree) && args.nonEmpty) { + print(args(0), "*") + } else if (treeInfo.isByNameParamType(tree)) { + print("=> ", if (args.isEmpty) "()" else args(0)) + } else + super.printTree(tree) + } + + case ExistentialTypeTree(tpt, whereClauses) => + print("(", tpt); + printColumn(whereClauses, " forSome { ", ";", "})") + + case EmptyTree => + + case tree => super.printTree(tree) + } + parentsStack.pop() + } + } /** Hook for extensions */ def xprintTree(treePrinter: TreePrinter, tree: Tree) = treePrinter.print(tree.productPrefix+tree.productIterator.mkString("(", ", ", ")")) + def newCodePrinter(writer: PrintWriter): TreePrinter = new ParsedTreePrinter(writer) def newTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer) def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream)) def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter)) diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index ea6afa7349..a54aa1f6e8 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -686,6 +686,7 @@ trait StdNames { val inlinedEquals: NameType = "inlinedEquals" val isArray: NameType = "isArray" val isDefinedAt: NameType = "isDefinedAt" + val isEmpty: NameType = "isEmpty" val isInstanceOf_ : NameType = "isInstanceOf" val isInstanceOf_Ob : NameType = "$isInstanceOf" val java: NameType = "java" diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 0ce5a0fbea..bed8310767 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -244,6 +244,18 @@ abstract class SymbolTable extends macros.Universe finally popPhase(saved) } + final def findPhaseWithName(phaseName: String): Phase = { + var ph = phase + while (ph != NoPhase && ph.name != phaseName) { + ph = ph.prev + } + if (ph eq NoPhase) phase else ph + } + final def enteringPhaseWithName[T](phaseName: String)(body: => T): T = { + val phase = findPhaseWithName(phaseName) + enteringPhase(phase)(body) + } + def slowButSafeEnteringPhase[T](ph: Phase)(op: => T): T = { if (isCompilerUniverse) enteringPhase(ph)(op) else op @@ -344,16 +356,18 @@ abstract class SymbolTable extends macros.Universe // Weak references so the garbage collector will take care of // letting us know when a cache is really out of commission. - private val caches = WeakHashSet[Clearable]() + import java.lang.ref.WeakReference + private var caches = List[WeakReference[Clearable]]() def recordCache[T <: Clearable](cache: T): T = { - caches += cache + caches ::= new WeakReference(cache) cache } def clearAll() = { debuglog("Clearing " + caches.size + " caches.") - caches foreach (_.clear) + caches foreach (ref => Option(ref.get).foreach(_.clear)) + caches = caches.filterNot(_.get == null) } def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]()) @@ -364,9 +378,9 @@ abstract class SymbolTable extends macros.Universe val NoCached: T = null.asInstanceOf[T] var cached: T = NoCached var cachedRunId = NoRunId - caches += new Clearable { + recordCache(new Clearable { def clear(): Unit = cached = NoCached - } + }) () => { if (currentRunId != cachedRunId || cached == NoCached) { cached = f diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 6269004298..f6d21ec9bd 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -340,11 +340,13 @@ abstract class TreeGen extends macros.TreeBuilder { // create parameters for <init> as synthetic trees. var vparamss1 = mmap(vparamss) { vd => - atPos(vd.pos.focus) { + val param = atPos(vd.pos.makeTransparent) { val mods = Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR) - ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, vd.rhs.duplicate) + ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, duplicateAndKeepPositions(vd.rhs)) } + param } + val (edefs, rest) = body span treeInfo.isEarlyDef val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef val gvdefs = evdefs map { @@ -377,15 +379,21 @@ abstract class TreeGen extends macros.TreeBuilder { // this means that we don't know what will be the arguments of the super call // therefore here we emit a dummy which gets populated when the template is named and typechecked Some( - // TODO: previously this was `wrappingPos(superPos, lvdefs ::: argss.flatten)` - // is it going to be a problem that we can no longer include the `argss`? - atPos(wrappingPos(superPos, lvdefs)) ( + atPos(wrappingPos(superPos, lvdefs ::: vparamss1.flatten).makeTransparent) ( DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant()))))) } } constr foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus = false)) // Field definitions for the class - remove defaults. - val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree)) + + val fieldDefs = vparamss.flatten map (vd => { + val field = copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree) + // Prevent overlapping of `field` end's position with default argument's start position. + // This is needed for `Positions.Locator(pos).traverse` to return the correct tree when + // the `pos` is a point position with all its values equal to `vd.rhs.pos.start`. + if(field.pos.isRange && vd.rhs.pos.isRange) field.pos = field.pos.withEnd(vd.rhs.pos.start - 1) + field + }) global.Template(parents, self, gvdefs ::: fieldDefs ::: constr ++: etdefs ::: rest) } diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 8fdf4dc27a..497a7c91b1 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -612,8 +612,8 @@ abstract class TreeInfo { def effectivePatternArity(args: List[Tree]): Int = flattenedPatternArgs(args).length def flattenedPatternArgs(args: List[Tree]): List[Tree] = args map unbind match { - case Apply(fun, xs) :: Nil if isTupleSymbol(fun.symbol) => xs - case xs => xs + case build.SyntacticTuple(xs) :: Nil => xs + case xs => xs } // used in the symbols for labeldefs and valdefs emitted by the pattern matcher diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index d191fbd38f..4a518f6c56 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -541,7 +541,7 @@ trait Trees extends api.Trees { extends TypTree with TypeBoundsTreeApi object TypeBoundsTree extends TypeBoundsTreeExtractor - case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree]) + case class ExistentialTypeTree(tpt: Tree, whereClauses: List[MemberDef]) extends TypTree with ExistentialTypeTreeApi object ExistentialTypeTree extends ExistentialTypeTreeExtractor @@ -694,7 +694,7 @@ trait Trees extends api.Trees { new AppliedTypeTree(tpt, args).copyAttrs(tree) def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree) = new TypeBoundsTree(lo, hi).copyAttrs(tree) - def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]) = + def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[MemberDef]) = new ExistentialTypeTree(tpt, whereClauses).copyAttrs(tree) } @@ -910,7 +910,7 @@ trait Trees extends api.Trees { if (lo0 == lo) && (hi0 == hi) => t case _ => treeCopy.TypeBoundsTree(tree, lo, hi) } - def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]) = tree match { + def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[MemberDef]) = tree match { case t @ ExistentialTypeTree(tpt0, whereClauses0) if (tpt0 == tpt) && (whereClauses0 == whereClauses) => t case _ => treeCopy.ExistentialTypeTree(tree, tpt, whereClauses) @@ -1421,7 +1421,7 @@ trait Trees extends api.Trees { case CompoundTypeTree(templ) => treeCopy.CompoundTypeTree(tree, transformTemplate(templ)) case ExistentialTypeTree(tpt, whereClauses) => - treeCopy.ExistentialTypeTree(tree, transform(tpt), transformTrees(whereClauses)) + treeCopy.ExistentialTypeTree(tree, transform(tpt), transformMemberDefs(whereClauses)) case Return(expr) => treeCopy.Return(tree, transform(expr)) case Alternative(trees) => diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 99e6ae633f..e9230aceee 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -8,7 +8,6 @@ package reflect package internal import scala.collection.{ mutable, immutable, generic } -import generic.Clearable import scala.ref.WeakReference import mutable.ListBuffer import Flags._ @@ -1999,7 +1998,9 @@ trait Types if (sym.typeParams.size != args.size) devWarning(s"$this.transform($tp), but tparams.isEmpty and args=$args") - asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args) + val GenPolyType(tparams, result) = asSeenFromOwner(tp) + assert((tparams eq Nil) || tparams == sym.typeParams, (tparams, sym.typeParams)) + result.instantiateTypeParams(sym.typeParams, args) } // note: does not go through typeRef. There's no need to because @@ -2309,7 +2310,14 @@ trait Types } thisInfo.decls } - protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform + protected[Types] def baseTypeSeqImpl: BaseTypeSeq = + if (sym.info.baseTypeSeq exists (_.typeSymbolDirect.isAbstractType)) + // SI-8046 base type sequence might have more elements in a subclass, we can't map it element wise. + transform(sym.info).baseTypeSeq + else + // Optimization: no abstract types, we can compute the BTS of this TypeRef as an element-wise map + // of the BTS of the referenced symbol. + sym.info.baseTypeSeq map transform override def baseTypeSeq: BaseTypeSeq = { val cache = baseTypeSeqCache @@ -3660,7 +3668,11 @@ trait Types if (Statistics.canEnable) Statistics.incCounter(rawTypeCount) if (uniqueRunId != currentRunId) { uniques = util.WeakHashSet[Type](initialUniquesCapacity) - perRunCaches.recordCache(uniques) + // JZ: We used to register this as a perRunCache so it would be cleared eagerly at + // the end of the compilation run. But, that facility didn't actually clear this map (SI-8129)! + // When i fixed that bug, run/tpeCache-tyconCache.scala started failing. Why was that? + // I've removed the registration for now. I don't think its particularly harmful anymore + // as a) this is now a weak set, and b) it is discarded completely before the next run. uniqueRunId = currentRunId } (uniques findEntryOrUpdate tp).asInstanceOf[T] diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index a6c34935ad..3d222fce10 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -487,6 +487,7 @@ abstract class UnPickler { def nameRef() = readNameRef() def tparamRef() = readTypeDefRef() def vparamRef() = readValDefRef() + def memberRef() = readMemberDefRef() def constRef() = readConstantRef() def idRef() = readIdentRef() def termNameRef() = readNameRef().toTermName @@ -520,7 +521,7 @@ abstract class UnPickler { case CLASStree => ClassDef(modsRef, typeNameRef, rep(tparamRef), implRef) case COMPOUNDTYPEtree => CompoundTypeTree(implRef) case DEFDEFtree => DefDef(modsRef, termNameRef, rep(tparamRef), rep(rep(vparamRef)), ref, ref) - case EXISTENTIALTYPEtree => ExistentialTypeTree(ref, all(ref)) + case EXISTENTIALTYPEtree => ExistentialTypeTree(ref, all(memberRef)) case FUNCTIONtree => Function(rep(vparamRef), ref) case IMPORTtree => Import(ref, selectorsRef) case LABELtree => LabelDef(termNameRef, rep(idRef), ref) @@ -634,6 +635,12 @@ abstract class UnPickler { case other => errorBadSignature("expected an TypeDef (" + other + ")") } + protected def readMemberDefRef(): MemberDef = + readTreeRef() match { + case tree:MemberDef => tree + case other => + errorBadSignature("expected an MemberDef (" + other + ")") + } protected def errorBadSignature(msg: String) = throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index b60fecd66e..1620d8156b 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -17,20 +17,15 @@ trait TypeComparers { private val _pendingSubTypes = new mutable.HashSet[SubTypePair] def pendingSubTypes = _pendingSubTypes - class SubTypePair(val tp1: Type, val tp2: Type) { - override def hashCode = tp1.hashCode * 41 + tp2.hashCode - override def equals(other: Any) = (this eq other.asInstanceOf[AnyRef]) || (other match { - // suspend TypeVars in types compared by =:=, - // since we don't want to mutate them simply to check whether a subtype test is pending - // in addition to making subtyping "more correct" for type vars, - // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion) - // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold) - case stp: SubTypePair => - val tvars = List(tp1, stp.tp1, tp2, stp.tp2) flatMap (t => if (t.isGround) Nil else typeVarsInType(t)) - suspendingTypeVars(tvars)(tp1 =:= stp.tp1 && tp2 =:= stp.tp2) - case _ => - false - }) + final case class SubTypePair(tp1: Type, tp2: Type) { + // SI-8146 we used to implement equality here in terms of pairwise =:=. + // But, this was inconsistent with hashCode, which was based on the + // Type#hashCode, based on the structure of types, not the meaning. + // Now, we use `Type#{equals,hashCode}` as the (consistent) basis for + // detecting cycles (aka keeping subtyping decidable.) + // + // I added a tests to show that we detect the cycle: neg/t8146-no-finitary* + override def toString = tp1+" <:<? "+tp2 } @@ -170,11 +165,20 @@ trait TypeComparers { // corresponds does not check length of two sequences before checking the predicate, // but SubstMap assumes it has been checked (SI-2956) ( sameLength(tparams1, tparams2) - && (tparams1 corresponds tparams2)((p1, p2) => p1.info =:= subst(p2.info)) + && (tparams1 corresponds tparams2)((p1, p2) => methodHigherOrderTypeParamsSameVariance(p1, p2) && p1.info =:= subst(p2.info)) && (res1 =:= subst(res2)) ) } + // SI-2066 This prevents overrides with incompatible variance in higher order type parameters. + private def methodHigherOrderTypeParamsSameVariance(sym1: Symbol, sym2: Symbol) = { + def ignoreVariance(sym: Symbol) = !(sym.isHigherOrderTypeParameter && sym.logicallyEnclosingMember.isMethod) + ignoreVariance(sym1) || ignoreVariance(sym2) || sym1.variance == sym2.variance + } + + private def methodHigherOrderTypeParamsSubVariance(low: Symbol, high: Symbol) = + methodHigherOrderTypeParamsSameVariance(low, high) || low.variance.isInvariant + def isSameType2(tp1: Type, tp2: Type): Boolean = { def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSameType(lhs, rhs) @@ -262,7 +266,7 @@ trait TypeComparers { if (subsametypeRecursions >= LogPendingSubTypesThreshold) { val p = new SubTypePair(tp1, tp2) if (pendingSubTypes(p)) - false + false // see neg/t8146-no-finitary* else try { pendingSubTypes += p @@ -327,7 +331,10 @@ trait TypeComparers { val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1) def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes) def sub2(tp: Type) = tp.substSym(tparams2, substitutes) - def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info) + def cmp(p1: Symbol, p2: Symbol) = ( + methodHigherOrderTypeParamsSubVariance(p2, p1) + && sub2(p2.info) <:< sub1(p1.info) + ) (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2)) } diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala index f61c1f3c50..e4a6503184 100644 --- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala +++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala @@ -41,7 +41,8 @@ trait TraceSymbolActivity { } } - private def signature(id: Int) = runBeforeErasure(allSymbols(id).defString) + private lazy val erasurePhase = findPhaseWithName("erasure") + private def signature(id: Int) = enteringPhase(erasurePhase)(allSymbols(id).defString) private def dashes(s: Any): String = ("" + s) map (_ => '-') private def show(s1: Any, ss: Any*) { @@ -87,14 +88,6 @@ trait TraceSymbolActivity { private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String) = { showMapFreq(xs.toList groupBy groupFn)(showFn) } - private lazy val findErasurePhase: Phase = { - var ph = phase - while (ph != NoPhase && ph.name != "erasure") { - ph = ph.prev - } - if (ph eq NoPhase) phase else ph - } - private def runBeforeErasure[T](body: => T): T = enteringPhase(findErasurePhase)(body) def showAllSymbols() { if (!enabled) return diff --git a/src/reflect/scala/reflect/macros/Aliases.scala b/src/reflect/scala/reflect/macros/Aliases.scala index ca599dbd49..cc92cd10c7 100644 --- a/src/reflect/scala/reflect/macros/Aliases.scala +++ b/src/reflect/scala/reflect/macros/Aliases.scala @@ -5,11 +5,11 @@ package macros /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> * - * A slice of [[scala.reflect.macros.BlackboxContext the Scala macros context]] that defines shorthands for the + * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that defines shorthands for the * most frequently used types and functions of the underlying compiler universe. */ trait Aliases { - self: BlackboxContext => + self: blackbox.Context => /** The type of symbols representing declarations. */ type Symbol = universe.Symbol diff --git a/src/reflect/scala/reflect/macros/BlackboxMacro.scala b/src/reflect/scala/reflect/macros/BlackboxMacro.scala deleted file mode 100644 index df142e9238..0000000000 --- a/src/reflect/scala/reflect/macros/BlackboxMacro.scala +++ /dev/null @@ -1,36 +0,0 @@ -package scala.reflect -package macros - -/** - * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> - * - * Traditionally macro implementations are defined as methods, - * but this trait provides an alternative way of encoding macro impls as - * bundles, traits which extend `scala.reflect.macros.BlackboxMacro` or`scala.reflect.macros.WhiteboxMacro` . - * - * Instead of: - * - * def impl[T: c.WeakTypeTag](c: BlackboxContext)(x: c.Expr[Int]) = ... - * - * One can write: - * - * trait Impl extends BlackboxMacro { - * def apply[T: c.WeakTypeTag](x: c.Expr[Int]) = ... - * } - * - * Without changing anything else at all. - * - * This language feature is useful in itself in cases when macro implementations - * are complex and need to be modularized. State of the art technique of addressing this need is quite heavyweight: - * http://docs.scala-lang.org/overviews/macros/overview.html#writing_bigger_macros. - * - * @see `scala.reflect.macros.WhiteboxMacro` - */ -trait BlackboxMacro { - /** The context to be used by the macro implementation. - * - * Vanilla macro implementations have to carry it in their signatures, however when a macro is a full-fledged module, - * it can define the context next to the implementation, makes implementation signature more lightweight. - */ - val c: BlackboxContext -} diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala index 31905c4739..5f248d25d3 100644 --- a/src/reflect/scala/reflect/macros/Enclosures.scala +++ b/src/reflect/scala/reflect/macros/Enclosures.scala @@ -7,32 +7,19 @@ import scala.language.existentials // SI-6541 /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> * - * A slice of [[scala.reflect.macros.BlackboxContext the Scala macros context]] that exposes + * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that exposes * enclosing trees (method, class, compilation unit and currently compiled application), * the enclosing position of the macro expansion, as well as macros and implicits * that are currently in-flight. */ trait Enclosures { - self: BlackboxContext => + self: blackbox.Context => /** The tree that undergoes macro expansion. * Can be useful to get an offset or a range position of the entire tree being processed. */ def macroApplication: Tree - /** The semantic role that `macroApplication` plays in the code. - */ - type MacroRole - - /** The role that represents an application of a term macro, - * e.g. `M(2)(3)` in `val x = M(2)(3)` or `M(a, b)` in `x match { case x @ M(a, b) => }`. - */ - def APPLY_ROLE: MacroRole - - /** The semantic role that `macroApplication` plays in the code. - */ - def macroRole: MacroRole - /** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only. * Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion. * @@ -43,7 +30,7 @@ trait Enclosures { * Unlike `openMacros`, this is a val, which means that it gets initialized when the context is created * and always stays the same regardless of whatever happens during macro expansion. */ - def enclosingMacros: List[BlackboxContext] + def enclosingMacros: List[blackbox.Context] /** Tries to guess a position for the enclosing application. * But that is simple, right? Just dereference `pos` of `macroApplication`? Not really. diff --git a/src/reflect/scala/reflect/macros/Evals.scala b/src/reflect/scala/reflect/macros/Evals.scala index eb37e83cad..222ae43d79 100644 --- a/src/reflect/scala/reflect/macros/Evals.scala +++ b/src/reflect/scala/reflect/macros/Evals.scala @@ -5,11 +5,11 @@ package macros /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> * - * A slice of [[scala.reflect.macros.BlackboxContext the Scala macros context]] that provides + * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that provides * a facility to evaluate trees. */ trait Evals { - self: BlackboxContext => + self: blackbox.Context => /** Takes a typed wrapper for a tree of type `T` and evaluates it to a value of type `T`. * @@ -21,12 +21,12 @@ trait Evals { * mutates the tree in place, therefore the conventional approach is to `duplicate` the tree first. * * {{{ - * scala> def impl(c: BlackboxContext)(x: c.Expr[String]) = { + * scala> def impl(c: Context)(x: c.Expr[String]) = { * | val x1 = c.Expr[String](c.resetAllAttrs(x.tree.duplicate)) * | println(s"compile-time value is: \${c.eval(x1)}") * | x * | } - * impl: (c: BlackboxContext)(x: c.Expr[String])c.Expr[String] + * impl: (c: Context)(x: c.Expr[String])c.Expr[String] * * scala> def test(x: String) = macro impl * test: (x: String)String diff --git a/src/reflect/scala/reflect/macros/ExprUtils.scala b/src/reflect/scala/reflect/macros/ExprUtils.scala index 58b61e446a..c438653c92 100644 --- a/src/reflect/scala/reflect/macros/ExprUtils.scala +++ b/src/reflect/scala/reflect/macros/ExprUtils.scala @@ -5,11 +5,11 @@ package macros /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> * - * A slice of [[scala.reflect.macros.BlackboxContext the Scala macros context]] that defines shorthands for the + * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that defines shorthands for the * most common `Expr`-creating functions. */ trait ExprUtils { - self: BlackboxContext => + self: blackbox.Context => /** Shorthand for `Literal(Constant(null))` in the underlying `universe`. */ @deprecated("Use quasiquotes instead", "2.11.0") diff --git a/src/reflect/scala/reflect/macros/FrontEnds.scala b/src/reflect/scala/reflect/macros/FrontEnds.scala index 3a910d89ad..a770f325b2 100644 --- a/src/reflect/scala/reflect/macros/FrontEnds.scala +++ b/src/reflect/scala/reflect/macros/FrontEnds.scala @@ -5,12 +5,12 @@ package macros /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> * - * A slice of [[scala.reflect.macros.BlackboxContext the Scala macros context]] that + * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that * provides facilities to communicate with the compiler's front end * (emit warnings, errors and other sorts of messages). */ trait FrontEnds { - self: BlackboxContext => + self: blackbox.Context => /** For sending a message which should not be labeled as a warning/error, * but also shouldn't require -verbose to be visible. diff --git a/src/reflect/scala/reflect/macros/Infrastructure.scala b/src/reflect/scala/reflect/macros/Infrastructure.scala index b6585f94d2..0f2d9ce4cf 100644 --- a/src/reflect/scala/reflect/macros/Infrastructure.scala +++ b/src/reflect/scala/reflect/macros/Infrastructure.scala @@ -5,11 +5,11 @@ package macros /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> * - * A slice of [[scala.reflect.macros.BlackboxContext the Scala macros context]] that + * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that * provides facilities to communicate with the compiler's infrastructure. */ trait Infrastructure { - self: BlackboxContext => + self: blackbox.Context => /** Exposes macro-specific settings as a list of strings. * These settings are passed to the compiler via the "-Xmacro-settings:setting1,setting2...,settingN" command-line option. diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala index 6bd3e1a199..af60dffbfc 100644 --- a/src/reflect/scala/reflect/macros/Names.scala +++ b/src/reflect/scala/reflect/macros/Names.scala @@ -5,11 +5,11 @@ package macros /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> * - * A slice of [[scala.reflect.macros.BlackboxContext the Scala macros context]] that + * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that * provides functions that generate unique names. */ trait Names { - self: BlackboxContext => + self: blackbox.Context => /** Creates a unique string. */ @deprecated("Use freshName instead", "2.11.0") diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala index 9d4a7e2953..720b754649 100644 --- a/src/reflect/scala/reflect/macros/Parsers.scala +++ b/src/reflect/scala/reflect/macros/Parsers.scala @@ -5,11 +5,11 @@ package macros /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> * - * A slice of [[scala.reflect.macros.BlackboxContext the Scala macros context]] that + * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that * exposes functions to parse strings with Scala code into trees. */ trait Parsers { - self: BlackboxContext => + self: blackbox.Context => /** Parses a string with a Scala expression into an abstract syntax tree. * Only works for expressions, i.e. parsing a package declaration will fail. diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala index 67d10dc10a..ff1f7a3b28 100644 --- a/src/reflect/scala/reflect/macros/Reifiers.scala +++ b/src/reflect/scala/reflect/macros/Reifiers.scala @@ -5,11 +5,11 @@ package macros /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> * - * A slice of [[scala.reflect.macros.BlackboxContext the Scala macros context]] that + * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that * exposes functions to save reflection artifacts for runtime. */ trait Reifiers { - self: BlackboxContext => + self: blackbox.Context => /** Given a tree, generate a tree that when compiled and executed produces the original tree. * For more information and examples see the documentation for `Universe.reify`. diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index 5a25801335..54336900f8 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -5,11 +5,11 @@ package macros /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> * - * A slice of [[scala.reflect.macros.BlackboxContext the Scala macros context]] that + * A slice of [[scala.reflect.macros.blackbox.Context the Scala macros context]] that * partially exposes the type checker to macro writers. */ trait Typers { - self: BlackboxContext => + self: blackbox.Context => /** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only. * Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion. @@ -21,7 +21,7 @@ trait Typers { * Unlike `enclosingMacros`, this is a def, which means that it gets recalculated on every invocation, * so it might change depending on what is going on during macro expansion. */ - def openMacros: List[BlackboxContext] + def openMacros: List[blackbox.Context] /** @see `Typers.typecheck` */ diff --git a/src/reflect/scala/reflect/macros/WhiteboxMacro.scala b/src/reflect/scala/reflect/macros/WhiteboxMacro.scala deleted file mode 100644 index 1c581313eb..0000000000 --- a/src/reflect/scala/reflect/macros/WhiteboxMacro.scala +++ /dev/null @@ -1,36 +0,0 @@ -package scala.reflect -package macros - -/** - * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> - * - * Traditionally macro implementations are defined as methods, - * but this trait provides an alternative way of encoding macro impls as - * bundles, traits which extend `scala.reflect.macros.BlackboxMacro` or`scala.reflect.macros.WhiteboxMacro` . - * - * Instead of: - * - * def impl[T: c.WeakTypeTag](c: WhiteboxContext)(x: c.Expr[Int]) = ... - * - * One can write: - * - * trait Impl extends WhiteboxMacro { - * def apply[T: c.WeakTypeTag](x: c.Expr[Int]) = ... - * } - * - * Without changing anything else at all. - * - * This language feature is useful in itself in cases when macro implementations - * are complex and need to be modularized. State of the art technique of addressing this need is quite heavyweight: - * http://docs.scala-lang.org/overviews/macros/overview.html#writing_bigger_macros. - * - * @see `scala.reflect.macros.BlackboxMacro` - */ -trait WhiteboxMacro { - /** The context to be used by the macro implementation. - * - * Vanilla macro implementations have to carry it in their signatures, however when a macro is a full-fledged module, - * it can define the context next to the implementation, makes implementation signature more lightweight. - */ - val c: WhiteboxContext -} diff --git a/src/reflect/scala/reflect/macros/BlackboxContext.scala b/src/reflect/scala/reflect/macros/blackbox/Context.scala index 2c77289866..05d9595c3a 100644 --- a/src/reflect/scala/reflect/macros/BlackboxContext.scala +++ b/src/reflect/scala/reflect/macros/blackbox/Context.scala @@ -1,6 +1,7 @@ package scala package reflect package macros +package blackbox /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> @@ -24,24 +25,24 @@ package macros * enclosing trees and compilation units, evaluating trees, logging warnings/errors and much more. * Refer to the documentation of top-level traits in this package to learn the details. * - * If a macro def refers to a macro impl that uses `BlackboxContext`, then this macro def becomes a blackbox macro, + * If a macro def refers to a macro impl that uses `blackbox.Context`, then this macro def becomes a blackbox macro, * which means that its expansion will be upcast to its return type, enforcing faithfullness of that macro to its - * type signature. Whitebox macros, i.e. the ones defined with `WhiteboxContext`, aren't bound by this restriction, + * type signature. Whitebox macros, i.e. the ones defined with `whitebox.Context`, aren't bound by this restriction, * which enables a number of important use cases, but they are also going to enjoy less support than blackbox macros, * so choose wisely. See the [[http://docs.scala-lang.org/overviews/macros.html Macros Guide]] for more information. * - * @see `scala.reflect.macros.WhiteboxContext` + * @see `scala.reflect.macros.whitebox.Context` */ -trait BlackboxContext extends Aliases - with Enclosures - with Names - with Reifiers - with FrontEnds - with Infrastructure - with Typers - with Parsers - with Evals - with ExprUtils { +trait Context extends Aliases + with Enclosures + with Names + with Reifiers + with FrontEnds + with Infrastructure + with Typers + with Parsers + with Evals + with ExprUtils { /** The compile-time universe. */ val universe: Universe @@ -63,7 +64,7 @@ trait BlackboxContext extends Aliases * scala> class Coll[T] { * | def filter(p: T => Boolean): Coll[T] = macro M.filter[T] * | }; object M { - * | def filter[T](c: BlackboxContext { type PrefixType = Coll[T] }) + * | def filter[T](c: Context { type PrefixType = Coll[T] }) * | (p: c.Expr[T => Boolean]): c.Expr[Coll[T]] = * | { * | println(c.prefix.tree) diff --git a/src/reflect/scala/reflect/macros/package.scala b/src/reflect/scala/reflect/macros/package.scala index 6a8434a163..cc7111d794 100644 --- a/src/reflect/scala/reflect/macros/package.scala +++ b/src/reflect/scala/reflect/macros/package.scala @@ -19,10 +19,10 @@ package object macros { * with the former being better supported and the latter being more powerful. You can read about * the details of the split and the associated trade-offs in the [[http://docs.scala-lang.org/overviews/macros.html Macros Guide]]. * - * `scala.reflect.macros.Context` follows this tendency and turns into `scala.reflect.macros.BlackboxContext` - * and `scala.reflect.macros.WhiteboxContext`. The original `Context` is left in place for compatibility reasons, + * `scala.reflect.macros.Context` follows this tendency and turns into `scala.reflect.macros.blackbox.Context` + * and `scala.reflect.macros.whitebox.Context`. The original `Context` is left in place for compatibility reasons, * but it is now deprecated, nudging the users to choose between blackbox and whitebox macros. */ - @deprecated("Use BlackboxContext or WhiteboxContext instead", "2.11.0") - type Context = WhiteboxContext + @deprecated("Use blackbox.Context or whitebox.Context instead", "2.11.0") + type Context = whitebox.Context }
\ No newline at end of file diff --git a/src/reflect/scala/reflect/macros/WhiteboxContext.scala b/src/reflect/scala/reflect/macros/whitebox/Context.scala index 9d65a5c16e..bd48df46cc 100644 --- a/src/reflect/scala/reflect/macros/WhiteboxContext.scala +++ b/src/reflect/scala/reflect/macros/whitebox/Context.scala @@ -1,6 +1,7 @@ package scala package reflect package macros +package whitebox /** * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span> @@ -24,22 +25,22 @@ package macros * enclosing trees and compilation units, evaluating trees, logging warnings/errors and much more. * Refer to the documentation of top-level traits in this package to learn the details. * - * If a macro def refers to a macro impl that uses `WhiteboxContext`, then this macro def becomes a whitebox macro, + * If a macro def refers to a macro impl that uses `whitebox.Context`, then this macro def becomes a whitebox macro, * gaining the ability to refine the type of its expansion beyond its official return type, which enables a number of important use cases. - * Blackbox macros, i.e. the ones defined with `BlackboxContext`, can't do that, so they are less powerful. + * Blackbox macros, i.e. the ones defined with `blackbox.Context`, can't do that, so they are less powerful. * However blackbox macros are also going to enjoy better support than whitebox macros, so choose wisely. * See the [[http://docs.scala-lang.org/overviews/macros.html Macros Guide]] for more information. * - * @see `scala.reflect.macros.BlackboxContext` + * @see `scala.reflect.macros.blackbox.Context` */ -trait WhiteboxContext extends BlackboxContext { +trait Context extends blackbox.Context { /** @inheritdoc */ - def openMacros: List[WhiteboxContext] + def openMacros: List[Context] /** @inheritdoc */ - def enclosingMacros: List[WhiteboxContext] + def enclosingMacros: List[Context] /** Information about one of the currently considered implicit candidates. * Candidates are used in plural form, because implicit parameters may themselves have implicit parameters, diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 9c4a3a5fe1..6b3985d434 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -198,6 +198,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.ErroneousCollector this.adaptToNewRunMap // inaccessible: this.commonOwnerMapObj + this.SubTypePair this.SymbolKind this.NoSymbol this.CyclicReference @@ -273,6 +274,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.ComparableClass definitions.JavaCloneableClass definitions.JavaNumberClass + definitions.JavaEnumClass definitions.RemoteInterfaceClass definitions.RemoteExceptionClass definitions.ByNameParamClass @@ -319,8 +321,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.MirrorClass definitions.TypeCreatorClass definitions.TreeCreatorClass - definitions.BlackboxMacroClass - definitions.WhiteboxMacroClass definitions.BlackboxContextClass definitions.WhiteboxContextClass definitions.MacroImplAnnotation @@ -425,6 +425,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.languageFeatureModule definitions.metaAnnotations definitions.AnnotationDefaultAttr + // inaccessible: definitions.erasurePhase definitions.isPhantomClass definitions.syntheticCoreClasses definitions.syntheticCoreMethods diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala index 3a7688aa2c..3c9bbccba3 100644 --- a/src/reflect/scala/reflect/runtime/package.scala +++ b/src/reflect/scala/reflect/runtime/package.scala @@ -26,7 +26,7 @@ package object runtime { package runtime { private[scala] object Macros { - def currentMirror(c: scala.reflect.macros.BlackboxContext): c.Expr[universe.Mirror] = { + def currentMirror(c: scala.reflect.macros.blackbox.Context): c.Expr[universe.Mirror] = { import c.universe._ val runtimeClass = c.reifyEnclosingRuntimeClass if (runtimeClass.isEmpty) c.abort(c.enclosingPosition, "call site does not have an enclosing class") diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala index 496d5face1..915fd57bf8 100644 --- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala +++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala @@ -11,7 +11,6 @@ import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable } import scala.tools.nsc.util.ScalaClassLoader import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, Writer } import java.util.{ Locale } -import java.util.regex.Pattern import java.util.concurrent.ConcurrentLinkedQueue import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener, ForwardingJavaFileManager, JavaFileManager, JavaFileObject, @@ -20,6 +19,7 @@ import scala.reflect.io.{ AbstractFile, Directory, File, Path } import scala.io.Source import scala.util.{ Try, Success, Failure } import scala.util.Properties.lineSeparator +import scala.util.matching.Regex import scala.collection.JavaConverters import scala.collection.generic.Clearable import java.net.URL @@ -608,12 +608,12 @@ object JavapClass { // class k, candidate f without prefix def isFunOfClass(k: String, f: String) = { - val p = (s"${Pattern quote k}\\$$+anonfun").r + val p = (s"${Regex quote k}\\$$+anonfun").r (p findPrefixOf f).nonEmpty } // class k, candidate f without prefix, method m def isFunOfMethod(k: String, m: String, f: String) = { - val p = (s"${Pattern quote k}\\$$+anonfun\\$$${Pattern quote m}\\$$").r + val p = (s"${Regex quote k}\\$$+anonfun\\$$${Regex quote m}\\$$").r (p findPrefixOf f).nonEmpty } def isFunOfTarget(k: String, m: Option[String], f: String) = diff --git a/src/repl/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala index cf38a2ae3a..e09c6f315e 100644 --- a/src/repl/scala/tools/nsc/interpreter/Naming.scala +++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala @@ -8,6 +8,7 @@ package tools.nsc package interpreter import scala.util.Properties.lineSeparator +import scala.util.matching.Regex /** This is for name logic which is independent of the compiler (notice there's no Global.) * That includes at least generating, metaquoting, mangling, and unmangling. @@ -38,12 +39,10 @@ trait Naming { // // $line3.$read.$iw.$iw.Bippy = // $line3.$read$$iw$$iw$Bippy@4a6a00ca - - private def noMeta(s: String) = "\\Q" + s + "\\E" lazy val lineRegex = { val sn = sessionNames - val members = List(sn.read, sn.eval, sn.print) map noMeta mkString ("(?:", "|", ")") - debugging("lineRegex")(noMeta(sn.line) + """\d+[./]""" + members + """[$.]""") + val members = List(sn.read, sn.eval, sn.print) map Regex.quote mkString ("(?:", "|", ")") + debugging("lineRegex")(Regex.quote(sn.line) + """\d+[./]""" + members + """[$.]""") } private def removeLineWrapper(s: String) = s.replaceAll(lineRegex, "") diff --git a/src/swing/scala/swing/Publisher.scala b/src/swing/scala/swing/Publisher.scala deleted file mode 100644 index 578ef71e09..0000000000 --- a/src/swing/scala/swing/Publisher.scala +++ /dev/null @@ -1,174 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.swing - -import scala.collection.mutable -import mutable.Buffer -import event.Event - -/** <p> - * Notifies registered reactions when an event is published. Publishers are - * also reactors and listen to themselves per default as a convenience. - * </p> - * <p> - * In order to reduce memory leaks, reactions are weakly referenced by default, - * unless they implement <code>Reactions.StronglyReferenced</code>. That way, - * the lifetime of reactions are more easily bound to the registering object, - * which are reactors in common client code and hold strong references to their - * reactions. As a result, reactors can be garbage collected even though they - * still have reactions registered at some publisher, but not vice versa - * since reactors (strongly) reference publishers they are interested in. - * </p> - */ -trait Publisher extends Reactor { - import Reactions._ - - protected val listeners = new RefSet[Reaction] { - import scala.ref._ - val underlying = new mutable.HashSet[Reference[Reaction]] - protected def Ref(a: Reaction) = a match { - case a: StronglyReferenced => new StrongReference[Reaction](a) with super.Ref[Reaction] - case _ => new WeakReference[Reaction](a, referenceQueue) with super.Ref[Reaction] - } - } - - private[swing] def subscribe(listener: Reaction) { listeners += listener } - private[swing] def unsubscribe(listener: Reaction) { listeners -= listener } - - /** - * Notify all registered reactions. - */ - def publish(e: Event) { for (l <- listeners) if (l.isDefinedAt(e)) l(e) } - - listenTo(this) -} - -/** - * A publisher that subscribes itself to an underlying event source not before the first - * reaction is installed. Can unsubscribe itself when the last reaction is uninstalled. - */ -private[swing] trait LazyPublisher extends Publisher { - import Reactions._ - - protected def onFirstSubscribe() - protected def onLastUnsubscribe() - - override def subscribe(listener: Reaction) { - if(listeners.size == 1) onFirstSubscribe() - super.subscribe(listener) - } - override def unsubscribe(listener: Reaction) { - super.unsubscribe(listener) - if(listeners.size == 1) onLastUnsubscribe() - } -} - - - -import scala.ref._ - -private[swing] trait SingleRefCollection[+A <: AnyRef] extends Iterable[A] { self => - - trait Ref[+A <: AnyRef] extends Reference[A] { - override def hashCode() = get match { - case Some(x) => x.## - case _ => 0 - } - override def equals(that: Any) = that match { - case that: ReferenceWrapper[_] => - val v1 = this.get - val v2 = that.get - v1 == v2 - case _ => false - } - } - - //type Ref <: Reference[A] // TODO: could use higher kinded types, but currently crashes - protected[this] def Ref(a: A): Ref[A] - protected[this] val referenceQueue = new ReferenceQueue[A] - - protected val underlying: Iterable[Reference[A]] - - def purgeReferences() { - var ref = referenceQueue.poll - while (ref != None) { - removeReference(ref.get.asInstanceOf[Reference[A]]) - ref = referenceQueue.poll - } - } - - protected[this] def removeReference(ref: Reference[A]) - - def iterator = new Iterator[A] { - private val elems = self.underlying.iterator - private var hd: A = _ - private var ahead: Boolean = false - private def skip(): Unit = - while (!ahead && elems.hasNext) { - // make sure we have a reference to the next element, - // otherwise it might be garbage collected - val next = elems.next.get - ahead = next != None - if (ahead) hd = next.get - } - def hasNext: Boolean = { skip; ahead } - def next(): A = - if (hasNext) { ahead = false; hd } - else throw new NoSuchElementException("next on empty iterator") - } -} - -private[swing] class StrongReference[+T <: AnyRef](value: T) extends Reference[T] { - private[this] var ref: Option[T] = Some(value) - def isValid: Boolean = ref != None - def apply(): T = ref.get - def get : Option[T] = ref - override def toString = get.map(_.toString).getOrElse("<deleted>") - def clear() { ref = None } - def enqueue(): Boolean = false - def isEnqueued(): Boolean = false - } - -abstract class RefBuffer[A <: AnyRef] extends Buffer[A] with SingleRefCollection[A] { self => - protected val underlying: Buffer[Reference[A]] - - def +=(el: A): this.type = { purgeReferences(); underlying += Ref(el); this } - def +=:(el: A) = { purgeReferences(); Ref(el) +=: underlying; this } - def remove(el: A) { underlying -= Ref(el); purgeReferences(); } - def remove(n: Int) = { val el = apply(n); remove(el); el } - def insertAll(n: Int, iter: Iterable[A]) { - purgeReferences() - underlying.insertAll(n, iter.view.map(Ref(_))) - } - def update(n: Int, el: A) { purgeReferences(); underlying(n) = Ref(el) } - def apply(n: Int) = { - purgeReferences() - var el = underlying(n).get - while (el == None) { - purgeReferences(); el = underlying(n).get - } - el.get - } - - def length = { purgeReferences(); underlying.length } - def clear() { underlying.clear(); purgeReferences() } - - protected[this] def removeReference(ref: Reference[A]) { underlying -= ref } -} - -private[swing] abstract class RefSet[A <: AnyRef] extends mutable.Set[A] with SingleRefCollection[A] { self => - protected val underlying: mutable.Set[Reference[A]] - - def -=(el: A): this.type = { underlying -= Ref(el); purgeReferences(); this } - def +=(el: A): this.type = { purgeReferences(); underlying += Ref(el); this } - def contains(el: A): Boolean = { purgeReferences(); underlying.contains(Ref(el)) } - override def size = { purgeReferences(); underlying.size } - - protected[this] def removeReference(ref: Reference[A]) { underlying -= ref } -} |