summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2012-03-18 10:42:20 -0700
committerPaul Phillips <paulp@improving.org>2012-03-18 10:42:20 -0700
commit513232f251f1e1ce89adbde53fa1ccec67bda24d (patch)
tree7792247e953f37dc6c05fe64baeb0841811f31cd
parentf90efea9600cd9bd39537b256e371a093aa994cd (diff)
parentb3efb3d493605d1c7e106e5f0a697b52ebb3d97c (diff)
downloadscala-513232f251f1e1ce89adbde53fa1ccec67bda24d.tar.gz
scala-513232f251f1e1ce89adbde53fa1ccec67bda24d.tar.bz2
scala-513232f251f1e1ce89adbde53fa1ccec67bda24d.zip
Merge remote-tracking branches 'axel22/feature/checkinit-transient' and 'jsuereth/mirrored-seq-extractors' into develop
-rw-r--r--build.number9
-rw-r--r--build.xml89
-rw-r--r--src/build/maven/maven-deploy.xml38
-rw-r--r--src/build/pack.xml2
-rw-r--r--src/compiler/scala/reflect/internal/Definitions.scala15
-rw-r--r--src/compiler/scala/reflect/internal/Flags.scala51
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala17
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala122
-rwxr-xr-x[-rw-r--r--]src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala33
-rwxr-xr-x[-rw-r--r--]src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala10
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala22
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala88
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala3
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala87
-rw-r--r--src/library/scala/collection/SeqExtractors.scala21
-rw-r--r--src/library/scala/collection/mutable/ConcurrentTrieMap.scala (renamed from src/library/scala/collection/mutable/Ctrie.scala)100
-rw-r--r--src/library/scala/collection/parallel/mutable/ParConcurrentTrieMap.scala (renamed from src/library/scala/collection/parallel/mutable/ParCtrie.scala)54
-rw-r--r--src/library/scala/package.scala3
-rw-r--r--src/library/scala/reflect/api/Modifier.scala2
-rw-r--r--src/library/scala/util/Properties.scala24
-rwxr-xr-x[-rw-r--r--]src/library/scala/xml/Elem.scala35
-rwxr-xr-x[-rw-r--r--]src/library/scala/xml/Node.scala2
-rwxr-xr-x[-rw-r--r--]src/library/scala/xml/PrettyPrinter.scala2
-rwxr-xr-x[-rw-r--r--]src/library/scala/xml/Utility.scala63
-rwxr-xr-x[-rw-r--r--]src/library/scala/xml/XML.scala19
-rwxr-xr-x[-rw-r--r--]src/library/scala/xml/factory/Binder.scala10
-rwxr-xr-x[-rw-r--r--]src/library/scala/xml/parsing/ConstructingHandler.scala4
-rwxr-xr-x[-rw-r--r--]src/library/scala/xml/parsing/DefaultMarkupHandler.scala2
-rwxr-xr-x[-rw-r--r--]src/library/scala/xml/parsing/MarkupHandler.scala3
-rwxr-xr-x[-rw-r--r--]src/library/scala/xml/parsing/MarkupParser.scala2
-rwxr-xr-xsrc/library/scala/xml/pull/XMLEventReader.scala2
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala35
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala2
-rwxr-xr-x[-rw-r--r--]test/files/jvm/interpreter.check2
-rw-r--r--test/files/jvm/serialization.check8
-rw-r--r--test/files/jvm/serialization.scala14
-rwxr-xr-x[-rw-r--r--]test/files/jvm/t0632.check24
-rwxr-xr-xtest/files/jvm/t1118.check11
-rwxr-xr-xtest/files/jvm/t1118.scala21
-rw-r--r--test/files/jvm/unittest_xml.scala2
-rwxr-xr-x[-rw-r--r--]test/files/jvm/xml01.check4
-rwxr-xr-x[-rw-r--r--]test/files/jvm/xml03syntax.check4
-rw-r--r--test/files/jvm/xml05.check2
-rw-r--r--test/files/jvm/xmlattr.check4
-rw-r--r--test/files/neg/t5572.check11
-rw-r--r--test/files/neg/t5572.scala23
-rw-r--r--test/files/pos/anyval-rangepos.scala1
-rw-r--r--test/files/pos/dotless-targs.scala12
-rw-r--r--test/files/pos/rangepos.flags (renamed from test/files/pos/anyval-rangepos.flags)0
-rw-r--r--test/files/pos/rangepos.scala5
-rw-r--r--test/files/pos/t5545/S_1.scala4
-rw-r--r--test/files/pos/t5545/S_2.scala4
-rw-r--r--test/files/run/compiler-asSeenFrom.check323
-rw-r--r--test/files/run/compiler-asSeenFrom.scala122
-rw-r--r--test/files/run/ctries/concmap.scala24
-rw-r--r--test/files/run/ctries/iterator.scala20
-rw-r--r--test/files/run/ctries/lnode.scala14
-rw-r--r--test/files/run/ctries/snapshot.scala26
-rw-r--r--test/files/run/existentials-in-compiler.scala2
-rw-r--r--test/files/run/matchonseq.check2
-rw-r--r--test/files/run/matchonseq.scala8
-rwxr-xr-x[-rw-r--r--]test/files/run/t0663.check2
-rwxr-xr-x[-rw-r--r--]test/files/run/t1620.check4
-rwxr-xr-x[-rw-r--r--]test/files/run/t2124.check2
-rwxr-xr-x[-rw-r--r--]test/files/run/t2125.check2
-rw-r--r--test/files/run/xml-attribute.check24
-rw-r--r--test/files/scalacheck/Ctrie.scala14
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala12
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala2
-rw-r--r--test/scaladoc/resources/explicit-inheritance-override.scala48
-rw-r--r--test/scaladoc/resources/explicit-inheritance-usecase.scala47
-rw-r--r--test/scaladoc/resources/implicit-inheritance-override.scala8
-rw-r--r--test/scaladoc/resources/inheritdoc-corner-cases.scala78
-rw-r--r--test/scaladoc/scala/html/HtmlFactoryTest.scala196
-rwxr-xr-xtools/get-scala-commit-date16
-rw-r--r--tools/get-scala-commit-date.bat24
-rwxr-xr-xtools/get-scala-commit-drift40
-rw-r--r--tools/get-scala-commit-drift.bat (renamed from tools/get-scala-revision.bat)9
-rwxr-xr-xtools/get-scala-commit-sha (renamed from tools/get-scala-revision)14
-rw-r--r--tools/get-scala-commit-sha.bat21
-rwxr-xr-xtools/verify-jar-cache10
84 files changed, 1718 insertions, 533 deletions
diff --git a/build.number b/build.number
new file mode 100644
index 0000000000..b5c4e61d13
--- /dev/null
+++ b/build.number
@@ -0,0 +1,9 @@
+#Tue Sep 11 19:21:09 CEST 2007
+version.major=2
+version.minor=10
+version.patch=0
+# This is the -N part of a version. if it's 0, it's dropped from maven versions.
+version.bnum=0
+
+# Note: To build a release run ant with -Dbuild.release=true
+# To build an RC, run ant with -Dmaven.version.suffix=-RCN
diff --git a/build.xml b/build.xml
index f0fd36da5a..53bd9c3011 100644
--- a/build.xml
+++ b/build.xml
@@ -166,6 +166,8 @@ PROPERTIES
<!-- Loads custom properties definitions -->
<property file="${basedir}/build.properties"/>
+ <!-- Generating version number -->
+ <property file="${basedir}/build.number"/>
<!-- Additional command line arguments for scalac. They are added to all build targets -->
<property name="scalac.args" value=""/>
@@ -236,7 +238,30 @@ INITIALISATION
</touch>
</target>
- <target name="init" depends="init.jars">
+ <!-- Determines OSGi string + maven extension. -->
+ <target name="init.hasbuildnum">
+ <condition property="version.hasbuildnum">
+ <not><equals arg1="${version.bnum}" arg2="0"/></not>
+ </condition>
+ </target>
+ <target name="init.build.snapshot" unless="build.release">
+ <property name="maven.version.suffix" value="-SNAPSHOT"/>
+ </target>
+ <target name="init.build.release" if="build.release" depends="init.hasbuildnum, init.build.snapshot">
+ <property name="maven.version.suffix" value=""/>
+ </target>
+ <target name="init.build.nopatch.release" unless="version.hasbuildnum" depends="init.hasbuildnum">
+ <property name="version.suffix" value=""/>
+ </target>
+ <!-- funny thing, ant is. Can only specify *one* property in if check. Guaranteed that both are true here,
+ since properties are immutable. -->
+ <target name="init.build.patch.release" if="version.hasbuildnum" depends="init.build.nopatch.release">
+ <property name="version.suffix" value="-${version.bnum}"/>
+ </target>
+
+ <target name="init.build.suffix.done" depends="init.build.release, init.build.patch.release"/>
+
+ <target name="init" depends="init.jars, init.build.suffix.done">
<!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
<property name="scalac.args.optimise" value=""/>
<!-- scalac.args.quickonly are added to quick.* targets but not others (particularly, locker.)
@@ -255,22 +280,30 @@ INITIALISATION
<condition property="os.win">
<os family="windows"/>
</condition>
-
- <!-- Generating version string -->
- <exec osfamily="unix" executable="tools/get-scala-revision" outputproperty="version.number" failifexecutionfails="false" />
- <exec osfamily="windows" executable="tools/get-scala-revision.bat" outputproperty="version.number" failifexecutionfails="false" />
+
+ <exec osfamily="unix" executable="tools/get-scala-commit-sha" outputproperty="git.commit.sha" failifexecutionfails="false" />
+ <exec osfamily="windows" executable="tools/get-scala-commit-sha.bat" outputproperty="git.commit.sha" failifexecutionfails="false" />
+ <exec osfamily="unix" executable="tools/get-scala-commit-date" outputproperty="git.commit.date" failifexecutionfails="false" />
+ <exec osfamily="windows" executable="tools/get-scala-commit-date.bat" outputproperty="git.commit.date" failifexecutionfails="false" />
+ <exec osfamily="unix" executable="tools/get-scala-commit-drift" outputproperty="git.commit.drift" failifexecutionfails="false" />
+ <exec osfamily="windows" executable="tools/get-scala-commit-drift.bat" outputproperty="git.commit.drift" failifexecutionfails="false" />
+ <!-- some default in case something went wrong getting the revision -->
+ <property name="git.describe" value="-unknown-"/>
+
+ <!-- We use the git describe to determine the OSGi modifier for our build. -->
+ <property
+ name="maven.version.number"
+ value="${version.major}.${version.minor}.${version.patch}${version.suffix}${maven.version.suffix}"/>
+ <property
+ name="version.number"
+ value="${maven.version.number}-${git.commit.date}-${git.commit.drift}-${git.commit.sha}"/>
+ <property
+ name="osgi.version.number"
+ value="${version.major}.${version.minor}.${version.patch}.v${git.commit.date}${version.suffix}-${git.commit.sha}"/>
<!-- some default in case something went wrong getting the revision -->
<property name="version.number" value="-unknown-"/>
<property name="init.avail" value="yes"/>
- <!-- And print-out what we are building -->
- <echo message=" build time: ${time.human}" />
- <echo message=" java version: ${java.vm.name} ${java.version}" />
- <echo message=" java args: ${env.ANT_OPTS} ${jvm.opts}" />
- <echo message=" javac args: ${javac.args}" />
- <echo message=" scalac args: ${scalac.args}" />
- <echo message=" build number: ${version.number}" />
-
<!-- Local libs (developer use.) -->
<mkdir dir="${lib-extra.dir}"/>
@@ -293,7 +326,17 @@ INITIALISATION
<path refid="lib.extra"/>
</path>
- <!-- Define tasks that can be run with Starr -->
+ <!-- And print-out what we are building -->
+ <echo message=" build time: ${time.human}" />
+ <echo message=" java version: ${java.vm.name} ${java.version}" />
+ <echo message=" java args: ${env.ANT_OPTS} ${jvm.opts}" />
+ <echo message=" javac args: ${javac.args}" />
+ <echo message=" scalac args: ${scalac.args}" />
+ <echo message=" maven version: ${maven.version.number}"/>
+ <echo message=" OSGi version: ${osgi.version.number}" />
+ <echo message="canonical version: ${version.number}" />
+
+ <!-- Define tasks that can be run with Starr -->
<path id="starr.classpath">
<pathelement location="${lib.starr.jar}"/>
<pathelement location="${comp.starr.jar}"/>
@@ -358,6 +401,8 @@ LOCAL REFERENCE BUILD (LOCKER)
</scalacfork>
<propertyfile file="${build-locker.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
+ <entry key="maven.version.number" value="${maven.version.number}"/>
+ <entry key="osgi.version.number" value="${osgi.version.number}"/>
<entry key="copyright.string" value="${copyright.string}"/>
</propertyfile>
<copy todir="${build-locker.dir}/classes/library">
@@ -397,6 +442,8 @@ LOCAL REFERENCE BUILD (LOCKER)
</scalacfork>
<propertyfile file="${build-locker.dir}/classes/compiler/compiler.properties">
<entry key="version.number" value="${version.number}"/>
+ <entry key="maven.version.number" value="${maven.version.number}"/>
+ <entry key="osgi.version.number" value="${osgi.version.number}"/>
<entry key="copyright.string" value="${copyright.string}"/>
</propertyfile>
<copy todir="${build-locker.dir}/classes/compiler">
@@ -628,6 +675,8 @@ QUICK BUILD (QUICK)
</scalacfork>
<propertyfile file="${build-quick.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
+ <entry key="maven.version.number" value="${maven.version.number}"/>
+ <entry key="osgi.version.number" value="${osgi.version.number}"/>
<entry key="copyright.string" value="${copyright.string}"/>
</propertyfile>
<copy todir="${build-quick.dir}/classes/library">
@@ -687,6 +736,8 @@ QUICK BUILD (QUICK)
</scalacfork>
<propertyfile file="${build-quick.dir}/classes/compiler/compiler.properties">
<entry key="version.number" value="${version.number}"/>
+ <entry key="maven.version.number" value="${maven.version.number}"/>
+ <entry key="osgi.version.number" value="${osgi.version.number}"/>
<entry key="copyright.string" value="${copyright.string}"/>
</propertyfile>
<copy todir="${build-quick.dir}/classes/compiler">
@@ -1169,6 +1220,8 @@ BOOTSTRAPPING BUILD (STRAP)
</scalacfork>
<propertyfile file="${build-strap.dir}/classes/library/library.properties">
<entry key="version.number" value="${version.number}"/>
+ <entry key="maven.version.number" value="${maven.version.number}"/>
+ <entry key="osgi.version.number" value="${osgi.version.number}"/>
<entry key="copyright.string" value="${copyright.string}"/>
</propertyfile>
<copy todir="${build-strap.dir}/classes/library">
@@ -1208,6 +1261,8 @@ BOOTSTRAPPING BUILD (STRAP)
</scalacfork>
<propertyfile file="${build-strap.dir}/classes/compiler/compiler.properties">
<entry key="version.number" value="${version.number}"/>
+ <entry key="maven.version.number" value="${maven.version.number}"/>
+ <entry key="osgi.version.number" value="${osgi.version.number}"/>
<entry key="copyright.string" value="${copyright.string}"/>
</propertyfile>
<copy todir="${build-strap.dir}/classes/compiler">
@@ -2077,12 +2132,6 @@ FORWARDED TARGETS FOR NIGHTLY BUILDS
<ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/>
</target>
- <target name="nightly.checkinit">
- <antcall target="nightly-nopt">
- <param name="scalac.args.optimise" value="-Xcheckinit"/>
- </antcall>
- </target>
-
<target name="nightly.checkall">
<antcall target="nightly-nopt">
<param name="partest.scalacopts" value="-Ycheck:all"/>
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 2e490163e0..e0f31a5db2 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -16,10 +16,15 @@
<property name="local.release.repository" value="${user.home}/.m2/repository" />
<property name="repository.credentials.id" value="sonatype-nexus" />
<property name="settings.file" value="${user.home}/.m2/settings.xml" />
-
+ <condition property="version.is.snapshot">
+ <contains string="${maven.version.number}" substring="-SNAPSHOT"/>
+ </condition>
+
<echo>Using server[${repository.credentials.id}] for maven repository credentials.
Please make sure that your ~/.m2/settings.xml has the needed username/password for this server id
</echo>
+
+
</target>
<target name="init.maven" depends="init.properties">
@@ -241,28 +246,31 @@
</target>
<!-- Local Targets -->
- <target name="deploy.snapshot.local" depends="deploy.local.init" description="Deploys the bundled snapshot of the Scala Lanaguage to a local maven repository">
- <deploy-local-all version="${maven.snapshot.version.number}" repository="${local.snapshot.repository}" />
+ <target name="deploy.snapshot.local" depends="deploy.local.init" if="version.is.snapshot" description="Deploys the bundled snapshot of the Scala Lanaguage to a local maven repository">
+ <deploy-local-all version="${maven.version.number}" repository="${local.snapshot.repository}" />
</target>
- <target name="deploy.release.local" depends="deploy.local.init" description="Deploys the bundled files as a release into the local Maven repository">
- <deploy-local-all version="${version.number}" repository="${local.release.repository}" />
+ <target name="deploy.release.local" depends="deploy.local.init" unless="version.is.snapshot" description="Deploys the bundled files as a release into the local Maven repository">
+ <deploy-local-all version="${maven.version.number}" repository="${local.release.repository}" />
</target>
+ <target name="deploy.local" depends="deploy.snapshot.local, deploy.release.local" description="Deploys the bundle files to the local maven repo."/>
- <!-- Remote Targets -->
- <target name="deploy.signed.snapshot" depends="deploy.remote.init" description="Deploys the bundled files as a snapshot into the desired remote Maven repository">
- <deploy-remote-signed-all version="${maven.snapshot.version.number}" repository="${remote.snapshot.repository}" />
+ <!-- Remote Signed Targets -->
+ <target name="deploy.signed.snapshot" depends="deploy.remote.init" if="version.is.snapshot" description="Deploys the bundled files as a snapshot into the desired remote Maven repository">
+ <deploy-remote-signed-all version="${maven.version.number}" repository="${remote.snapshot.repository}" />
</target>
- <target name="deploy.signed.release" depends="deploy.remote.init" description="Deploys the bundled files as a release into the desired remote Maven repository">
- <deploy-remote-signed-all version="${version.number}" repository="${remote.release.repository}" />
+ <target name="deploy.signed.release" depends="deploy.remote.init" unless="version.is.snapshot" description="Deploys the bundled files as a release into the desired remote Maven repository">
+ <deploy-remote-signed-all version="${maven.version.number}" repository="${remote.release.repository}" />
</target>
-
- <target name="deploy.snapshot" depends="deploy.remote.init" description="Deploys the bundled files as a snapshot into the desired remote Maven repository">
- <deploy-remote-all version="${maven.snapshot.version.number}" repository="${remote.snapshot.repository}" />
+ <target name="deploy.signed" depends="deploy.signed.release, deploy.signed.snapshot" description="Deploys signed bundles to remote repo"/>
+ <!-- Remote unsigned targets -->
+ <target name="deploy.snapshot" depends="deploy.remote.init" if="version.is.snapshot" description="Deploys the bundled files as a snapshot into the desired remote Maven repository">
+ <deploy-remote-all version="${maven.version.number}" repository="${remote.snapshot.repository}" />
</target>
- <target name="deploy.release" depends="deploy.remote.init" description="Deploys the bundled files as a release into the desired remote Maven repository">
- <deploy-remote-all version="${version.number}" repository="${remote.release.repository}" />
+ <target name="deploy.release" depends="deploy.remote.init" unless="version.is.snapshot" description="Deploys the bundled files as a release into the desired remote Maven repository">
+ <deploy-remote-all version="${maven.version.number}" repository="${remote.release.repository}" />
</target>
+ <target name="deploy" depends="deploy.snapshot, deploy.release" description="Deploys unsigned artifacts to the maven repo."/>
</project>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 1b0cf19151..e79895e3a8 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -315,8 +315,6 @@ MAIN DISTRIBUTION SBAZ
<copy tofile="${dists.dir}/maven/${version.number}/build.xml"
file="${src.dir}/build/maven/maven-deploy.xml"/>
<!-- export properties for use when deploying -->
- <property name="maven.snapshot.version.number"
- value="${version.major}.${version.minor}.${version.patch}-SNAPSHOT"/>
<echoproperties destfile="${dists.dir}/maven/${version.number}/build.properties"/>
</target>
diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala
index 1d53b83b75..bd823c3128 100644
--- a/src/compiler/scala/reflect/internal/Definitions.scala
+++ b/src/compiler/scala/reflect/internal/Definitions.scala
@@ -904,6 +904,20 @@ trait Definitions extends reflect.api.StandardDefinitions {
def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name))
def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name))
+ def findMemberFromRoot(fullName: Name): Symbol = {
+ val segs = nme.segments(fullName.toString, fullName.isTermName)
+ if (segs.isEmpty) NoSymbol
+ else findNamedMember(segs.tail, definitions.RootClass.info member segs.head)
+ }
+ def findNamedMember(fullName: Name, root: Symbol): Symbol = {
+ val segs = nme.segments(fullName.toString, fullName.isTermName)
+ if (segs.isEmpty || segs.head != root.simpleName) NoSymbol
+ else findNamedMember(segs.tail, root)
+ }
+ def findNamedMember(segs: List[Name], root: Symbol): Symbol =
+ if (segs.isEmpty) root
+ else findNamedMember(segs.tail, root.info member segs.head)
+
def getMember(owner: Symbol, name: Name): Symbol = {
if (owner == NoSymbol) NoSymbol
else owner.info.nonPrivateMember(name) match {
@@ -911,6 +925,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
case result => result
}
}
+
def packageExists(packageName: String): Boolean =
getModuleIfDefined(packageName).isPackage
diff --git a/src/compiler/scala/reflect/internal/Flags.scala b/src/compiler/scala/reflect/internal/Flags.scala
index 270491d078..8aae80eed4 100644
--- a/src/compiler/scala/reflect/internal/Flags.scala
+++ b/src/compiler/scala/reflect/internal/Flags.scala
@@ -468,33 +468,34 @@ class Flags extends ModifierFlags {
protected final val rawFlagPickledOrder: Array[Long] = pickledListOrder.toArray
def flagOfModifier(mod: Modifier): Long = mod match {
- case Modifier.`protected` => PROTECTED
- case Modifier.`private` => PRIVATE
- case Modifier.`override` => OVERRIDE
- case Modifier.`abstract` => ABSTRACT
- case Modifier.`final`=> FINAL
- case Modifier.`sealed`=> SEALED
- case Modifier.`implicit`=> IMPLICIT
- case Modifier.`lazy`=> LAZY
- case Modifier.`case`=> CASE
- case Modifier.`trait`=> TRAIT
- case Modifier.deferred => DEFERRED
- case Modifier.interface => INTERFACE
- case Modifier.mutable => MUTABLE
- case Modifier.parameter => PARAM
- case Modifier.`macro` => MACRO
- case Modifier.covariant => COVARIANT
- case Modifier.contravariant => CONTRAVARIANT
- case Modifier.preSuper => PRESUPER
+ case Modifier.`protected` => PROTECTED
+ case Modifier.`private` => PRIVATE
+ case Modifier.`override` => OVERRIDE
+ case Modifier.`abstract` => ABSTRACT
+ case Modifier.`final` => FINAL
+ case Modifier.`sealed` => SEALED
+ case Modifier.`implicit` => IMPLICIT
+ case Modifier.`lazy` => LAZY
+ case Modifier.`case` => CASE
+ case Modifier.`trait` => TRAIT
+ case Modifier.deferred => DEFERRED
+ case Modifier.interface => INTERFACE
+ case Modifier.mutable => MUTABLE
+ case Modifier.parameter => PARAM
+ case Modifier.`macro` => MACRO
+ case Modifier.covariant => COVARIANT
+ case Modifier.contravariant => CONTRAVARIANT
+ case Modifier.preSuper => PRESUPER
case Modifier.abstractOverride => ABSOVERRIDE
- case Modifier.local => LOCAL
- case Modifier.java => JAVA
- case Modifier.static => STATIC
- case Modifier.caseAccessor => CASEACCESSOR
+ case Modifier.local => LOCAL
+ case Modifier.java => JAVA
+ case Modifier.static => STATIC
+ case Modifier.caseAccessor => CASEACCESSOR
case Modifier.defaultParameter => DEFAULTPARAM
- case Modifier.defaultInit => DEFAULTINIT
- case Modifier.paramAccessor => PARAMACCESSOR
- case Modifier.bynameParameter => BYNAMEPARAM
+ case Modifier.defaultInit => DEFAULTINIT
+ case Modifier.paramAccessor => PARAMACCESSOR
+ case Modifier.bynameParameter => BYNAMEPARAM
+ case _ => 0
}
def flagsOfModifiers(mods: List[Modifier]): Long =
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index bc2cc8191c..9ccd0c28db 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -44,6 +44,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
with symtab.Positions {
override def settings = currentSettings
+
+ import definitions.{ findNamedMember, findMemberFromRoot }
// alternate constructors ------------------------------------------
@@ -1494,21 +1496,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb
afterPhase(phase) { currentRun.units foreach (treePrinter.print(_)) }
}
- private def findMemberFromRoot(fullName: Name): Symbol = {
- val segs = nme.segments(fullName.toString, fullName.isTermName)
- if (segs.isEmpty) NoSymbol
- else findNamedMember(segs.tail, definitions.RootClass.info member segs.head)
- }
-
- private def findNamedMember(fullName: Name, root: Symbol): Symbol = {
- val segs = nme.segments(fullName.toString, fullName.isTermName)
- if (segs.isEmpty || segs.head != root.simpleName) NoSymbol
- else findNamedMember(segs.tail, root)
- }
- private def findNamedMember(segs: List[Name], root: Symbol): Symbol =
- if (segs.isEmpty) root
- else findNamedMember(segs.tail, root.info member segs.head)
-
/** We resolve the class/object ambiguity by passing a type/term name.
*/
def showDef(fullName: Name, declsOnly: Boolean, ph: Phase) = {
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 6a6379cca2..678f7b3028 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -19,6 +19,8 @@ import scala.collection.mutable
*/
trait DocComments { self: Global =>
+ var cookedDocComments = Map[Symbol, String]()
+
def reporter: Reporter
/** The raw doc comment map */
@@ -50,21 +52,29 @@ trait DocComments { self: Global =>
else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol)
}
- /** The raw doc comment of symbol `sym`, minus @usecase and @define sections, augmented by
+ /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by
* missing sections of an inherited doc comment.
* If a symbol does not have a doc comment but some overridden version of it does,
* the doc comment of the overridden version is copied instead.
*/
- def cookedDocComment(sym: Symbol, docStr: String = ""): String = {
- val ownComment = if (docStr.length == 0) docComments get sym map (_.template) getOrElse ""
- else DocComment(docStr).template
- superComment(sym) match {
- case None =>
- ownComment
- case Some(sc) =>
- if (ownComment == "") sc
- else merge(sc, ownComment, sym)
- }
+ def cookedDocComment(sym: Symbol, docStr: String = ""): String = cookedDocComments.get(sym) match {
+ case Some(comment) =>
+ comment
+ case None =>
+ val ownComment = if (docStr.length == 0) docComments get sym map (_.template) getOrElse ""
+ else DocComment(docStr).template
+ val comment = superComment(sym) match {
+ case None =>
+ if (ownComment.indexOf("@inheritdoc") != -1)
+ reporter.warning(sym.pos, "The comment for " + sym +
+ " contains @inheritdoc, but no parent comment is available to inherit from.")
+ ownComment.replaceAllLiterally("@inheritdoc", "<invalid inheritdoc annotation>")
+ case Some(sc) =>
+ if (ownComment == "") sc
+ else expandInheritdoc(sc, merge(sc, ownComment, sym), sym)
+ }
+ cookedDocComments += (sym -> comment)
+ comment
}
/** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
@@ -99,10 +109,18 @@ trait DocComments { self: Global =>
*/
def useCases(sym: Symbol, site: Symbol): List[(Symbol, String, Position)] = {
def getUseCases(dc: DocComment) = {
- for (uc <- dc.useCases; defn <- uc.expandedDefs(sym, site)) yield
- (defn,
- expandVariables(merge(cookedDocComment(sym), uc.comment.raw, defn), sym, site),
- uc.pos)
+ val fullSigComment = cookedDocComment(sym)
+ for (uc <- dc.useCases; defn <- uc.expandedDefs(sym, site)) yield {
+ // use cases comments go through a series of transformations:
+ // 1 - filling in missing sections from the full signature
+ // 2 - expanding explicit inheritance @inheritdoc tags
+ // 3 - expanding variables like $COLL
+ val useCaseCommentRaw = uc.comment.raw
+ val useCaseCommentMerged = merge(fullSigComment, useCaseCommentRaw, defn)
+ val useCaseCommentInheritdoc = expandInheritdoc(fullSigComment, useCaseCommentMerged, sym)
+ val useCaseCommentVariables = expandVariables(useCaseCommentInheritdoc, sym, site)
+ (defn, useCaseCommentVariables, uc.pos)
+ }
}
getDocComment(sym) map getUseCases getOrElse List()
}
@@ -201,6 +219,80 @@ trait DocComments { self: Global =>
}
}
+ /**
+ * Expand inheritdoc tags
+ * - for the main comment we transform the inheritdoc into the super variable,
+ * and the variable expansion can expand it further
+ * - for the param, tparam and throws sections we must replace comments on the spot
+ *
+ * This is done separately, for two reasons:
+ * 1. It takes longer to run compared to merge
+ * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely
+ * impacts performance
+ */
+ def expandInheritdoc(src: String, dst: String, sym: Symbol): String =
+ if (dst.indexOf("@inheritdoc") == -1)
+ dst
+ else {
+ val srcSections = tagIndex(src)
+ val dstSections = tagIndex(dst)
+ val srcTagMap = sectionTagMap(src, srcSections)
+ val srcNamedParams = Map() +
+ ("@param" -> paramDocs(src, "@param", srcSections)) +
+ ("@tparam" -> paramDocs(src, "@tparam", srcSections)) +
+ ("@throws" -> paramDocs(src, "@throws", srcSections))
+
+ val out = new StringBuilder
+
+ def replaceInheritdoc(src: String, dst: String) =
+ if (dst.indexOf("@inheritdoc") == -1)
+ dst
+ else
+ dst.replaceAllLiterally("@inheritdoc", src)
+
+ def getSourceSection(section: (Int, Int)): String = {
+
+ def getSectionHeader = extractSectionTag(dst, section) match {
+ case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(dst, section)
+ case other => other
+ }
+
+ def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String =
+ paramMap.get(param) match {
+ case Some(section) =>
+ // Cleanup the section tag and parameter
+ val sectionTextBounds = extractSectionText(src, section)
+ cleanupSectionText(src.substring(sectionTextBounds._1, sectionTextBounds._2))
+ case None =>
+ reporter.info(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym +
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", true)
+ "<invalid inheritdoc annotation>"
+ }
+
+ dst.substring(section._1, section._1 + 7) match {
+ case param@("@param "|"@tparam"|"@throws") => sectionString(extractSectionParam(dst, section), srcNamedParams(param.trim))
+ case _ => sectionString(extractSectionTag(dst, section), srcTagMap)
+ }
+ }
+
+ def mainComment(str: String, sections: List[(Int, Int)]): String =
+ if (str.trim.length > 3)
+ str.trim.substring(3, startTag(str, sections))
+ else
+ ""
+
+ // Append main comment
+ out.append("/**")
+ out.append(replaceInheritdoc(mainComment(src, srcSections), mainComment(dst, dstSections)))
+
+ // Append sections
+ for (section <- dstSections)
+ out.append(replaceInheritdoc(getSourceSection(section), dst.substring(section._1, section._2)))
+
+ out.append("*/")
+ out.toString
+ }
+
/** Maps symbols to the variable -> replacement maps that are defined
* in their doc comments
*/
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 46ade7d889..93fa9a60f6 100644..100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -267,7 +267,7 @@ trait MarkupParsers {
val (qname, attrMap) = xTag(())
if (ch == '/') { // empty element
xToken("/>")
- handle.element(r2p(start, start, curOffset), qname, attrMap, new ListBuffer[Tree])
+ handle.element(r2p(start, start, curOffset), qname, attrMap, true, new ListBuffer[Tree])
}
else { // handle content
xToken('>')
@@ -281,7 +281,7 @@ trait MarkupParsers {
val pos = r2p(start, start, curOffset)
qname match {
case "xml:group" => handle.group(pos, ts)
- case _ => handle.element(pos, qname, attrMap, ts)
+ case _ => handle.element(pos, qname, attrMap, false, ts)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index cd64c49b47..ccebcfa54d 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -126,7 +126,7 @@ self =>
val global: Global
import global._
- case class OpInfo(operand: Tree, operator: Name, targs: List[Tree], offset: Offset)
+ case class OpInfo(operand: Tree, operator: Name, offset: Offset)
class SourceFileParser(val source: SourceFile) extends Parser {
@@ -789,7 +789,7 @@ self =>
val rPos = top.pos
val end = if (rPos.isDefined) rPos.endOrPoint else opPos.endOrPoint
top = atPos(start, opinfo.offset, end) {
- makeBinop(isExpr, opinfo.operand, opinfo.operator, top, opPos, opinfo.targs)
+ makeBinop(isExpr, opinfo.operand, opinfo.operator, top, opPos)
}
}
top
@@ -1440,17 +1440,6 @@ self =>
}
}
- def advanceStack(base: List[OpInfo], top: Tree): Tree = {
- val newTop = reduceStack(true, base, top, precedence(in.name), treeInfo.isLeftAssoc(in.name))
- val op = in.name
- val pos = in.offset
- ident()
- val targs = if (in.token == LBRACKET) exprTypeArgs() else Nil
- opstack ::= OpInfo(newTop, op, targs, pos)
-
- newTop
- }
-
/** {{{
* PostfixExpr ::= InfixExpr [Id [nl]]
* InfixExpr ::= PrefixExpr
@@ -1462,21 +1451,22 @@ self =>
var top = prefixExpr()
while (isIdent) {
- top = advanceStack(base, top)
+ top = reduceStack(true, base, top, precedence(in.name), treeInfo.isLeftAssoc(in.name))
+ val op = in.name
+ opstack = OpInfo(top, op, in.offset) :: opstack
+ ident()
newLineOptWhenFollowing(isExprIntroToken)
-
if (isExprIntro) {
val next = prefixExpr()
if (next == EmptyTree)
return reduceStack(true, base, top, 0, true)
top = next
- }
- else {
+ } else {
val topinfo = opstack.head
opstack = opstack.tail
val od = stripParens(reduceStack(true, base, topinfo.operand, 0, true))
return atPos(od.pos.startOrPoint, topinfo.offset) {
- applyTypeArgs(Select(od, topinfo.operator.encode), topinfo.targs)
+ Select(od, topinfo.operator.encode)
}
}
}
@@ -1816,7 +1806,7 @@ self =>
top = reduceStack(
false, base, top, precedence(in.name), treeInfo.isLeftAssoc(in.name))
val op = in.name
- opstack = OpInfo(top, op, Nil, in.offset) :: opstack
+ opstack = OpInfo(top, op, in.offset) :: opstack
ident()
top = simplePattern()
}
@@ -1906,11 +1896,6 @@ self =>
def exprTypeArgs() = outPattern.typeArgs()
def exprSimpleType() = outPattern.simpleType()
- def applyTypeArgs(sel: Tree, targs: List[Tree]): Tree = (
- if (targs.isEmpty) sel
- else atPos(sel.pos)(TypeApply(sel, targs))
- )
-
/** Default entry points into some pattern contexts. */
def pattern(): Tree = noSeq.pattern()
def patterns(): List[Tree] = noSeq.patterns()
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index ffe65aec63..849437e4ff 100644..100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -101,7 +101,8 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
pre: Tree,
label: Tree,
attrs: Tree,
- scope:Tree,
+ scope: Tree,
+ empty: Boolean,
children: Seq[Tree]): Tree =
{
def starArgs =
@@ -109,7 +110,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
else List(Typed(makeXMLseq(pos, children), wildStar))
def pat = Apply(_scala_xml__Elem, List(pre, label, wild, wild) ::: convertToTextPat(children))
- def nonpat = New(_scala_xml_Elem, List(List(pre, label, attrs, scope) ::: starArgs))
+ def nonpat = New(_scala_xml_Elem, List(List(pre, label, attrs, scope, if (empty) Literal(Constant(true)) else Literal(Constant(false))) ::: starArgs))
atPos(pos) { if (isPattern) pat else nonpat }
}
@@ -140,7 +141,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
case (Some(pre), rest) => (const(pre), const(rest))
case _ => (wild, const(n))
}
- mkXML(pos, true, prepat, labpat, null, null, args)
+ mkXML(pos, true, prepat, labpat, null, null, false, args)
}
protected def convertToTextPat(t: Tree): Tree = t match {
@@ -188,7 +189,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
def unparsed(pos: Position, str: String): Tree =
atPos(pos)( New(_scala_xml_Unparsed, LL(const(str))) )
- def element(pos: Position, qname: String, attrMap: mutable.Map[String, Tree], args: Seq[Tree]): Tree = {
+ def element(pos: Position, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: Seq[Tree]): Tree = {
def handleNamespaceBinding(pre: String, z: String): Tree = {
def mkAssign(t: Tree): Tree = Assign(
Ident(_tmpscope),
@@ -259,6 +260,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
const(newlabel),
makeSymbolicAttrs,
Ident(_scope),
+ empty,
args
)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 87251b4cc9..0d2fbc5372 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -174,14 +174,7 @@ abstract class TreeBuilder {
}
/** Create tree representing (unencoded) binary operation expression or pattern. */
- def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position, targs: List[Tree] = Nil): Tree = {
- require(isExpr || targs.isEmpty, ((left, op, targs, right)))
-
- def mkSel(t: Tree) = {
- val sel = atPos(opPos union t.pos)(Select(stripParens(t), op.encode))
- if (targs.isEmpty) sel else atPos(left.pos)(TypeApply(sel, targs))
- }
-
+ def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position): Tree = {
def mkNamed(args: List[Tree]) =
if (isExpr) args map {
case a @ Assign(id @ Ident(name), rhs) =>
@@ -194,17 +187,14 @@ abstract class TreeBuilder {
}
if (isExpr) {
if (treeInfo.isLeftAssoc(op)) {
- Apply(mkSel(left), arguments)
- }
- else {
+ Apply(atPos(opPos union left.pos) { Select(stripParens(left), op.encode) }, arguments)
+ } else {
val x = freshTermName()
Block(
List(ValDef(Modifiers(SYNTHETIC), x, TypeTree(), stripParens(left))),
- Apply(mkSel(right), List(Ident(x)))
- )
+ Apply(atPos(opPos union right.pos) { Select(stripParens(right), op.encode) }, List(Ident(x))))
}
- }
- else {
+ } else {
Apply(Ident(op.encode), stripParens(left) :: arguments)
}
}
@@ -216,7 +206,7 @@ abstract class TreeBuilder {
def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree], argss: List[List[Tree]],
npos: Position, cpos: Position): Tree =
if (parents.isEmpty)
- makeNew(List(atPos(npos union cpos)(scalaAnyRefConstr)), self, stats, argss, npos, cpos)
+ makeNew(List(scalaAnyRefConstr), self, stats, argss, npos, cpos)
else if (parents.tail.isEmpty && stats.isEmpty)
atPos(npos union cpos) { New(parents.head, argss) }
else {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index 5e5320ca9a..e35286b281 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -438,7 +438,9 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
if(!comment.throws.isEmpty) {
<dt>Exceptions thrown</dt>
<dd>{
- val exceptionsXml: Iterable[scala.xml.NodeSeq] = (for(exception <- comment.throws ) yield <span class="cmt">{Text(exception._1) ++ bodyToHtml(exception._2)}</span> )
+ val exceptionsXml: Iterable[scala.xml.NodeSeq] =
+ for(exception <- comment.throws.toList.sortBy(_._1) ) yield
+ <span class="cmt">{Text(exception._1) ++ bodyToHtml(exception._2)}</span>
exceptionsXml.reduceLeft(_ ++ Text("") ++ _)
}</dd>
} else NodeSeq.Empty
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index efa524503c..b088c643cb 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -38,7 +38,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
val key = (sym, inTpl)
if (commentCache isDefinedAt key)
Some(commentCache(key))
- else { // not reached for use-case comments
+ else {
val c = defineComment(sym, inTpl)
if (c isDefined) commentCache += (sym, inTpl) -> c.get
c
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 8ed44b5a31..b21fa4bc83 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -165,24 +165,19 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
varSym
}
- def addStaticMethodToClass(forName: String, forArgsTypes: List[Type], forResultType: Type)
- (forBody: Pair[Symbol, List[Symbol]] => Tree): Symbol = {
+ def addStaticMethodToClass(forBody: (Symbol, Symbol) => Tree): Symbol = {
+ val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName), ad.pos, STATIC | SYNTHETIC)
+ val params = methSym.newSyntheticValueParams(List(ClassClass.tpe))
+ methSym setInfoAndEnter MethodType(params, MethodClass.tpe)
- val methSym = currentClass.newMethod(mkTerm(forName), ad.pos, STATIC | SYNTHETIC)
- val params = methSym.newSyntheticValueParams(forArgsTypes)
- methSym setInfoAndEnter MethodType(params, forResultType)
-
- val methDef = typedPos( DefDef(methSym, forBody(methSym -> params)) )
+ val methDef = typedPos(DefDef(methSym, forBody(methSym, params.head)))
newStaticMembers append transform(methDef)
-
methSym
}
def fromTypesToClassArrayLiteral(paramTypes: List[Type]): Tree =
ArrayValue(TypeTree(ClassClass.tpe), paramTypes map LIT)
- def theTypeClassArray = arrayType(ClassClass.tpe)
-
/* ... */
def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = dispatchType match {
case NO_CACHE =>
@@ -197,12 +192,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
- addStaticMethodToClass(nme.reflMethodName, List(ClassClass.tpe), MethodClass.tpe) {
- case Pair(reflMethodSym, List(forReceiverSym)) =>
- (REF(forReceiverSym) DOT Class_getMethod)(LIT(method), safeREF(reflParamsCacheSym))
- }
+ addStaticMethodToClass((_, forReceiverSym) =>
+ gen.mkMethodCall(REF(forReceiverSym), Class_getMethod, Nil, List(LIT(method), safeREF(reflParamsCacheSym)))
+ )
case MONO_CACHE =>
@@ -226,7 +220,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
val reflMethodCacheSym: Symbol =
addStaticVariableToClass(nme.reflMethodCacheName, MethodClass.tpe, NULL, false)
@@ -237,17 +231,16 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def isCacheEmpty(receiver: Symbol): Tree =
reflClassCacheSym.IS_NULL() OR (reflClassCacheSym.GET() OBJ_NE REF(receiver))
- addStaticMethodToClass(nme.reflMethodName, List(ClassClass.tpe), MethodClass.tpe) {
- case Pair(reflMethodSym, List(forReceiverSym)) =>
- BLOCK(
- IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
- safeREF(reflMethodCacheSym) === ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), safeREF(reflParamsCacheSym))) ,
- safeREF(reflClassCacheSym) === gen.mkSoftRef(REF(forReceiverSym)),
- UNIT
- ) ENDIF,
- safeREF(reflMethodCacheSym)
- )
- }
+ addStaticMethodToClass((_, forReceiverSym) =>
+ BLOCK(
+ IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
+ safeREF(reflMethodCacheSym) === ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), safeREF(reflParamsCacheSym))) ,
+ safeREF(reflClassCacheSym) === gen.mkSoftRef(REF(forReceiverSym)),
+ UNIT
+ ) ENDIF,
+ safeREF(reflMethodCacheSym)
+ )
+ )
case POLY_CACHE =>
@@ -273,7 +266,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
val reflPolyCacheSym: Symbol = (
@@ -281,26 +274,25 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
)
def getPolyCache = gen.mkCast(fn(safeREF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
- addStaticMethodToClass(nme.reflMethodName, List(ClassClass.tpe), MethodClass.tpe)
- { case Pair(reflMethodSym, List(forReceiverSym)) =>
- val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
-
- BLOCK(
- IF (getPolyCache OBJ_EQ NULL) THEN (safeREF(reflPolyCacheSym) === mkNewPolyCache) ENDIF,
- VAL(methodSym) === ((getPolyCache DOT methodCache_find)(REF(forReceiverSym))) ,
- IF (REF(methodSym) OBJ_!= NULL) .
- THEN (Return(REF(methodSym)))
- ELSE {
- def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), safeREF(reflParamsCacheSym)))
- def cacheRHS = ((getPolyCache DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
- BLOCK(
- REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
- safeREF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
- Return(REF(methodSym))
- )
- }
- )
- }
+ addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
+ val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
+
+ BLOCK(
+ IF (getPolyCache OBJ_EQ NULL) THEN (safeREF(reflPolyCacheSym) === mkNewPolyCache) ENDIF,
+ VAL(methodSym) === ((getPolyCache DOT methodCache_find)(REF(forReceiverSym))) ,
+ IF (REF(methodSym) OBJ_!= NULL) .
+ THEN (Return(REF(methodSym)))
+ ELSE {
+ def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), safeREF(reflParamsCacheSym)))
+ def cacheRHS = ((getPolyCache DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
+ BLOCK(
+ REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
+ safeREF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
+ Return(REF(methodSym))
+ )
+ }
+ )
+ })
}
/* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 973e26af41..25f3e7af5c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1295,7 +1295,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
def parentTypes(templ: Template): List[Tree] =
- if (templ.parents.isEmpty) List(atPos(templ.pos.focus)(TypeTree(AnyRefClass.tpe)))
+ if (templ.parents.isEmpty) List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
else try {
val clazz = context.owner
// Normalize supertype and mixins so that supertype is always a class, not a trait.
@@ -3630,7 +3630,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val Select(qual, name) = fun
tryTypedArgs(args, forArgMode(fun, mode)) match {
case Some(args1) =>
- assert((args1.length == 0) || !args1.head.tpe.isErroneous, "try typed args is ok")
val qual1 =
if (!pt.isError) adaptToArguments(qual, name, args1, pt, true, true)
else qual
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index fbe92e5d84..f4ce6d6ef1 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -26,6 +26,14 @@ object DocStrings {
if (start < str.length && isIdentifierPart(str charAt start)) skipIdent(str, start + 1)
else start
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipTag(str: String, start: Int): Int =
+ if (start < str.length && (str charAt start) == '@') skipIdent(str, start + 1)
+ else start
+
+
/** Returns index of string `str` after `start` skipping longest
* sequence of space and tab characters, possibly also containing
* a single `*` character or the `/``**` sequence.
@@ -68,38 +76,46 @@ object DocStrings {
/** Produces a string index, which is a list of ``sections'', i.e
* pairs of start/end positions of all tagged sections in the string.
- * Every section starts with a `@` and extends to the next `@`, or
- * to the end of the comment string, but excluding the final two
+ * Every section starts with an at sign and extends to the next at sign,
+ * or to the end of the comment string, but excluding the final two
* characters which terminate the comment.
*
* Also take usecases into account - they need to expand until the next
* usecase or the end of the string, as they might include other sections
* of their own
*/
- def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] =
- findAll(str, 0) (idx => str(idx) == '@' && p(idx)) match {
+ def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = {
+ var indices = findAll(str, 0) (idx => str(idx) == '@' && p(idx))
+ indices = mergeUsecaseSections(str, indices)
+ indices = mergeInheritdocSections(str, indices)
+
+ indices match {
case List() => List()
- case idxs => {
- val idxs2 = mergeUsecaseSections(str, idxs)
- idxs2 zip (idxs2.tail ::: List(str.length - 2))
- }
+ case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
}
+ }
/**
* Merge sections following an usecase into the usecase comment, so they
* can override the parent symbol's sections
*/
def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = {
- idxs.find(str.substring(_).startsWith("@usecase")) match {
- case Some(firstUC) =>
- val commentSections = idxs.take(idxs.indexOf(firstUC))
- val usecaseSections = idxs.drop(idxs.indexOf(firstUC)).filter(str.substring(_).startsWith("@usecase"))
+ idxs.indexWhere(str.substring(_).startsWith("@usecase")) match {
+ case firstUCIndex if firstUCIndex != -1 =>
+ val commentSections = idxs.take(firstUCIndex)
+ val usecaseSections = idxs.drop(firstUCIndex).filter(str.substring(_).startsWith("@usecase"))
commentSections ::: usecaseSections
- case None =>
+ case _ =>
idxs
}
}
+ /**
+ * Merge the inheritdoc sections, as they never make sense on their own
+ */
+ def mergeInheritdocSections(str: String, idxs: List[Int]): List[Int] =
+ idxs.filterNot(str.substring(_).startsWith("@inheritdoc"))
+
/** Does interval `iv` start with given `tag`?
*/
def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean =
@@ -108,12 +124,11 @@ object DocStrings {
def startsWithTag(str: String, start: Int, tag: String): Boolean =
str.startsWith(tag, start) && !isIdentifierPart(str charAt (start + tag.length))
-
/** The first start tag of a list of tag intervals,
* or the end of the whole comment string - 2 if list is empty
*/
def startTag(str: String, sections: List[(Int, Int)]) = sections match {
- case List() => str.length - 2
+ case Nil => str.length - 2
case (start, _) :: _ => start
}
@@ -155,4 +170,46 @@ object DocStrings {
idx
}
}
+
+ /** A map from the section tag to section parameters */
+ def sectionTagMap(str: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections) yield
+ extractSectionTag(str, section) -> section
+ }
+
+ /** Extract the section tag, treating the section tag as an indentifier */
+ def extractSectionTag(str: String, section: (Int, Int)): String =
+ str.substring(section._1, skipTag(str, section._1))
+
+ /** Extract the section parameter */
+ def extractSectionParam(str: String, section: (Int, Int)): String = {
+ assert(str.substring(section._1).startsWith("@param") ||
+ str.substring(section._1).startsWith("@tparam") ||
+ str.substring(section._1).startsWith("@throws"))
+
+ val start = skipWhitespace(str, skipTag(str, section._1))
+ val finish = skipIdent(str, start)
+
+ str.substring(start, finish)
+ }
+
+ /** Extract the section text, except for the tag and comment newlines */
+ def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = {
+ if (str.substring(section._1).startsWith("@param") ||
+ str.substring(section._1).startsWith("@tparam") ||
+ str.substring(section._1).startsWith("@throws"))
+ (skipWhitespace(str, skipIdent(str, skipWhitespace(str, skipTag(str, section._1)))), section._2)
+ else
+ (skipWhitespace(str, skipTag(str, section._1)), section._2)
+ }
+
+ /** Cleanup section text */
+ def cleanupSectionText(str: String) = {
+ var result = str.trim.replaceAll("\n\\s+\\*\\s+", " \n")
+ while (result.endsWith("\n"))
+ result = result.substring(0, str.length - 1)
+ result
+ }
+
}
diff --git a/src/library/scala/collection/SeqExtractors.scala b/src/library/scala/collection/SeqExtractors.scala
new file mode 100644
index 0000000000..cb3cb27f18
--- /dev/null
+++ b/src/library/scala/collection/SeqExtractors.scala
@@ -0,0 +1,21 @@
+package scala.collection
+
+/** An extractor used to head/tail deconstruct sequences. */
+object +: {
+ def unapply[T,Coll <: SeqLike[T, Coll]](
+ t: Coll with SeqLike[T, Coll]): Option[(T, Coll)] =
+ if(t.isEmpty) None
+ else Some(t.head -> t.tail)
+}
+
+/** An extractor used to init/last deconstruct sequences. */
+object :+ {
+ /** Splits a sequence into init :+ tail.
+ * @returns Some(init, tail) if sequence is non-empty.
+ * None otherwise.
+ */
+ def unapply[T,Coll <: SeqLike[T, Coll]](
+ t: Coll with SeqLike[T, Coll]): Option[(Coll, T)] =
+ if(t.isEmpty) None
+ else Some(t.init -> t.last)
+}
diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/ConcurrentTrieMap.scala
index cbec118aa9..1a44c8e423 100644
--- a/src/library/scala/collection/mutable/Ctrie.scala
+++ b/src/library/scala/collection/mutable/ConcurrentTrieMap.scala
@@ -13,7 +13,7 @@ package mutable
import java.util.concurrent.atomic._
import collection.immutable.{ ListMap => ImmutableListMap }
-import collection.parallel.mutable.ParCtrie
+import collection.parallel.mutable.ParConcurrentTrieMap
import generic._
import annotation.tailrec
import annotation.switch
@@ -31,16 +31,16 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
@inline final def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n)
- final def gcasRead(ct: Ctrie[K, V]): MainNode[K, V] = GCAS_READ(ct)
+ final def gcasRead(ct: ConcurrentTrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct)
- @inline final def GCAS_READ(ct: Ctrie[K, V]): MainNode[K, V] = {
+ @inline final def GCAS_READ(ct: ConcurrentTrieMap[K, V]): MainNode[K, V] = {
val m = /*READ*/mainnode
val prevval = /*READ*/m.prev
if (prevval eq null) m
else GCAS_Complete(m, ct)
}
- @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: Ctrie[K, V]): MainNode[K, V] = if (m eq null) null else {
+ @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: ConcurrentTrieMap[K, V]): MainNode[K, V] = if (m eq null) null else {
// complete the GCAS
val prev = /*READ*/m.prev
val ctr = ct.readRoot(true)
@@ -72,7 +72,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
- @inline final def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: Ctrie[K, V]): Boolean = {
+ @inline final def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: ConcurrentTrieMap[K, V]): Boolean = {
n.WRITE_PREV(old)
if (CAS(old, n)) {
GCAS_Complete(n, ct)
@@ -86,7 +86,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
nin
}
- final def copyToGen(ngen: Gen, ct: Ctrie[K, V]) = {
+ final def copyToGen(ngen: Gen, ct: ConcurrentTrieMap[K, V]) = {
val nin = new INode[K, V](ngen)
val main = GCAS_READ(ct)
nin.WRITE(main)
@@ -97,7 +97,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
*
* @return true if successful, false otherwise
*/
- @tailrec final def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Boolean = {
+ @tailrec final def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: ConcurrentTrieMap[K, V]): Boolean = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -143,7 +143,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
* @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v`
* @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key)
*/
- @tailrec final def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = {
+ @tailrec final def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: ConcurrentTrieMap[K, V]): Option[V] = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -233,7 +233,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
*
* @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise
*/
- @tailrec final def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): AnyRef = {
+ @tailrec final def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: ConcurrentTrieMap[K, V]): AnyRef = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -276,7 +276,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
* @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value
* @return null if not successful, an Option[V] indicating the previous value otherwise
*/
- final def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = {
+ final def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: ConcurrentTrieMap[K, V]): Option[V] = {
val m = GCAS_READ(ct) // use -Yinline!
m match {
@@ -352,7 +352,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
- private def clean(nd: INode[K, V], ct: Ctrie[K, V], lev: Int) {
+ private def clean(nd: INode[K, V], ct: ConcurrentTrieMap[K, V], lev: Int) {
val m = nd.GCAS_READ(ct)
m match {
case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct)
@@ -360,9 +360,9 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
}
}
- final def isNullInode(ct: Ctrie[K, V]) = GCAS_READ(ct) eq null
+ final def isNullInode(ct: ConcurrentTrieMap[K, V]) = GCAS_READ(ct) eq null
- final def cachedSize(ct: Ctrie[K, V]): Int = {
+ final def cachedSize(ct: ConcurrentTrieMap[K, V]): Int = {
val m = GCAS_READ(ct)
m.cachedSize(ct)
}
@@ -438,7 +438,7 @@ extends MainNode[K, V] {
if (updmap.size > 1) new LNode(updmap)
else {
val (k, v) = updmap.iterator.next
- new TNode(k, v, Ctrie.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses
+ new TNode(k, v, ConcurrentTrieMap.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses
}
}
def get(k: K) = listmap.get(k)
@@ -455,7 +455,7 @@ extends CNodeBase[K, V] {
val currsz = READ_SIZE()
if (currsz != -1) currsz
else {
- val sz = computeSize(ct.asInstanceOf[Ctrie[K, V]])
+ val sz = computeSize(ct.asInstanceOf[ConcurrentTrieMap[K, V]])
while (READ_SIZE() == -1) CAS_SIZE(-1, sz)
READ_SIZE()
}
@@ -466,7 +466,7 @@ extends CNodeBase[K, V] {
// => if there are concurrent size computations, they start
// at different positions, so they are more likely to
// to be independent
- private def computeSize(ct: Ctrie[K, V]): Int = {
+ private def computeSize(ct: ConcurrentTrieMap[K, V]): Int = {
var i = 0
var sz = 0
val offset = math.abs(util.Random.nextInt()) % array.length
@@ -511,7 +511,7 @@ extends CNodeBase[K, V] {
/** Returns a copy of this cnode such that all the i-nodes below it are copied
* to the specified generation `ngen`.
*/
- final def renewed(ngen: Gen, ct: Ctrie[K, V]) = {
+ final def renewed(ngen: Gen, ct: ConcurrentTrieMap[K, V]) = {
var i = 0
val arr = array
val len = arr.length
@@ -542,7 +542,7 @@ extends CNodeBase[K, V] {
// returns the version of this node with at least some null-inodes
// removed (those existing when the op began)
// - if there are only null-i-nodes below, returns null
- final def toCompressed(ct: Ctrie[K, V], lev: Int, gen: Gen) = {
+ final def toCompressed(ct: ConcurrentTrieMap[K, V], lev: Int, gen: Gen) = {
var bmp = bitmap
var i = 0
val arr = array
@@ -594,7 +594,7 @@ private[mutable] object CNode {
val yidx = (yhc >>> lev) & 0x1f
val bmp = (1 << xidx) | (1 << yidx)
if (xidx == yidx) {
- val subinode = new INode[K, V](gen)//(Ctrie.inodeupdater)
+ val subinode = new INode[K, V](gen)//(ConcurrentTrieMap.inodeupdater)
subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen)
new CNode(bmp, Array(subinode), gen)
} else {
@@ -613,7 +613,7 @@ private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmai
}
-/** A concurrent hash-trie or Ctrie is a concurrent thread-safe lock-free
+/** A concurrent hash-trie or ConcurrentTrieMap is a concurrent thread-safe lock-free
* implementation of a hash array mapped trie. It is used to implement the
* concurrent map abstraction. It has particularly scalable concurrent insert
* and remove operations and is memory-efficient. It supports O(1), atomic,
@@ -627,20 +627,20 @@ private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmai
* @since 2.10
*/
@SerialVersionUID(0L - 6402774413839597105L)
-final class Ctrie[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[Ctrie[K, V], AnyRef])
+final class ConcurrentTrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[ConcurrentTrieMap[K, V], AnyRef])
extends ConcurrentMap[K, V]
- with MapLike[K, V, Ctrie[K, V]]
- with CustomParallelizable[(K, V), ParCtrie[K, V]]
+ with MapLike[K, V, ConcurrentTrieMap[K, V]]
+ with CustomParallelizable[(K, V), ParConcurrentTrieMap[K, V]]
with Serializable
{
- import Ctrie.computeHash
+ import ConcurrentTrieMap.computeHash
private var rootupdater = rtupd
@volatile var root = r
def this() = this(
INode.newRootNode,
- AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root")
+ AtomicReferenceFieldUpdater.newUpdater(classOf[ConcurrentTrieMap[K, V]], classOf[AnyRef], "root")
)
/* internal methods */
@@ -652,22 +652,22 @@ extends ConcurrentMap[K, V]
out.writeObject(k)
out.writeObject(v)
}
- out.writeObject(CtrieSerializationEnd)
+ out.writeObject(ConcurrentTrieMapSerializationEnd)
}
private def readObject(in: java.io.ObjectInputStream) {
root = INode.newRootNode
- rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root")
+ rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[ConcurrentTrieMap[K, V]], classOf[AnyRef], "root")
var obj: AnyRef = null
do {
obj = in.readObject()
- if (obj != CtrieSerializationEnd) {
+ if (obj != ConcurrentTrieMapSerializationEnd) {
val k = obj.asInstanceOf[K]
val v = in.readObject().asInstanceOf[V]
update(k, v)
}
- } while (obj != CtrieSerializationEnd)
+ } while (obj != ConcurrentTrieMapSerializationEnd)
}
@inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
@@ -760,37 +760,37 @@ extends ConcurrentMap[K, V]
override def seq = this
- override def par = new ParCtrie(this)
+ override def par = new ParConcurrentTrieMap(this)
- override def empty: Ctrie[K, V] = new Ctrie[K, V]
+ override def empty: ConcurrentTrieMap[K, V] = new ConcurrentTrieMap[K, V]
final def isReadOnly = rootupdater eq null
final def nonReadOnly = rootupdater ne null
- /** Returns a snapshot of this Ctrie.
+ /** Returns a snapshot of this ConcurrentTrieMap.
* This operation is lock-free and linearizable.
*
* The snapshot is lazily updated - the first time some branch
- * in the snapshot or this Ctrie are accessed, they are rewritten.
+ * in the snapshot or this ConcurrentTrieMap are accessed, they are rewritten.
* This means that the work of rebuilding both the snapshot and this
- * Ctrie is distributed across all the threads doing updates or accesses
+ * ConcurrentTrieMap is distributed across all the threads doing updates or accesses
* subsequent to the snapshot creation.
*/
- @tailrec final def snapshot(): Ctrie[K, V] = {
+ @tailrec final def snapshot(): ConcurrentTrieMap[K, V] = {
val r = RDCSS_READ_ROOT()
val expmain = r.gcasRead(this)
- if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r.copyToGen(new Gen, this), rootupdater)
+ if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new ConcurrentTrieMap(r.copyToGen(new Gen, this), rootupdater)
else snapshot()
}
- /** Returns a read-only snapshot of this Ctrie.
+ /** Returns a read-only snapshot of this ConcurrentTrieMap.
* This operation is lock-free and linearizable.
*
* The snapshot is lazily updated - the first time some branch
- * of this Ctrie are accessed, it is rewritten. The work of creating
+ * of this ConcurrentTrieMap are accessed, it is rewritten. The work of creating
* the snapshot is thus distributed across subsequent updates
- * and accesses on this Ctrie by all threads.
+ * and accesses on this ConcurrentTrieMap by all threads.
* Note that the snapshot itself is never rewritten unlike when calling
* the `snapshot` method, but the obtained snapshot cannot be modified.
*
@@ -799,7 +799,7 @@ extends ConcurrentMap[K, V]
@tailrec final def readOnlySnapshot(): collection.Map[K, V] = {
val r = RDCSS_READ_ROOT()
val expmain = r.gcasRead(this)
- if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r, null)
+ if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new ConcurrentTrieMap(r, null)
else readOnlySnapshot()
}
@@ -872,7 +872,7 @@ extends ConcurrentMap[K, V]
def iterator: Iterator[(K, V)] =
if (nonReadOnly) readOnlySnapshot().iterator
- else new CtrieIterator(0, this)
+ else new ConcurrentTrieMapIterator(0, this)
private def cachedSize() = {
val r = RDCSS_READ_ROOT()
@@ -883,17 +883,17 @@ extends ConcurrentMap[K, V]
if (nonReadOnly) readOnlySnapshot().size
else cachedSize()
- override def stringPrefix = "Ctrie"
+ override def stringPrefix = "ConcurrentTrieMap"
}
-object Ctrie extends MutableMapFactory[Ctrie] {
+object ConcurrentTrieMap extends MutableMapFactory[ConcurrentTrieMap] {
val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode")
- implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Ctrie[K, V]] = new MapCanBuildFrom[K, V]
+ implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), ConcurrentTrieMap[K, V]] = new MapCanBuildFrom[K, V]
- def empty[K, V]: Ctrie[K, V] = new Ctrie[K, V]
+ def empty[K, V]: ConcurrentTrieMap[K, V] = new ConcurrentTrieMap[K, V]
@inline final def computeHash[K](k: K): Int = {
var hcode = k.hashCode
@@ -905,7 +905,7 @@ object Ctrie extends MutableMapFactory[Ctrie] {
}
-private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
+private[collection] class ConcurrentTrieMapIterator[K, V](var level: Int, private var ct: ConcurrentTrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
var stack = new Array[Array[BasicNode]](7)
var stackpos = new Array[Int](7)
var depth = -1
@@ -971,9 +971,9 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
}
} else current = null
- protected def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new CtrieIterator[K, V](_lev, _ct, _mustInit)
+ protected def newIterator(_lev: Int, _ct: ConcurrentTrieMap[K, V], _mustInit: Boolean) = new ConcurrentTrieMapIterator[K, V](_lev, _ct, _mustInit)
- protected def dupTo(it: CtrieIterator[K, V]) = {
+ protected def dupTo(it: ConcurrentTrieMapIterator[K, V]) = {
it.level = this.level
it.ct = this.ct
it.depth = this.depth
@@ -993,7 +993,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct
}
/** Returns a sequence of iterators over subsets of this iterator.
- * It's used to ease the implementation of splitters for a parallel version of the Ctrie.
+ * It's used to ease the implementation of splitters for a parallel version of the ConcurrentTrieMap.
*/
protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) {
// the case where an LNode is being iterated
@@ -1043,7 +1043,7 @@ private[mutable] object RestartException extends util.control.ControlThrowable
/** Only used for ctrie serialization. */
@SerialVersionUID(0L - 7237891413820527142L)
-private[mutable] case object CtrieSerializationEnd
+private[mutable] case object ConcurrentTrieMapSerializationEnd
private[mutable] object Debug {
diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParConcurrentTrieMap.scala
index 470972adad..a6495161ea 100644
--- a/src/library/scala/collection/parallel/mutable/ParCtrie.scala
+++ b/src/library/scala/collection/parallel/mutable/ParConcurrentTrieMap.scala
@@ -20,12 +20,12 @@ import scala.collection.mutable.LNode
import scala.collection.mutable.CNode
import scala.collection.mutable.SNode
import scala.collection.mutable.INode
-import scala.collection.mutable.Ctrie
-import scala.collection.mutable.CtrieIterator
+import scala.collection.mutable.ConcurrentTrieMap
+import scala.collection.mutable.ConcurrentTrieMapIterator
-/** Parallel Ctrie collection.
+/** Parallel ConcurrentTrieMap collection.
*
* It has its bulk operations parallelized, but uses the snapshot operation
* to create the splitter. This means that parallel bulk operations can be
@@ -34,24 +34,24 @@ import scala.collection.mutable.CtrieIterator
* @author Aleksandar Prokopec
* @since 2.10
*/
-final class ParCtrie[K, V] private[collection] (private val ctrie: Ctrie[K, V])
+final class ParConcurrentTrieMap[K, V] private[collection] (private val ctrie: ConcurrentTrieMap[K, V])
extends ParMap[K, V]
- with GenericParMapTemplate[K, V, ParCtrie]
- with ParMapLike[K, V, ParCtrie[K, V], Ctrie[K, V]]
- with ParCtrieCombiner[K, V]
+ with GenericParMapTemplate[K, V, ParConcurrentTrieMap]
+ with ParMapLike[K, V, ParConcurrentTrieMap[K, V], ConcurrentTrieMap[K, V]]
+ with ParConcurrentTrieMapCombiner[K, V]
with Serializable
{
- def this() = this(new Ctrie)
+ def this() = this(new ConcurrentTrieMap)
- override def mapCompanion: GenericParMapCompanion[ParCtrie] = ParCtrie
+ override def mapCompanion: GenericParMapCompanion[ParConcurrentTrieMap] = ParConcurrentTrieMap
- override def empty: ParCtrie[K, V] = ParCtrie.empty
+ override def empty: ParConcurrentTrieMap[K, V] = ParConcurrentTrieMap.empty
- protected[this] override def newCombiner = ParCtrie.newCombiner
+ protected[this] override def newCombiner = ParConcurrentTrieMap.newCombiner
override def seq = ctrie
- def splitter = new ParCtrieSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[Ctrie[K, V]], true)
+ def splitter = new ParConcurrentTrieMapSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[ConcurrentTrieMap[K, V]], true)
override def clear() = ctrie.clear()
@@ -87,11 +87,11 @@ extends ParMap[K, V]
}
}
- override def stringPrefix = "ParCtrie"
+ override def stringPrefix = "ParConcurrentTrieMap"
/* tasks */
- /** Computes Ctrie size in parallel. */
+ /** Computes ConcurrentTrieMap size in parallel. */
class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] {
var result = -1
def leaf(prev: Option[Int]) = {
@@ -118,15 +118,15 @@ extends ParMap[K, V]
}
-private[collection] class ParCtrieSplitter[K, V](lev: Int, ct: Ctrie[K, V], mustInit: Boolean)
-extends CtrieIterator[K, V](lev, ct, mustInit)
+private[collection] class ParConcurrentTrieMapSplitter[K, V](lev: Int, ct: ConcurrentTrieMap[K, V], mustInit: Boolean)
+extends ConcurrentTrieMapIterator[K, V](lev, ct, mustInit)
with IterableSplitter[(K, V)]
{
// only evaluated if `remaining` is invoked (which is not used by most tasks)
lazy val totalsize = ct.par.size
var iterated = 0
- protected override def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new ParCtrieSplitter[K, V](_lev, _ct, _mustInit)
+ protected override def newIterator(_lev: Int, _ct: ConcurrentTrieMap[K, V], _mustInit: Boolean) = new ParConcurrentTrieMapSplitter[K, V](_lev, _ct, _mustInit)
override def shouldSplitFurther[S](coll: collection.parallel.ParIterable[S], parallelismLevel: Int) = {
val maxsplits = 3 + Integer.highestOneBit(parallelismLevel)
@@ -153,15 +153,15 @@ extends CtrieIterator[K, V](lev, ct, mustInit)
}
-/** Only used within the `ParCtrie`. */
-private[mutable] trait ParCtrieCombiner[K, V] extends Combiner[(K, V), ParCtrie[K, V]] {
+/** Only used within the `ParConcurrentTrieMap`. */
+private[mutable] trait ParConcurrentTrieMapCombiner[K, V] extends Combiner[(K, V), ParConcurrentTrieMap[K, V]] {
- def combine[N <: (K, V), NewTo >: ParCtrie[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this eq other) this else {
+ def combine[N <: (K, V), NewTo >: ParConcurrentTrieMap[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this eq other) this else {
throw new UnsupportedOperationException("This shouldn't have been called in the first place.")
- val thiz = this.asInstanceOf[ParCtrie[K, V]]
- val that = other.asInstanceOf[ParCtrie[K, V]]
- val result = new ParCtrie[K, V]
+ val thiz = this.asInstanceOf[ParConcurrentTrieMap[K, V]]
+ val that = other.asInstanceOf[ParConcurrentTrieMap[K, V]]
+ val result = new ParConcurrentTrieMap[K, V]
result ++= thiz.iterator
result ++= that.iterator
@@ -174,13 +174,13 @@ private[mutable] trait ParCtrieCombiner[K, V] extends Combiner[(K, V), ParCtrie[
}
-object ParCtrie extends ParMapFactory[ParCtrie] {
+object ParConcurrentTrieMap extends ParMapFactory[ParConcurrentTrieMap] {
- def empty[K, V]: ParCtrie[K, V] = new ParCtrie[K, V]
+ def empty[K, V]: ParConcurrentTrieMap[K, V] = new ParConcurrentTrieMap[K, V]
- def newCombiner[K, V]: Combiner[(K, V), ParCtrie[K, V]] = new ParCtrie[K, V]
+ def newCombiner[K, V]: Combiner[(K, V), ParConcurrentTrieMap[K, V]] = new ParConcurrentTrieMap[K, V]
- implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParCtrie[K, V]] = new CanCombineFromMap[K, V]
+ implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParConcurrentTrieMap[K, V]] = new CanCombineFromMap[K, V]
}
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 366af34ee9..1ef1911fd3 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -64,6 +64,9 @@ package object scala {
type ::[A] = scala.collection.immutable.::[A]
val :: = scala.collection.immutable.::
+ val +: = scala.collection.+:
+ val :+ = scala.collection.:+
+
type Stream[+A] = scala.collection.immutable.Stream[A]
val Stream = scala.collection.immutable.Stream
val #:: = scala.collection.immutable.Stream.#::
diff --git a/src/library/scala/reflect/api/Modifier.scala b/src/library/scala/reflect/api/Modifier.scala
index cbfe91e59b..1b67929e15 100644
--- a/src/library/scala/reflect/api/Modifier.scala
+++ b/src/library/scala/reflect/api/Modifier.scala
@@ -2,7 +2,7 @@ package scala.reflect.api
import collection.{ immutable, mutable }
-sealed abstract class Modifier {
+abstract class Modifier private[api] () {
def name: String
def isKeyword: Boolean
def sourceString: String = if (isKeyword) "`" + name + "`" else name
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 0c7772cd07..38ca89b98b 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -72,10 +72,11 @@ private[scala] trait PropertiesTrait {
* it is an RC, Beta, etc. or was built from source, or if the version
* cannot be read.
*/
- val releaseVersion = scalaPropOrNone("version.number") flatMap { s =>
- val segments = s split '.'
- if (segments.size == 4 && segments.last == "final") Some(segments take 3 mkString ".") else None
- }
+ val releaseVersion =
+ for {
+ v <- scalaPropOrNone("maven.version.number")
+ if !(v endsWith "-SNAPSHOT")
+ } yield v
/** The development Scala version, if this is not a final release.
* The precise contents are not guaranteed, but it aims to provide a
@@ -85,15 +86,12 @@ private[scala] trait PropertiesTrait {
* @return Some(version) if this is a non-final version, None if this
* is a final release or the version cannot be read.
*/
- val developmentVersion = scalaPropOrNone("version.number") flatMap { s =>
- val segments = s split '.'
- if (segments.isEmpty || segments.last == "final")
- None
- else if (segments.last startsWith "r")
- Some(s takeWhile (ch => ch != '-')) // Cutting e.g. 2.10.0.r24774-b20110417125606 to 2.10.0.r24774
- else
- Some(s)
- }
+ val developmentVersion =
+ for {
+ v <- scalaPropOrNone("maven.version.number")
+ if v endsWith "-SNAPSHOT"
+ ov <- scalaPropOrNone("version.number")
+ } yield ov
/** Either the development or release version if known, otherwise
* the empty string.
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index df52b34f87..cc244a5b88 100644..100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -17,8 +17,18 @@ package scala.xml
* @author Burak Emir <bqe@google.com>
*/
object Elem {
- def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*) =
- new Elem(prefix, label, attributes, scope, child:_*)
+ /** Build an Elem, setting its minimizeEmpty property to <code>true</code> if it has no children. Note that this
+ * default may not be exactly what you want, as some XML dialects don't permit some elements to be minimized.
+ *
+ * @deprecated This factory method is retained for backward compatibility; please use the other one, with which you
+ * can specify your own preference for minimizeEmpty.
+ */
+ @deprecated
+ def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem =
+ apply(prefix, label, attributes, scope, child.isEmpty, child: _*)
+
+ def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*): Elem =
+ new Elem(prefix,label,attributes,scope, minimizeEmpty, child:_*)
def unapplySeq(n: Node) = n match {
case _: SpecialNode | _: Group => None
@@ -29,11 +39,13 @@ object Elem {
/** The case class `Elem` extends the `Node` class,
* providing an immutable data object representing an XML element.
*
- * @param prefix namespace prefix (may be null, but not the empty string)
- * @param label the element name
- * @param attribute the attribute map
- * @param scope the scope containing the namespace bindings
- * @param child the children of this node
+ * @param prefix namespace prefix (may be null, but not the empty string)
+ * @param label the element name
+ * @param attributes1 the attribute map
+ * @param scope the scope containing the namespace bindings
+ * @param minimizeEmpty `true` if this element should be serialized as minimized (i.e. "&lt;el/&gt;") when
+ * empty; `false` if it should be written out in long form.
+ * @param child the children of this node
*
* Copyright 2008 Google Inc. All Rights Reserved.
* @author Burak Emir <bqe@google.com>
@@ -43,9 +55,15 @@ class Elem(
val label: String,
attributes1: MetaData,
override val scope: NamespaceBinding,
+ val minimizeEmpty: Boolean,
val child: Node*)
extends Node with Serializable
{
+ @deprecated("This constructor is retained for backward compatibility. Please use the primary constructor, which lets you specify your own preference for `minimizeEmpty`.", "2.10")
+ def this(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*) = {
+ this(prefix, label, attributes, scope, child.isEmpty, child: _*)
+ }
+
final override def doCollectNamespaces = true
final override def doTransform = true
@@ -83,8 +101,9 @@ extends Node with Serializable
label: String = this.label,
attributes: MetaData = this.attributes,
scope: NamespaceBinding = this.scope,
+ minimizeEmpty: Boolean = this.minimizeEmpty,
child: Seq[Node] = this.child.toSeq
- ): Elem = Elem(prefix, label, attributes, scope, child: _*)
+ ): Elem = Elem(prefix, label, attributes, scope, minimizeEmpty, child: _*)
/** Returns concatenation of `text(n)` for each child `n`.
*/
diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala
index 2ead18fe08..02e34e1bdc 100644..100755
--- a/src/library/scala/xml/Node.scala
+++ b/src/library/scala/xml/Node.scala
@@ -159,7 +159,7 @@ abstract class Node extends NodeSeq {
* @return ...
*/
def buildString(stripComments: Boolean): String =
- Utility.toXML(this, stripComments = stripComments).toString
+ Utility.serialize(this, stripComments = stripComments).toString
/**
* Same as `toString('''false''')`.
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
index ea39b51352..64dbd00f2f 100644..100755
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -161,7 +161,7 @@ class PrettyPrinter(width: Int, step: Int) {
case _ =>
val test = {
val sb = new StringBuilder()
- Utility.toXML(node, pscope, sb, false)
+ Utility.serialize(node, pscope, sb, false)
if (doPreserve(node)) sb.toString
else TextBuffer.fromString(sb.toString).toText(0).data
}
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index fc20b892b9..9f944c0e92 100644..100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -181,6 +181,13 @@ object Utility extends AnyRef with parsing.TokenTests {
// sb.toString()
// }
+ /**
+ * Serialize the provided Node to the provided StringBuilder.
+ * <p/>
+ * Note that calling this source-compatible method will result in the same old, arguably almost universally unwanted,
+ * behaviour.
+ */
+ @deprecated("Please use `serialize` instead and specify a `minimizeTags` parameter", "2.10")
def toXML(
x: Node,
pscope: NamespaceBinding = TopScope,
@@ -190,29 +197,51 @@ object Utility extends AnyRef with parsing.TokenTests {
preserveWhitespace: Boolean = false,
minimizeTags: Boolean = false): StringBuilder =
{
+ serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, if (minimizeTags) MinimizeMode.Always else MinimizeMode.Never)
+ }
+
+ /**
+ * Serialize an XML Node to a StringBuilder.
+ *
+ * This is essentially a minor rework of `toXML` that can't have the same name due to an unfortunate
+ * combination of named/default arguments and overloading.
+ *
+ * @todo use a Writer instead
+ */
+ def serialize(
+ x: Node,
+ pscope: NamespaceBinding = TopScope,
+ sb: StringBuilder = new StringBuilder,
+ stripComments: Boolean = false,
+ decodeEntities: Boolean = true,
+ preserveWhitespace: Boolean = false,
+ minimizeTags: MinimizeMode.Value = MinimizeMode.Default): StringBuilder =
+ {
x match {
- case c: Comment => if (!stripComments) c buildString sb else sb
- case x: SpecialNode => x buildString sb
- case g: Group =>
- g.nodes foreach {toXML(_, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)}
- sb
- case _ =>
+ case c: Comment if !stripComments => c buildString sb
+ case s: SpecialNode => s buildString sb
+ case g: Group => for (c <- g.nodes) serialize(c, g.scope, sb, minimizeTags = minimizeTags) ; sb
+ case el: Elem =>
// print tag with namespace declarations
sb.append('<')
- x.nameToString(sb)
- if (x.attributes ne null) x.attributes.buildString(sb)
- x.scope.buildString(sb, pscope)
- if (x.child.isEmpty && minimizeTags) {
+ el.nameToString(sb)
+ if (el.attributes ne null) el.attributes.buildString(sb)
+ el.scope.buildString(sb, pscope)
+ if (el.child.isEmpty &&
+ (minimizeTags == MinimizeMode.Always ||
+ (minimizeTags == MinimizeMode.Default && el.minimizeEmpty)))
+ {
// no children, so use short form: <xyz .../>
- sb.append(" />")
+ sb.append("/>")
} else {
// children, so use long form: <xyz ...>...</xyz>
sb.append('>')
- sequenceToXML(x.child, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
+ sequenceToXML(el.child, el.scope, sb, stripComments)
sb.append("</")
- x.nameToString(sb)
+ el.nameToString(sb)
sb.append('>')
}
+ case _ => throw new IllegalArgumentException("Don't know how to serialize a " + x.getClass.getName)
}
}
@@ -223,20 +252,20 @@ object Utility extends AnyRef with parsing.TokenTests {
stripComments: Boolean = false,
decodeEntities: Boolean = true,
preserveWhitespace: Boolean = false,
- minimizeTags: Boolean = false): Unit =
+ minimizeTags: MinimizeMode.Value = MinimizeMode.Default): Unit =
{
if (children.isEmpty) return
else if (children forall isAtomAndNotText) { // add space
val it = children.iterator
val f = it.next
- toXML(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
+ serialize(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
while (it.hasNext) {
val x = it.next
sb.append(' ')
- toXML(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
+ serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
}
}
- else children foreach { toXML(_, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) }
+ else children foreach { serialize(_, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) }
}
/**
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
index bc3033d081..4beba91899 100644..100755
--- a/src/library/scala/xml/XML.scala
+++ b/src/library/scala/xml/XML.scala
@@ -26,6 +26,21 @@ object Source
def fromSysId(sysID: String) = new InputSource(sysID)
def fromString(string: String) = fromReader(new StringReader(string))
}
+
+/**
+ * Governs how empty elements (i.e. those without child elements) should be serialized.
+ */
+object MinimizeMode extends Enumeration {
+ /** Minimize empty tags if they were originally empty when parsed, or if they were constructed with [[scala.xml.Elem]]`#minimizeEmpty` == true */
+ val Default = Value
+
+ /** Always minimize empty tags. Note that this may be problematic for XHTML, in which case [[scala.xml.Xhtml]]`#toXhtml` should be used instead. */
+ val Always = Value
+
+ /** Never minimize empty tags. */
+ val Never = Value
+}
+
import Source._
/** The object `XML` provides constants, and functions to load
@@ -83,10 +98,10 @@ object XML extends XMLLoader[Elem]
* @param xmlDecl if true, write xml declaration
* @param doctype if not null, write doctype declaration
*/
- final def write(w: java.io.Writer, node: Node, enc: String, xmlDecl: Boolean, doctype: dtd.DocType) {
+ final def write(w: java.io.Writer, node: Node, enc: String, xmlDecl: Boolean, doctype: dtd.DocType, minimizeTags: MinimizeMode.Value = MinimizeMode.Default) {
/* TODO: optimize by giving writer parameter to toXML*/
if (xmlDecl) w.write("<?xml version='1.0' encoding='" + enc + "'?>\n")
if (doctype ne null) w.write( doctype.toString() + "\n")
- w.write(Utility.toXML(node).toString)
+ w.write(Utility.serialize(node, minimizeTags = minimizeTags).toString)
}
}
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala
index a8b0ed585b..b4fe153bd8 100644..100755
--- a/src/library/scala/xml/factory/Binder.scala
+++ b/src/library/scala/xml/factory/Binder.scala
@@ -43,13 +43,13 @@ abstract class Binder(val preserveWS: Boolean) extends ValidatingMarkupHandler {
result &+ text(0, x.data)
case x:EntityRef =>
result &+ entityRef(0, x.entityName)
- case _ =>
- elemStart(0, n.prefix, n.label, n.attributes, n.scope)
+ case x:Elem =>
+ elemStart(0, x.prefix, x.label, x.attributes, x.scope)
val old = result
result = new NodeBuffer()
- for (m <- n.child) traverse(m)
- result = old &+ elem(0, n.prefix, n.label, n.attributes, n.scope, NodeSeq.fromSeq(result)).toList;
- elemEnd(0, n.prefix, n.label)
+ for (m <- x.child) traverse(m)
+ result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList;
+ elemEnd(0, x.prefix, x.label)
}
final def validate(n: Node): Node = {
diff --git a/src/library/scala/xml/parsing/ConstructingHandler.scala b/src/library/scala/xml/parsing/ConstructingHandler.scala
index 60c19138c3..7e61674682 100644..100755
--- a/src/library/scala/xml/parsing/ConstructingHandler.scala
+++ b/src/library/scala/xml/parsing/ConstructingHandler.scala
@@ -21,8 +21,8 @@ abstract class ConstructingHandler extends MarkupHandler
val preserveWS: Boolean
def elem(pos: Int, pre: String, label: String, attrs: MetaData,
- pscope: NamespaceBinding, nodes: NodeSeq): NodeSeq =
- Elem(pre, label, attrs, pscope, nodes:_*)
+ pscope: NamespaceBinding, empty: Boolean, nodes: NodeSeq): NodeSeq =
+ Elem(pre, label, attrs, pscope, empty, nodes:_*)
def procInstr(pos: Int, target: String, txt: String) =
ProcInstr(target, txt)
diff --git a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
index 699c5b2b5f..e0258ba781 100644..100755
--- a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
@@ -16,7 +16,7 @@ package parsing
abstract class DefaultMarkupHandler extends MarkupHandler {
def elem(pos: Int, pre: String, label: String, attrs: MetaData,
- scope:NamespaceBinding, args: NodeSeq) = NodeSeq.Empty
+ scope:NamespaceBinding, empty: Boolean, args: NodeSeq) = NodeSeq.Empty
def procInstr(pos: Int, target: String, txt: String) = NodeSeq.Empty
diff --git a/src/library/scala/xml/parsing/MarkupHandler.scala b/src/library/scala/xml/parsing/MarkupHandler.scala
index 87e785a98f..83db2f177d 100644..100755
--- a/src/library/scala/xml/parsing/MarkupHandler.scala
+++ b/src/library/scala/xml/parsing/MarkupHandler.scala
@@ -75,10 +75,11 @@ abstract class MarkupHandler extends Logged
* @param pre the prefix
* @param label the local name
* @param attrs the attributes (metadata)
+ * @param empty `true` if the element was previously empty; `false` otherwise.
* @param args the children of this element
* @return ...
*/
- def elem(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, args: NodeSeq): NodeSeq
+ def elem(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, empty: Boolean, args: NodeSeq): NodeSeq
/** callback method invoked by MarkupParser after parsing PI.
*/
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index 1de08b3025..32feaa2209 100644..100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -569,7 +569,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
tmp
}
}
- val res = handle.elem(pos, pre, local, aMap, scope, ts)
+ val res = handle.elem(pos, pre, local, aMap, scope, ts == NodeSeq.Empty, ts)
handle.elemEnd(pos, pre, local)
res
}
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
index f84d91d138..c764d042c8 100755
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/library/scala/xml/pull/XMLEventReader.scala
@@ -81,7 +81,7 @@ extends collection.AbstractIterator[XMLEvent]
// memory usage optimization return one <ignore/> for top level to satisfy
// MarkupParser.document() otherwise NodeSeq.Empty
private var ignoreWritten = false
- final def elem(pos: Int, pre: String, label: String, attrs: MetaData, pscope: NamespaceBinding, nodes: NodeSeq): NodeSeq =
+ final def elem(pos: Int, pre: String, label: String, attrs: MetaData, pscope: NamespaceBinding, empty: Boolean, nodes: NodeSeq): NodeSeq =
if (level == 1 && !ignoreWritten) {ignoreWritten = true; <ignore/> } else NodeSeq.Empty
def procInstr(pos: Int, target: String, txt: String) = setEvent(EvProcInstr(target, txt))
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
index 1cb09b433a..994928c0f6 100644
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -19,9 +19,42 @@ abstract class CompilerTest extends DirectTest {
lazy val global: Global = newCompiler()
lazy val units = compilationUnits(global)(sources: _ *)
+ import global._
+ import definitions._
override def extraSettings = "-usejavacp -d " + testOutput.path
- def sources: List[String] = List(code)
def show() = (sources, units).zipped foreach check
+
+ // Override at least one of these...
+ def code = ""
+ def sources: List[String] = List(code)
+
+ // Utility functions
+
+ class MkType(sym: Symbol) {
+ def apply[M](implicit m1: Manifest[M]): Type =
+ if (sym eq NoSymbol) NoType
+ else appliedType(sym.typeConstructor, List(m1) map (x => manifestToType(x)))
+ }
+ implicit def mkMkType(sym: Symbol) = new MkType(sym)
+
+ def allMembers(root: Symbol): List[Symbol] = {
+ def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = {
+ val latest = roots flatMap (_.info.members) filterNot (seen contains _)
+ if (latest.isEmpty) seen.toList.sortWith(_ isLess _)
+ else loop(seen ++ latest, latest)
+ }
+ loop(Set(), List(root))
+ }
+
+ class SymsInPackage(pkgName: String) {
+ def pkg = getRequiredModule(pkgName)
+ def classes = allMembers(pkg) filter (_.isClass)
+ def modules = allMembers(pkg) filter (_.isModule)
+ def symbols = classes ++ terms filterNot (_ eq NoSymbol)
+ def terms = allMembers(pkg) filter (s => s.isTerm && !s.isConstructor)
+ def tparams = classes flatMap (_.info.typeParams)
+ def tpes = symbols map (_.tpe) distinct
+ }
}
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index 07444f8d4b..4e7f36bdc9 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -69,7 +69,7 @@ abstract class DirectTest extends App {
/** Constructor/main body **/
try show()
- catch { case t => println(t) ; sys.exit(1) }
+ catch { case t => println(t) ; t.printStackTrace ; sys.exit(1) }
/** Debugger interest only below this line **/
protected def isDebug = (sys.props contains "partest.debug") || (sys.env contains "PARTEST_DEBUG")
diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check
index 196a769a17..243c9aa3be 100644..100755
--- a/test/files/jvm/interpreter.check
+++ b/test/files/jvm/interpreter.check
@@ -301,7 +301,7 @@ scala> <a>
/></a>
res8: scala.xml.Elem =
<a>
-<b c="c" d="dd"></b></a>
+<b c="c" d="dd"/></a>
scala>
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index 81b68f0f5d..0b8055a6b9 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -192,8 +192,8 @@ x = TreeSet(1, 2, 3)
y = TreeSet(1, 2, 3)
x equals y: true, y equals x: true
-x = Ctrie(1 -> one, 2 -> two, 3 -> three)
-y = Ctrie(1 -> one, 2 -> two, 3 -> three)
+x = ConcurrentTrieMap(1 -> one, 2 -> two, 3 -> three)
+y = ConcurrentTrieMap(1 -> one, 2 -> two, 3 -> three)
x equals y: true, y equals x: true
x = xml:src="hello"
@@ -287,8 +287,8 @@ x = ParHashMap(2 -> 4, 1 -> 2)
y = ParHashMap(2 -> 4, 1 -> 2)
x equals y: true, y equals x: true
-x = ParCtrie(1 -> 2, 2 -> 4)
-y = ParCtrie(1 -> 2, 2 -> 4)
+x = ParConcurrentTrieMap(1 -> 2, 2 -> 4)
+y = ParConcurrentTrieMap(1 -> 2, 2 -> 4)
x equals y: true, y equals x: true
x = ParHashSet(1, 2, 3)
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index 75daa8903d..1e89036f37 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -286,7 +286,7 @@ object Test3_mutable {
import scala.collection.mutable.{
ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList,
HashMap, HashSet, History, LinkedList, ListBuffer, Publisher, Queue,
- Stack, StringBuilder, WrappedArray, TreeSet, Ctrie}
+ Stack, StringBuilder, WrappedArray, TreeSet, ConcurrentTrieMap}
// in alphabetic order
try {
@@ -386,9 +386,9 @@ object Test3_mutable {
val _ts1: TreeSet[Int] = read(write(ts1))
check(ts1, _ts1)
- // Ctrie
- val ct1 = Ctrie[Int, String]() ++= Array(1 -> "one", 2 -> "two", 3 -> "three")
- val _ct1: Ctrie[Int, String] = read(write(ct1))
+ // ConcurrentTrieMap
+ val ct1 = ConcurrentTrieMap[Int, String]() ++= Array(1 -> "one", 2 -> "two", 3 -> "three")
+ val _ct1: ConcurrentTrieMap[Int, String] = read(write(ct1))
check(ct1, _ct1)
}
catch {
@@ -613,9 +613,9 @@ object Test9_parallel {
val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm))
check(mpm, _mpm)
- // mutable.ParCtrie
- val mpc = mutable.ParCtrie(1 -> 2, 2 -> 4)
- val _mpc: mutable.ParCtrie[Int, Int] = read(write(mpc))
+ // mutable.ParConcurrentTrieMap
+ val mpc = mutable.ParConcurrentTrieMap(1 -> 2, 2 -> 4)
+ val _mpc: mutable.ParConcurrentTrieMap[Int, Int] = read(write(mpc))
check(mpc, _mpc)
// mutable.ParHashSet
diff --git a/test/files/jvm/t0632.check b/test/files/jvm/t0632.check
index 3185410a75..681bc9da92 100644..100755
--- a/test/files/jvm/t0632.check
+++ b/test/files/jvm/t0632.check
@@ -1,12 +1,12 @@
-<foo x="&amp;"></foo>
-<foo x="&amp;"></foo>
-<foo x="&amp;"></foo>
-<foo x="&amp;"></foo>
-<foo x="&amp;amp;"></foo>
-<foo x="&amp;amp;"></foo>
-<foo x="&amp;amp;"></foo>
-<foo x="&amp;amp;"></foo>
-<foo x="&amp;&amp;"></foo>
-<foo x="&amp;&amp;"></foo>
-<foo x="&amp;&amp;"></foo>
-<foo x="&amp;&amp;"></foo>
+<foo x="&amp;"/>
+<foo x="&amp;"/>
+<foo x="&amp;"/>
+<foo x="&amp;"/>
+<foo x="&amp;amp;"/>
+<foo x="&amp;amp;"/>
+<foo x="&amp;amp;"/>
+<foo x="&amp;amp;"/>
+<foo x="&amp;&amp;"/>
+<foo x="&amp;&amp;"/>
+<foo x="&amp;&amp;"/>
+<foo x="&amp;&amp;"/>
diff --git a/test/files/jvm/t1118.check b/test/files/jvm/t1118.check
new file mode 100755
index 0000000000..d676b413c9
--- /dev/null
+++ b/test/files/jvm/t1118.check
@@ -0,0 +1,11 @@
+
+<hi/> <!-- literal short -->
+<there></there> <!-- literal long -->
+<guys who="you all"></guys> <!-- literal long with attribute-->
+<hows it="going"/> <!-- literal short with attribute -->
+<this>is pretty cool</this> <!-- literal not empty -->
+
+<emptiness></emptiness> <!--programmatic long-->
+<vide/> <!--programmatic short-->
+<elem attr="value"/> <!--programmatic short with attribute-->
+<elem2 attr2="value2"></elem2> <!--programmatic long with attribute-->
diff --git a/test/files/jvm/t1118.scala b/test/files/jvm/t1118.scala
new file mode 100755
index 0000000000..3c86547241
--- /dev/null
+++ b/test/files/jvm/t1118.scala
@@ -0,0 +1,21 @@
+import scala.xml._
+
+object Test {
+ def main(args: Array[String]) {
+ println(<xml:group>
+<hi/> <!-- literal short -->
+<there></there> <!-- literal long -->
+<guys who="you all"></guys> <!-- literal long with attribute-->
+<hows it="going"/> <!-- literal short with attribute -->
+<this>is pretty cool</this> <!-- literal not empty -->
+</xml:group>)
+
+ println(Elem(null, "emptiness", Null, TopScope, false) ++ Text(" ") ++ Comment("programmatic long"))
+
+ println(Elem(null, "vide", Null, TopScope, true) ++ Text(" ") ++ Comment("programmatic short"))
+
+ println(Elem(null, "elem", Attribute("attr", Text("value"), Null), TopScope, true) ++ Text(" ") ++ Comment ("programmatic short with attribute"))
+
+ println(Elem(null, "elem2", Attribute("attr2", Text("value2"), Null), TopScope, false) ++ Text(" ") ++ Comment ("programmatic long with attribute"))
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/unittest_xml.scala b/test/files/jvm/unittest_xml.scala
index c03695f5c6..106334e625 100644
--- a/test/files/jvm/unittest_xml.scala
+++ b/test/files/jvm/unittest_xml.scala
@@ -89,7 +89,7 @@ object Test {
assert(" a=\"2\" g=\"3\" j=\"2\" oo=\"2\"" == xml.Utility.sort(q.attributes).toString)
val pp = new xml.PrettyPrinter(80,5)
- assert("<a a=\"2\" g=\"3\" j=\"2\" oo=\"2\"></a>" == pp.format(q))
+ assert("<a a=\"2\" g=\"3\" j=\"2\" oo=\"2\"/>" == pp.format(q))
<hi>
<there/>
diff --git a/test/files/jvm/xml01.check b/test/files/jvm/xml01.check
index 5e82e9a729..d78e6df410 100644..100755
--- a/test/files/jvm/xml01.check
+++ b/test/files/jvm/xml01.check
@@ -3,6 +3,6 @@ xpath \
xpath \\ DESCENDANTS
<book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book>
-- group nodes
-<f><a></a><b></b><c></c></f>
-<a></a><f><a></a><b></b><c></c></f><a></a><b></b><c></c>
+<f><a/><b/><c/></f>
+<a/><f><a/><b/><c/></f><a/><b/><c/>
attribute value normalization
diff --git a/test/files/jvm/xml03syntax.check b/test/files/jvm/xml03syntax.check
index 9fbedc2ae6..edcdbdd2ba 100644..100755
--- a/test/files/jvm/xml03syntax.check
+++ b/test/files/jvm/xml03syntax.check
@@ -22,5 +22,5 @@ true
2
4
-node=<elem key="<b>hello</b>"></elem>, key=Some(<b>hello</b>)
-node=<elem></elem>, key=None
+node=<elem key="<b>hello</b>"/>, key=Some(<b>hello</b>)
+node=<elem/>, key=None
diff --git a/test/files/jvm/xml05.check b/test/files/jvm/xml05.check
index 00e617c578..8d3e803bc8 100644
--- a/test/files/jvm/xml05.check
+++ b/test/files/jvm/xml05.check
@@ -4,7 +4,7 @@ Type :help for more information.
scala>
scala> <city name="San Jos&eacute;"/>
-res0: scala.xml.Elem = <city name="San Jos&eacute;"></city>
+res0: scala.xml.Elem = <city name="San Jos&eacute;"/>
scala>
diff --git a/test/files/jvm/xmlattr.check b/test/files/jvm/xmlattr.check
index af80b60fb2..a87420d86c 100644
--- a/test/files/jvm/xmlattr.check
+++ b/test/files/jvm/xmlattr.check
@@ -14,5 +14,5 @@ true
true
true
true
-<b x="&amp;"></b>
-<b x="&amp;"></b>
+<b x="&amp;"/>
+<b x="&amp;"/>
diff --git a/test/files/neg/t5572.check b/test/files/neg/t5572.check
new file mode 100644
index 0000000000..7b1e290861
--- /dev/null
+++ b/test/files/neg/t5572.check
@@ -0,0 +1,11 @@
+t5572.scala:16: error: type mismatch;
+ found : B
+ required: A
+ Z.transf(a, b) match {
+ ^
+t5572.scala:18: error: type mismatch;
+ found : A
+ required: B
+ run(sth, b)
+ ^
+two errors found
diff --git a/test/files/neg/t5572.scala b/test/files/neg/t5572.scala
new file mode 100644
index 0000000000..2da1209c61
--- /dev/null
+++ b/test/files/neg/t5572.scala
@@ -0,0 +1,23 @@
+class A
+class B
+
+trait X
+
+object Z {
+ def transf(a: A, b: B): X = null
+}
+
+class Test {
+
+ def bar(): (A, B)
+
+ def foo {
+ val (b, a) = bar()
+ Z.transf(a, b) match {
+ case sth =>
+ run(sth, b)
+ }
+ }
+
+ def run(x: X, z: B): Unit = ()
+}
diff --git a/test/files/pos/anyval-rangepos.scala b/test/files/pos/anyval-rangepos.scala
deleted file mode 100644
index 8d79793c0d..0000000000
--- a/test/files/pos/anyval-rangepos.scala
+++ /dev/null
@@ -1 +0,0 @@
-class Foo(val x: Double) extends AnyVal { }
diff --git a/test/files/pos/dotless-targs.scala b/test/files/pos/dotless-targs.scala
deleted file mode 100644
index 8337352d18..0000000000
--- a/test/files/pos/dotless-targs.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-class A {
- def fn1 = List apply 1
- def fn2 = List apply[Int] 2
-
- def f1 = "f1" isInstanceOf[String]
-
- def g1 = "g1" toList
- def g2 = "g2" toList 2
- def g3 = "g3" apply 3
-
- def h1 = List apply[List[Int]] (List(1), List(2)) mapConserve[List[Any]] (x => x)
-}
diff --git a/test/files/pos/anyval-rangepos.flags b/test/files/pos/rangepos.flags
index fcf951d907..fcf951d907 100644
--- a/test/files/pos/anyval-rangepos.flags
+++ b/test/files/pos/rangepos.flags
diff --git a/test/files/pos/rangepos.scala b/test/files/pos/rangepos.scala
new file mode 100644
index 0000000000..623b096acb
--- /dev/null
+++ b/test/files/pos/rangepos.scala
@@ -0,0 +1,5 @@
+class Foo(val x: Double) extends AnyVal { }
+
+object Pretty {
+ def f(s1: String) = new { def bar = 5 }
+}
diff --git a/test/files/pos/t5545/S_1.scala b/test/files/pos/t5545/S_1.scala
new file mode 100644
index 0000000000..59ec1fd851
--- /dev/null
+++ b/test/files/pos/t5545/S_1.scala
@@ -0,0 +1,4 @@
+trait F[@specialized(Int) T1, R] {
+ def f(v1: T1): R
+ def g = v1 => f(v1)
+}
diff --git a/test/files/pos/t5545/S_2.scala b/test/files/pos/t5545/S_2.scala
new file mode 100644
index 0000000000..59ec1fd851
--- /dev/null
+++ b/test/files/pos/t5545/S_2.scala
@@ -0,0 +1,4 @@
+trait F[@specialized(Int) T1, R] {
+ def f(v1: T1): R
+ def g = v1 => f(v1)
+}
diff --git a/test/files/run/compiler-asSeenFrom.check b/test/files/run/compiler-asSeenFrom.check
new file mode 100644
index 0000000000..f198e61072
--- /dev/null
+++ b/test/files/run/compiler-asSeenFrom.check
@@ -0,0 +1,323 @@
+class C {
+ type seen from prefix is
+ ---- ---------------- --
+ C[List[T3]]#I[T1] D[A1] C[List[T3]]#I[A1]
+ C[List[T3]]#I[T1] D[T3] C[List[T3]]#I[T3]
+ C[List[T3]]#J[T1] D[A1] C[List[T3]]#J[A1]
+ C[List[T3]]#J[T1] D[T3] C[List[T3]]#J[T3]
+ C[T1]#I[Int] C[List[T3]] C[List[T3]]#I[Int]
+ C[T1]#I[Int] D[A1] C[A1]#I[Int]
+ C[T1]#I[Int] D[T3] C[T3]#I[Int]
+ C[T1]#I[List[Int]] C[List[T3]] C[List[T3]]#I[List[Int]]
+ C[T1]#I[List[Int]] D[A1] C[A1]#I[List[Int]]
+ C[T1]#I[List[Int]] D[T3] C[T3]#I[List[Int]]
+ C[T1]#I[T1] C[List[T3]] C[List[T3]]#I[List[T3]]
+ C[T1]#I[T1] D[A1] C[A1]#I[A1]
+ C[T1]#I[T1] D[T3] C[T3]#I[T3]
+ C[T1]#I[T2] C[List[T3]] C[List[T3]]#I[T2]
+ C[T1]#I[T2] D[A1] C[A1]#I[T2]
+ C[T1]#I[T2] D[T3] C[T3]#I[T2]
+ C[T1]#I[T3] C[List[T3]] C[List[T3]]#I[T3]
+ C[T1]#I[T3] D[A1] C[A1]#I[T3]
+ C[T1]#I[T3] D[T3] C[T3]#I[T3]
+ C[T1]#I[T4] C[List[T3]] C[List[T3]]#I[T4]
+ C[T1]#I[T4] D[A1] C[A1]#I[T4]
+ C[T1]#I[T4] D[T3] C[T3]#I[T4]
+ C[T1]#J[Int] C[List[T3]] C[List[T3]]#J[Int]
+ C[T1]#J[Int] D[A1] C[A1]#J[Int]
+ C[T1]#J[Int] D[T3] C[T3]#J[Int]
+ C[T1]#J[List[Int]] C[List[T3]] C[List[T3]]#J[List[Int]]
+ C[T1]#J[List[Int]] D[A1] C[A1]#J[List[Int]]
+ C[T1]#J[List[Int]] D[T3] C[T3]#J[List[Int]]
+ C[T1]#J[T1] C[List[T3]] C[List[T3]]#J[List[T3]]
+ C[T1]#J[T1] D[A1] C[A1]#J[A1]
+ C[T1]#J[T1] D[T3] C[T3]#J[T3]
+ C[T1]#J[T2] C[List[T3]] C[List[T3]]#J[T2]
+ C[T1]#J[T2] D[A1] C[A1]#J[T2]
+ C[T1]#J[T2] D[T3] C[T3]#J[T2]
+ C[T1]#J[T3] C[List[T3]] C[List[T3]]#J[T3]
+ C[T1]#J[T3] D[A1] C[A1]#J[T3]
+ C[T1]#J[T3] D[T3] C[T3]#J[T3]
+ C[T1]#J[T4] C[List[T3]] C[List[T3]]#J[T4]
+ C[T1]#J[T4] D[A1] C[A1]#J[T4]
+ C[T1]#J[T4] D[T3] C[T3]#J[T4]
+ D[T3]#J[T1] C[List[T3]] D[T3]#J[List[T3]]
+ D[T3]#J[T1] D[A1] D[T3]#J[A1]
+ D[A1]#J[T1] C[List[T3]] D[A1]#J[List[T3]]
+ D[A1]#J[T1] D[T3] D[A1]#J[T3]
+}
+class D {
+ type seen from prefix is
+ ---- ---------------- --
+ C[List[T3]]#I[Int] D[A1] C[List[A1]]#I[Int]
+ C[List[T3]]#I[List[Int]] D[A1] C[List[A1]]#I[List[Int]]
+ C[List[T3]]#I[T1] D[A1] C[List[A1]]#I[T1]
+ C[List[T3]]#I[T2] D[A1] C[List[A1]]#I[T2]
+ C[List[T3]]#I[T3] D[A1] C[List[A1]]#I[A1]
+ C[List[T3]]#I[T4] D[A1] C[List[A1]]#I[T4]
+ C[List[T3]]#J[Int] D[A1] C[List[A1]]#J[Int]
+ C[List[T3]]#J[List[Int]] D[A1] C[List[A1]]#J[List[Int]]
+ C[List[T3]]#J[T1] D[A1] C[List[A1]]#J[T1]
+ C[List[T3]]#J[T2] D[A1] C[List[A1]]#J[T2]
+ C[List[T3]]#J[T3] D[A1] C[List[A1]]#J[A1]
+ C[List[T3]]#J[T4] D[A1] C[List[A1]]#J[T4]
+ C[T1]#I[T3] D[A1] C[T1]#I[A1]
+ C[T1]#J[T3] D[A1] C[T1]#J[A1]
+ D[T3]#J[Int] D[A1] D[A1]#J[Int]
+ D[T3]#J[List[Int]] D[A1] D[A1]#J[List[Int]]
+ D[T3]#J[T1] D[A1] D[A1]#J[T1]
+ D[T3]#J[T2] D[A1] D[A1]#J[T2]
+ D[T3]#J[T3] D[A1] D[A1]#J[A1]
+ D[T3]#J[T4] D[A1] D[A1]#J[T4]
+}
+class I {
+ type seen from prefix is
+ ---- ---------------- --
+ C[List[T3]]#I[T1] D.this.J[T4] C[List[T3]]#I[List[T3]]
+ C[List[T3]]#I[T1] Z.dZ.J[A2] C[List[T3]]#I[List[A1]]
+ C[List[T3]]#I[T1] Z.dZ.J[P] C[List[T3]]#I[List[A1]]
+ C[List[T3]]#I[T2] D.this.J[T4] C[List[T3]]#I[T4]
+ C[List[T3]]#I[T2] Z.dZ.J[A2] C[List[T3]]#I[A2]
+ C[List[T3]]#I[T2] Z.dZ.J[P] C[List[T3]]#I[P]
+ C[List[T3]]#J[T1] D.this.J[T4] C[List[T3]]#J[List[T3]]
+ C[List[T3]]#J[T1] Z.dZ.J[A2] C[List[T3]]#J[List[A1]]
+ C[List[T3]]#J[T1] Z.dZ.J[P] C[List[T3]]#J[List[A1]]
+ C[List[T3]]#J[T2] D.this.J[T4] C[List[T3]]#J[T4]
+ C[List[T3]]#J[T2] Z.dZ.J[A2] C[List[T3]]#J[A2]
+ C[List[T3]]#J[T2] Z.dZ.J[P] C[List[T3]]#J[P]
+ C[T1]#I[Int] D.this.J[T4] C[List[T3]]#I[Int]
+ C[T1]#I[Int] Z.dZ.J[A2] C[List[A1]]#I[Int]
+ C[T1]#I[Int] Z.dZ.J[P] C[List[A1]]#I[Int]
+ C[T1]#I[List[Int]] D.this.J[T4] C[List[T3]]#I[List[Int]]
+ C[T1]#I[List[Int]] Z.dZ.J[A2] C[List[A1]]#I[List[Int]]
+ C[T1]#I[List[Int]] Z.dZ.J[P] C[List[A1]]#I[List[Int]]
+ C[T1]#I[T1] D.this.J[T4] C[List[T3]]#I[List[T3]]
+ C[T1]#I[T1] Z.dZ.J[A2] C[List[A1]]#I[List[A1]]
+ C[T1]#I[T1] Z.dZ.J[P] C[List[A1]]#I[List[A1]]
+ C[T1]#I[T2] D.this.J[T4] C[List[T3]]#I[T4]
+ C[T1]#I[T2] Z.dZ.J[A2] C[List[A1]]#I[A2]
+ C[T1]#I[T2] Z.dZ.J[P] C[List[A1]]#I[P]
+ C[T1]#I[T3] D.this.J[T4] C[List[T3]]#I[T3]
+ C[T1]#I[T3] Z.dZ.J[A2] C[List[A1]]#I[T3]
+ C[T1]#I[T3] Z.dZ.J[P] C[List[A1]]#I[T3]
+ C[T1]#I[T4] D.this.J[T4] C[List[T3]]#I[T4]
+ C[T1]#I[T4] Z.dZ.J[A2] C[List[A1]]#I[T4]
+ C[T1]#I[T4] Z.dZ.J[P] C[List[A1]]#I[T4]
+ C[T1]#J[Int] D.this.J[T4] C[List[T3]]#J[Int]
+ C[T1]#J[Int] Z.dZ.J[A2] C[List[A1]]#J[Int]
+ C[T1]#J[Int] Z.dZ.J[P] C[List[A1]]#J[Int]
+ C[T1]#J[List[Int]] D.this.J[T4] C[List[T3]]#J[List[Int]]
+ C[T1]#J[List[Int]] Z.dZ.J[A2] C[List[A1]]#J[List[Int]]
+ C[T1]#J[List[Int]] Z.dZ.J[P] C[List[A1]]#J[List[Int]]
+ C[T1]#J[T1] D.this.J[T4] C[List[T3]]#J[List[T3]]
+ C[T1]#J[T1] Z.dZ.J[A2] C[List[A1]]#J[List[A1]]
+ C[T1]#J[T1] Z.dZ.J[P] C[List[A1]]#J[List[A1]]
+ C[T1]#J[T2] D.this.J[T4] C[List[T3]]#J[T4]
+ C[T1]#J[T2] Z.dZ.J[A2] C[List[A1]]#J[A2]
+ C[T1]#J[T2] Z.dZ.J[P] C[List[A1]]#J[P]
+ C[T1]#J[T3] D.this.J[T4] C[List[T3]]#J[T3]
+ C[T1]#J[T3] Z.dZ.J[A2] C[List[A1]]#J[T3]
+ C[T1]#J[T3] Z.dZ.J[P] C[List[A1]]#J[T3]
+ C[T1]#J[T4] D.this.J[T4] C[List[T3]]#J[T4]
+ C[T1]#J[T4] Z.dZ.J[A2] C[List[A1]]#J[T4]
+ C[T1]#J[T4] Z.dZ.J[P] C[List[A1]]#J[T4]
+ D[T3]#J[T1] D.this.J[T4] D[T3]#J[List[T3]]
+ D[T3]#J[T1] Z.dZ.J[A2] D[T3]#J[List[A1]]
+ D[T3]#J[T1] Z.dZ.J[P] D[T3]#J[List[A1]]
+ D[T3]#J[T2] D.this.J[T4] D[T3]#J[T4]
+ D[T3]#J[T2] Z.dZ.J[A2] D[T3]#J[A2]
+ D[T3]#J[T2] Z.dZ.J[P] D[T3]#J[P]
+ D[A1]#J[T1] D.this.J[T4] D[A1]#J[List[T3]]
+ D[A1]#J[T1] Z.dZ.J[A2] D[A1]#J[List[A1]]
+ D[A1]#J[T1] Z.dZ.J[P] D[A1]#J[List[A1]]
+ D[A1]#J[T2] D.this.J[T4] D[A1]#J[T4]
+ D[A1]#J[T2] Z.dZ.J[A2] D[A1]#J[A2]
+ D[A1]#J[T2] Z.dZ.J[P] D[A1]#J[P]
+}
+class J {
+ type seen from prefix is
+ ---- ---------------- --
+ C[List[T3]]#I[Int] Z.dZ.J[A2] C[List[A1]]#I[Int]
+ C[List[T3]]#I[Int] Z.dZ.J[P] C[List[A1]]#I[Int]
+ C[List[T3]]#I[List[Int]] Z.dZ.J[A2] C[List[A1]]#I[List[Int]]
+ C[List[T3]]#I[List[Int]] Z.dZ.J[P] C[List[A1]]#I[List[Int]]
+ C[List[T3]]#I[T1] Z.dZ.J[A2] C[List[A1]]#I[T1]
+ C[List[T3]]#I[T1] Z.dZ.J[P] C[List[A1]]#I[T1]
+ C[List[T3]]#I[T2] Z.dZ.J[A2] C[List[A1]]#I[T2]
+ C[List[T3]]#I[T2] Z.dZ.J[P] C[List[A1]]#I[T2]
+ C[List[T3]]#I[T3] Z.dZ.J[A2] C[List[A1]]#I[A1]
+ C[List[T3]]#I[T3] Z.dZ.J[P] C[List[A1]]#I[A1]
+ C[List[T3]]#I[T4] Z.dZ.J[A2] C[List[A1]]#I[A2]
+ C[List[T3]]#I[T4] Z.dZ.J[P] C[List[A1]]#I[P]
+ C[List[T3]]#J[Int] Z.dZ.J[A2] C[List[A1]]#J[Int]
+ C[List[T3]]#J[Int] Z.dZ.J[P] C[List[A1]]#J[Int]
+ C[List[T3]]#J[List[Int]] Z.dZ.J[A2] C[List[A1]]#J[List[Int]]
+ C[List[T3]]#J[List[Int]] Z.dZ.J[P] C[List[A1]]#J[List[Int]]
+ C[List[T3]]#J[T1] Z.dZ.J[A2] C[List[A1]]#J[T1]
+ C[List[T3]]#J[T1] Z.dZ.J[P] C[List[A1]]#J[T1]
+ C[List[T3]]#J[T2] Z.dZ.J[A2] C[List[A1]]#J[T2]
+ C[List[T3]]#J[T2] Z.dZ.J[P] C[List[A1]]#J[T2]
+ C[List[T3]]#J[T3] Z.dZ.J[A2] C[List[A1]]#J[A1]
+ C[List[T3]]#J[T3] Z.dZ.J[P] C[List[A1]]#J[A1]
+ C[List[T3]]#J[T4] Z.dZ.J[A2] C[List[A1]]#J[A2]
+ C[List[T3]]#J[T4] Z.dZ.J[P] C[List[A1]]#J[P]
+ C[T1]#I[T3] Z.dZ.J[A2] C[T1]#I[A1]
+ C[T1]#I[T3] Z.dZ.J[P] C[T1]#I[A1]
+ C[T1]#I[T4] Z.dZ.J[A2] C[T1]#I[A2]
+ C[T1]#I[T4] Z.dZ.J[P] C[T1]#I[P]
+ C[T1]#J[T3] Z.dZ.J[A2] C[T1]#J[A1]
+ C[T1]#J[T3] Z.dZ.J[P] C[T1]#J[A1]
+ C[T1]#J[T4] Z.dZ.J[A2] C[T1]#J[A2]
+ C[T1]#J[T4] Z.dZ.J[P] C[T1]#J[P]
+ D[T3]#J[Int] Z.dZ.J[A2] D[A1]#J[Int]
+ D[T3]#J[Int] Z.dZ.J[P] D[A1]#J[Int]
+ D[T3]#J[List[Int]] Z.dZ.J[A2] D[A1]#J[List[Int]]
+ D[T3]#J[List[Int]] Z.dZ.J[P] D[A1]#J[List[Int]]
+ D[T3]#J[T1] Z.dZ.J[A2] D[A1]#J[T1]
+ D[T3]#J[T1] Z.dZ.J[P] D[A1]#J[T1]
+ D[T3]#J[T2] Z.dZ.J[A2] D[A1]#J[T2]
+ D[T3]#J[T2] Z.dZ.J[P] D[A1]#J[T2]
+ D[T3]#J[T3] Z.dZ.J[A2] D[A1]#J[A1]
+ D[T3]#J[T3] Z.dZ.J[P] D[A1]#J[A1]
+ D[T3]#J[T4] Z.dZ.J[A2] D[A1]#J[A2]
+ D[T3]#J[T4] Z.dZ.J[P] D[A1]#J[P]
+ D[A1]#J[T3] Z.dZ.J[A2] D[A1]#J[A1]
+ D[A1]#J[T3] Z.dZ.J[P] D[A1]#J[A1]
+ D[A1]#J[T4] Z.dZ.J[A2] D[A1]#J[A2]
+ D[A1]#J[T4] Z.dZ.J[P] D[A1]#J[P]
+}
+class D { // after parser
+ private val cD: ll.C[List[T3]]
+ val cD: ll.C[List[T3]]
+}
+
+class D { // after uncurry
+ private val cD: ll.C[List[T3]]
+ val cD(): ll.C[List[T3]]
+}
+
+class D { // after erasure
+ private val cD: ll.C
+ val cD(): ll.C
+}
+
+object Z { // after parser
+ def kz[P <: ll.Z.dZ.J[ll.A2]]: ll.Z.dZ.J[P]
+ private val jZ: ll.Z.dZ.J[ll.A2]
+ val jZ: ll.Z.dZ.J[ll.A2]
+ private val dZ: ll.D[ll.A1]
+ val dZ: ll.D[ll.A1]
+}
+
+object Z { // after uncurry
+ def kz[P <: ll.Z.dZ.J[ll.A2]](): ll.Z.dZ.J[P]
+ private val jZ: ll.Z.dZ.J[ll.A2]
+ val jZ(): ll.Z.dZ.J[ll.A2]
+ private val dZ: ll.D[ll.A1]
+ val dZ(): ll.D[ll.A1]
+}
+
+object Z { // after erasure
+ def kz(): ll.D#J
+ private val jZ: ll.D#J
+ val jZ(): ll.D#J
+ private val dZ: ll.D
+ val dZ(): ll.D
+}
+
+object Z { // after flatten
+ def kz(): ll.D#D$J
+ private val jZ: ll.D#D$J
+ val jZ(): ll.D#D$J
+ private val dZ: ll.D
+ val dZ(): ll.D
+}
+
+value dZ { // after parser
+ private val cD: ll.C[List[T3]]
+ val cD: ll.C[List[T3]]
+}
+
+value dZ { // after parser
+ private val cD: ll.C[List[T3]]
+ val cD: ll.C[List[T3]]
+}
+
+value dZ { // after uncurry
+ private val cD: ll.C[List[T3]]
+ val cD(): ll.C[List[T3]]
+}
+
+value dZ { // after erasure
+ private val cD: ll.C
+ val cD(): ll.C
+}
+
+value jZ { // after parser
+ def thisI(): I.this.type
+ def thisC(): C.this.type
+ def t2(): T2
+ def t1(): T1
+}
+
+value jZ { // after parser
+ def thisI(): I.this.type
+ def thisC(): C.this.type
+ def t2(): T2
+ def t1(): T1
+}
+
+value jZ { // after explicitouter
+ protected val $outer: D.this.type
+ val ll$D$J$$$outer(): D.this.type
+ val ll$C$I$$$outer(): C.this.type
+ def thisI(): I.this.type
+ def thisC(): C.this.type
+ def t2(): T2
+ def t1(): T1
+}
+
+value jZ { // after erasure
+ protected val $outer: ll.D
+ val ll$D$J$$$outer(): ll.D
+ protected val $outer: ll.C
+ val ll$C$I$$$outer(): ll.C
+ def thisI(): ll.C#I
+ def thisC(): ll.C
+ def t2(): Object
+ def t1(): Object
+}
+
+value jZ { // after flatten
+ protected val $outer: ll.D
+ val ll$D$J$$$outer(): ll.D
+ protected val $outer: ll.C
+ val ll$C$I$$$outer(): ll.C
+ def thisI(): ll.C#C$I
+ def thisC(): ll.C
+ def t2(): Object
+ def t1(): Object
+}
+
+method kz { // after parser
+ def thisI(): I.this.type
+ def thisC(): C.this.type
+ def t2(): T2
+ def t1(): T1
+}
+
+value $outer { // after parser
+ private val cD: ll.C[List[T3]]
+ val cD: ll.C[List[T3]]
+}
+
+value $outer { // after uncurry
+ private val cD: ll.C[List[T3]]
+ val cD(): ll.C[List[T3]]
+}
+
+value $outer { // after erasure
+ private val cD: ll.C
+ val cD(): ll.C
+}
+
diff --git a/test/files/run/compiler-asSeenFrom.scala b/test/files/run/compiler-asSeenFrom.scala
new file mode 100644
index 0000000000..1fc3a5ee71
--- /dev/null
+++ b/test/files/run/compiler-asSeenFrom.scala
@@ -0,0 +1,122 @@
+import scala.tools.nsc._
+import scala.tools.partest.CompilerTest
+import scala.collection.{ mutable, immutable, generic }
+
+/** It's too messy but it's better than not having it.
+ */
+object Test extends CompilerTest {
+ import global._
+ import definitions._
+
+ override def sources = List(lambdaLift)
+ def lambdaLift = """
+package ll {
+ class A1
+ class A2
+ class X
+ class C[T1]() {
+ class I[T2]() {
+ def t1(): T1 = ???
+ def t2(): T2 = ???
+ def thisC(): C.this.type = ???
+ def thisI(): I.this.type = ???
+ }
+ }
+ class D[T3]() extends C[T3]() {
+ val cD: C[List[T3]] = ???
+ class J[T4]() extends cD.I[T4]()
+ }
+ object Z {
+ val dZ: D[A1] = ???
+ val jZ: dZ.J[A2] = ???
+
+ def kz[P <: dZ.J[A2]]: dZ.J[P] = ???
+ }
+}
+"""
+
+ object syms extends SymsInPackage("ll") {
+ def isPossibleEnclosure(encl: Symbol, sym: Symbol) = sym.enclClassChain drop 1 exists (_ isSubClass encl)
+ def isInterestingPrefix(pre: Type) = pre.typeConstructor.typeParams.nonEmpty && pre.members.exists(_.isType)
+
+ def asSeenPrefixes = tpes map (_.finalResultType) distinct
+ def typeRefPrefixes = asSeenPrefixes filter isInterestingPrefix
+
+ def nestsIn(outer: Symbol) = classes filter (c => c.enclClassChain drop 1 exists(_ isSubClass outer))
+ def typeRefs(targs: List[Type]) = (
+ for (p <- typeRefPrefixes ; c <- classes filter (isPossibleEnclosure(p.typeSymbol, _)) ; a <- targs) yield
+ typeRef(p, c, List(a))
+ )
+
+ val wfmt = "%-" + 25 + "s"
+ def to_s(x: Any): String = wfmt.format(x.toString.replaceAll("""\bll\.""", ""))
+
+ def fmt(args: Any*): String = {
+ (args map to_s mkString " ").replaceAll("""\s+$""", "")
+ }
+ def fname(sym: Symbol) = {
+ val p = "" + sym.owner.name
+ val x = if (sym.owner.isPackageClass || sym.owner.isModuleClass || sym.owner.isTerm) "." else "#"
+ sym.kindString + " " + p + x + sym.name
+ }
+
+ def permuteAsSeenFrom(targs: List[Type]) = (
+ for {
+ tp <- typeRefs(targs filterNot (_ eq NoType))
+ prefix <- asSeenPrefixes
+ if tp.prefix != prefix
+ site <- classes
+ seen = tp.asSeenFrom(prefix, site)
+ if tp != seen
+ if !seen.isInstanceOf[ExistentialType]
+ }
+ yield ((site, tp, prefix, seen))
+ )
+
+ def block(label: Any)(lines: List[String]): List[String] = {
+ val first = "" + label + " {"
+ val last = "}"
+
+ first +: lines.map(" " + _) :+ last
+ }
+
+ def permute(targs: List[Type]): List[String] = {
+ permuteAsSeenFrom(targs).groupBy(_._1).toList.sortBy(_._1.toString) flatMap {
+ case (site, xs) =>
+ block(fmt(site)) {
+ fmt("type", "seen from prefix", "is") ::
+ fmt("----", "----------------", "--") :: {
+ xs.groupBy(_._2).toList.sortBy(_._1.toString) flatMap {
+ case (tp, ys) =>
+ (ys map { case (_, _, prefix, seen) => fmt(tp, prefix, seen) }).sorted.distinct
+ }
+ }
+ }
+ }
+ }
+ }
+
+ def pretty(xs: List[_]) = if (xs.isEmpty) "" else xs.mkString("\n ", "\n ", "\n")
+
+ def signaturesIn(info: Type): List[String] = (
+ info.members
+ filterNot (s => s.isType || s.owner == ObjectClass || s.owner == AnyClass || s.isConstructor)
+ map (_.defString)
+ )
+
+ def check(source: String, unit: global.CompilationUnit) = {
+ import syms._
+
+ afterTyper {
+ val typeArgs = List[Type](IntClass.tpe, ListClass[Int]) ++ tparams.map(_.tpe)
+ permute(typeArgs) foreach println
+ }
+ for (x <- classes ++ terms) {
+ afterEachPhase(signaturesIn(x.tpe)) collect {
+ case (ph, sigs) if sigs.nonEmpty =>
+ println(sigs.mkString(x + " { // after " + ph + "\n ", "\n ", "\n}\n"))
+ }
+ }
+ true
+ }
+}
diff --git a/test/files/run/ctries/concmap.scala b/test/files/run/ctries/concmap.scala
index d73e33182a..bf8cc9d12f 100644
--- a/test/files/run/ctries/concmap.scala
+++ b/test/files/run/ctries/concmap.scala
@@ -1,7 +1,7 @@
-import collection.mutable.Ctrie
+import collection.mutable.ConcurrentTrieMap
object ConcurrentMapSpec extends Spec {
@@ -11,13 +11,13 @@ object ConcurrentMapSpec extends Spec {
def test() {
"support put" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until initsz) assert(ct.put(new Wrap(i), i) == None)
for (i <- 0 until initsz) assert(ct.put(new Wrap(i), -i) == Some(i))
}
"support put if absent" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until initsz) ct.update(new Wrap(i), i)
for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i))
for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i))
@@ -26,7 +26,7 @@ object ConcurrentMapSpec extends Spec {
}
"support remove if mapped to a specific value" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until initsz) ct.update(new Wrap(i), i)
for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), -i - 1) == false)
for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == true)
@@ -34,7 +34,7 @@ object ConcurrentMapSpec extends Spec {
}
"support replace if mapped to a specific value" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until initsz) ct.update(new Wrap(i), i)
for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i - 1, -i - 2) == false)
for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == true)
@@ -43,7 +43,7 @@ object ConcurrentMapSpec extends Spec {
}
"support replace if present" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until initsz) ct.update(new Wrap(i), i)
for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i) == Some(i))
for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i) == Some(-i))
@@ -56,7 +56,7 @@ object ConcurrentMapSpec extends Spec {
}
"support replace if mapped to a specific value, using several threads" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
val sz = 55000
for (i <- 0 until sz) ct.update(new Wrap(i), i)
@@ -89,7 +89,7 @@ object ConcurrentMapSpec extends Spec {
}
"support put if absent, several threads" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
val sz = 110000
class Updater(offs: Int) extends Thread {
@@ -110,7 +110,7 @@ object ConcurrentMapSpec extends Spec {
}
"support remove if mapped to a specific value, several threads" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
val sz = 55000
for (i <- 0 until sz) ct.update(new Wrap(i), i)
@@ -132,7 +132,7 @@ object ConcurrentMapSpec extends Spec {
}
"have all or none of the elements depending on the oddity" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
val sz = 65000
for (i <- 0 until sz) ct(new Wrap(i)) = i
@@ -165,7 +165,7 @@ object ConcurrentMapSpec extends Spec {
}
"compute size correctly" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
val sz = 36450
for (i <- 0 until sz) ct(new Wrap(i)) = i
@@ -174,7 +174,7 @@ object ConcurrentMapSpec extends Spec {
}
"compute size correctly in parallel" in {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
val sz = 36450
for (i <- 0 until sz) ct(new Wrap(i)) = i
val pct = ct.par
diff --git a/test/files/run/ctries/iterator.scala b/test/files/run/ctries/iterator.scala
index 85a6ab7623..dbfab6b8a9 100644
--- a/test/files/run/ctries/iterator.scala
+++ b/test/files/run/ctries/iterator.scala
@@ -3,7 +3,7 @@
import collection._
-import collection.mutable.Ctrie
+import collection.mutable.ConcurrentTrieMap
@@ -11,7 +11,7 @@ object IteratorSpec extends Spec {
def test() {
"work for an empty trie" in {
- val ct = new Ctrie
+ val ct = new ConcurrentTrieMap
val it = ct.iterator
it.hasNext shouldEqual (false)
@@ -19,7 +19,7 @@ object IteratorSpec extends Spec {
}
def nonEmptyIteratorCheck(sz: Int) {
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct.put(new Wrap(i), i)
val it = ct.iterator
@@ -84,7 +84,7 @@ object IteratorSpec extends Spec {
}
def nonEmptyCollideCheck(sz: Int) {
- val ct = new Ctrie[DumbHash, Int]
+ val ct = new ConcurrentTrieMap[DumbHash, Int]
for (i <- 0 until sz) ct.put(new DumbHash(i), i)
val it = ct.iterator
@@ -144,7 +144,7 @@ object IteratorSpec extends Spec {
val W = 15
val S = 5
val checks = 5
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct.put(new Wrap(i), i)
class Modifier extends Thread {
@@ -156,7 +156,7 @@ object IteratorSpec extends Spec {
}
}
- def consistentIteration(ct: Ctrie[Wrap, Int], checks: Int) {
+ def consistentIteration(ct: ConcurrentTrieMap[Wrap, Int], checks: Int) {
class Iter extends Thread {
override def run() {
val snap = ct.readOnlySnapshot()
@@ -185,7 +185,7 @@ object IteratorSpec extends Spec {
val sgroupsize = 10
val sgroupnum = 5
val removerslowdown = 50
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct.put(new Wrap(i), i)
class Remover extends Thread {
@@ -227,7 +227,7 @@ object IteratorSpec extends Spec {
val sgroupsize = 10
val sgroupnum = 10
val inserterslowdown = 50
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
class Inserter extends Thread {
override def run() {
@@ -265,7 +265,7 @@ object IteratorSpec extends Spec {
"work on a yet unevaluated snapshot" in {
val sz = 50000
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct.update(new Wrap(i), i)
val snap = ct.snapshot()
@@ -276,7 +276,7 @@ object IteratorSpec extends Spec {
"be duplicated" in {
val sz = 50
- val ct = collection.parallel.mutable.ParCtrie((0 until sz) zip (0 until sz): _*)
+ val ct = collection.parallel.mutable.ParConcurrentTrieMap((0 until sz) zip (0 until sz): _*)
val it = ct.splitter
for (_ <- 0 until (sz / 2)) it.next()
val dupit = it.dup
diff --git a/test/files/run/ctries/lnode.scala b/test/files/run/ctries/lnode.scala
index 88cbeed1f6..e480795956 100644
--- a/test/files/run/ctries/lnode.scala
+++ b/test/files/run/ctries/lnode.scala
@@ -1,7 +1,7 @@
-import collection.mutable.Ctrie
+import collection.mutable.ConcurrentTrieMap
object LNodeSpec extends Spec {
@@ -11,19 +11,19 @@ object LNodeSpec extends Spec {
def test() {
"accept elements with the same hash codes" in {
- val ct = new Ctrie[DumbHash, Int]
+ val ct = new ConcurrentTrieMap[DumbHash, Int]
for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
}
"lookup elements with the same hash codes" in {
- val ct = new Ctrie[DumbHash, Int]
+ val ct = new ConcurrentTrieMap[DumbHash, Int]
for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i))
for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None)
}
"remove elements with the same hash codes" in {
- val ct = new Ctrie[DumbHash, Int]
+ val ct = new ConcurrentTrieMap[DumbHash, Int]
for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
for (i <- 0 until initsz) {
val remelem = ct.remove(new DumbHash(i))
@@ -33,7 +33,7 @@ object LNodeSpec extends Spec {
}
"put elements with the same hash codes if absent" in {
- val ct = new Ctrie[DumbHash, Int]
+ val ct = new ConcurrentTrieMap[DumbHash, Int]
for (i <- 0 until initsz) ct.put(new DumbHash(i), i)
for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i)
for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i))
@@ -42,7 +42,7 @@ object LNodeSpec extends Spec {
}
"replace elements with the same hash codes" in {
- val ct = new Ctrie[DumbHash, Int]
+ val ct = new ConcurrentTrieMap[DumbHash, Int]
for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i)
for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i))
@@ -51,7 +51,7 @@ object LNodeSpec extends Spec {
}
"remove elements with the same hash codes if mapped to a specific value" in {
- val ct = new Ctrie[DumbHash, Int]
+ val ct = new ConcurrentTrieMap[DumbHash, Int]
for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true)
}
diff --git a/test/files/run/ctries/snapshot.scala b/test/files/run/ctries/snapshot.scala
index 69073d3f06..3c816130b3 100644
--- a/test/files/run/ctries/snapshot.scala
+++ b/test/files/run/ctries/snapshot.scala
@@ -3,7 +3,7 @@
import collection._
-import collection.mutable.Ctrie
+import collection.mutable.ConcurrentTrieMap
@@ -11,11 +11,11 @@ object SnapshotSpec extends Spec {
def test() {
"support snapshots" in {
- val ctn = new Ctrie
+ val ctn = new ConcurrentTrieMap
ctn.snapshot()
ctn.readOnlySnapshot()
- val ct = new Ctrie[Int, Int]
+ val ct = new ConcurrentTrieMap[Int, Int]
for (i <- 0 until 100) ct.put(i, i)
ct.snapshot()
ct.readOnlySnapshot()
@@ -24,7 +24,7 @@ object SnapshotSpec extends Spec {
"empty 2 quiescent snapshots in isolation" in {
val sz = 4000
- class Worker(trie: Ctrie[Wrap, Int]) extends Thread {
+ class Worker(trie: ConcurrentTrieMap[Wrap, Int]) extends Thread {
override def run() {
for (i <- 0 until sz) {
assert(trie.remove(new Wrap(i)) == Some(i))
@@ -35,7 +35,7 @@ object SnapshotSpec extends Spec {
}
}
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct.put(new Wrap(i), i)
val snapt = ct.snapshot()
@@ -96,7 +96,7 @@ object SnapshotSpec extends Spec {
}
// traverses the trie `rep` times and modifies each entry
- class Modifier(trie: Ctrie[Wrap, Int], index: Int, rep: Int, sz: Int) extends Thread {
+ class Modifier(trie: ConcurrentTrieMap[Wrap, Int], index: Int, rep: Int, sz: Int) extends Thread {
setName("Modifier %d".format(index))
override def run() {
@@ -110,7 +110,7 @@ object SnapshotSpec extends Spec {
}
// removes all the elements from the trie
- class Remover(trie: Ctrie[Wrap, Int], index: Int, totremovers: Int, sz: Int) extends Thread {
+ class Remover(trie: ConcurrentTrieMap[Wrap, Int], index: Int, totremovers: Int, sz: Int) extends Thread {
setName("Remover %d".format(index))
override def run() {
@@ -123,7 +123,7 @@ object SnapshotSpec extends Spec {
val N = 100
val W = 10
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct(new Wrap(i)) = i
val readonly = ct.readOnlySnapshot()
val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
@@ -141,7 +141,7 @@ object SnapshotSpec extends Spec {
val W = 100
val S = 5000
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct(new Wrap(i)) = i
val threads = for (i <- 0 until W) yield new Remover(ct, i, W, sz)
@@ -156,7 +156,7 @@ object SnapshotSpec extends Spec {
val W = 10
val S = 7000
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct(new Wrap(i)) = i
val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
@@ -165,7 +165,7 @@ object SnapshotSpec extends Spec {
threads.foreach(_.join())
}
- def consistentNonReadOnly(name: String, trie: Ctrie[Wrap, Int], sz: Int, N: Int) {
+ def consistentNonReadOnly(name: String, trie: ConcurrentTrieMap[Wrap, Int], sz: Int, N: Int) {
@volatile var e: Exception = null
// reads possible entries once and stores them
@@ -223,7 +223,7 @@ object SnapshotSpec extends Spec {
val W = 10
val S = 400
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct(new Wrap(i)) = i
val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
@@ -241,7 +241,7 @@ object SnapshotSpec extends Spec {
val S = 10
val modifytimes = 1200
val snaptimes = 600
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct(new Wrap(i)) = i
class Snapshooter extends Thread {
diff --git a/test/files/run/existentials-in-compiler.scala b/test/files/run/existentials-in-compiler.scala
index e4f6920145..8c04e4782c 100644
--- a/test/files/run/existentials-in-compiler.scala
+++ b/test/files/run/existentials-in-compiler.scala
@@ -6,7 +6,7 @@ object Test extends CompilerTest {
import global._
import definitions._
- def code = """
+ override def code = """
package extest {
trait Bippy[A <: AnyRef, B] { } // wildcards
trait BippyLike[A <: AnyRef, B <: List[A], This <: BippyLike[A, B, This] with Bippy[A, B]] // no wildcards
diff --git a/test/files/run/matchonseq.check b/test/files/run/matchonseq.check
new file mode 100644
index 0000000000..3fe554095a
--- /dev/null
+++ b/test/files/run/matchonseq.check
@@ -0,0 +1,2 @@
+It worked! head=1
+It worked! last=3
diff --git a/test/files/run/matchonseq.scala b/test/files/run/matchonseq.scala
new file mode 100644
index 0000000000..49b406a6ec
--- /dev/null
+++ b/test/files/run/matchonseq.scala
@@ -0,0 +1,8 @@
+object Test extends App{
+ Vector(1,2,3) match {
+ case head +: tail => println("It worked! head=" + head)
+ }
+ Vector(1,2,3) match {
+ case init :+ last => println("It worked! last=" + last)
+ }
+}
diff --git a/test/files/run/t0663.check b/test/files/run/t0663.check
index 22b68b7f57..dd9be2af70 100644..100755
--- a/test/files/run/t0663.check
+++ b/test/files/run/t0663.check
@@ -1 +1 @@
-<feed></feed>
+<feed/>
diff --git a/test/files/run/t1620.check b/test/files/run/t1620.check
index 979efc8227..afa1e6acd5 100644..100755
--- a/test/files/run/t1620.check
+++ b/test/files/run/t1620.check
@@ -1,6 +1,6 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE foo PUBLIC "-//Foo Corp//DTD 1.0//EN" "foo.dtd">
-<foo></foo>
+<foo/>
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE foo PUBLIC "-//Foo Corp//DTD 1.0//EN">
-<foo></foo>
+<foo/>
diff --git a/test/files/run/t2124.check b/test/files/run/t2124.check
index 2b8840209f..51b40469aa 100644..100755
--- a/test/files/run/t2124.check
+++ b/test/files/run/t2124.check
@@ -1 +1 @@
-<p><lost></lost><q></q></p>
+<p><lost/><q/></p>
diff --git a/test/files/run/t2125.check b/test/files/run/t2125.check
index 2b8840209f..51b40469aa 100644..100755
--- a/test/files/run/t2125.check
+++ b/test/files/run/t2125.check
@@ -1 +1 @@
-<p><lost></lost><q></q></p>
+<p><lost/><q/></p>
diff --git a/test/files/run/xml-attribute.check b/test/files/run/xml-attribute.check
index 3ae2034684..3cfe3779fc 100644
--- a/test/files/run/xml-attribute.check
+++ b/test/files/run/xml-attribute.check
@@ -1,12 +1,12 @@
-<t></t>
-<t></t>
-<t></t>
-<t></t>
-<t></t>
-<t b="1" d="2"></t>
-<t b="1" d="2"></t>
-<t b="1" d="2"></t>
-<t a="1" d="2"></t>
-<t b="1" d="2"></t>
-<t a="1" b="2" c="3"></t>
-<t g="1" e="2" p:a="3" f:e="4" mgruhu:ji="5"></t> \ No newline at end of file
+<t/>
+<t/>
+<t/>
+<t/>
+<t/>
+<t b="1" d="2"/>
+<t b="1" d="2"/>
+<t b="1" d="2"/>
+<t a="1" d="2"/>
+<t b="1" d="2"/>
+<t a="1" b="2" c="3"/>
+<t g="1" e="2" p:a="3" f:e="4" mgruhu:ji="5"/>
diff --git a/test/files/scalacheck/Ctrie.scala b/test/files/scalacheck/Ctrie.scala
index 2950937278..b9d71b88a3 100644
--- a/test/files/scalacheck/Ctrie.scala
+++ b/test/files/scalacheck/Ctrie.scala
@@ -5,7 +5,7 @@ import org.scalacheck._
import Prop._
import org.scalacheck.Gen._
import collection._
-import collection.mutable.Ctrie
+import collection.mutable.ConcurrentTrieMap
@@ -16,7 +16,7 @@ case class Wrap(i: Int) {
/** A check mainly oriented towards checking snapshot correctness.
*/
-object Test extends Properties("Ctrie") {
+object Test extends Properties("ConcurrentTrieMap") {
/* generators */
@@ -102,7 +102,7 @@ object Test extends Properties("Ctrie") {
(numThreads, numElems) =>
val p = 3 //numThreads
val sz = 102 //numElems
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
// checker
val checker = spawn {
@@ -134,7 +134,7 @@ object Test extends Properties("Ctrie") {
property("update") = forAll(sizes) {
(n: Int) =>
- val ct = new Ctrie[Int, Int]
+ val ct = new ConcurrentTrieMap[Int, Int]
for (i <- 0 until n) ct(i) = i
(0 until n) forall {
case i => ct(i) == i
@@ -143,7 +143,7 @@ object Test extends Properties("Ctrie") {
property("concurrent update") = forAll(threadCountsAndSizes) {
case (p, sz) =>
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
inParallel(p) {
idx =>
@@ -158,7 +158,7 @@ object Test extends Properties("Ctrie") {
property("concurrent remove") = forAll(threadCounts, sizes) {
(p, sz) =>
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
for (i <- 0 until sz) ct(Wrap(i)) = i
inParallel(p) {
@@ -174,7 +174,7 @@ object Test extends Properties("Ctrie") {
property("concurrent putIfAbsent") = forAll(threadCounts, sizes) {
(p, sz) =>
- val ct = new Ctrie[Wrap, Int]
+ val ct = new ConcurrentTrieMap[Wrap, Int]
val results = inParallel(p) {
idx =>
diff --git a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
index d1924f0ada..a04c0ff8d4 100644
--- a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
@@ -15,25 +15,25 @@ import scala.collection.parallel.ops._
-abstract class ParallelCtrieCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParCtrie[" + tp + "]") {
+abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParConcurrentTrieMap[" + tp + "]") {
// ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
// ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
- type CollType = ParCtrie[K, V]
+ type CollType = ParConcurrentTrieMap[K, V]
def isCheckingViews = false
def hasStrictOrder = false
def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
- val ct = new mutable.Ctrie[K, V]
+ val ct = new mutable.ConcurrentTrieMap[K, V]
val gen = vals(rnd.nextInt(vals.size))
for (i <- 0 until sz) ct += sample(gen)
ct
}
def fromTraversable(t: Traversable[(K, V)]) = {
- val pct = new ParCtrie[K, V]
+ val pct = new ParConcurrentTrieMap[K, V]
var i = 0
for (kv <- t.toList) {
pct += kv
@@ -45,7 +45,7 @@ abstract class ParallelCtrieCheck[K, V](tp: String) extends ParallelMapCheck[K,
}
-object IntIntParallelCtrieCheck extends ParallelCtrieCheck[Int, Int]("Int, Int")
+object IntIntParallelConcurrentTrieMapCheck extends ParallelConcurrentTrieMapCheck[Int, Int]("Int, Int")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
@@ -58,7 +58,7 @@ with PairValues[Int, Int]
def koperators = intoperators
override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
- case pm: ParCtrie[k, v] =>
+ case pm: ParConcurrentTrieMap[k, v] =>
println("Mutable parallel ctrie")
case _ =>
println("could not match data structure type: " + ds.getClass)
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index 8a0dba3c25..0a91977da0 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -26,7 +26,7 @@ class ParCollProperties extends Properties("Parallel collections") {
include(mutable.IntIntParallelHashMapCheck)
// parallel ctrie
- include(mutable.IntIntParallelCtrieCheck)
+ include(mutable.IntIntParallelConcurrentTrieMapCheck)
// parallel mutable hash sets (tables)
include(mutable.IntParallelHashSetCheck)
diff --git a/test/scaladoc/resources/explicit-inheritance-override.scala b/test/scaladoc/resources/explicit-inheritance-override.scala
new file mode 100644
index 0000000000..62ce653aea
--- /dev/null
+++ b/test/scaladoc/resources/explicit-inheritance-override.scala
@@ -0,0 +1,48 @@
+// This tests the implicit comment inheritance capabilities of scaladoc for class inheritance (no $super, no @inheritdoc)
+class InheritDocBase {
+ /**
+ * The base comment. And another sentence...
+ *
+ * @param arg1 The T term comment
+ * @param arg2 The string comment
+ * @tparam T the type of the first argument
+ * @throws SomeException if the function is not called with correct parameters
+ * @return The return comment
+ * @see The Manual
+ * @note Be careful!
+ * @example function[Int](3, "something")
+ * @author a Scala developer
+ * @version 0.0.2
+ * @since 0.0.1
+ * @todo Call mom.
+ */
+ def function[T](arg1: T, arg2: String): Double = 0.0d
+}
+
+class InheritDocDerived extends InheritDocBase {
+ /**
+ * Starting line
+ *
+ * @inheritdoc
+ * @inheritdoc
+ *
+ * Ending line
+ *
+ * @param arg1 Start1 @inheritdoc End1
+ * @param arg2 Start2 @inheritdoc End2
+ * @param arg3 Start3 ShouldWarn @inheritdoc End3
+ * @tparam T StartT @inheritdoc EndT
+ * @tparam ShouldWarn StartSW @inheritdoc EndSW
+ * @throws SomeException StartEx @inheritdoc EndEx
+ * @throws SomeOtherException StartSOE Should Warn @inheritdoc EndSOE
+ * @return StartRet @inheritdoc EndRet
+ * @see StartSee @inheritdoc EndSee
+ * @note StartNote @inheritdoc EndNote
+ * @example StartExample @inheritdoc EndExample
+ * @author StartAuthor @inheritdoc EndAuthor
+ * @version StartVer @inheritdoc EndVer
+ * @since StartSince @inheritdoc EndSince
+ * @todo StartTodo @inheritdoc And dad! EndTodo
+ */
+ override def function[T](arg1: T, arg2: String): Double = 1.0d
+} \ No newline at end of file
diff --git a/test/scaladoc/resources/explicit-inheritance-usecase.scala b/test/scaladoc/resources/explicit-inheritance-usecase.scala
new file mode 100644
index 0000000000..e10cec437a
--- /dev/null
+++ b/test/scaladoc/resources/explicit-inheritance-usecase.scala
@@ -0,0 +1,47 @@
+// This tests the implicit comment inheritance capabilities of scaladoc for usecases (no $super, no @inheritdoc)
+/** Testing use case inheritance */
+class UseCaseInheritDoc {
+ /**
+ * The base comment. And another sentence...
+ *
+ * @param arg1 The T term comment
+ * @param arg2 The string comment
+ * @tparam T the type of the first argument
+ * @throws SomeException if the function is not called with correct parameters
+ * @return The return comment
+ * @see The Manual
+ * @note Be careful!
+ * @example function[Int](3, "something")
+ * @author a Scala developer
+ * @version 0.0.2
+ * @since 0.0.1
+ * @todo Call mom.
+ *
+ * @usecase def function[T](arg1: T, arg2: String): Double
+ *
+ * Starting line
+ *
+ * @inheritdoc
+ * @inheritdoc
+ *
+ * Ending line
+ *
+ * @param arg1 Start1 @inheritdoc End1
+ * @param arg2 Start2 @inheritdoc End2
+ * @param arg3 Start3 ShouldWarn @inheritdoc End3
+ * @tparam T StartT @inheritdoc EndT
+ * @tparam ShouldWarn StartSW @inheritdoc EndSW
+ * @throws SomeException StartEx @inheritdoc EndEx
+ * @throws SomeOtherException StartSOE Should Warn @inheritdoc EndSOE
+ * @return StartRet @inheritdoc EndRet
+ * @see StartSee @inheritdoc EndSee
+ * @note StartNote @inheritdoc EndNote
+ * @example StartExample @inheritdoc EndExample
+ * @author StartAuthor @inheritdoc EndAuthor
+ * @version StartVer @inheritdoc EndVer
+ * @since StartSince @inheritdoc EndSince
+ * @todo StartTodo @inheritdoc And dad! EndTodo
+ */
+ def function[T](implicit arg1: T, arg2: String): Double = 0.0d
+}
+
diff --git a/test/scaladoc/resources/implicit-inheritance-override.scala b/test/scaladoc/resources/implicit-inheritance-override.scala
index 85b8e8d543..5d692f59ad 100644
--- a/test/scaladoc/resources/implicit-inheritance-override.scala
+++ b/test/scaladoc/resources/implicit-inheritance-override.scala
@@ -2,12 +2,12 @@
class Base {
/**
* The base comment. And another sentence...
- *
- * @param arg1 The T term comment
- * @param arg2 The string comment
+ *
+ * @param arg1 The T term comment
+ * @param arg2 The string comment
* @tparam T the type of the first argument
* @return The return comment
- */
+ */
def function[T](arg1: T, arg2: String): Double = 0.0d
}
diff --git a/test/scaladoc/resources/inheritdoc-corner-cases.scala b/test/scaladoc/resources/inheritdoc-corner-cases.scala
new file mode 100644
index 0000000000..8cd995e605
--- /dev/null
+++ b/test/scaladoc/resources/inheritdoc-corner-cases.scala
@@ -0,0 +1,78 @@
+// TEST1: Inherit from multiple classes
+trait A {
+ /**
+ * Hello 1 comment
+ */
+ def hello1 = 0
+}
+
+trait B {
+ /**
+ * Hello 2 comment
+ */
+ def hello2 = 1
+}
+
+trait C extends B
+
+class D extends A with C {
+ /**
+ * Inherited: @inheritdoc
+ */
+ override def hello1 = super.hello2
+
+ /**
+ * Inherited: @inheritdoc
+ */
+ override def hello2 = super.hello1
+}
+
+// TEST2: Invalid inherit: no parents
+trait E {
+ /**
+ * @inheritdoc
+ */
+ def whereDidThisComeFrom
+}
+
+// TEST3: Invalid inherit, but other parents present
+trait F extends E {
+ /**
+ * @inheritdoc
+ */
+ def howAboutThis
+}
+
+
+// TEST4: Inherit from something that inherits: inherit should propagate
+trait G extends D {
+ /**
+ * @inheritdoc
+ */
+ override def hello1 = 13
+
+ /**
+ * @inheritdoc
+ */
+ override def hello2 = 14
+}
+
+// TEST5: Inherit missing parameters
+trait H extends G {
+ /**
+ * Missing params
+ * @throws HelloException @inheritdoc
+ * @todo @inheritdoc
+ */
+ override def hello1 = 15
+}
+
+// TEST6: Inherit from something that inherits in the usecase
+trait I extends G {
+ /**
+ * @inheritdoc
+ * @usecase def hello1(i: Int)
+ * @inheritdoc
+ */
+ override def hello1 = 13
+} \ No newline at end of file
diff --git a/test/scaladoc/scala/html/HtmlFactoryTest.scala b/test/scaladoc/scala/html/HtmlFactoryTest.scala
index d46a9581b9..67358e6e70 100644
--- a/test/scaladoc/scala/html/HtmlFactoryTest.scala
+++ b/test/scaladoc/scala/html/HtmlFactoryTest.scala
@@ -21,9 +21,9 @@ object XMLUtil {
}
object Test extends Properties("HtmlFactory") {
-
- final val RESOURCES = "test/scaladoc/resources/"
-
+
+ final val RESOURCES = "test/scaladoc/resources/"
+
import scala.tools.nsc.doc.{DocFactory, Settings}
import scala.tools.nsc.doc.model.IndexModelFactory
import scala.tools.nsc.doc.html.HtmlFactory
@@ -87,7 +87,7 @@ object Test extends Properties("HtmlFactory") {
/**
* This tests the text without the markup - ex:
- *
+ *
* <h4 class="signature">
* <span class="modifier_kind">
* <span class="modifier">implicit</span>
@@ -97,24 +97,24 @@ object Test extends Properties("HtmlFactory") {
* <span class="name">test</span><span class="params">()</span><span class="result">: <span name="scala.Int" class="extype">Int</span></span>
* </span>
* </h4>
- *
+ *
* becomes:
- *
+ *
* implicit def test(): Int
- *
+ *
* and is required to contain the text in the given checks
- *
+ *
* NOTE: Comparison is done ignoring all whitespace
*/
def checkText(scalaFile: String, debug: Boolean = true)(checks: (Option[String], String, Boolean)*): Boolean = {
- val htmlFile = scalaFile.stripSuffix(".scala") + ".html"
+ val htmlFile = scalaFile.stripSuffix(".scala") + ".html"
val htmlAllFiles = createTemplates(scalaFile)
var result = true
-
+
for ((fileHint, check, expected) <- checks) {
// resolve the file to be checked
val fileName = fileHint match {
- case Some(file) =>
+ case Some(file) =>
if (file endsWith ".html")
file
else
@@ -122,20 +122,27 @@ object Test extends Properties("HtmlFactory") {
case None =>
htmlFile
}
- val fileText = htmlAllFiles(fileName).text.replace('→',' ').replaceAll("\\s+","")
- val checkText = check.replace('→',' ').replaceAll("\\s+","")
+ val fileTextPretty = htmlAllFiles(fileName).text.replace('→',' ').replaceAll("\\s+"," ")
+ val fileText = fileTextPretty.replaceAll(" ", "")
+
+ val checkTextPretty = check.replace('→',' ').replaceAll("\\s+"," ")
+ val checkText = checkTextPretty.replaceAll(" ", "")
+
val checkValue = fileText.contains(checkText) == expected
if (debug && (!checkValue)) {
- Console.err.println("Check failed: ")
- Console.err.println("HTML: " + fileText)
- Console.err.println("Check: " + checkText)
+ Console.err.println("")
+ Console.err.println("HTML Check failed for resource file " + scalaFile + ":")
+ Console.err.println("Could not match: \n" + checkTextPretty)
+ Console.err.println("In the extracted HTML text: \n" + fileTextPretty)
+ Console.err.println("NOTE: The whitespaces are eliminated before matching!")
+ Console.err.println("")
}
- result &&= checkValue
+ result &&= checkValue
}
-
+
result
}
-
+
def shortComments(root: scala.xml.Node) =
XMLUtil.stripGroup(root).descendant.flatMap {
@@ -284,7 +291,7 @@ object Test extends Properties("HtmlFactory") {
case _ => false
}
}
-
+
property("Trac #4420 - no whitespace at end of line") = {
val files = createTemplates("Trac4420.scala")
@@ -432,47 +439,46 @@ object Test extends Properties("HtmlFactory") {
createTemplate("SI_4898.scala")
true
}
-
+
property("Use cases should override their original members") =
checkText("SI_5054_q1.scala")(
(None,"""def test(): Int""", true),
(None,"""def test(implicit lost: Int): Int""", false)
)
- property("Use cases should keep their flags - final should not be lost") =
+ property("Use cases should keep their flags - final should not be lost") =
checkText("SI_5054_q2.scala")((None, """final def test(): Int""", true))
-
- property("Use cases should keep their flags - implicit should not be lost") =
+
+ property("Use cases should keep their flags - implicit should not be lost") =
checkText("SI_5054_q3.scala")((None, """implicit def test(): Int""", true))
-
- property("Use cases should keep their flags - real abstract should not be lost") =
+
+ property("Use cases should keep their flags - real abstract should not be lost") =
checkText("SI_5054_q4.scala")((None, """abstract def test(): Int""", true))
- property("Use cases should keep their flags - traits should not be affected") =
+ property("Use cases should keep their flags - traits should not be affected") =
checkText("SI_5054_q5.scala")((None, """def test(): Int""", true))
- property("Use cases should keep their flags - traits should not be affected") =
+ property("Use cases should keep their flags - traits should not be affected") =
checkText("SI_5054_q6.scala")((None, """abstract def test(): Int""", true))
-
- property("Use case individual signature test") =
+
+ property("Use case individual signature test") =
checkText("SI_5054_q7.scala")(
(None, """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""", true),
(None, """abstract def test1(): Int [use case] This takes the implicit value in scope.""", true)
)
- property("Display correct \"Definition classes\"") =
- checkText("SI_5287.scala")(
+ property("Display correct \"Definition classes\"") =
+ checkText("SI_5287.scala")(
(None,
"""def method(): Int
[use case] The usecase explanation
[use case] The usecase explanation
Definition Classes SI_5287 SI_5287_B SI_5287_A""", true)
- ) // the explanation appears twice, as small comment and full comment
-
-
- property("Correct comment inheritance for overriding") =
+ ) // the explanation appears twice, as small comment and full comment
+
+ property("Correct comment inheritance for overriding") =
checkText("implicit-inheritance-override.scala")(
- (Some("Base"),
+ (Some("Base"),
"""def function[T](arg1: T, arg2: String): Double
The base comment.
The base comment. And another sentence...
@@ -481,7 +487,7 @@ object Test extends Properties("HtmlFactory") {
arg2 The string comment
returns The return comment
""", true),
- (Some("DerivedA"),
+ (Some("DerivedA"),
"""def function[T](arg1: T, arg2: String): Double
Overriding the comment, the params and returns comments should stay the same.
Overriding the comment, the params and returns comments should stay the same.
@@ -490,21 +496,21 @@ object Test extends Properties("HtmlFactory") {
arg2 The string comment
returns The return comment
""", true),
- (Some("DerivedB"),
+ (Some("DerivedB"),
"""def function[T](arg1: T, arg2: String): Double
T the type of the first argument
arg1 The overridden T term comment
arg2 The overridden string comment
returns The return comment
""", true),
- (Some("DerivedC"),
+ (Some("DerivedC"),
"""def function[T](arg1: T, arg2: String): Double
T the type of the first argument
arg1 The T term comment
arg2 The string comment
returns The overridden return comment
""", true),
- (Some("DerivedD"),
+ (Some("DerivedD"),
"""def function[T](arg1: T, arg2: String): Double
T The overriden type parameter comment
arg1 The T term comment
@@ -512,11 +518,11 @@ object Test extends Properties("HtmlFactory") {
returns The return comment
""", true)
)
-
+
for (useCaseFile <- List("UseCaseInheritance", "UseCaseOverrideInheritance")) {
- property("Correct comment inheritance for usecases") =
+ property("Correct comment inheritance for usecases") =
checkText("implicit-inheritance-usecase.scala")(
- (Some(useCaseFile),
+ (Some(useCaseFile),
"""def missing_arg[T](arg1: T): Double
[use case]
[use case]
@@ -524,7 +530,7 @@ object Test extends Properties("HtmlFactory") {
arg1 The T term comment
returns The return comment
""", true),
- (Some(useCaseFile),
+ (Some(useCaseFile),
"""def missing_targ(arg1: Int, arg2: String): Double
[use case]
[use case]
@@ -532,7 +538,7 @@ object Test extends Properties("HtmlFactory") {
arg2 The string comment
returns The return comment
""", true),
- (Some(useCaseFile),
+ (Some(useCaseFile),
"""def overridden_arg1[T](implicit arg1: T, arg2: String): Double
[use case]
[use case]
@@ -541,7 +547,7 @@ object Test extends Properties("HtmlFactory") {
arg2 The string comment
returns The return comment
""", true),
- (Some(useCaseFile),
+ (Some(useCaseFile),
"""def overridden_targ[T](implicit arg1: T, arg2: String): Double
[use case]
[use case]
@@ -550,7 +556,7 @@ object Test extends Properties("HtmlFactory") {
arg2 The string comment
returns The return comment
""", true),
- (Some(useCaseFile),
+ (Some(useCaseFile),
"""def overridden_return[T](implicit arg1: T, arg2: String): Double
[use case]
[use case]
@@ -559,7 +565,7 @@ object Test extends Properties("HtmlFactory") {
arg2 The string comment
returns The overridden return comment
""", true),
- (Some(useCaseFile),
+ (Some(useCaseFile),
"""def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double
[use case]
[use case]
@@ -569,7 +575,7 @@ object Test extends Properties("HtmlFactory") {
arg3 The added float comment
returns The return comment
""", true),
- (Some(useCaseFile),
+ (Some(useCaseFile),
"""def overridden_comment[T](implicit arg1: T, arg2: String): Double
[use case] The overridden comment.
[use case] The overridden comment.
@@ -578,9 +584,93 @@ object Test extends Properties("HtmlFactory") {
arg2 The string comment
returns The return comment
""", true)
- )
- }
-
+ )
+ }
+
+ property("Correct explicit inheritance for override") =
+ checkText("explicit-inheritance-override.scala")(
+ (Some("InheritDocDerived"),
+ """def function[T](arg1: T, arg2: String): Double
+ Starting line
+ Starting line
+ The base comment. And another sentence...
+ The base comment. And another sentence...
+ Ending line
+ T StartT the type of the first argument EndT
+ arg1 Start1 The T term comment End1
+ arg2 Start2 The string comment End2
+ returns StartRet The return comment EndRet
+ Definition Classes InheritDocDerived → InheritDocBase
+ Example: StartExample function[Int](3, "something") EndExample
+ Version StartVer 0.0.2 EndVer
+ Since StartSince 0.0.1 EndSince
+ Exceptions thrown
+ SomeException StartEx if the function is not called with correct parameters EndEx
+ SomeOtherException StartSOE Should Warn <invalid inheritdoc annotation> EndSOE
+ To do StartTodo Call mom. And dad! EndTodo
+ Note StartNote Be careful! EndNote
+ See also StartSee The Manual EndSee
+ """, true))
+
+ property("Correct explicit inheritance for usecase") =
+ checkText("explicit-inheritance-usecase.scala")(
+ (Some("UseCaseInheritDoc"),
+ """def function[T](arg1: T, arg2: String): Double
+ [use case] Starting line
+ [use case] Starting line
+ The base comment. And another sentence...
+ The base comment. And another sentence...
+ Ending line
+ T StartT the type of the first argument EndT
+ arg1 Start1 The T term comment End1
+ arg2 Start2 The string comment End2
+ returns StartRet The return comment EndRet
+ Example: StartExample function[Int](3,"something") EndExample
+ Version StartVer 0.0.2 EndVer
+ Since StartSince 0.0.1 EndSince
+ Exceptions thrown
+ SomeException StartEx if the function is not called with correct parameters EndEx
+ SomeOtherException StartSOE Should Warn <invalid inheritdoc annotation> EndSOE
+ To do StartTodo Call mom. And dad! EndTodo
+ Note StartNote Be careful! EndNote
+ See also StartSee The Manual EndSee
+ """, true))
+
+ property("Correct explicit inheritance in corner cases") =
+ checkText("inheritdoc-corner-cases.scala")(
+ (Some("D"),
+ """def hello1: Int
+ Inherited: Hello 1 comment
+ Inherited: Hello 1 comment
+ Definition Classes D → A
+ """, true),
+ (Some("D"),
+ """def hello2: Int
+ Inherited: Hello 2 comment
+ Inherited: Hello 2 comment
+ Definition Classes D → B
+ """, true),
+ (Some("G"),
+ """def hello1: Int
+ Inherited: Hello 1 comment
+ Inherited: Hello 1 comment
+ Definition Classes G → D → A
+ """, true),
+ (Some("G"),
+ """def hello2: Int
+ Inherited: Hello 2 comment
+ Inherited: Hello 2 comment
+ Definition Classes G → D → B
+ """, true),
+ (Some("I"),
+ """def hello1(i: Int): Unit
+ [use case] Inherited: Hello 1 comment
+ [use case] Inherited: Hello 1 comment
+ Definition Classes I → G → D → A
+ """, true)
+ // traits E, F and H shouldn't crash scaladoc but we don't need to check the output
+ )
+
{
val files = createTemplates("basic.scala")
//println(files)
diff --git a/tools/get-scala-commit-date b/tools/get-scala-commit-date
new file mode 100755
index 0000000000..ef5b0f540d
--- /dev/null
+++ b/tools/get-scala-commit-date
@@ -0,0 +1,16 @@
+#!/usr/bin/env bash
+#
+# Usage: get-scala-commit-date [dir]
+# Figures out current commit date of a git clone.
+# If no dir is given, current working dir is used.
+#
+# Example build version string:
+# 20120312
+#
+
+[[ $# -eq 0 ]] || cd "$1"
+
+lastcommitdate=$(git log --format="%ci" HEAD | head -n 1 | cut -d ' ' -f 1)
+
+# 20120324
+echo "${lastcommitdate//-/}"
diff --git a/tools/get-scala-commit-date.bat b/tools/get-scala-commit-date.bat
new file mode 100644
index 0000000000..a07155533f
--- /dev/null
+++ b/tools/get-scala-commit-date.bat
@@ -0,0 +1,24 @@
+@echo off
+rem
+rem Usage: get-scala-revison.bat [dir]
+rem Figures out current scala commit date of a git clone.
+rem
+rem If no dir is given, current working dir is used.
+
+@setlocal
+set _DIR=
+if "%*"=="" (
+ for /f "delims=;" %%i in ('cd') do set "_DIR=%%i"
+) else (
+ set "_DIR=%~1"
+)
+cd %_DIR%
+
+rem TODO - Check with a real windows user that this works!
+if exist .git\NUL (
+ for /f "tokens=1delims= " in ('git log --format="%ci" -1') do set commitdate=%%a
+ echo %commitdate%
+)
+
+:end
+@endlocal
diff --git a/tools/get-scala-commit-drift b/tools/get-scala-commit-drift
new file mode 100755
index 0000000000..4959826ec1
--- /dev/null
+++ b/tools/get-scala-commit-drift
@@ -0,0 +1,40 @@
+#!/usr/bin/env bash
+#
+# Usage: get-scala-commit-drift [dir]
+# Figures out current commit drift of a git clone.
+# If no dir is given, current working dir is used.
+#
+# Example output string:
+# 123
+#
+# Build drift = # of commits since last tag.
+
+[[ $# -eq 0 ]] || cd "$1"
+
+ensure_tag () {
+ sha=$1
+ rev=$2
+
+ [[ -n $(git tag -l $rev) ]] || {
+ git tag -a -m "generated by get-scala-revision" $rev $sha
+ }
+}
+
+# Ensure some baseline tags are present so if this repository's
+# tags are screwed up or stale, we should still have a reference
+# point for a build string.
+ensure_tag 58cb15c40d v2.10.0-M1
+ensure_tag 29f3eace1e v2.9.1
+ensure_tag b0d78f6b9c v2.8.2
+
+# the closest tag, obtained separately because we have to
+# reconstruct the string around the padded distance.
+tag=$(git describe --tags --match 'v2*' --abbrev=0)
+
+# printf %016s is not portable for 0-padding, has to be a digit.
+# so we're stuck disassembling it.
+described=$(git describe --tags --match 'v2*' --abbrev=10)
+suffix="${described##${tag}-}"
+counter=$(echo $suffix | cut -d - -f 1)
+
+echo "$counter"
diff --git a/tools/get-scala-revision.bat b/tools/get-scala-commit-drift.bat
index 48c7cbd94f..ac289d3481 100644
--- a/tools/get-scala-revision.bat
+++ b/tools/get-scala-commit-drift.bat
@@ -1,7 +1,7 @@
@echo off
rem
-rem Usage: get-scala-revison.bat [dir]
-rem Figures out current scala revision of a git clone.
+rem Usage: get-scala-commit-drift.bat [dir]
+rem Figures out current scala commit drift, of a clone.
rem
rem If no dir is given, current working dir is used.
@@ -14,9 +14,8 @@ if "%*"=="" (
)
cd %_DIR%
-if exist .git\NUL (
- git describe --abbrev=10 --always --tags
-)
+rem TODO - WRITE THIS
+echo "TODO"
:end
@endlocal
diff --git a/tools/get-scala-revision b/tools/get-scala-commit-sha
index 4d97ec58ad..0abe31a53c 100755
--- a/tools/get-scala-revision
+++ b/tools/get-scala-commit-sha
@@ -1,11 +1,11 @@
#!/usr/bin/env bash
#
-# Usage: get-scala-revision [dir]
-# Figures out current scala revision of a git clone.
+# Usage: get-scala-commit-sha [dir]
+# Figures out current commit sha of a git clone.
# If no dir is given, current working dir is used.
#
# Example build version string:
-# v2.10.0-M1-0098-g6f1c486d0b-2012-02-01
+# 6f1c486d0ba
#
[[ $# -eq 0 ]] || cd "$1"
@@ -34,11 +34,7 @@ tag=$(git describe --tags --match 'v2*' --abbrev=0)
# so we're stuck disassembling it.
described=$(git describe --tags --match 'v2*' --abbrev=10)
suffix="${described##${tag}-}"
-counter=$(echo $suffix | cut -d - -f 1)
hash=$(echo $suffix | cut -d - -f 2)
+hash=${hash#g}
-# remove any alphabetic characters before the version number
-tag=$(echo $tag | sed "s/\([a-z_A-Z]*\)\(.*\)/\2/")
-
-# 2.10.0-M1-0098-g6f1c486d0b-2012-02-01
-printf "%s-%04d-%s-%s\n" "$tag" "$counter" "$hash" $(date "+%Y-%m-%d")
+echo "$hash"
diff --git a/tools/get-scala-commit-sha.bat b/tools/get-scala-commit-sha.bat
new file mode 100644
index 0000000000..80d9aa34b1
--- /dev/null
+++ b/tools/get-scala-commit-sha.bat
@@ -0,0 +1,21 @@
+@echo off
+rem
+rem Usage: get-scala-commit-drift.bat [dir]
+rem Figures out current scala commit drift, of a clone.
+rem
+rem If no dir is given, current working dir is used.
+
+@setlocal
+set _DIR=
+if "%*"=="" (
+ for /f "delims=;" %%i in ('cd') do set "_DIR=%%i"
+) else (
+ set "_DIR=%~1"
+)
+cd %_DIR%
+
+rem TODO - truncate chars.
+git log -1 --format="%T"
+
+:end
+@endlocal
diff --git a/tools/verify-jar-cache b/tools/verify-jar-cache
index 1e86264ecb..8a376a6987 100755
--- a/tools/verify-jar-cache
+++ b/tools/verify-jar-cache
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/bash
#
# Discovers files whose sha sum does not match the
# sha embedded in their directory name from ~/.sbt/cache/scala.
@@ -9,14 +9,14 @@ cd ~/.sbt/cache/scala
unset failed
unset removal
-[[ $1 == "-f" ]] && removal=true
+[[ "$1" == "-f" ]] && removal=true
for file in $(find . -type f); do
sha=$(echo "${file:2}" | sed 's/\/.*$//')
sum=$(shasum "$file" | sed 's/ .*$//')
if [[ $sum != $sha ]]; then
failed=true
- if [[ -n $removal ]]; then
+ if [[ -n "$removal" ]]; then
echo "Removing corrupt file $file, shasum=$sum"
rm -rf $sha
else
@@ -25,9 +25,9 @@ for file in $(find . -type f); do
fi
done
-if [[ -z $failed ]]; then
+if [[ -z "$failed" ]]; then
echo "All cached files match their shas."
-elif [[ -z $removal ]]; then
+elif [[ -z "$removal" ]]; then
echo ""
echo "Run again with -f to remove the corrupt files."
fi