summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMartin Odersky <odersky@gmail.com>2012-04-13 16:27:46 -0700
committerMartin Odersky <odersky@gmail.com>2012-04-13 16:27:46 -0700
commit2744be417e1f2c461068bb2daef9fa8f11822031 (patch)
tree06580aa466401d86d63176b2af429e9daaf047f9
parentb5757577c520e6339e4278fa91f725e66561f73e (diff)
parent1d0610840bb7409f0da084d3cc94e4110dd2e2c4 (diff)
downloadscala-2744be417e1f2c461068bb2daef9fa8f11822031.tar.gz
scala-2744be417e1f2c461068bb2daef9fa8f11822031.tar.bz2
scala-2744be417e1f2c461068bb2daef9fa8f11822031.zip
Merge branch 'master' into topic/sip18
-rw-r--r--build.xml145
-rw-r--r--gitignore.SAMPLE5
-rw-r--r--src/compiler/scala/reflect/internal/StdNames.scala7
-rw-r--r--src/compiler/scala/reflect/internal/SymbolTable.scala7
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala82
-rw-r--r--src/compiler/scala/reflect/makro/runtime/AbortMacroException.scala (renamed from src/compiler/scala/reflect/makro/runtime/Errors.scala)0
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala (renamed from src/compiler/scala/reflect/reify/Reifiers.scala)0
-rw-r--r--src/compiler/scala/tools/ant/Scaladoc.scala70
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala10
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala127
-rw-r--r--src/compiler/scala/tools/nsc/doc/Uncompilable.scala4
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala3
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala12
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala156
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gifbin0 -> 167 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.pngbin0 -> 1150 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.pngbin0 -> 646 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css101
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js104
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Entity.scala107
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala99
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala501
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/TreeFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala5
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala22
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala47
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala299
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala55
-rw-r--r--src/library/scala/Array.scala13
-rw-r--r--src/library/scala/Option.scala11
-rw-r--r--src/library/scala/Tuple2.scala12
-rw-r--r--src/library/scala/Tuple3.scala13
-rw-r--r--src/library/scala/concurrent/ConcurrentPackageObject.scala60
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala21
-rw-r--r--src/library/scala/concurrent/Future.scala9
-rw-r--r--src/library/scala/concurrent/Promise.scala18
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala38
-rw-r--r--src/library/scala/concurrent/impl/Future.scala13
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala8
-rw-r--r--src/library/scala/concurrent/util/Duration.scala174
-rw-r--r--src/library/scala/concurrent/util/duration/Classifier.scala9
-rw-r--r--src/library/scala/concurrent/util/duration/IntMult.scala18
-rw-r--r--src/library/scala/concurrent/util/duration/package.scala30
-rw-r--r--src/library/scala/reflect/ArrayTag.scala (renamed from src/library/scala/reflect/ArrayTags.scala)0
-rw-r--r--src/library/scala/reflect/ClassTag.scala (renamed from src/library/scala/reflect/ClassTags.scala)0
-rw-r--r--src/library/scala/reflect/api/Attachment.scala (renamed from src/library/scala/reflect/api/Attachments.scala)0
-rw-r--r--src/library/scala/reflect/api/StandardNames.scala15
-rw-r--r--src/library/scala/reflect/makro/internal/Utils.scala (renamed from src/library/scala/reflect/makro/internal/typeTagImpl.scala)0
-rw-r--r--src/partest/scala/tools/partest/ScaladocModelTest.scala78
-rw-r--r--test/disabled/jvm/concurrent-future.check2
-rw-r--r--test/disabled/jvm/concurrent-future.scala40
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents.check22
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents.flags1
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents.scala56
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Bind_12.scala6
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Class_4.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Class_5.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Def_13.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Object_6.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Object_7.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Package_10.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Package_11.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Trait_8.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Trait_9.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Type_3.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Val_1.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Var_2.scala3
-rw-r--r--test/files/neg/macro-deprecate-dont-touch-backquotedidents/Main.scala2
-rw-r--r--test/files/neg/macro-deprecate-idents.check78
-rw-r--r--test/files/neg/macro-deprecate-idents.flags1
-rw-r--r--test/files/neg/macro-deprecate-idents.scala56
-rw-r--r--test/files/neg/macro-deprecate-idents/Macros_Def_13.scala3
-rw-r--r--test/files/neg/macro-deprecate-idents/Main.scala2
-rw-r--r--test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala2
-rw-r--r--test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala2
-rw-r--r--test/files/neg/macro-keyword-bind.check7
-rw-r--r--test/files/neg/macro-keyword-bind.flags1
-rw-r--r--test/files/neg/macro-keyword-bind.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Bind_12.scala)0
-rw-r--r--test/files/neg/macro-keyword-class1.check4
-rw-r--r--test/files/neg/macro-keyword-class1.flags1
-rw-r--r--test/files/neg/macro-keyword-class1.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Class_4.scala)0
-rw-r--r--test/files/neg/macro-keyword-class2.check4
-rw-r--r--test/files/neg/macro-keyword-class2.flags1
-rw-r--r--test/files/neg/macro-keyword-class2.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Class_5.scala)0
-rw-r--r--test/files/neg/macro-keyword-object1.check4
-rw-r--r--test/files/neg/macro-keyword-object1.flags1
-rw-r--r--test/files/neg/macro-keyword-object1.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Object_6.scala)0
-rw-r--r--test/files/neg/macro-keyword-object2.check4
-rw-r--r--test/files/neg/macro-keyword-object2.flags1
-rw-r--r--test/files/neg/macro-keyword-object2.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Object_7.scala)0
-rw-r--r--test/files/neg/macro-keyword-package1.check4
-rw-r--r--test/files/neg/macro-keyword-package1.flags1
-rw-r--r--test/files/neg/macro-keyword-package1.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Package_10.scala)0
-rw-r--r--test/files/neg/macro-keyword-package2.check4
-rw-r--r--test/files/neg/macro-keyword-package2.flags1
-rw-r--r--test/files/neg/macro-keyword-package2.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Package_11.scala)0
-rw-r--r--test/files/neg/macro-keyword-trait1.check4
-rw-r--r--test/files/neg/macro-keyword-trait1.flags1
-rw-r--r--test/files/neg/macro-keyword-trait1.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Trait_8.scala)0
-rw-r--r--test/files/neg/macro-keyword-trait2.check4
-rw-r--r--test/files/neg/macro-keyword-trait2.flags1
-rw-r--r--test/files/neg/macro-keyword-trait2.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Trait_9.scala)0
-rw-r--r--test/files/neg/macro-keyword-type.check4
-rw-r--r--test/files/neg/macro-keyword-type.flags1
-rw-r--r--test/files/neg/macro-keyword-type.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Type_3.scala)0
-rw-r--r--test/files/neg/macro-keyword-val.check7
-rw-r--r--test/files/neg/macro-keyword-val.flags1
-rw-r--r--test/files/neg/macro-keyword-val.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Val_1.scala)0
-rw-r--r--test/files/neg/macro-keyword-var.check7
-rw-r--r--test/files/neg/macro-keyword-var.flags (renamed from test/files/neg/macro-keyword.flags)0
-rw-r--r--test/files/neg/macro-keyword-var.scala (renamed from test/files/neg/macro-deprecate-idents/Macros_Var_2.scala)0
-rw-r--r--test/files/neg/macro-keyword.check49
-rw-r--r--test/files/neg/macro-keyword/Macros_Bind_12.scala6
-rw-r--r--test/files/neg/macro-keyword/Macros_Class_4.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Class_5.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Def_13.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Object_6.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Object_7.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Package_10.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Package_11.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Trait_8.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Trait_9.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Type_3.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Val_1.scala3
-rw-r--r--test/files/neg/macro-keyword/Macros_Var_2.scala3
-rw-r--r--test/files/neg/t5510.check19
-rw-r--r--test/files/neg/t5510.scala7
-rw-r--r--test/files/run/t5535.check20
-rw-r--r--test/files/run/t5535.scala10
-rw-r--r--test/files/run/t5583.check20
-rw-r--r--test/files/run/t5583.scala11
-rw-r--r--test/scaladoc/resources/implicits-base-res.scala143
-rw-r--r--test/scaladoc/resources/implicits-chaining-res.scala48
-rw-r--r--test/scaladoc/resources/implicits-elimination-res.scala9
-rw-r--r--test/scaladoc/resources/implicits-scopes-res.scala51
-rw-r--r--test/scaladoc/run/SI-5373.check2
-rw-r--r--test/scaladoc/run/SI-5373.scala6
-rw-r--r--test/scaladoc/run/implicits-base.check1
-rw-r--r--test/scaladoc/run/implicits-base.scala179
-rw-r--r--test/scaladoc/run/implicits-chaining.check1
-rw-r--r--test/scaladoc/run/implicits-chaining.scala64
-rw-r--r--test/scaladoc/run/implicits-elimination.check1
-rw-r--r--test/scaladoc/run/implicits-elimination.scala22
-rw-r--r--test/scaladoc/run/implicits-scopes.check1
-rw-r--r--test/scaladoc/run/implicits-scopes.scala76
-rw-r--r--test/scaladoc/scalacheck/CommentFactoryTest.scala5
-rwxr-xr-xtools/binary-repo-lib.sh59
-rwxr-xr-xtools/cleanup-commit130
154 files changed, 3028 insertions, 866 deletions
diff --git a/build.xml b/build.xml
index de58ed8909..1a0e85a6f0 100644
--- a/build.xml
+++ b/build.xml
@@ -8,10 +8,10 @@ SuperSabbus for Scala core, builds the scala library and compiler. It can also p
<!-- ===========================================================================
END-USER TARGETS
============================================================================ -->
-
+
<target name="build" depends="pack.done"
description="Builds the Scala compiler and library. Executables are in 'build/pack/bin'."/>
-
+
<target name="build-opt"
description="Builds the optimised Scala compiler and library. Executables are in 'build/pack/bin'.">
<antcall target="build">
@@ -34,20 +34,20 @@ END-USER TARGETS
<target name="docs" depends="docs.done"
description="Builds documentation for the Scala library. Scaladoc is in 'build/scaladoc/library'."/>
-
+
<target name="docscomp" depends="docs.comp"
description="Builds documentation for the Scala compiler and library. Scaladoc is in 'build/scaladoc'."/>
-
+
<target name="docsclean" depends="docs.clean"
description="Removes generated documentation. Distributions are untouched."/>
-
+
<target name="dist"
description="Makes a new distribution and tests it. Will remove existing binaries and documentation.">
<antcall target="locker.clean"/>
<antcall target="docs.clean"/>
<antcall target="all.done"/>
</target>
-
+
<target name="dist-opt"
description="Makes a new optimised distribution and tests it. Will remove existing binaries and documentation.">
<antcall target="dist">
@@ -67,7 +67,7 @@ END-USER TARGETS
<target name="distclean" depends="dist.clean"
description="Removes all distributions. Binaries and documentation are untouched."/>
-
+
<target name="replacestarr"
description="Replaces the Starr compiler and library by fresh ones built from current sources and tests them.">
<fail message="This target is not available on Windows. Use 'ant replacestarrwin' instead.">
@@ -81,7 +81,7 @@ END-USER TARGETS
<antcall target="locker.clean"/>
<antcall target="test.done"/>
</target>
-
+
<target name="replacestarr-opt"
description="Replaces the Starr compiler and library by fresh, optimised ones built from current sources and tests them.">
<antcall target="replacestarr">
@@ -103,7 +103,7 @@ END-USER TARGETS
<antcall target="locker.clean"/>
<antcall target="test.done"/>
</target>
-
+
<target name="replacelocker"
description="Replaces the Locker compiler and library by fresh ones built from current sources.">
<antcall target="palo.clean"/>
@@ -144,7 +144,7 @@ END-USER TARGETS
description="Requires forkjoin library to be rebuilt. Add this target before any other if class file format is incompatible.">
<property name="forkjoin.outdated" value="yes"/>
</target>
-
+
<!-- ===========================================================================
PROPERTIES
============================================================================ -->
@@ -159,7 +159,7 @@ PROPERTIES
<property name="lib-ant.dir" value="${lib.dir}/ant"/>
<property name="src.dir" value="${basedir}/src"/>
<property name="partest.dir" value="${basedir}/test"/>
-
+
<!-- For developers: any jars placed in this dir will be added to the classpath
of all targets and copied into quick/pack/etc builds. -->
<property name="lib-extra.dir" value="${lib.dir}/extra"/>
@@ -170,7 +170,7 @@ PROPERTIES
<property file="${basedir}/build.number"/>
<!-- Additional command line arguments for scalac. They are added to all build targets -->
- <property name="scalac.args" value="-Xmacros"/>
+ <property name="scalac.args" value=""/>
<property name="javac.args" value=""/>
<!-- Sets location of pre-compiled libraries -->
@@ -281,7 +281,7 @@ INITIALISATION
<condition property="os.win">
<os family="windows"/>
</condition>
-
+
<exec osfamily="unix" executable="tools/get-scala-commit-sha" outputproperty="git.commit.sha" failifexecutionfails="false" />
<exec osfamily="windows" executable="tools/get-scala-commit-sha.bat" outputproperty="git.commit.sha" failifexecutionfails="false" />
<exec osfamily="unix" executable="tools/get-scala-commit-date" outputproperty="git.commit.date" failifexecutionfails="false" />
@@ -314,12 +314,13 @@ INITIALISATION
<target name="init.version.done" depends="init.version.release, init.version.snapshot"/>
<target name="init" depends="init.jars, init.maven.jars, init.version.done">
+ <property name="scalac.args.always" value="-Xmacros" />
<!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
<property name="scalac.args.optimise" value=""/>
<!-- scalac.args.quickonly are added to quick.* targets but not others (particularly, locker.)
This is to facilitate testing new command line options which do not yet exist in starr. -->
<property name="scalac.args.quickonly" value=""/>
- <property name="scalac.args.all" value="${scalac.args} ${scalac.args.optimise}"/>
+ <property name="scalac.args.all" value="${scalac.args.always} ${scalac.args} ${scalac.args.optimise}"/>
<property name="scalac.args.quick" value="${scalac.args.all} ${scalac.args.quickonly}"/>
<!-- Setting-up Ant contrib tasks -->
<taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib.dir}/ant/ant-contrib.jar"/>
@@ -336,7 +337,7 @@ INITIALISATION
<!-- Local libs (developer use.) -->
<mkdir dir="${lib-extra.dir}"/>
-
+
<path id="lib.extra">
<!-- needs ant 1.7.1 -->
<!-- <fileset dir="${lib-extra.dir}" erroronmissingdir="false"> -->
@@ -386,7 +387,7 @@ INITIALISATION
<pathelement location="${lib.dir}/forkjoin.jar"/>
<path refid="lib.extra"/>
</path>
- <taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.classpath"/>
+ <taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.classpath"/>
</target>
<!-- ===========================================================================
@@ -449,7 +450,7 @@ LOCAL REFERENCE BUILD (LOCKER)
<touch file="${build-locker.dir}/library.complete" verbose="no"/>
<stopwatch name="locker.lib.timer" action="total"/>
</target>
-
+
<target name="locker.pre-comp" depends="locker.lib" unless="locker.available">
<condition property="locker.comp.needed">
<not><available file="${build-locker.dir}/compiler.complete"/></not>
@@ -509,11 +510,11 @@ LOCAL REFERENCE BUILD (LOCKER)
<path refid="aux.libs"/>
</path>
</target>
-
+
<target name="locker.clean" depends="palo.clean">
<delete dir="${build-locker.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
</target>
-
+
<target name="locker.unlock.pre-lib">
<uptodate property="locker.lib.available" targetfile="${build-locker.dir}/library.complete">
<srcfiles dir="${src.dir}">
@@ -635,7 +636,7 @@ PACKED LOCKER BUILD (PALO)
<!-- ===========================================================================
QUICK BUILD (QUICK)
============================================================================ -->
-
+
<target name="quick.start" depends="locker.done"/>
<target name="quick.pre-lib" depends="quick.start">
@@ -659,7 +660,7 @@ QUICK BUILD (QUICK)
classpath="${build-quick.dir}/classes/library"
includes="**/*.java"
target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
+ <compilerarg line="${javac.args}"/>
</javac>
<javac
srcdir="${src.dir}/actors"
@@ -723,13 +724,13 @@ QUICK BUILD (QUICK)
<touch file="${build-quick.dir}/library.complete" verbose="no"/>
<stopwatch name="quick.lib.timer" action="total"/>
</target>
-
+
<target name="quick.newlibs" depends="quick.lib" if="libs.outdated">
<antcall target="libs.done" inheritRefs="true"/>
<property name="fjbg.jar" value="${build-libs.dir}/fjbg.jar"/>
<property name="msil.jar" value="${build-libs.dir}/msil.jar"/>
</target>
-
+
<target name="quick.libs" depends="quick.newlibs" unless="libs.outdated">
<property name="fjbg.jar" value="${lib.dir}/fjbg.jar"/>
<property name="msil.jar" value="${lib.dir}/msil.jar"/>
@@ -837,7 +838,7 @@ QUICK BUILD (QUICK)
<touch file="${build-quick.dir}/plugins.complete" verbose="no"/>
<stopwatch name="quick.plugins.timer" action="total"/>
</target>
-
+
<target name="quick.pre-scalacheck" depends="quick.plugins">
<uptodate property="quick.scalacheck.available" targetfile="${build-quick.dir}/scalacheck.complete">
<srcfiles dir="${src.dir}/scalacheck"/>
@@ -862,7 +863,7 @@ QUICK BUILD (QUICK)
<touch file="${build-quick.dir}/scalacheck.complete" verbose="no"/>
<stopwatch name="quick.scalacheck.timer" action="total"/>
</target>
-
+
<target name="quick.pre-scalap" depends="quick.scalacheck">
<uptodate property="quick.scalap.available" targetfile="${build-quick.dir}/scalap.complete">
<srcfiles dir="${src.dir}/scalap"/>
@@ -897,7 +898,7 @@ QUICK BUILD (QUICK)
<srcfiles dir="${src.dir}/partest"/>
</uptodate>
</target>
-
+
<target name="quick.partest" depends="quick.pre-partest" unless="quick.partest.available">
<stopwatch name="quick.partest.timer"/>
<mkdir dir="${build-quick.dir}/classes/partest"/>
@@ -992,7 +993,7 @@ QUICK BUILD (QUICK)
<chmod perm="ugo+rx" file="${build-quick.dir}/bin/scalap"/>
<touch file="${build-quick.dir}/bin.complete" verbose="no"/>
</target>
-
+
<target name="quick.done" depends="quick.bin">
<path id="quick.classpath">
<pathelement location="${build-quick.dir}/classes/library"/>
@@ -1008,9 +1009,9 @@ QUICK BUILD (QUICK)
<!-- ===========================================================================
PACKED QUICK BUILD (PACK)
============================================================================ -->
-
+
<target name="pack.start" depends="quick.done"/>
-
+
<target name="pack.pre-lib" depends="pack.start">
<uptodate
property="pack.lib.available"
@@ -1049,7 +1050,7 @@ PACKED QUICK BUILD (PACK)
</fileset>
</jar>
</target>
-
+
<target name="pack.pre-comp" depends="pack.lib">
<uptodate
property="pack.comp.available"
@@ -1077,7 +1078,7 @@ PACKED QUICK BUILD (PACK)
</fileset>
</copy>
</target>
-
+
<target name="pack.pre-plugins" depends="pack.comp">
<uptodate
property="pack.plugins.available"
@@ -1091,13 +1092,13 @@ PACKED QUICK BUILD (PACK)
<fileset dir="${build-quick.dir}/classes/continuations-plugin"/>
</jar>
</target>
-
+
<target name="pack.scalacheck" depends="pack.plugins">
<jar destfile="${build-pack.dir}/lib/scalacheck.jar">
<fileset dir="${build-quick.dir}/classes/scalacheck"/>
</jar>
</target>
-
+
<target name="pack.pre-partest" depends="pack.scalacheck">
<uptodate
property="pack.partest.available"
@@ -1111,7 +1112,7 @@ PACKED QUICK BUILD (PACK)
<fileset dir="${build-quick.dir}/classes/partest"/>
</jar>
</target>
-
+
<target name="pack.pre-scalap" depends="pack.partest">
<uptodate
property="pack.scalap.available"
@@ -1123,10 +1124,10 @@ PACKED QUICK BUILD (PACK)
<mkdir dir="${build-pack.dir}/lib"/>
<jar destfile="${build-pack.dir}/lib/scalap.jar">
<fileset dir="${build-quick.dir}/classes/scalap"/>
- <fileset file="${src.dir}/scalap/decoder.properties"/>
+ <fileset file="${src.dir}/scalap/decoder.properties"/>
</jar>
</target>
-
+
<target name="pack.pre-bin" depends="pack.scalap">
<uptodate
property="pack.bin.available"
@@ -1170,7 +1171,7 @@ PACKED QUICK BUILD (PACK)
<chmod perm="ugo+rx" file="${build-pack.dir}/bin/scalap"/>
<touch file="${build-pack.dir}/bin.complete" verbose="no"/>
</target>
-
+
<target name="pack.done" depends="pack.bin">
<path id="pack.classpath">
<pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
@@ -1193,9 +1194,9 @@ PACKED QUICK BUILD (PACK)
<!-- ===========================================================================
BOOTSTRAPPING BUILD (STRAP)
============================================================================ -->
-
+
<target name="strap.start" depends="pack.done"/>
-
+
<target name="strap.pre-lib" depends="strap.start">
<uptodate property="strap.lib.available" targetfile="${build-strap.dir}/library.complete">
<srcfiles dir="${src.dir}">
@@ -1280,7 +1281,7 @@ BOOTSTRAPPING BUILD (STRAP)
<touch file="${build-strap.dir}/library.complete" verbose="no"/>
<stopwatch name="strap.lib.timer" action="total"/>
</target>
-
+
<target name="strap.pre-comp" depends="strap.lib">
<uptodate property="strap.comp.available" targetfile="${build-strap.dir}/compiler.complete">
<srcfiles dir="${src.dir}/compiler"/>
@@ -1374,7 +1375,7 @@ BOOTSTRAPPING BUILD (STRAP)
<touch file="${build-strap.dir}/plugins.complete" verbose="no"/>
<stopwatch name="strap.plugins.timer" action="total"/>
</target>
-
+
<target name="strap.scalacheck" depends="strap.plugins">
<mkdir dir="${build-strap.dir}/classes/scalacheck"/>
<scalacfork
@@ -1418,13 +1419,13 @@ BOOTSTRAPPING BUILD (STRAP)
<touch file="${build-strap.dir}/scalap.complete" verbose="no"/>
<stopwatch name="strap.scalap.timer" action="total"/>
</target>
-
+
<target name="strap.pre-partest" depends="strap.scalap">
<uptodate property="strap.partest.available" targetfile="${build-strap.dir}/partest.complete">
<srcfiles dir="${src.dir}/partest"/>
</uptodate>
</target>
-
+
<target name="strap.partest" depends="strap.pre-partest" unless="strap.partest.available">
<stopwatch name="strap.partest.timer"/>
<mkdir dir="${build-strap.dir}/classes/partest"/>
@@ -1476,9 +1477,9 @@ BOOTSTRAPPING BUILD (STRAP)
<!-- ===========================================================================
LIBRARIES (MSIL, FJBG maybe later)
============================================================================ -->
-
+
<target name="libs.start"/>
-
+
<target name="libs.pre-forkjoin" depends="libs.start">
<property name="java6.home" value="/home/linuxsoft/apps/java-1.6"/>
<fail message="Compiling forkjoin.jar requires java 1.6. Please set the property `java6.home` in build.properties or using `-Djava6.home=/path/to/java6`">
@@ -1494,7 +1495,7 @@ LIBRARIES (MSIL, FJBG maybe later)
</srcfiles>
</uptodate>
</target>
-
+
<target name="libs.forkjoin" depends="libs.pre-forkjoin" unless="libs.forkjoin.available">
<mkdir dir="${build-libs.dir}/classes/forkjoin"/>
<javac
@@ -1511,16 +1512,16 @@ LIBRARIES (MSIL, FJBG maybe later)
</javac>
<touch file="${build-libs.dir}/forkjoin.complete" verbose="no"/>
</target>
-
+
<target name="libs.pre-forkjoinpack" depends="libs.forkjoin">
</target>
-
+
<target name="libs.forkjoinpack" depends="libs.pre-forkjoinpack" unless="libs.forkjoinpack.available">
<jar destfile="${build-libs.dir}/forkjoin.jar">
<fileset dir="${build-libs.dir}/classes/forkjoin"/>
</jar>
</target>
-
+
<target name="libs.pre-msil" depends="libs.start">
<uptodate property="libs.msil.available" targetfile="${build-libs.dir}/msil.complete">
<srcfiles dir="${src.dir}/msil">
@@ -1529,7 +1530,7 @@ LIBRARIES (MSIL, FJBG maybe later)
</srcfiles>
</uptodate>
</target>
-
+
<target name="libs.msil" depends="libs.pre-msil" unless="libs.msil.available">
<mkdir dir="${build-libs.dir}/classes/msil"/>
<javac
@@ -1556,16 +1557,16 @@ LIBRARIES (MSIL, FJBG maybe later)
</scalacfork>
<touch file="${build-libs.dir}/msil.complete" verbose="no"/>
</target>
-
+
<target name="libs.pre-msilpack" depends="libs.msil">
</target>
-
+
<target name="libs.msilpack" depends="libs.pre-msilpack" unless="libs.msilpack.available">
<jar destfile="${build-libs.dir}/msil.jar">
<fileset dir="${build-libs.dir}/classes/msil"/>
</jar>
</target>
-
+
<target name="libs.pre-fjbg" depends="libs.start">
<uptodate property="libs.fjbg.available" targetfile="${build-libs.dir}/fjbg.complete">
<srcfiles dir="${src.dir}/fjbg">
@@ -1574,7 +1575,7 @@ LIBRARIES (MSIL, FJBG maybe later)
</srcfiles>
</uptodate>
</target>
-
+
<target name="libs.fjbg" depends="libs.pre-fjbg" unless="libs.fjbg.available">
<mkdir dir="${build-libs.dir}/classes/fjbg"/>
<javac
@@ -1588,10 +1589,10 @@ LIBRARIES (MSIL, FJBG maybe later)
</javac>
<touch file="${build-libs.dir}/fjbg.complete" verbose="no"/>
</target>
-
+
<target name="libs.pre-fjbgpack" depends="libs.fjbg">
</target>
-
+
<target name="libs.fjbgpack" depends="libs.pre-fjbgpack" unless="libs.fjbgpack.available">
<jar destfile="${build-libs.dir}/fjbg.jar">
<fileset dir="${build-libs.dir}/classes/fjbg"/>
@@ -1601,7 +1602,7 @@ LIBRARIES (MSIL, FJBG maybe later)
<target name="libs.done" depends="libs.msilpack, libs.fjbgpack"/>
<target name="forkjoin.done" depends="libs.forkjoinpack"/>
-
+
<target name="libs.clean" depends="pack.clean">
<delete dir="${build-libs.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
</target>
@@ -1609,7 +1610,7 @@ LIBRARIES (MSIL, FJBG maybe later)
<!-- ===========================================================================
DOCUMENTATION
============================================================================ -->
-
+
<target name="docs.start" depends="pack.done">
<macrodef name="doc-uptodate-check">
<attribute name="name" />
@@ -1639,7 +1640,7 @@ DOCUMENTATION
<property name="scaladoc.url" value="https://github.com/scala/scala/tree/${scaladoc.git.commit}/src"/>
<echo message="Scaladoc will point to ${scaladoc.url} for source files."/>
</target>
-
+
<target name="docs.pre-lib" depends="docs.start">
<doc-uptodate-check name="library" srcdir="${src.dir}">
<source-includes>
@@ -1658,13 +1659,14 @@ DOCUMENTATION
destdir="${build-docs.dir}/library"
doctitle="Scala Standard Library API (Scaladoc)"
docversion="${version.number}"
- docfooter="epfl"
+ docfooter="epfl"
docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
docUncompilable="${src.dir}/library-aux"
sourcepath="${src.dir}"
classpathref="pack.classpath"
addparams="${scalac.args.all}"
- docRootContent="${src.dir}/library/rootdoc.txt">
+ docRootContent="${src.dir}/library/rootdoc.txt"
+ implicits="on" diagrams="on">
<src>
<files includes="${src.dir}/actors"/>
<files includes="${src.dir}/library/scala"/>
@@ -1745,7 +1747,8 @@ DOCUMENTATION
classpathref="pack.classpath"
srcdir="${src.dir}/compiler"
docRootContent="${src.dir}/compiler/rootdoc.txt"
- addparams="${scalac.args.all}">
+ addparams="${scalac.args.all}"
+ implicits="on" diagrams="on">
<include name="**/*.scala"/>
</scaladoc>
<touch file="${build-docs.dir}/compiler.complete" verbose="no"/>
@@ -1766,7 +1769,8 @@ DOCUMENTATION
sourcepath="${src.dir}"
classpathref="pack.classpath"
srcdir="${src.dir}/jline/src/main/java"
- addparams="${scalac.args.all}">
+ addparams="${scalac.args.all}"
+ implicits="on" diagrams="on">
<include name="**/*.scala"/>
<include name="**/*.java"/>
</scaladoc>
@@ -1789,7 +1793,8 @@ DOCUMENTATION
sourcepath="${src.dir}"
classpathref="pack.classpath"
srcdir="${src.dir}/scalap"
- addparams="${scalac.args.all}">
+ addparams="${scalac.args.all}"
+ implicits="on" diagrams="on">
<include name="**/*.scala"/>
</scaladoc>
<touch file="${build-docs.dir}/scalap.complete" verbose="no"/>
@@ -1810,7 +1815,8 @@ DOCUMENTATION
sourcepath="${src.dir}"
classpathref="pack.classpath"
srcdir="${src.dir}/partest"
- addparams="${scalac.args.all}">
+ addparams="${scalac.args.all}"
+ implicits="on" diagrams="on">
<include name="**/*.scala"/>
</scaladoc>
<touch file="${build-docs.dir}/partest.complete" verbose="no"/>
@@ -1831,7 +1837,8 @@ DOCUMENTATION
sourcepath="${src.dir}"
classpathref="pack.classpath"
srcdir="${src.dir}/continuations/plugin"
- addparams="${scalac.args.all}">
+ addparams="${scalac.args.all}"
+ implicits="on" diagrams="on">
<include name="**/*.scala"/>
</scaladoc>
<touch file="${build-docs.dir}/continuations-plugin.complete" verbose="no"/>
@@ -1863,7 +1870,7 @@ BOOTRAPING TEST AND TEST SUITE
<target name="test.classload" depends="pack.done">
<classloadVerify classpath="${build-pack.dir}/lib/scala-library.jar" />
</target>
-
+
<!-- this target will run only those tests found in test/debug -->
<target name="test.debug">
<antcall target="test.suite">
@@ -2154,7 +2161,7 @@ STABLE REFERENCE (STARR)
<!-- ===========================================================================
FORWARDED TARGETS FOR PACKAGING
============================================================================ -->
-
+
<target name="distpack" depends="dist.done, docs.all">
<ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/>
</target>
@@ -2169,7 +2176,7 @@ FORWARDED TARGETS FOR PACKAGING
<param name="scalac.args.optimise" value="-optimise"/>
</antcall>
</target>
-
+
<target name="distpack-maven-opt"
description="Builds an optimised maven distribution.">
<antcall target="distpack-maven">
diff --git a/gitignore.SAMPLE b/gitignore.SAMPLE
index 3c15a5de9e..483ad4caca 100644
--- a/gitignore.SAMPLE
+++ b/gitignore.SAMPLE
@@ -27,4 +27,9 @@
/src/intellij/*.iml
/src/intellij/*.ipr
/src/intellij/*.iws
+/.cache
+/.idea
+/.settings
+# bak files produced by ./cleanup-commit
+*.bak
diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala
index 3679daa9a2..6f6fc8e95c 100644
--- a/src/compiler/scala/reflect/internal/StdNames.scala
+++ b/src/compiler/scala/reflect/internal/StdNames.scala
@@ -270,8 +270,6 @@ trait StdNames extends NameManglers { self: SymbolTable =>
case _ => newTermName("x$" + i)
}
- // [Eugene to Paul] see comments in StandardNames.scala to find out why's this here
- val QQQ = ???
val ??? = encode("???")
val wrapRefArray: NameType = "wrapRefArray"
@@ -643,14 +641,9 @@ trait StdNames extends NameManglers { self: SymbolTable =>
val ZOR = encode("||")
// unary operators
- // [Eugene to Paul] see comments in StandardNames.scala to find out why's this here
- val UNARY_TILDE = UNARY_~
val UNARY_~ = encode("unary_~")
- val UNARY_PLUS = UNARY_+
val UNARY_+ = encode("unary_+")
- val UNARY_MINUS = UNARY_-
val UNARY_- = encode("unary_-")
- val UNARY_NOT = UNARY_!
val UNARY_! = encode("unary_!")
// Grouped here so Cleanup knows what tests to perform.
diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala
index 9f67c5aa01..0268339ed0 100644
--- a/src/compiler/scala/reflect/internal/SymbolTable.scala
+++ b/src/compiler/scala/reflect/internal/SymbolTable.scala
@@ -52,6 +52,13 @@ abstract class SymbolTable extends api.Universe
/** Overridden when we know more about what was happening during a failure. */
def supplementErrorMessage(msg: String): String = msg
+
+ private[scala] def printCaller[T](msg: String)(result: T) = {
+ Console.err.println(msg + ": " + result)
+ Console.err.println("Called from:")
+ (new Throwable).getStackTrace.drop(2).take(15).foreach(Console.err.println)
+ result
+ }
private[scala] def printResult[T](msg: String)(result: T) = {
Console.err.println(msg + ": " + result)
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index b3d425f0c5..3efbe4b4df 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -917,14 +917,10 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def directObjectString = safeToString
- /** A test whether a type contains any unification type variables. */
+ /** A test whether a type contains any unification type variables.
+ * Overridden with custom logic except where trivially true.
+ */
def isGround: Boolean = this match {
- case TypeVar(_, constr) =>
- constr.instValid && constr.inst.isGround
- case TypeRef(pre, sym, args) =>
- sym.isPackageClass || pre.isGround && (args forall (_.isGround))
- case SingleType(pre, sym) =>
- sym.isPackageClass || pre.isGround
case ThisType(_) | NoPrefix | WildcardType | NoType | ErrorType | ConstantType(_) =>
true
case _ =>
@@ -1260,6 +1256,8 @@ trait Types extends api.Types { self: SymbolTable =>
*/
abstract case class SingleType(pre: Type, sym: Symbol) extends SingletonType {
override val isTrivial: Boolean = pre.isTrivial
+ override def isGround = sym.isPackageClass || pre.isGround
+
// override def isNullable = underlying.isNullable
override def isNotNull = underlying.isNotNull
private[reflect] var underlyingCache: Type = NoType
@@ -2145,6 +2143,11 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
+ override def isGround = (
+ sym.isPackageClass
+ || pre.isGround && args.forall(_.isGround)
+ )
+
def etaExpand: Type = {
// must initialise symbol, see test/files/pos/ticket0137.scala
val tpars = initializedTypeParams
@@ -2237,10 +2240,10 @@ trait Types extends api.Types { self: SymbolTable =>
parentsString(thisInfo.parents) + refinementString
else rest
)
- private def customToString = this match {
- case TypeRef(_, RepeatedParamClass, arg :: _) => arg + "*"
- case TypeRef(_, ByNameParamClass, arg :: _) => "=> " + arg
- case _ =>
+ private def customToString = sym match {
+ case RepeatedParamClass => args.head + "*"
+ case ByNameParamClass => "=> " + args.head
+ case _ =>
def targs = normalize.typeArgs
if (isFunctionType(this)) {
@@ -2684,21 +2687,35 @@ trait Types extends api.Types { self: SymbolTable =>
else new TypeConstraint
}
def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
+ def untouchable(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = true)
+ def apply(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = false)
def apply(origin: Type, constr: TypeConstraint): TypeVar = apply(origin, constr, Nil, Nil)
- def apply(tparam: Symbol): TypeVar = apply(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams)
+ def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]): TypeVar =
+ createTypeVar(origin, constr, args, params, untouchable = false)
/** This is the only place TypeVars should be instantiated.
*/
- def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]): TypeVar = {
+ private def createTypeVar(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol], untouchable: Boolean): TypeVar = {
val tv = (
- if (args.isEmpty && params.isEmpty) new TypeVar(origin, constr)
- else if (args.size == params.size) new AppliedTypeVar(origin, constr, params zip args)
- else if (args.isEmpty) new HKTypeVar(origin, constr, params)
+ if (args.isEmpty && params.isEmpty) {
+ if (untouchable) new TypeVar(origin, constr) with UntouchableTypeVar
+ else new TypeVar(origin, constr)
+ }
+ else if (args.size == params.size) {
+ if (untouchable) new AppliedTypeVar(origin, constr, params zip args) with UntouchableTypeVar
+ else new AppliedTypeVar(origin, constr, params zip args)
+ }
+ else if (args.isEmpty) {
+ if (untouchable) new HKTypeVar(origin, constr, params) with UntouchableTypeVar
+ else new HKTypeVar(origin, constr, params)
+ }
else throw new Error("Invalid TypeVar construction: " + ((origin, constr, args, params)))
)
trace("create", "In " + tv.originLocation)(tv)
}
+ private def createTypeVar(tparam: Symbol, untouchable: Boolean): TypeVar =
+ createTypeVar(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams, untouchable)
}
/** Repack existential types, otherwise they sometimes get unpacked in the
@@ -2746,6 +2763,23 @@ trait Types extends api.Types { self: SymbolTable =>
zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]")
)
}
+
+ trait UntouchableTypeVar extends TypeVar {
+ override def untouchable = true
+ override def isGround = true
+ override def registerTypeEquality(tp: Type, typeVarLHS: Boolean) = tp match {
+ case t: TypeVar if !t.untouchable =>
+ t.registerTypeEquality(this, !typeVarLHS)
+ case _ =>
+ super.registerTypeEquality(tp, typeVarLHS)
+ }
+ override def registerBound(tp: Type, isLowerBound: Boolean, isNumericBound: Boolean = false): Boolean = tp match {
+ case t: TypeVar if !t.untouchable =>
+ t.registerBound(this, !isLowerBound, isNumericBound)
+ case _ =>
+ super.registerBound(tp, isLowerBound, isNumericBound)
+ }
+ }
/** A class representing a type variable: not used after phase `typer`.
*
@@ -2760,6 +2794,7 @@ trait Types extends api.Types { self: SymbolTable =>
val origin: Type,
val constr0: TypeConstraint
) extends Type {
+ def untouchable = false // by other typevars
override def params: List[Symbol] = Nil
override def typeArgs: List[Type] = Nil
override def isHigherKinded = false
@@ -2772,6 +2807,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
var constr = constr0
def instValid = constr.instValid
+ override def isGround = instValid && constr.inst.isGround
/** The variable's skolemization level */
val level = skolemizationLevel
@@ -2940,14 +2976,13 @@ trait Types extends api.Types { self: SymbolTable =>
// would be pointless. In this case, each check we perform causes us to lose specificity: in
// the end the best we'll do is the least specific type we tested against, since the typevar
// does not see these checks as "probes" but as requirements to fulfill.
- // TODO: the `suspended` flag can be used to poke around with leaving a trace
+ // TODO: can the `suspended` flag be used to poke around without leaving a trace?
//
// So the strategy used here is to test first the type, then the direct parents, and finally
// to fall back on the individual base types. This warrants eventual re-examination.
// AM: I think we could use the `suspended` flag to avoid side-effecting during unification
-
- if (suspended) // constraint accumulation is disabled
+ if (suspended) // constraint accumulation is disabled
checkSubtype(tp, origin)
else if (constr.instValid) // type var is already set
checkSubtype(tp, constr.inst)
@@ -3045,7 +3080,7 @@ trait Types extends api.Types { self: SymbolTable =>
override def safeToString = (
if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>"
else if (constr.inst ne NoType) "" + constr.inst
- else "?" + levelString + originName
+ else (if(untouchable) "!?" else "?") + levelString + originName
)
override def kind = "TypeVar"
@@ -4742,7 +4777,7 @@ trait Types extends api.Types { self: SymbolTable =>
val sym1 = adaptToNewRun(sym.owner.thisType, sym)
if (sym1 == sym) tp else ThisType(sym1)
} catch {
- case ex: MissingTypeControl =>
+ case ex: MissingTypeControl =>
tp
}
case SingleType(pre, sym) =>
@@ -6053,8 +6088,9 @@ trait Types extends api.Types { self: SymbolTable =>
def stripType(tp: Type) = tp match {
case ExistentialType(_, res) =>
res
- case TypeVar(_, constr) =>
- if (constr.instValid) constr.inst
+ case tv@TypeVar(_, constr) =>
+ if (tv.instValid) constr.inst
+ else if (tv.untouchable) tv
else abort("trying to do lub/glb of typevar "+tp)
case t => t
}
diff --git a/src/compiler/scala/reflect/makro/runtime/Errors.scala b/src/compiler/scala/reflect/makro/runtime/AbortMacroException.scala
index d78eae9237..d78eae9237 100644
--- a/src/compiler/scala/reflect/makro/runtime/Errors.scala
+++ b/src/compiler/scala/reflect/makro/runtime/AbortMacroException.scala
diff --git a/src/compiler/scala/reflect/reify/Reifiers.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 16c26734b2..16c26734b2 100644
--- a/src/compiler/scala/reflect/reify/Reifiers.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
index c92474b33e..daa08ef8a7 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/compiler/scala/tools/ant/Scaladoc.scala
@@ -75,6 +75,11 @@ class Scaladoc extends ScalaMatchingTask {
*/
object Flag extends PermissibleValue {
val values = List("yes", "no", "on", "off")
+ def getBooleanValue(value: String, flagName: String): Boolean =
+ if (Flag.isPermissible(value))
+ return ("yes".equals(value) || "on".equals(value))
+ else
+ buildError("Unknown " + flagName + " flag '" + value + "'")
}
/** The directories that contain source files to compile. */
@@ -127,6 +132,25 @@ class Scaladoc extends ScalaMatchingTask {
/** Instruct the ant task not to fail in the event of errors */
private var nofail: Boolean = false
+ /** Instruct the scaladoc tool to document implicit conversions */
+ private var docImplicits: Boolean = false
+
+ /** Instruct the scaladoc tool to document all (including impossible) implicit conversions */
+ private var docImplicitsShowAll: Boolean = false
+
+ /** Instruct the scaladoc tool to output implicits debugging information */
+ private var docImplicitsDebug: Boolean = false
+
+ /** Instruct the scaladoc tool to create diagrams */
+ private var docDiagrams: Boolean = false
+
+ /** Instruct the scaladoc tool to output diagram creation debugging information */
+ private var docDiagramsDebug: Boolean = false
+
+ /** Instruct the scaladoc tool to use the binary given to create diagrams */
+ private var docDiagramsDotPath: Option[String] = None
+
+
/*============================================================================*\
** Properties setters **
\*============================================================================*/
@@ -361,12 +385,39 @@ class Scaladoc extends ScalaMatchingTask {
*
* @param input One of the flags `yes/no` or `on/off`. Default if no/off.
*/
- def setNoFail(input: String) {
- if (Flag.isPermissible(input))
- nofail = "yes".equals(input) || "on".equals(input)
- else
- buildError("Unknown nofail flag '" + input + "'")
- }
+ def setNoFail(input: String) =
+ nofail = Flag.getBooleanValue(input, "nofail")
+
+ /** Set the `implicits` info attribute.
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setImplicits(input: String) =
+ docImplicits = Flag.getBooleanValue(input, "implicits")
+
+ /** Set the `implicitsShowAll` info attribute to enable scaladoc to show all implicits, including those impossible to
+ * convert to from the default scope
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setImplicitsShowAll(input: String) =
+ docImplicitsShowAll = Flag.getBooleanValue(input, "implicitsShowAll")
+
+ /** Set the `implicitsDebug` info attribute so scaladoc outputs implicit conversion debug information
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setImplicitsDebug(input: String) =
+ docImplicitsDebug = Flag.getBooleanValue(input, "implicitsDebug")
+
+ /** Set the `diagrams` bit so Scaladoc adds diagrams to the documentation
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setDiagrams(input: String) =
+ docDiagrams = Flag.getBooleanValue(input, "diagrams")
+
+ /** Set the `diagramsDebug` bit so Scaladoc outputs diagram building debug information
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setDiagramsDebug(input: String) =
+ docDiagramsDebug = Flag.getBooleanValue(input, "diagramsDebug")
+
+ /** Set the `diagramsDotPath` attribute to the path where graphviz dot can be found (including the binary file name,
+ * eg: /usr/bin/dot) */
+ def setDiagramsDotPath(input: String) =
+ docDiagramsDotPath = Some(input)
/*============================================================================*\
** Properties getters **
@@ -560,6 +611,13 @@ class Scaladoc extends ScalaMatchingTask {
docSettings.deprecation.value = deprecation
docSettings.unchecked.value = unchecked
+ docSettings.docImplicits.value = docImplicits
+ docSettings.docImplicitsDebug.value = docImplicitsDebug
+ docSettings.docImplicitsShowAll.value = docImplicitsShowAll
+ docSettings.docDiagrams.value = docDiagrams
+ docSettings.docDiagramsDebug.value = docDiagramsDebug
+ if(!docDiagramsDotPath.isEmpty) docSettings.docDiagramsDotPath.value = docDiagramsDotPath.get
+
if (!docgenerator.isEmpty) docSettings.docgenerator.value = docgenerator.get
if (!docrootcontent.isEmpty) docSettings.docRootContent.value = docrootcontent.get.getAbsolutePath()
log("Scaladoc params = '" + addParams + "'", Project.MSG_DEBUG)
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index ff4e2f3fb5..8e7eeed3cc 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -252,7 +252,7 @@ trait DocComments { self: Global =>
def replaceInheritdoc(childSection: String, parentSection: => String) =
if (childSection.indexOf("@inheritdoc") == -1)
childSection
- else
+ else
childSection.replaceAllLiterally("@inheritdoc", parentSection)
def getParentSection(section: (Int, Int)): String = {
@@ -275,9 +275,9 @@ trait DocComments { self: Global =>
}
child.substring(section._1, section._1 + 7) match {
- case param@("@param "|"@tparam"|"@throws") =>
+ case param@("@param "|"@tparam"|"@throws") =>
sectionString(extractSectionParam(child, section), parentNamedParams(param.trim))
- case _ =>
+ case _ =>
sectionString(extractSectionTag(child, section), parentTagMap)
}
}
@@ -367,7 +367,7 @@ trait DocComments { self: Global =>
case vname =>
lookupVariable(vname, site) match {
case Some(replacement) => replaceWith(replacement)
- case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym)
+ case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym + " in " + site)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 583da36ead..87072f3172 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -253,6 +253,12 @@ trait Scanners extends ScannersCommon {
lastOffset -= 1
}
if (inStringInterpolation) fetchStringPart() else fetchToken()
+ if(token == ERROR) {
+ if (inMultiLineInterpolation)
+ sepRegions = sepRegions.tail.tail
+ else if (inStringInterpolation)
+ sepRegions = sepRegions.tail
+ }
} else {
this copyFrom next
next.token = EMPTY
@@ -350,7 +356,7 @@ trait Scanners extends ScannersCommon {
putChar(ch)
nextChar()
getIdentRest()
- if (ch == '"' && token == IDENTIFIER && settings.Xexperimental.value)
+ if (ch == '"' && token == IDENTIFIER)
token = INTERPOLATIONID
case '<' => // is XMLSTART?
val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
@@ -718,7 +724,7 @@ trait Scanners extends ScannersCommon {
do {
putChar(ch)
nextRawChar()
- } while (Character.isUnicodeIdentifierPart(ch))
+ } while (ch != SU && Character.isUnicodeIdentifierPart(ch))
next.token = IDENTIFIER
next.name = newTermName(cbuf.toString)
cbuf.clear()
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index f32564f097..76a8b87ba7 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -58,7 +58,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
case Right(sourceCode) =>
new compiler.Run() compileSources List(new BatchSourceFile("newSource", sourceCode))
}
-
+
if (reporter.hasErrors)
return None
@@ -80,6 +80,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
val modelFactory = (
new { override val global: compiler.type = compiler }
with model.ModelFactory(compiler, settings)
+ with model.ModelFactoryImplicitSupport
with model.comment.CommentFactory
with model.TreeFactory {
override def templateShouldDocument(sym: compiler.Symbol) =
@@ -89,7 +90,8 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
modelFactory.makeModel match {
case Some(madeModel) =>
- println("model contains " + modelFactory.templatesCount + " documentable templates")
+ if (settings.reportModel)
+ println("model contains " + modelFactory.templatesCount + " documentable templates")
Some(madeModel)
case None =>
println("no documentable class found in compilation units")
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index 5d3fc6c044..17bfb7d21d 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -88,6 +88,38 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
""
)
+ val docImplicits = BooleanSetting (
+ "-implicits",
+ "Document members inherited by implicit conversions."
+ )
+
+ val docImplicitsDebug = BooleanSetting (
+ "-implicits-debug",
+ "Show debugging information for members inherited by implicit conversions."
+ )
+
+ val docImplicitsShowAll = BooleanSetting (
+ "-implicits-show-all",
+ "Show members inherited by implicit conversions that are impossible in the default scope. " +
+ "(for example conversions that require Numeric[String] to be in scope)"
+ )
+
+ val docDiagrams = BooleanSetting (
+ "-diagrams",
+ "Create inheritance diagrams for classes, traits and packages."
+ )
+
+ val docDiagramsDebug = BooleanSetting (
+ "-diagrams-debug",
+ "Show debugging information for the diagram creation process."
+ )
+
+ val docDiagramsDotPath = PathSetting (
+ "-diagrams-dot-path",
+ "The path to the dot executable used to generate the inheritance diagrams. Ex: /usr/bin/dot",
+ "dot" // by default, just pick up the system-wide dot
+ )
+
// Somewhere slightly before r18708 scaladoc stopped building unless the
// self-type check was suppressed. I hijacked the slotted-for-removal-anyway
// suppress-vt-warnings option and renamed it for this purpose.
@@ -95,9 +127,102 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
// For improved help output.
def scaladocSpecific = Set[Settings#Setting](
- docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator
+ docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
+ docDiagrams, docDiagramsDebug, docDiagramsDotPath,
+ docImplicits, docImplicitsDebug, docImplicitsShowAll
)
val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
override def isScaladoc = true
+
+ // unset by the testsuite, we don't need to count the entities in the model
+ var reportModel = true
+
+ /**
+ * This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty,
+ * but ultimately scaladoc has to be useful. :)
+ */
+ object hardcoded {
+
+ /** The common context bounds and some humanly explanations. Feel free to add more explanations
+ * `<root>.scala.package.Numeric` is the type class
+ * `tparam` is the name of the type parameter it gets (this only describes type classes with 1 type param)
+ * the function result should be a humanly-understandable description of the type class
+ */
+ val knownTypeClasses: Map[String, String => String] = Map() +
+ ("<root>.scala.package.Numeric" -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) +
+ ("<root>.scala.package.Integral" -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) +
+ ("<root>.scala.package.Fractional" -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) +
+ ("<root>.scala.reflect.Manifest" -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) +
+ ("<root>.scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
+ ("<root>.scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available"))
+
+ /**
+ * Set of classes to exclude from index and diagrams
+ * TODO: Should be configurable
+ */
+ def isExcluded(qname: String) = {
+ ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
+ qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
+ ) && !(
+ qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
+ qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
+ qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
+ qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
+ qname == "scala.runtime.AbstractFunction2"
+ )
+ )
+ }
+
+ /** Common conversion targets that affect any class in Scala */
+ val commonConversionTargets = List(
+ "scala.Predef.any2stringfmt",
+ "scala.Predef.any2stringadd",
+ "scala.Predef.any2ArrowAssoc",
+ "scala.Predef.any2Ensuring")
+
+ /** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
+ val arraySkipConversions = List(
+ "scala.Predef.refArrayOps",
+ "scala.Predef.intArrayOps",
+ "scala.Predef.doubleArrayOps",
+ "scala.Predef.longArrayOps",
+ "scala.Predef.floatArrayOps",
+ "scala.Predef.charArrayOps",
+ "scala.Predef.byteArrayOps",
+ "scala.Predef.shortArrayOps",
+ "scala.Predef.booleanArrayOps",
+ "scala.Predef.unitArrayOps",
+ "scala.LowPriorityImplicits.wrapRefArray",
+ "scala.LowPriorityImplicits.wrapIntArray",
+ "scala.LowPriorityImplicits.wrapDoubleArray",
+ "scala.LowPriorityImplicits.wrapLongArray",
+ "scala.LowPriorityImplicits.wrapFloatArray",
+ "scala.LowPriorityImplicits.wrapCharArray",
+ "scala.LowPriorityImplicits.wrapByteArray",
+ "scala.LowPriorityImplicits.wrapShortArray",
+ "scala.LowPriorityImplicits.wrapBooleanArray",
+ "scala.LowPriorityImplicits.wrapUnitArray",
+ "scala.LowPriorityImplicits.genericWrapArray")
+
+ // included as names as here we don't have access to a Global with Definitions :(
+ def valueClassList = List("unit", "boolean", "byte", "short", "char", "int", "long", "float", "double")
+ def valueClassFilterPrefixes = List("scala.LowPriorityImplicits", "scala.Predef")
+
+ /** Dirty, dirty, dirty hack: the value params conversions can all kick in -- and they are disambiguated by priority
+ * but showing priority in scaladoc would make no sense -- so we have to manually remove the conversions that we
+ * know will never get a chance to kick in. Anyway, DIRTY DIRTY DIRTY! */
+ def valueClassFilter(value: String, conversionName: String): Boolean = {
+ val valueName = value.toLowerCase
+ val otherValues = valueClassList.filterNot(_ == valueName)
+
+ for (prefix <- valueClassFilterPrefixes)
+ if (conversionName.startsWith(prefix))
+ for (otherValue <- otherValues)
+ if (conversionName.startsWith(prefix + "." + otherValue))
+ return false
+
+ true
+ }
+ }
}
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
index 573952a2e3..8f426a443d 100644
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
@@ -16,7 +16,7 @@ trait Uncompilable {
val settings: Settings
import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, Name, DocComment, NoSymbol }
- import global.definitions.RootClass
+ import global.definitions.{ RootClass, AnyRefClass }
private implicit def translateName(name: Global#Name) =
if (name.isTypeName) newTypeName("" + name) else newTermName("" + name)
@@ -34,7 +34,7 @@ trait Uncompilable {
}
def files = settings.uncompilableFiles
def symbols = pairs map (_._1)
- def templates = symbols filter (x => x.isClass || x.isTrait) toSet
+ def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet
def comments = {
if (settings.debug.value || settings.verbose.value)
inform("Found %d uncompilable files: %s".format(files.size, files mkString ", "))
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index 0116e02e0e..914824d523 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -71,6 +71,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"signaturebg.gif",
"signaturebg2.gif",
"typebg.gif",
+ "conversionbg.gif",
"valuemembersbg.gif",
"navigation-li-a.png",
@@ -80,6 +81,8 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"selected.png",
"selected2-right.png",
"selected2.png",
+ "selected-right-implicits.png",
+ "selected-implicits.png",
"unselected.png"
)
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index 1544dafc69..e3da8bddea 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -23,7 +23,7 @@ abstract class HtmlPage extends Page { thisPage =>
protected def title: String
/** The page description */
- protected def description: String =
+ protected def description: String =
// unless overwritten, will display the title in a spaced format, keeping - and .
title.replaceAll("[^a-zA-Z0-9\\.\\-]+", " ").replaceAll("\\-+", " - ").replaceAll(" +", " ")
@@ -164,15 +164,15 @@ abstract class HtmlPage extends Page { thisPage =>
}
/** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */
- def templateToHtml(tpl: TemplateEntity) = tpl match {
+ def templateToHtml(tpl: TemplateEntity, name: String = null) = tpl match {
case dTpl: DocTemplateEntity =>
if (hasPage(dTpl)) {
- <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ dTpl.name }</a>
+ <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ if (name eq null) dTpl.name else name }</a>
} else {
- xml.Text(dTpl.name)
+ xml.Text(if (name eq null) dTpl.name else name)
}
case ndTpl: NoDocTemplate =>
- xml.Text(ndTpl.name)
+ xml.Text(if (name eq null) ndTpl.name else name)
}
/** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */
@@ -192,6 +192,6 @@ abstract class HtmlPage extends Page { thisPage =>
else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isTrait) "object_to_trait_big.png"
else if (ety.isObject) "object_big.png"
else if (ety.isPackage) "package_big.png"
- else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
+ else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index 2ebd83fd99..d3f42ffe6e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -89,21 +89,42 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<div id="mbrsel">
<div id='textfilter'><span class='pre'/><span class='input'><input type='text' accesskey='/'/></span><span class='post'/></div>
- { if (tpl.linearizationTemplates.isEmpty) NodeSeq.Empty else
+ { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
<div id="order">
<span class="filtertype">Ordering</span>
<ol><li class="alpha in"><span>Alphabetic</span></li><li class="inherit out"><span>By inheritance</span></li></ol>
</div>
}
- { if (tpl.linearizationTemplates.isEmpty) NodeSeq.Empty else
- <div id="ancestors">
- <span class="filtertype">Inherited</span>
- <ol><li class="hideall out"><span>Hide All</span></li>
- <li class="showall in"><span>Show all</span></li></ol>
- <ol id="linearization">{
- (tpl :: tpl.linearizationTemplates) map { wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li> }
- }</ol>
- </div>
+ { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
+ {
+ if (!tpl.linearization.isEmpty)
+ <div id="ancestors">
+ <span class="filtertype">Inherited<br/>
+ </span>
+ <ol id="linearization">
+ { (tpl :: tpl.linearizationTemplates).map(wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li>) }
+ </ol>
+ </div>
+ else NodeSeq.Empty
+ } ++ {
+ if (!tpl.conversions.isEmpty)
+ <div id="ancestors">
+ <span class="filtertype">Implicitly<br/>
+ </span>
+ <ol id="implicits">
+ { tpl.conversions.map(conv => <li class="in" name={ conv.conversionQualifiedName }><span>{ "by " + conv.conversionShortName }</span></li>) }
+ </ol>
+ </div>
+ else NodeSeq.Empty
+ } ++
+ <div id="ancestors">
+ <span class="filtertype"></span>
+ <ol>
+ <li class="hideall out"><span>Hide All</span></li>
+ <li class="showall in"><span>Show all</span></li>
+ </ol>
+ <a href="docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
+ </div>
}
{
<div id="visbl">
@@ -153,23 +174,25 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<div id="inheritedMembers">
{
+ // linearization
NodeSeq fromSeq (for ((superTpl, superType) <- (tpl.linearizationTemplates zip tpl.linearizationTypes)) yield
<div class="parent" name={ superTpl.qualifiedName }>
<h3>Inherited from {
- if (tpl.universe.settings.useStupidTypes.value)
- superTpl match {
- case dtpl: DocTemplateEntity =>
- val sig = signature(dtpl, false, true) \ "_"
- sig
- case tpl: TemplateEntity =>
- tpl.name
- }
- else
- typeToHtml(superType, true)
+ typeToHtmlWithStupidTypes(tpl, superTpl, superType)
}</h3>
</div>
)
}
+ {
+ // implicitly inherited
+ NodeSeq fromSeq (for (conversion <- (tpl.conversions)) yield
+ <div class="conversion" name={ conversion.conversionQualifiedName }>
+ <h3>Inherited by implicit conversion { conversion.conversionShortName } from
+ { typeToHtml(tpl.resultType, true) } to { typeToHtml(conversion.targetType, true) }
+ </h3>
+ </div>
+ )
+ }
</div>
</div>
@@ -220,11 +243,12 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
case d:MemberEntity with Def => defParamsToString(d)
case _ => ""
}
+ val memberComment = memberToCommentHtml(mbr, false)
<li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
- data-isabs={ mbr.isAbstract.toString }>
+ data-isabs={ mbr.isAbstract.toString } fullComment={ if(memberComment.isEmpty) "no" else "yes" }>
<a id={ mbr.name +defParamsString +":"+ mbr.resultType.name}/>
{ signature(mbr, false) }
- { memberToCommentHtml(mbr, false) }
+ { memberComment }
</li>
}
@@ -276,6 +300,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
<p class="comment cmt">{ inlineToHtml(mbr.comment.get.short) }</p>
def memberToCommentBodyHtml(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+
val memberComment =
if (mbr.comment.isEmpty) NodeSeq.Empty
else <div class="comment cmt">{ commentToHtml(mbr.comment) }</div>
@@ -327,6 +352,45 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
}
+ val implicitInformation = mbr.byConversion match {
+ case Some(conv) =>
+ <dt class="implicit">Implicit information</dt> ++
+ {
+ val targetType = typeToHtml(conv.targetType, true)
+ val conversionMethod = conv.convertorMethod match {
+ case Left(member) => Text(member.name)
+ case Right(name) => Text(name)
+ }
+
+ // strip off the package object endings, they make things harder to follow
+ val conversionOwnerQualifiedNane = conv.convertorOwner.qualifiedName.stripSuffix(".package")
+ val conversionOwner = templateToHtml(conv.convertorOwner, conversionOwnerQualifiedNane)
+
+ val constraintText = conv.constraints match {
+ case Nil =>
+ NodeSeq.Empty
+ case List(constraint) =>
+ xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ xml.Text(".")
+ case List(constraint1, constraint2) =>
+ xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++
+ xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ xml.Text(".")
+ case constraints =>
+ <br/> ++ "This conversion will take place only if all of the following constraints are met:" ++ <br/> ++ {
+ var index = 0
+ constraints map { constraint => xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++ <br/> }
+ }
+ }
+
+ <dd>
+ This member is added by an implicit conversion from { typeToHtml(mbr.inTemplate.resultType, true) } to
+ { targetType } performed by method { conversionMethod } in { conversionOwner }.
+ { constraintText }
+ </dd>
+ }
+ case _ =>
+ NodeSeq.Empty
+ }
+
// --- start attributes block vals
val attributes: Seq[scala.xml.Node] = {
val fvs: List[comment.Paragraph] = visibility(mbr).toList
@@ -355,7 +419,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
</div>
case _ => NodeSeq.Empty
}
- }
+ }
val selfType: Seq[scala.xml.Node] = mbr match {
case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty && !isReduced) =>
@@ -478,7 +542,7 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
}
// end attributes block vals ---
- val attributesInfo = attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
+ val attributesInfo = implicitInformation ++ attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
val attributesBlock =
if (attributesInfo.isEmpty)
NodeSeq.Empty
@@ -562,12 +626,13 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
</span>
<span class="symbol">
{
+ val nameClass = if (mbr.byConversion.isDefined) "implicit" else "name"
val nameHtml = {
val value = if (mbr.isConstructor) tpl.name else mbr.name
val span = if (mbr.deprecation.isDefined)
- <span class={"name deprecated"} title={"Deprecated: "+bodyToStr(mbr.deprecation.get)}>{ value }</span>
+ <span class={ nameClass + " deprecated"} title={"Deprecated: "+bodyToStr(mbr.deprecation.get)}>{ value }</span>
else
- <span class={"name"}>{ value }</span>
+ <span class={ nameClass }>{ value }</span>
val encoded = scala.reflect.NameTransformer.encode(value)
if (encoded != value) {
span % new UnprefixedAttribute("title",
@@ -766,4 +831,43 @@ class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage
case _ => inl.toString
}
+ private def typeToHtmlWithStupidTypes(tpl: TemplateEntity, superTpl: TemplateEntity, superType: TypeEntity): NodeSeq =
+ if (tpl.universe.settings.useStupidTypes.value)
+ superTpl match {
+ case dtpl: DocTemplateEntity =>
+ val sig = signature(dtpl, false, true) \ "_"
+ sig
+ case tpl: TemplateEntity =>
+ Text(tpl.name)
+ }
+ else
+ typeToHtml(superType, true)
+
+ private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match {
+ case ktcc: KnownTypeClassConstraint =>
+ xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++
+ templateToHtml(ktcc.typeClassEntity) ++ xml.Text(")")
+ case tcc: TypeClassConstraint =>
+ xml.Text(tcc.typeParamName + " is ") ++
+ <a href="http://stackoverflow.com/questions/2982276/what-is-a-context-bound-in-scala" target="_blank">
+ context-bounded</a> ++ xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
+ templateToHtml(tcc.typeClassEntity) ++ xml.Text(")")
+ case impl: ImplicitInScopeConstraint =>
+ xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ xml.Text(" is in scope")
+ case eq: EqualTypeParamConstraint =>
+ xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
+ typeToHtml(eq.rhs, true) ++ xml.Text(")")
+ case bt: BoundedTypeParamConstraint =>
+ xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
+ bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++
+ typeToHtml(bt.lowerBound, true) ++ xml.Text(" <: ") ++
+ typeToHtml(bt.upperBound, true) ++ xml.Text(")")
+ case lb: LowerBoundedTypeParamConstraint =>
+ xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
+ typeToHtml(lb.lowerBound, true) ++ xml.Text(")")
+ case ub: UpperBoundedTypeParamConstraint =>
+ xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
+ typeToHtml(ub.upperBound, true) ++ xml.Text(")")
+ }
+
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
new file mode 100644
index 0000000000..4be145d0af
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
new file mode 100644
index 0000000000..bc29efb3e6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
new file mode 100644
index 0000000000..8313f4975b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 6fb83c133e..5a1779bba5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -106,7 +106,7 @@ a[href]:hover {
font-size: 24pt;
text-shadow: black 0px 2px 0px;
/* text-shadow: black 0px 0px 0px;*/
-text-decoration: none;
+text-decoration: none;
}
#definition #owner {
@@ -162,7 +162,7 @@ text-decoration: none;
padding-left: 15px;
background: url("arrow-right.png") no-repeat 0 3px transparent;
}
-
+
.toggleContainer.open .toggle {
background: url("arrow-down.png") no-repeat 0 3px transparent;
}
@@ -205,6 +205,11 @@ dl.attributes > dt {
font-style: italic;
}
+dl.attributes > dt.implicit {
+ font-weight: bold;
+ color: darkgreen;
+}
+
dl.attributes > dd {
display: block;
padding-left: 10em;
@@ -241,6 +246,17 @@ dl.attributes > dd {
color: white;
}
+#inheritedMembers > div.conversion > h3 {
+ background: #dadada url("conversionbg.gif") repeat-x bottom left; /* gray */
+ height: 17px;
+ font-style: italic;
+ font-size: 12pt;
+}
+
+#inheritedMembers > div.conversion > h3 * {
+ color: white;
+}
+
/* Member cells */
div.members > ol {
@@ -310,10 +326,21 @@ div.members > ol > li:last-child {
font-weight: bold;
}
-.signature .symbol .params .implicit {
+.signature .symbol > .implicit {
+ display: inline-block;
+ font-weight: bold;
+ text-decoration: underline;
+ color: darkgreen;
+}
+
+.signature .symbol .params > .implicit {
font-style: italic;
}
+.signature .symbol .implicit.deprecated {
+ text-decoration: line-through;
+}
+
.signature .symbol .name.deprecated {
text-decoration: line-through;
}
@@ -369,15 +396,15 @@ div.members > ol > li:last-child {
.cmt {}
.cmt p {
- margin: 0.7em 0;
+ margin: 0.7em 0;
}
.cmt p:first-child {
- margin-top: 0;
+ margin-top: 0;
}
.cmt p:last-child {
- margin-bottom: 0;
+ margin-bottom: 0;
}
.cmt h3,
@@ -539,7 +566,7 @@ div.fullcommenttop .block {
margin-bottom: 5px
}
-div.fullcomment div.block ol li p,
+div.fullcomment div.block ol li p,
div.fullcomment div.block ol li {
display:inline
}
@@ -583,10 +610,10 @@ div.fullcomment dl.paramcmts > dd {
/* Members filter tool */
#textfilter {
- position: relative;
- display: block;
+ position: relative;
+ display: block;
height: 20px;
- margin-bottom: 5px;
+ margin-bottom: 5px;
}
#textfilter > .pre {
@@ -600,7 +627,7 @@ div.fullcomment dl.paramcmts > dd {
}
#textfilter > .input {
- display: block;
+ display: block;
position: absolute;
top: 0;
right: 20px;
@@ -608,10 +635,10 @@ div.fullcomment dl.paramcmts > dd {
}
#textfilter > .input > input {
- height: 20px;
- padding: 1px;
- font-weight: bold;
- color: #000000;
+ height: 20px;
+ padding: 1px;
+ font-weight: bold;
+ color: #000000;
background: #ffffff url("filterboxbarbg.png") repeat-x top left;
width: 100%;
}
@@ -660,6 +687,13 @@ div.fullcomment dl.paramcmts > dd {
display: inline-block;
}
+#mbrsel > div > a {
+ position:relative;
+ top: -8px;
+ font-size: 11px;
+ text-shadow: #ffffff 0 1px 0;
+}
+
#mbrsel > div > ol#linearization {
display: table;
margin-left: 70px;
@@ -683,9 +717,32 @@ div.fullcomment dl.paramcmts > dd {
text-shadow: #ffffff 0 1px 0;
}
+#mbrsel > div > ol#implicits {
+ display: table;
+ margin-left: 70px;
+}
+
+#mbrsel > div > ol#implicits > li.in {
+ text-decoration: none;
+ float: left;
+ padding-right: 10px;
+ margin-right: 5px;
+ background: url(selected-right-implicits.png) no-repeat;
+ background-position: right 0px;
+}
+
+#mbrsel > div > ol#implicits > li.in > span{
+ color: #404040;
+ float: left;
+ padding: 1px 0 1px 10px;
+ background: url(selected-implicits.png) no-repeat;
+ background-position: 0px 0px;
+ text-shadow: #ffffff 0 1px 0;
+}
+
#mbrsel > div > ol > li {
/* padding: 3px 10px;*/
- line-height: 16pt;
+ line-height: 16pt;
display: inline-block;
cursor: pointer;
}
@@ -709,10 +766,10 @@ div.fullcomment dl.paramcmts > dd {
}
#mbrsel > div > ol > li.out {
- text-decoration: none;
- float: left;
- padding-right: 10px;
- margin-right: 5px;
+ text-decoration: none;
+ float: left;
+ padding-right: 10px;
+ margin-right: 5px;
}
#mbrsel > div > ol > li.out > span{
@@ -739,10 +796,10 @@ div.fullcomment dl.paramcmts > dd {
#mbrsel .showall {
color: #4C4C4C;
line-height: 16px;
- font-weight: bold;
+ font-weight: bold;
}
#mbrsel .showall span {
color: #4C4C4C;
- font-weight: bold;
+ font-weight: bold;
}*/ \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
index 3cdd9a7f27..fd5a981cb0 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -2,21 +2,23 @@
// code by Gilles Dubochet with contributions by Pedro Furlanetto
$(document).ready(function(){
- var isHiddenClass;
- if (document.title == 'scala.AnyRef') {
- isHiddenClass = function (name) {
- return name == 'scala.Any';
- };
- } else {
- isHiddenClass = function (name) {
- return name == 'scala.Any' || name == 'scala.AnyRef';
- };
- }
+ var isHiddenClass = function (name) {
+ return name == 'scala.Any' ||
+ name == 'scala.AnyRef' ||
+ name == 'scala.Predef.any2stringfmt' ||
+ name == 'scala.Predef.any2stringadd' ||
+ name == 'scala.Predef.any2ArrowAssoc' ||
+ name == 'scala.Predef.any2Ensuring'
+ };
+
+ $("#linearization li:gt(0)").filter(function(){
+ return isHiddenClass($(this).attr("name"));
+ }).removeClass("in").addClass("out");
- $("#linearization li").filter(function(){
+ $("#implicits li").filter(function(){
return isHiddenClass($(this).attr("name"));
}).removeClass("in").addClass("out");
-
+
// Pre-filter members
filter();
@@ -54,17 +56,38 @@ $(document).ready(function(){
};
filter();
});
- $("#ancestors > ol > li.hideall").click(function() {
+
+ $("#implicits li").click(function(){
+ if ($(this).hasClass("in")) {
+ $(this).removeClass("in");
+ $(this).addClass("out");
+ }
+ else if ($(this).hasClass("out")) {
+ $(this).removeClass("out");
+ $(this).addClass("in");
+ };
+ filter();
+ });
+
+ $("#mbrsel > div[id=ancestors] > ol > li.hideall").click(function() {
$("#linearization li.in").removeClass("in").addClass("out");
$("#linearization li:first").removeClass("out").addClass("in");
+ $("#implicits li.in").removeClass("in").addClass("out");
filter();
})
- $("#ancestors > ol > li.showall").click(function() {
- var filtered =
+ $("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() {
+ var filteredLinearization =
$("#linearization li.out").filter(function() {
return ! isHiddenClass($(this).attr("name"));
});
- filtered.removeClass("out").addClass("in");
+ filteredLinearization.removeClass("out").addClass("in");
+
+ var filteredImplicits =
+ $("#implicits li.out").filter(function() {
+ return ! isHiddenClass($(this).attr("name"));
+ });
+ filteredImplicits.removeClass("out").addClass("in");
+
filter();
});
$("#visbl > ol > li.public").click(function() {
@@ -108,8 +131,10 @@ $(document).ready(function(){
});
/* Add toggle arrows */
- var docAllSigs = $("#template li").has(".fullcomment").find(".signature");
-
+ //var docAllSigs = $("#template li").has(".fullcomment").find(".signature");
+ // trying to speed things up a little bit
+ var docAllSigs = $("#template li[fullComment=yes] .signature");
+
function commentToggleFct(signature){
var parent = signature.parent();
var shortComment = $(".shortcomment", parent);
@@ -129,7 +154,7 @@ $(document).ready(function(){
docAllSigs.click(function() {
commentToggleFct($(this));
});
-
+
/* Linear super types and known subclasses */
function toggleShowContentFct(outerElement){
var content = $(".hiddenContent", outerElement);
@@ -148,20 +173,22 @@ $(document).ready(function(){
$(".toggleContainer").click(function() {
toggleShowContentFct($(this));
});
-
+
// Set parent window title
windowTitle();
});
function orderAlpha() {
$("#template > div.parent").hide();
- $("#ancestors").show();
+ $("#template > div.conversion").hide();
+ $("#mbrsel > div[id=ancestors]").show();
filter();
};
function orderInherit() {
$("#template > div.parent").show();
- $("#ancestors").hide();
+ $("#template > div.conversion").show();
+ $("#mbrsel > div[id=ancestors]").hide();
filter();
};
@@ -177,6 +204,9 @@ function initInherit() {
$("#inheritedMembers > div.parent").each(function(){
parents[$(this).attr("name")] = $(this);
});
+ $("#inheritedMembers > div.conversion").each(function(){
+ parents[$(this).attr("name")] = $(this);
+ });
$("#types > ol > li").each(function(){
var mbr = $(this);
this.mbrText = mbr.find("> .fullcomment .cmt").text();
@@ -216,6 +246,9 @@ function initInherit() {
$("#inheritedMembers > div.parent").each(function() {
if ($("> div.members", this).length == 0) { $(this).remove(); };
});
+ $("#inheritedMembers > div.conversion").each(function() {
+ if ($("> div.members", this).length == 0) { $(this).remove(); };
+ });
};
function filter(scrollToMember) {
@@ -224,13 +257,17 @@ function filter(scrollToMember) {
var queryRegExp = new RegExp(query, "i");
var privateMembersHidden = $("#visbl > ol > li.public").hasClass("in");
var orderingAlphabetic = $("#order > ol > li.alpha").hasClass("in");
- var hiddenSuperclassElements = orderingAlphabetic ? $("#linearization > li.out") : $("#linearization > li:gt(0)");
- var hiddenSuperclasses = hiddenSuperclassElements.map(function() {
+ var hiddenSuperclassElementsLinearization = orderingAlphabetic ? $("#linearization > li.out") : $("#linearization > li:gt(0)");
+ var hiddenSuperclassesLinearization = hiddenSuperclassElementsLinearization.map(function() {
+ return $(this).attr("name");
+ }).get();
+ var hiddenSuperclassElementsImplicits = orderingAlphabetic ? $("#implicits > li.out") : $("#implicits > li");
+ var hiddenSuperclassesImplicits = hiddenSuperclassElementsImplicits.map(function() {
return $(this).attr("name");
}).get();
var hideInheritedMembers;
-
+
if(orderingAlphabetic) {
$("#inheritedMembers").hide();
hideInheritedMembers = true;
@@ -242,9 +279,10 @@ function filter(scrollToMember) {
$("#allMembers > .members").each(filterFunc);
hideInheritedMembers = false;
$("#inheritedMembers > .parent > .members").each(filterFunc);
+ $("#inheritedMembers > .conversion > .members").each(filterFunc);
}
-
+
function filterFunc() {
var membersVisible = false;
var members = $(this);
@@ -262,12 +300,18 @@ function filter(scrollToMember) {
ownerIndex = name.lastIndexOf(".");
}
var owner = name.slice(0, ownerIndex);
- for (var i = 0; i < hiddenSuperclasses.length; i++) {
- if (hiddenSuperclasses[i] == owner) {
+ for (var i = 0; i < hiddenSuperclassesLinearization.length; i++) {
+ if (hiddenSuperclassesLinearization[i] == owner) {
mbr.hide();
return;
}
- }
+ };
+ for (var i = 0; i < hiddenSuperclassesImplicits.length; i++) {
+ if (hiddenSuperclassesImplicits[i] == owner) {
+ mbr.hide();
+ return;
+ }
+ };
}
if (query && !(queryRegExp.test(name) || queryRegExp.test(this.mbrText))) {
mbr.hide();
@@ -276,7 +320,7 @@ function filter(scrollToMember) {
mbr.show();
membersVisible = true;
});
-
+
if (membersVisible)
members.show();
else
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
index 6eb14a4907..6488847049 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
@@ -167,6 +167,8 @@ trait MemberEntity extends Entity {
/** Whether this member is abstract. */
def isAbstract: Boolean
+ /** If this member originates from an implicit conversion, we set the implicit information to the correct origin */
+ def byConversion: Option[ImplicitConversion]
}
object MemberEntity {
// Oh contravariance, contravariance, wherefore art thou contravariance?
@@ -246,6 +248,8 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
* other entity of the pair is the companion. */
def companion: Option[DocTemplateEntity]
+ /** The implicit conversions this template (class or trait, objects and packages are not affected) */
+ def conversions: List[ImplicitConversion]
}
@@ -413,3 +417,106 @@ trait Annotation extends Entity {
def arguments: List[ValueArgument]
}
+
+/** A trait that signals the member results from an implicit conversion */
+trait ImplicitConversion {
+
+ /** The source of the implicit conversion*/
+ def source: DocTemplateEntity
+
+ /** The result type after the conversion */
+ def targetType: TypeEntity
+
+ /** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */
+ def convertorMethod: Either[MemberEntity, String]
+
+ /** A short name of the convertion */
+ def conversionShortName: String
+
+ /** A qualified name uniquely identifying the convertion (currently: the conversion method's qualified name) */
+ def conversionQualifiedName: String
+
+ /** The entity that performed the conversion */
+ def convertorOwner: TemplateEntity
+
+ /** The constraints that the transformations puts on the type parameters */
+ def constraints: List[Constraint]
+
+ /** The members inherited by this implicit conversion */
+ def members: List[MemberEntity]
+}
+
+/** A trait that encapsulates a constraint necessary for implicit conversion */
+trait Constraint {
+ // /** The implicit conversion during which this constraint appears */
+ // def conversion: ImplicitConversion
+}
+
+/** A constraint involving a type parameter which must be in scope */
+trait ImplicitInScopeConstraint extends Constraint {
+ /** The type of the implicit value required */
+ def implicitType: TypeEntity
+
+ /** toString for debugging */
+ override def toString = "an implicit _: " + implicitType.name + " must be in scope"
+}
+
+trait TypeClassConstraint extends ImplicitInScopeConstraint with TypeParamConstraint {
+ /** Type class name */
+ def typeClassEntity: TemplateEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a class of type " + typeClassEntity.qualifiedName + " (" +
+ typeParamName + ": " + typeClassEntity.name + ")"
+}
+
+trait KnownTypeClassConstraint extends TypeClassConstraint {
+ /** Type explanation, takes the type parameter name and generates the explanation */
+ def typeExplanation: (String) => String
+
+ /** toString for debugging */
+ override def toString = typeExplanation(typeParamName) + " (" + typeParamName + ": " + typeClassEntity.name + ")"
+}
+
+/** A constraint involving a type parameter */
+trait TypeParamConstraint extends Constraint {
+ /** The type parameter involved */
+ def typeParamName: String
+}
+
+trait EqualTypeParamConstraint extends TypeParamConstraint {
+ /** The rhs */
+ def rhs: TypeEntity
+ /** toString for debugging */
+ override def toString = typeParamName + " is " + rhs.name + " (" + typeParamName + " =:= " + rhs.name + ")"
+}
+
+trait BoundedTypeParamConstraint extends TypeParamConstraint {
+ /** The lower bound */
+ def lowerBound: TypeEntity
+
+ /** The upper bound */
+ def upperBound: TypeEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a superclass of " + lowerBound.name + " and a subclass of " +
+ upperBound.name + " (" + typeParamName + " >: " + lowerBound.name + " <: " + upperBound.name + ")"
+}
+
+trait LowerBoundedTypeParamConstraint extends TypeParamConstraint {
+ /** The lower bound */
+ def lowerBound: TypeEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a superclass of " + lowerBound.name + " (" + typeParamName + " >: " +
+ lowerBound.name + ")"
+}
+
+trait UpperBoundedTypeParamConstraint extends TypeParamConstraint {
+ /** The lower bound */
+ def upperBound: TypeEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a subclass of " + upperBound.name + " (" + typeParamName + " <: " +
+ upperBound.name + ")"
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index 78796231eb..9fcd43ac02 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -17,7 +17,7 @@ import model.{ RootPackage => RootPackageEntity }
/** This trait extracts all required information for documentation from compilation units */
class ModelFactory(val global: Global, val settings: doc.Settings) {
- thisFactory: ModelFactory with CommentFactory with TreeFactory =>
+ thisFactory: ModelFactory with ModelFactoryImplicitSupport with CommentFactory with TreeFactory =>
import global._
import definitions.{ ObjectClass, RootPackage, EmptyPackage, NothingClass, AnyClass, AnyValClass, AnyRefClass }
@@ -95,7 +95,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def isDocTemplate = false
}
- abstract class MemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
+ abstract class MemberImpl(sym: Symbol, implConv: ImplicitConversionImpl = null, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
lazy val comment =
if (inTpl == null) None else thisFactory.comment(sym, inTpl)
override def inTemplate = inTpl
@@ -128,7 +128,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
if (sym.isSealed) fgs += Paragraph(Text("sealed"))
if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
- if (!sym.isTrait && (sym hasFlag Flags.DEFERRED)) fgs += Paragraph(Text("abstract"))
+ /* Resetting the DEFERRED flag is a little trick here for refined types: (example from scala.collections)
+ * {{{
+ * implicit def traversable2ops[T](t: collection.GenTraversableOnce[T]) = new TraversableOps[T] {
+ * def isParallel = ...
+ * }}}
+ * the type the method returns is TraversableOps, which has all-abstract symbols. But in reality, it couldn't have
+ * any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */
+ if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (implConv eq null)) fgs += Paragraph(Text("abstract"))
if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
fgs.toList
}
@@ -162,7 +169,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
case NullaryMethodType(res) => resultTpe(res)
case _ => tpe
}
- makeTypeInTemplateContext(resultTpe(sym.tpe), inTemplate, sym)
+ val tpe = if (implConv eq null) sym.tpe else implConv.toType memberInfo sym
+ makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym)
}
def isDef = false
def isVal = false
@@ -173,15 +181,17 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def isAliasType = false
def isAbstractType = false
def isAbstract =
- ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED))) ||
+ // for the explanation of implConv == null see comment on flags
+ ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (implConv == null)) ||
sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
def isTemplate = false
+ def byConversion = if (implConv ne null) Some(implConv) else None
}
/** The instantiation of `TemplateImpl` triggers the creation of the following entities:
* All ancestors of the template and all non-package members.
*/
- abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with DocTemplateEntity {
+ abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, null, inTpl) with TemplateImpl with HigherKindedImpl with DocTemplateEntity {
//if (inTpl != null) println("mbr " + sym + " in " + (inTpl.toRoot map (_.sym)).mkString(" > "))
if (settings.verbose.value)
inform("Creating doc template for " + sym)
@@ -245,16 +255,20 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def subClasses = if (subClassesCache == null) Nil else subClassesCache.toList
- protected lazy val memberSyms =
+ val conversions = if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil
+
+ lazy val memberSyms =
// Only this class's constructors are part of its members, inherited constructors are not.
sym.info.members.filter(s => localShouldDocument(s) && (!s.isConstructor || s.owner == sym) && !isPureBridge(sym) )
- val members = memberSyms flatMap (makeMember(_, this))
- val templates = members collect { case c: DocTemplateEntity => c }
- val methods = members collect { case d: Def => d }
- val values = members collect { case v: Val => v }
- val abstractTypes = members collect { case t: AbstractType => t }
- val aliasTypes = members collect { case t: AliasType => t }
+ val members = (memberSyms.flatMap(makeMember(_, null, this))) :::
+ (conversions.flatMap((_.members))) // also take in the members from implicit conversions
+
+ val templates = members collect { case c: DocTemplateEntity => c }
+ val methods = members collect { case d: Def => d }
+ val values = members collect { case v: Val => v }
+ val abstractTypes = members collect { case t: AbstractType => t }
+ val aliasTypes = members collect { case t: AliasType => t }
override def isTemplate = true
def isDocTemplate = true
def companion = sym.companionSymbol match {
@@ -273,18 +287,22 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
- abstract class NonTemplateMemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
+ abstract class NonTemplateMemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl) extends MemberImpl(sym, implConv, inTpl) with NonTemplateMemberEntity {
override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
- lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "#" + name)
+ lazy val definitionName =
+ if (implConv == null) optimize(inDefinitionTemplates.head.qualifiedName + "#" + name)
+ else optimize(implConv.conversionQualifiedName + "#" + name)
def isUseCase = sym.isSynthetic
def isBridge = sym.isBridge
}
- abstract class NonTemplateParamMemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends NonTemplateMemberImpl(sym, inTpl) {
- def valueParams =
- sym.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
+ abstract class NonTemplateParamMemberImpl(sym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl) extends NonTemplateMemberImpl(sym, implConv, inTpl) {
+ def valueParams = {
+ val info = if (implConv eq null) sym.info else implConv.toType memberInfo sym
+ info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
}}
+ }
}
abstract class ParameterImpl(sym: Symbol, inTpl: => TemplateImpl) extends EntityImpl(sym, inTpl) with ParameterEntity {
@@ -356,7 +374,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
override def qualifiedName = "_root_"
override def inheritedFrom = Nil
override def isRootPackage = true
- override protected lazy val memberSyms =
+ override lazy val memberSyms =
(bSym.info.members ++ EmptyPackage.info.members) filter { s =>
s != EmptyPackage && s != RootPackage
}
@@ -454,18 +472,19 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
/** */
- def makeMember(aSym: Symbol, inTpl: => DocTemplateImpl): List[MemberImpl] = {
+ // TODO: Should be able to override the type
+ def makeMember(aSym: Symbol, implConv: ImplicitConversionImpl, inTpl: => DocTemplateImpl): List[MemberImpl] = {
def makeMember0(bSym: Symbol, _useCaseOf: Option[MemberImpl]): Option[MemberImpl] = {
if (bSym.isGetter && bSym.isLazy)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with Val {
override lazy val comment = // The analyser does not duplicate the lazy val's DocDef when it introduces its accessor.
thisFactory.comment(bSym.accessed, inTpl) // This hack should be removed after analyser is fixed.
override def isLazyVal = true
override def useCaseOf = _useCaseOf
})
else if (bSym.isGetter && bSym.accessed.isMutable)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with Val {
override def isVar = true
override def useCaseOf = _useCaseOf
})
@@ -481,36 +500,36 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
else bSym
}
- Some(new NonTemplateParamMemberImpl(cSym, inTpl) with HigherKindedImpl with Def {
+ Some(new NonTemplateParamMemberImpl(cSym, implConv, inTpl) with HigherKindedImpl with Def {
override def isDef = true
override def useCaseOf = _useCaseOf
})
}
- else if (bSym.isConstructor)
- Some(new NonTemplateParamMemberImpl(bSym, inTpl) with Constructor {
+ else if (bSym.isConstructor && (implConv == null))
+ Some(new NonTemplateParamMemberImpl(bSym, implConv, inTpl) with Constructor {
override def isConstructor = true
def isPrimary = sym.isPrimaryConstructor
override def useCaseOf = _useCaseOf
})
else if (bSym.isGetter) // Scala field accessor or Java field
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with Val {
override def isVal = true
override def useCaseOf = _useCaseOf
})
else if (bSym.isAbstractType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with AbstractType {
+ Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with TypeBoundsImpl with HigherKindedImpl with AbstractType {
override def isAbstractType = true
override def useCaseOf = _useCaseOf
})
- else if (bSym.isAliasType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with HigherKindedImpl with AliasType {
+ else if (bSym.isAliasType && bSym != AnyRefClass)
+ Some(new NonTemplateMemberImpl(bSym, implConv, inTpl) with HigherKindedImpl with AliasType {
override def isAliasType = true
def alias = makeTypeInTemplateContext(sym.tpe.dealias, inTpl, sym)
override def useCaseOf = _useCaseOf
})
else if (bSym.isPackage)
inTpl match { case inPkg: PackageImpl => makePackage(bSym, inPkg) }
- else if ((bSym.isClass || bSym.isModule) && templateShouldDocument(bSym))
+ else if ((bSym.isClass || bSym.isModule || bSym == AnyRefClass) && templateShouldDocument(bSym))
Some(makeDocTemplate(bSym, inTpl))
else
None
@@ -520,16 +539,16 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
Nil
else {
val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) =>
- docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898
+ docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898
bSym
}
val member = makeMember0(aSym, None)
- if (allSyms.isEmpty)
- member.toList
- else
- // Use cases replace the original definitions - SI-5054
- allSyms flatMap { makeMember0(_, member) }
+ if (allSyms.isEmpty)
+ member.toList
+ else
+ // Use cases replace the original definitions - SI-5054
+ allSyms flatMap { makeMember0(_, member) }
}
}
@@ -639,9 +658,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
// nameBuffer append stripPrefixes.foldLeft(pre.prefixString)(_ stripPrefix _)
// }
val bSym = normalizeTemplate(aSym)
- if (bSym.isNonClassType)
+ if (bSym.isNonClassType) {
nameBuffer append bSym.decodedName
- else {
+ } else {
val tpl = makeTemplate(bSym)
val pos0 = nameBuffer.length
refBuffer += pos0 -> (tpl, tpl.name.length)
@@ -692,8 +711,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def templateShouldDocument(aSym: Symbol): Boolean = {
- // TODO: document sourceless entities (e.g., Any, etc), based on a new Setting to be added
- (aSym.isPackageClass || (aSym.sourceFile != null)) && localShouldDocument(aSym) &&
+ // TODO: document sourceless entities (e.g., Any, etc), based on a new Setting to be added
+ (aSym.isPackageClass || (aSym.sourceFile != null)) && localShouldDocument(aSym) &&
( aSym.owner == NoSymbol || templateShouldDocument(aSym.owner) ) && !isEmptyJavaObject(aSym)
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
new file mode 100644
index 0000000000..23bef02bed
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -0,0 +1,501 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2012 LAMP/EPFL
+ *
+ * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
+ *
+ * @author Vlad Ureche
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc
+package doc
+package model
+
+import comment._
+
+import scala.collection._
+import scala.util.matching.Regex
+
+import symtab.Flags
+import io._
+
+import model.{ RootPackage => RootPackageEntity }
+
+/**
+ * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
+ *
+ * Let's take this as an example:
+ * {{{
+ * object Test {
+ * class A
+ *
+ * class B {
+ * def foo = 1
+ * }
+ *
+ * class C extends B {
+ * def bar = 2
+ * class implicit
+ * }
+ *
+ * D def conv(a: A) = new C
+ * }
+ * }}}
+ *
+ * Overview:
+ * - scaladoc-ing the above classes, `A` will get two more methods: foo and bar, over its default methods
+ * - the nested classes (specifically `D` above), abstract types, type aliases and constructor members are not added to
+ * `A` (see makeMember0 in ModelFactory, last 3 cases)
+ * - the members added by implicit conversion are always listed under the implicit conversion, not under the class they
+ * actually come from (`foo` will be listed as coming from the implicit conversion to `C` instead of `B`) - see
+ * `definitionName` in MemberImpl
+ *
+ * Internals:
+ * TODO: Give an overview here
+ */
+trait ModelFactoryImplicitSupport {
+ thisFactory: ModelFactory with CommentFactory with TreeFactory =>
+
+ import global._
+ import global.analyzer._
+ import global.definitions._
+ import settings.hardcoded
+
+ // debugging:
+ val DEBUG: Boolean = settings.docImplicitsDebug.value
+ val ERROR: Boolean = true // currently we show all errors
+ @inline final def debug(msg: => String) = if (DEBUG) println(msg)
+ @inline final def error(msg: => String) = if (ERROR) println(msg)
+
+ /** This is a flag that indicates whether to eliminate implicits that cannot be satisfied within the current scope.
+ * For example, if an implicit conversion requires that there is a Numeric[T] in scope:
+ * {{{
+ * class A[T]
+ * class B extends A[Int]
+ * class C extends A[String]
+ * implicit def pimpA[T: Numeric](a: A[T]): D
+ * }}}
+ * For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the
+ * conversion from C to D, depending on -implicits-show-all, the conversion can:
+ * - not be generated at all, since there's no Numeric[String] in scope (if ran without -implicits-show-all)
+ * - generated with a *weird* constraint, Numeric[String] as the user might add it by hand (if flag is enabled)
+ */
+ val implicitsShowAll: Boolean = settings.docImplicitsShowAll.value
+ class ImplicitNotFound(tpe: Type) extends Exception("No implicit of type " + tpe + " found in scope.")
+
+ /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
+
+ class ImplicitConversionImpl(
+ val sym: Symbol,
+ val convSym: Symbol,
+ val toType: Type,
+ val constrs: List[Constraint],
+ inTpl: => DocTemplateImpl)
+ extends ImplicitConversion {
+
+ def source: DocTemplateEntity = inTpl
+
+ def targetType: TypeEntity = makeType(toType, inTpl)
+
+ def convertorOwner: TemplateEntity =
+ if (convSym != NoSymbol)
+ makeTemplate(convSym.owner)
+ else {
+ error("Scaladoc implicits: Implicit conversion from " + sym.tpe + " to " + toType + " done by " + convSym + " = NoSymbol!")
+ makeRootPackage.get // surely the root package was created :)
+ }
+
+ def convertorMethod: Either[MemberEntity, String] = {
+ var convertor: MemberEntity = null
+
+ convertorOwner match {
+ case doc: DocTemplateImpl =>
+ val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m }
+ if (convertors.length == 1)
+ convertor = convertors.head
+ case _ =>
+ }
+ if (convertor ne null)
+ Left(convertor)
+ else
+ Right(convSym.nameString)
+ }
+
+ def conversionShortName = convSym.nameString
+
+ def conversionQualifiedName = convertorOwner.qualifiedName + "." + convSym.nameString
+
+ lazy val constraints: List[Constraint] = constrs
+
+ val members: List[MemberEntity] = {
+ // Obtain the members inherited by the implicit conversion
+ var memberSyms = toType.members.filter(implicitShouldDocument(_))
+ val existingMembers = sym.info.members
+
+ // Debugging part :)
+ debug(sym.nameString + "\n" + "=" * sym.nameString.length())
+ debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType)
+
+ // Members inherited by implicit conversions cannot override actual members
+ memberSyms = memberSyms.filterNot((sym1: Symbol) =>
+ existingMembers.exists(sym2 => sym1.name == sym2.name &&
+ isSameType(toType.memberInfo(sym1), sym.info.memberInfo(sym2))))
+
+ debug(" -> full type: " + toType)
+ if (constraints.length != 0) {
+ debug(" -> constraints: ")
+ constraints foreach { constr => debug(" - " + constr) }
+ }
+ debug(" -> members:")
+ memberSyms foreach (sym => debug(" - "+ sym.decodedName +" : " + sym.info))
+ debug("")
+
+ memberSyms.flatMap((makeMember(_, this, inTpl)))
+ }
+ }
+
+ /* ============== MAKER METHODS ============== */
+
+ /**
+ * Make the implicit conversion objects
+ *
+ * A word about the scope of the implicit conversions: currently we look at a very basic context composed of the
+ * default Scala imports (Predef._ for example) and the companion object of the current class, if one exists. In the
+ * future we might want to extend this to more complex scopes.
+ */
+ def makeImplicitConversions(sym: Symbol, inTpl: => DocTemplateImpl): List[ImplicitConversion] =
+ // Nothing and Null are somewhat special -- they can be transformed by any implicit conversion available in scope.
+ // But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null
+ if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil
+ else {
+ var context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
+
+ val results = global.analyzer.allViewsFrom(sym.tpe, context, sym.typeParams)
+ var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
+ conversions = conversions.filterNot(_.members.isEmpty)
+
+ // Filter out specialized conversions from array
+ if (sym == ArrayClass)
+ conversions = conversions.filterNot((conv: ImplicitConversion) =>
+ hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
+
+ // Filter out non-sensical conversions from value types
+ if (isScalaValueType(sym.tpe))
+ conversions = conversions.filter((ic: ImplicitConversion) =>
+ hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
+
+ // Put the class-specific conversions in front
+ val (ownConversions, commonConversions) =
+ conversions.partition(conv => !hardcoded.commonConversionTargets.contains(conv.conversionQualifiedName))
+
+ ownConversions ::: commonConversions
+ }
+
+ /** makeImplicitConversion performs the heavier lifting to get the implicit listing:
+ * - for each possible conversion function (also called view)
+ * * figures out the final result of the view (to what is our class transformed?)
+ * * figures out the necessary constraints on the type parameters (such as T <: Int) and the context (such as Numeric[T])
+ * * lists all inherited members
+ *
+ * What? in details:
+ * - say we start from a class A[T1, T2, T3, T4]
+ * - we have an implicit function (view) in scope:
+ * def pimpA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: Manifest[T4], ev2: Numeric[T4]): PimpedA
+ * - A is converted to PimpedA ONLY if a couple of constraints are satisfied:
+ * * T1 must be equal to Int
+ * * T2 must be equal to Foo[Bar[X]]
+ * * T3 must be upper bounded by Long
+ * * there must be evidence of Numeric[T4] and a Mainfest[T4] within scope
+ * - the final type is PimpedA and A therefore inherits a couple of members from pimpedA
+ *
+ * How?
+ * some notes:
+ * - Scala's type inference will want to solve all type parameters down to actual types, but we only want constraints
+ * to maintain generality
+ * - therefore, allViewsFrom wraps type parameters into "untouchable" type variables that only gather constraints,
+ * but are never solved down to a type
+ * - these must be reverted back to the type parameters and the constraints must be extracted and simplified (this is
+ * done by the uniteConstraints and boundedTParamsConstraints. Be sure to check them out
+ * - we also need to transform implicit parameters in the view's signature into constraints, such that Numeric[T4]
+ * appears as a constraint
+ */
+ def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: => DocTemplateImpl): List[ImplicitConversion] =
+ if (result.tree == EmptyTree) Nil
+ else {
+ // `result` will contain the type of the view (= implicit conversion method)
+ // the search introduces untouchable type variables, but we want to get back to type parameters
+ val viewFullType = result.tree.tpe
+ // set the previously implicit parameters to being explicit
+
+ val (viewSimplifiedType, viewImplicitTypes) = removeImplicitParameters(viewFullType)
+
+ // TODO: Isolate this corner case :) - Predef.<%< and put it in the testsuite
+ if (viewSimplifiedType.params.length != 1) {
+ // This is known to be caused by the `<%<` object in Predef:
+ // {{{
+ // sealed abstract class <%<[-From, +To] extends (From => To) with Serializable
+ // object <%< {
+ // implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
+ // }
+ // }}}
+ // so we just won't generate an implicit conversion for implicit methods that only take implicit parameters
+ return Nil
+ }
+
+ // type the view application so we get the exact type of the result (not the formal type)
+ val viewTree = result.tree.setType(viewSimplifiedType)
+ val appliedTree = new ApplyImplicitView(viewTree, List(Ident("<argument>") setType viewTree.tpe.paramTypes.head))
+ val appliedTreeTyped: Tree = {
+ val newContext = context.makeImplicit(context.ambiguousErrors)
+ val newTyper = global.analyzer.newTyper(newContext)
+ newTyper.silent(_.typed(appliedTree, global.analyzer.EXPRmode, WildcardType), false) match {
+
+ case global.analyzer.SilentResultValue(t: Tree) => t
+ case global.analyzer.SilentTypeError(err) =>
+ global.reporter.warning(sym.pos, err.toString)
+ return Nil
+ }
+ }
+
+ // now we have the final type:
+ val toType = wildcardToNothing(typeVarToOriginOrWildcard(appliedTreeTyped.tpe.finalResultType))
+
+ try {
+ // Transform bound constraints into scaladoc constraints
+ val implParamConstraints = makeImplicitConstraints(viewImplicitTypes, sym, context, inTpl)
+ val boundsConstraints = makeBoundedConstraints(sym.typeParams, constrs, inTpl)
+ // TODO: no substitution constraints appear in the library and compiler scaladoc. Maybe they can be removed?
+ val substConstraints = makeSubstitutionConstraints(result.subst, inTpl)
+ val constraints = implParamConstraints ::: boundsConstraints ::: substConstraints
+
+ List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl))
+ } catch {
+ case i: ImplicitNotFound =>
+ //println(" Eliminating: " + toType)
+ Nil
+ }
+ }
+
+ def makeImplicitConstraints(types: List[Type], sym: Symbol, context: Context, inTpl: => DocTemplateImpl): List[Constraint] =
+ types.flatMap((tpe:Type) => {
+ // TODO: Before creating constraints, map typeVarToOriginOrWildcard on the implicitTypes
+ val implType = typeVarToOriginOrWildcard(tpe)
+ val qualifiedName = implType.typeSymbol.ownerChain.reverse.map(_.nameString).mkString(".")
+
+ var available: Option[Boolean] = None
+
+ // see: https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/gm_fr0RKzC4
+ //
+ // println(implType + " => " + implType.isTrivial)
+ // var tpes: List[Type] = List(implType)
+ // while (!tpes.isEmpty) {
+ // val tpe = tpes.head
+ // tpes = tpes.tail
+ // tpe match {
+ // case TypeRef(pre, sym, args) =>
+ // tpes = pre :: args ::: tpes
+ // println(tpe + " => " + tpe.isTrivial)
+ // case _ =>
+ // println(tpe + " (of type" + tpe.getClass + ") => " + tpe.isTrivial)
+ // }
+ // }
+ // println("\n")
+
+ // look for type variables in the type. If there are none, we can decide if the implicit is there or not
+ if (implType.isTrivial) {
+ try {
+ context.flushBuffer() /* any errors here should not prevent future findings */
+ // TODO: Not sure this is the right thing to do -- seems similar to what scalac should be doing
+ val context2 = context.make(context.unit, context.tree, sym.owner, context.scope, context.imports)
+ val search = inferImplicit(EmptyTree, tpe, false, false, context2, false)
+ context.flushBuffer() /* any errors here should not prevent future findings */
+
+ available = Some(search.tree != EmptyTree)
+ } catch {
+ case _ =>
+ }
+ }
+
+ available match {
+ case Some(true) =>
+ Nil
+ case Some(false) if (!implicitsShowAll) =>
+ // if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String])
+ throw new ImplicitNotFound(implType)
+ case _ =>
+ val typeParamNames = sym.typeParams.map(_.name)
+
+ // TODO: This is maybe the worst hack I ever did - it's as dirty as hell, but it seems to work, so until I
+ // learn more about symbols, it'll have to do.
+ implType match {
+ case TypeRef(pre, sym, List(TypeRef(NoPrefix, targ, Nil))) if (typeParamNames contains targ.name) =>
+ hardcoded.knownTypeClasses.get(qualifiedName) match {
+ case Some(explanation) =>
+ List(new KnownTypeClassConstraint {
+ val typeParamName = targ.nameString
+ val typeExplanation = explanation
+ val typeClassEntity = makeTemplate(sym)
+ val implicitType: TypeEntity = makeType(implType, inTpl)
+ })
+ case None =>
+ List(new TypeClassConstraint {
+ val typeParamName = targ.nameString
+ val typeClassEntity = makeTemplate(sym)
+ val implicitType: TypeEntity = makeType(implType, inTpl)
+ })
+ }
+ case _ =>
+ List(new ImplicitInScopeConstraint{
+ val implicitType: TypeEntity = makeType(implType, inTpl)
+ })
+ }
+ }
+ })
+
+ def makeSubstitutionConstraints(subst: TreeTypeSubstituter, inTpl: => DocTemplateImpl): List[Constraint] =
+ (subst.from zip subst.to) map {
+ case (from, to) =>
+ new EqualTypeParamConstraint {
+ error("Scaladoc implicits: Unexpected type substitution constraint from: " + from + " to: " + to)
+ val typeParamName = from.toString
+ val rhs = makeType(to, inTpl)
+ }
+ }
+
+ def makeBoundedConstraints(tparams: List[Symbol], constrs: List[TypeConstraint], inTpl: => DocTemplateImpl): List[Constraint] =
+ (tparams zip constrs) flatMap {
+ case (tparam, constr) => {
+ uniteConstraints(constr) match {
+ case (loBounds, upBounds) => (loBounds filter (_ != NothingClass.tpe), upBounds filter (_ != AnyClass.tpe)) match {
+ case (Nil, Nil) =>
+ Nil
+ case (List(lo), List(up)) if (lo == up) =>
+ List(new EqualTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ val rhs = makeType(lo, inTpl)
+ })
+ case (List(lo), List(up)) =>
+ List(new BoundedTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ val lowerBound = makeType(lo, inTpl)
+ val upperBound = makeType(up, inTpl)
+ })
+ case (List(lo), Nil) =>
+ List(new LowerBoundedTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ val lowerBound = makeType(lo, inTpl)
+ })
+ case (Nil, List(up)) =>
+ List(new UpperBoundedTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ val upperBound = makeType(up, inTpl)
+ })
+ case other =>
+ // this is likely an error on the lub/glb side
+ error("Scaladoc implicits: Error computing lub/glb for: " + (tparam, constr) + ":\n" + other)
+ Nil
+ }
+ }
+ }
+ }
+
+ /**
+ * uniteConstraints takes a TypeConstraint instance and simplifies the constraints inside
+ *
+ * Normally TypeConstraint contains multiple lower and upper bounds, and we want to reduce this to a lower and an
+ * upper bound. Here are a couple of catches we need to be aware of:
+ * - before finding a view (implicit method in scope that maps class A[T1,T2,.. Tn] to something else) the type
+ * parameters are transformed into "untouchable" type variables so that type inference does not attempt to
+ * fully solve them down to a type but rather constrains them on both sides just enough for the view to be
+ * applicable -- now, we want to transform those type variables back to the original type parameters
+ * - some of the bounds fail type inference and therefore refer to Nothing => when performing unification (lub, glb)
+ * they start looking ugly => we (unsoundly) transform Nothing to WildcardType so we fool the unification algorithms
+ * into thinking there's nothing there
+ * - we don't want the wildcard types surviving the unification so we replace them back to Nothings
+ */
+ def uniteConstraints(constr: TypeConstraint): (List[Type], List[Type]) =
+ try {
+ (List(wildcardToNothing(lub(constr.loBounds map typeVarToOriginOrWildcard))),
+ List(wildcardToNothing(glb(constr.hiBounds map typeVarToOriginOrWildcard))))
+ } catch {
+ // does this actually ever happen? (probably when type vars occur in the bounds)
+ case x: Throwable => (constr.loBounds.distinct, constr.hiBounds.distinct)
+ }
+
+ /**
+ * Make implicits explicit - Not used curently
+ */
+ object implicitToExplicit extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case MethodType(params, resultType) =>
+ MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType)
+ case other =>
+ other
+ }
+ }
+
+ /**
+ * removeImplicitParameters transforms implicit parameters from the view result type into constraints and
+ * returns the simplified type of the view
+ *
+ * for the example view:
+ * implicit def pimpMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
+ * the implicit view result type is:
+ * (a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
+ * and the simplified type will be:
+ * MyClass[T] => PimpedMyClass[T]
+ */
+ def removeImplicitParameters(viewType: Type): (Type, List[Type]) = {
+
+ val params = viewType.paramss.flatten
+ val (normalParams, implParams) = params.partition(!_.isImplicit)
+ val simplifiedType = MethodType(normalParams, viewType.finalResultType)
+ val implicitTypes = implParams.map(_.tpe)
+
+ (simplifiedType, implicitTypes)
+ }
+
+ /**
+ * typeVarsToOriginOrWildcard transforms the "untouchable" type variables into either their origins (the original
+ * type parameters) or into wildcard types if nothing matches
+ */
+ object typeVarToOriginOrWildcard extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case tv: TypeVar =>
+ if (tv.constr.inst.typeSymbol == NothingClass)
+ WildcardType
+ else
+ tv.origin //appliedType(tv.origin.typeConstructor, tv.typeArgs map this)
+ case other =>
+ if (other.typeSymbol == NothingClass)
+ WildcardType
+ else
+ other
+ }
+ }
+
+ /**
+ * wildcardToNothing transforms wildcard types back to Nothing
+ */
+ object wildcardToNothing extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case WildcardType =>
+ NothingClass.tpe
+ case other =>
+ other
+ }
+ }
+
+ /** implicitShouldDocument decides whether a member inherited by implicit conversion should be documented */
+ def implicitShouldDocument(aSym: Symbol): Boolean = {
+ // We shouldn't document:
+ // - constructors
+ // - common methods (in Any, AnyRef, Object) as they are automatically removed
+ // - private and protected members (not accessible following an implicit conversion)
+ // - members starting with _ (usually reserved for internal stuff)
+ localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != ObjectClass) &&
+ (aSym.owner != AnyClass) && (aSym.owner != AnyRefClass) &&
+ (!aSym.isProtected) && (!aSym.isPrivate) && (!aSym.name.startsWith("_")) &&
+ (aSym.isMethod || aSym.isGetter || aSym.isSetter) &&
+ (aSym.nameString != "getClass")
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
index 988f2e0ba9..f948d53c8b 100755
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -52,7 +52,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
if (asym.isSetter) asym = asym.getter(asym.owner)
makeTemplate(asym.owner) match {
case docTmpl: DocTemplateImpl =>
- val mbrs: List[MemberImpl] = makeMember(asym,docTmpl)
+ val mbrs: List[MemberImpl] = makeMember(asym, null, docTmpl)
mbrs foreach { mbr => refs += ((start, (mbr,end))) }
case _ =>
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
index f9c1907696..a86462ad5f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -287,7 +287,12 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
}
// chasing down results which won't parse
+ // This used to work fine, now it reports a type error before any
+ // exception gets to us. See SI-5657. Don't have time to deal with
+ // it, so disabling everything.
def execute(line: String): Option[ExecResult] = {
+ return None // disabled
+
val parsed = Parsed(line)
def noDotOrSlash = line forall (ch => ch != '.' && ch != '/')
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
index d617215452..40993eb916 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -121,7 +121,7 @@ trait MemberHandlers {
private def vparamss = member.vparamss
private def isMacro = member.mods.hasFlag(scala.reflect.internal.Flags.MACRO)
// true if not a macro and 0-arity
- override def definesValue = !isMacro && (vparamss.isEmpty || vparamss.head.isEmpty)
+ override def definesValue = !isMacro && (vparamss.isEmpty || vparamss.head.isEmpty && vparamss.tail.isEmpty)
override def resultExtractionCode(req: Request) =
if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
index 8c589eba60..0c26aa8b28 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
@@ -27,6 +27,12 @@ trait ReplConfig {
try Console println msg
catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) }
+ private[nsc] def repldbgex(ex: Throwable): Unit = {
+ if (isReplDebug) {
+ echo("Caught/suppressing: " + ex)
+ ex.printStackTrace
+ }
+ }
private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg)
private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg)
private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg)
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 617a811a76..ff671088ac 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1328,7 +1328,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
} else super.transform(tree)
- case TypeApply(Select(qual, name), targs)
+ case TypeApply(sel @ Select(qual, name), targs)
if (!specializedTypeVars(symbol.info).isEmpty && name != nme.CONSTRUCTOR) =>
debuglog("checking typeapp for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe)
val qual1 = transform(qual)
@@ -1342,14 +1342,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val residualTargs = symbol.info.typeParams zip targs collect {
case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ
}
+ // See SI-5583. Don't know why it happens now if it didn't before.
+ if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) {
+ log("!!! Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
+ localTyper.typed(sel)
+ }
+ else {
+ ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
+ "residual: %s, tparams: %s, env: %s".format(residualTargs, specMember.info.typeParams, env))
+ )
- ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
- "residual: %s, tparams: %s, env: %s".format(residualTargs, symbol.info.typeParams, env))
- )
-
- val tree1 = gen.mkTypeApply(Select(qual1, specMember), residualTargs)
- debuglog("rewrote " + tree + " to " + tree1)
- localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
+ val tree1 = gen.mkTypeApply(Select(qual1, specMember), residualTargs)
+ debuglog("rewrote " + tree + " to " + tree1)
+ localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
+ }
case None => super.transform(tree)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index a6d05ec48e..b4e0ad6edf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -94,6 +94,27 @@ trait Implicits {
result
}
+ /** Find all views from type `tp` (in which `tpars` are free)
+ *
+ * Note that the trees in the search results in the returned list share the same type variables.
+ * Ignore their constr field! The list of type constraints returned along with each tree specifies the constraints that
+ * must be met by the corresponding type parameter in `tpars` (for the returned implicit view to be valid).
+ *
+ * @arg tp from-type for the implicit conversion
+ * @arg context search implicits here
+ * @arg tpars symbols that should be considered free type variables
+ * (implicit search should not try to solve them, just track their constraints)
+ */
+ def allViewsFrom(tp: Type, context: Context, tpars: List[Symbol]): List[(SearchResult, List[TypeConstraint])] = {
+ // my untouchable typevars are better than yours (they can't be constrained by them)
+ val tvars = tpars map (TypeVar untouchable _)
+ val tpSubsted = tp.subst(tpars, tvars)
+
+ val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(false))
+
+ search.allImplicitsPoly(tvars)
+ }
+
private final val sizeLimit = 50000
private type Infos = List[ImplicitInfo]
private type Infoss = List[List[ImplicitInfo]]
@@ -369,7 +390,7 @@ trait Implicits {
private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
(context.openImplicits find { case (tp, tree1) => tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
- // println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
+ //println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
throw DivergentImplicit
case None =>
try {
@@ -378,7 +399,7 @@ trait Implicits {
typedImplicit0(info, ptChecked)
} catch {
case ex: DivergentImplicit =>
- // println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
+ //println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
if (context.openImplicits.tail.isEmpty) {
if (!(pt.isErroneous))
DivergingImplicitExpansionError(tree, pt, info.sym)(context)
@@ -510,7 +531,7 @@ trait Implicits {
private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
incCounter(plausiblyCompatibleImplicits)
- printTyping(
+ printTyping (
ptBlock("typedImplicit0",
"info.name" -> info.name,
"ptChecked" -> ptChecked,
@@ -1202,6 +1223,26 @@ trait Implicits {
def search(iss: Infoss, isLocal: Boolean) = applicableInfos(iss, isLocal).values
(search(context.implicitss, true) ++ search(implicitsOfExpectedType, false)).toList.filter(_.tree ne EmptyTree)
}
+
+ // find all implicits for some type that contains type variables
+ // collect the constraints that result from typing each implicit
+ def allImplicitsPoly(tvars: List[TypeVar]): List[(SearchResult, List[TypeConstraint])] = {
+ def resetTVars() = tvars foreach { _.constr = new TypeConstraint }
+
+ def eligibleInfos(iss: Infoss, isLocal: Boolean) = new ImplicitComputation(iss, if (isLocal) util.HashSet[Name](512) else null).eligible
+ val allEligibleInfos = (eligibleInfos(context.implicitss, true) ++ eligibleInfos(implicitsOfExpectedType, false)).toList
+
+ allEligibleInfos flatMap { ii =>
+ // each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit)
+ // thus, start each type var off with a fresh for every typedImplicit
+ resetTVars()
+ // any previous errors should not affect us now
+ context.flushBuffer()
+ val res = typedImplicit(ii, false)
+ if (res.tree ne EmptyTree) List((res, tvars map (_.constr)))
+ else Nil
+ }
+ }
}
object ImplicitNotFoundMsg {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 2efedfb717..d78fd35d25 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -83,7 +83,7 @@ trait Infer {
def apply(tp: Type): Type = tp match {
case WildcardType | BoundedWildcardType(_) | NoType =>
throw new NoInstance("undetermined type")
- case tv @ TypeVar(origin, constr) =>
+ case tv @ TypeVar(origin, constr) if !tv.untouchable =>
if (constr.inst == NoType) {
throw new DeferredNoInstance(() =>
"no unique instantiation of type variable " + origin + " could be found")
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 6996dc6836..988b821d1a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -9,6 +9,7 @@ import scala.collection.mutable.ListBuffer
import scala.compat.Platform.EOL
import scala.reflect.makro.runtime.{Context => MacroContext}
import scala.reflect.runtime.Mirror
+import util.Statistics._
/**
* Code to deal with macros, namely with:
@@ -1002,159 +1003,165 @@ trait Macros { self: Analyzer =>
delayed += expandee -> (typer.context, undetparams)
Delay(expandee)
} else {
- val macroDef = expandee.symbol
- macroRuntime(macroDef) match {
- case Some(runtime) =>
- val savedInfolevel = nodePrinters.infolevel
- try {
- // InfoLevel.Verbose examines and prints out infos of symbols
- // by the means of this'es these symbols can climb up the lexical scope
- // when these symbols will be examined by a node printer
- // they will enumerate and analyze their children (ask for infos and tpes)
- // if one of those children involves macro expansion, things might get nasty
- // that's why I'm temporarily turning this behavior off
- nodePrinters.infolevel = nodePrinters.InfoLevel.Quiet
- val args = macroArgs(typer, expandee)
- args match {
- case Some(args) =>
- // adding stuff to openMacros is easy, but removing it is a nightmare
- // it needs to be sprinkled over several different code locations
- val (context: MacroContext) :: _ = args
- openMacros = context :: openMacros
- val expanded: MacroExpansionResult = try {
- val prevNumErrors = reporter.ERROR.count
- val expanded = runtime(args)
- val currNumErrors = reporter.ERROR.count
- if (currNumErrors != prevNumErrors) {
- fail(typer, expandee) // errors have been reported by the macro itself
- } else {
- expanded match {
- case expanded: Expr[_] =>
- if (macroDebug || macroCopypaste) {
- if (macroDebug) println("original:")
- println(expanded.tree)
- println(showRaw(expanded.tree))
- }
-
- freeTerms(expanded.tree) foreach (fte => typer.context.error(expandee.pos,
- ("macro expansion contains free term variable %s %s. "+
- "have you forgot to use eval when splicing this variable into a reifee? " +
- "if you have troubles tracking free term variables, consider using -Xlog-free-terms").format(fte.name, fte.origin)))
- freeTypes(expanded.tree) foreach (fty => typer.context.error(expandee.pos,
- ("macro expansion contains free type variable %s %s. "+
- "have you forgot to use c.TypeTag annotation for this type parameter? " +
- "if you have troubles tracking free type variables, consider using -Xlog-free-types").format(fty.name, fty.origin)))
-
- val currNumErrors = reporter.ERROR.count
- if (currNumErrors != prevNumErrors) {
- fail(typer, expandee)
- } else {
- // inherit the position from the first position-ful expandee in macro callstack
- // this is essential for sane error messages
- var tree = expanded.tree
- var position = openMacros.find(c => c.expandee.pos != NoPosition).map(_.expandee.pos).getOrElse(NoPosition)
- tree = atPos(position.focus)(tree)
-
- // now macro expansion gets typechecked against the macro definition return type
- // however, this happens in macroExpand, not here in macroExpand1
- Success(tree)
- }
- case expanded if expanded.isInstanceOf[Expr[_]] =>
- val msg = "macro must return a compiler-specific expr; returned value is Expr, but it doesn't belong to this compiler's universe"
- fail(typer, expandee, msg)
- case expanded =>
- val msg = "macro must return a compiler-specific expr; returned value is of class: %s".format(expanded.getClass)
- fail(typer, expandee, msg)
+ val start = startTimer(macroExpandNanos)
+ incCounter(macroExpandCount)
+ try {
+ val macroDef = expandee.symbol
+ macroRuntime(macroDef) match {
+ case Some(runtime) =>
+ val savedInfolevel = nodePrinters.infolevel
+ try {
+ // InfoLevel.Verbose examines and prints out infos of symbols
+ // by the means of this'es these symbols can climb up the lexical scope
+ // when these symbols will be examined by a node printer
+ // they will enumerate and analyze their children (ask for infos and tpes)
+ // if one of those children involves macro expansion, things might get nasty
+ // that's why I'm temporarily turning this behavior off
+ nodePrinters.infolevel = nodePrinters.InfoLevel.Quiet
+ val args = macroArgs(typer, expandee)
+ args match {
+ case Some(args) =>
+ // adding stuff to openMacros is easy, but removing it is a nightmare
+ // it needs to be sprinkled over several different code locations
+ val (context: MacroContext) :: _ = args
+ openMacros = context :: openMacros
+ val expanded: MacroExpansionResult = try {
+ val prevNumErrors = reporter.ERROR.count
+ val expanded = runtime(args)
+ val currNumErrors = reporter.ERROR.count
+ if (currNumErrors != prevNumErrors) {
+ fail(typer, expandee) // errors have been reported by the macro itself
+ } else {
+ expanded match {
+ case expanded: Expr[_] =>
+ if (macroDebug || macroCopypaste) {
+ if (macroDebug) println("original:")
+ println(expanded.tree)
+ println(showRaw(expanded.tree))
+ }
+
+ freeTerms(expanded.tree) foreach (fte => typer.context.error(expandee.pos,
+ ("macro expansion contains free term variable %s %s. "+
+ "have you forgot to use eval when splicing this variable into a reifee? " +
+ "if you have troubles tracking free term variables, consider using -Xlog-free-terms").format(fte.name, fte.origin)))
+ freeTypes(expanded.tree) foreach (fty => typer.context.error(expandee.pos,
+ ("macro expansion contains free type variable %s %s. "+
+ "have you forgot to use c.TypeTag annotation for this type parameter? " +
+ "if you have troubles tracking free type variables, consider using -Xlog-free-types").format(fty.name, fty.origin)))
+
+ val currNumErrors = reporter.ERROR.count
+ if (currNumErrors != prevNumErrors) {
+ fail(typer, expandee)
+ } else {
+ // inherit the position from the first position-ful expandee in macro callstack
+ // this is essential for sane error messages
+ var tree = expanded.tree
+ var position = openMacros.find(c => c.expandee.pos != NoPosition).map(_.expandee.pos).getOrElse(NoPosition)
+ tree = atPos(position.focus)(tree)
+
+ // now macro expansion gets typechecked against the macro definition return type
+ // however, this happens in macroExpand, not here in macroExpand1
+ Success(tree)
+ }
+ case expanded if expanded.isInstanceOf[Expr[_]] =>
+ val msg = "macro must return a compiler-specific expr; returned value is Expr, but it doesn't belong to this compiler's universe"
+ fail(typer, expandee, msg)
+ case expanded =>
+ val msg = "macro must return a compiler-specific expr; returned value is of class: %s".format(expanded.getClass)
+ fail(typer, expandee, msg)
+ }
}
+ } catch {
+ case ex: Throwable =>
+ openMacros = openMacros.tail
+ throw ex
}
- } catch {
- case ex: Throwable =>
- openMacros = openMacros.tail
- throw ex
+ if (!expanded.isInstanceOf[Success]) openMacros = openMacros.tail
+ expanded
+ case None =>
+ fail(typer, expandee) // error has been reported by macroArgs
+ }
+ } catch {
+ case ex =>
+ // [Eugene] any ideas about how to improve this one?
+ val realex = ReflectionUtils.unwrapThrowable(ex)
+ realex match {
+ case realex: reflect.makro.runtime.AbortMacroException =>
+ if (macroDebug || macroCopypaste) println("macro expansion has failed: %s".format(realex.msg))
+ fail(typer, expandee) // error has been reported by abort
+ case _ =>
+ val message = {
+ try {
+ // the most reliable way of obtaining currently executing method
+ // http://stackoverflow.com/questions/442747/getting-the-name-of-the-current-executing-method
+ val currentMethodName = new Object(){}.getClass().getEnclosingMethod().getName
+ val relevancyThreshold = realex.getStackTrace().indexWhere(este => este.getMethodName == currentMethodName)
+ if (relevancyThreshold == -1) None
+ else {
+ var relevantElements = realex.getStackTrace().take(relevancyThreshold + 1)
+ var framesTillReflectiveInvocationOfMacroImpl = relevantElements.reverse.indexWhere(_.isNativeMethod) + 1
+ relevantElements = relevantElements dropRight framesTillReflectiveInvocationOfMacroImpl
+
+ realex.setStackTrace(relevantElements)
+ val message = new java.io.StringWriter()
+ realex.printStackTrace(new java.io.PrintWriter(message))
+ Some(EOL + message)
+ }
+ } catch {
+ // if the magic above goes boom, just fall back to uninformative, but better than nothing, getMessage
+ case ex: Throwable =>
+ None
+ }
+ } getOrElse realex.getMessage
+ fail(typer, expandee, "exception during macro expansion: " + message)
}
- if (!expanded.isInstanceOf[Success]) openMacros = openMacros.tail
- expanded
- case None =>
- fail(typer, expandee) // error has been reported by macroArgs
+ } finally {
+ nodePrinters.infolevel = savedInfolevel
+ }
+ case None =>
+ def notFound() = {
+ typer.context.error(expandee.pos, "macro implementation not found: " + macroDef.name + " " +
+ "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)\n" +
+ "if you do need to define macro implementations along with the rest of your program, consider two-phase compilation with -Xmacro-fallback-classpath " +
+ "in the second phase pointing to the output of the first phase")
+ None
}
- } catch {
- case ex =>
- // [Eugene] any ideas about how to improve this one?
- val realex = ReflectionUtils.unwrapThrowable(ex)
- realex match {
- case realex: reflect.makro.runtime.AbortMacroException =>
- if (macroDebug || macroCopypaste) println("macro expansion has failed: %s".format(realex.msg))
- fail(typer, expandee) // error has been reported by abort
+ def fallBackToOverridden(tree: Tree): Option[Tree] = {
+ tree match {
+ case Select(qual, name) if (macroDef.isTermMacro) =>
+ macroDef.allOverriddenSymbols match {
+ case first :: _ =>
+ Some(Select(qual, name) setPos tree.pos setSymbol first)
+ case _ =>
+ macroTrace("macro is not overridden: ")(tree)
+ notFound()
+ }
+ case Apply(fn, args) =>
+ fallBackToOverridden(fn) match {
+ case Some(fn1) => Some(Apply(fn1, args) setPos tree.pos)
+ case _ => None
+ }
+ case TypeApply(fn, args) =>
+ fallBackToOverridden(fn) match {
+ case Some(fn1) => Some(TypeApply(fn1, args) setPos tree.pos)
+ case _ => None
+ }
case _ =>
- val message = {
- try {
- // the most reliable way of obtaining currently executing method
- // http://stackoverflow.com/questions/442747/getting-the-name-of-the-current-executing-method
- val currentMethodName = new Object(){}.getClass().getEnclosingMethod().getName
- val relevancyThreshold = realex.getStackTrace().indexWhere(este => este.getMethodName == currentMethodName)
- if (relevancyThreshold == -1) None
- else {
- var relevantElements = realex.getStackTrace().take(relevancyThreshold + 1)
- var framesTillReflectiveInvocationOfMacroImpl = relevantElements.reverse.indexWhere(_.isNativeMethod) + 1
- relevantElements = relevantElements dropRight framesTillReflectiveInvocationOfMacroImpl
-
- realex.setStackTrace(relevantElements)
- val message = new java.io.StringWriter()
- realex.printStackTrace(new java.io.PrintWriter(message))
- Some(EOL + message)
- }
- } catch {
- // if the magic above goes boom, just fall back to uninformative, but better than nothing, getMessage
- case ex: Throwable =>
- None
- }
- } getOrElse realex.getMessage
- fail(typer, expandee, "exception during macro expansion: " + message)
+ macroTrace("unexpected tree in fallback: ")(tree)
+ notFound()
}
- } finally {
- nodePrinters.infolevel = savedInfolevel
- }
- case None =>
- def notFound() = {
- typer.context.error(expandee.pos, "macro implementation not found: " + macroDef.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)\n" +
- "if you do need to define macro implementations along with the rest of your program, consider two-phase compilation with -Xmacro-fallback-classpath " +
- "in the second phase pointing to the output of the first phase")
- None
- }
- def fallBackToOverridden(tree: Tree): Option[Tree] = {
- tree match {
- case Select(qual, name) if (macroDef.isTermMacro) =>
- macroDef.allOverriddenSymbols match {
- case first :: _ =>
- Some(Select(qual, name) setPos tree.pos setSymbol first)
- case _ =>
- macroTrace("macro is not overridden: ")(tree)
- notFound()
- }
- case Apply(fn, args) =>
- fallBackToOverridden(fn) match {
- case Some(fn1) => Some(Apply(fn1, args) setPos tree.pos)
- case _ => None
- }
- case TypeApply(fn, args) =>
- fallBackToOverridden(fn) match {
- case Some(fn1) => Some(TypeApply(fn1, args) setPos tree.pos)
- case _ => None
- }
- case _ =>
- macroTrace("unexpected tree in fallback: ")(tree)
- notFound()
}
- }
- fallBackToOverridden(expandee) match {
- case Some(tree1) =>
- macroTrace("falling back to ")(tree1)
- currentRun.macroExpansionFailed = true
- Fallback(tree1)
- case None =>
- fail(typer, expandee)
- }
+ fallBackToOverridden(expandee) match {
+ case Some(tree1) =>
+ macroTrace("falling back to ")(tree1)
+ currentRun.macroExpansionFailed = true
+ Fallback(tree1)
+ case None =>
+ fail(typer, expandee)
+ }
+ }
+ } finally {
+ stopTimer(macroExpandNanos, start)
}
}
} else {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 9431c6f5e5..fcdcc7b748 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -2787,7 +2787,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val lencmp = compareLengths(args, formals)
def checkNotMacro() = {
- if (fun.symbol != null && fun.symbol.filter(sym => sym != null && sym.isTermMacro) != NoSymbol)
+ if (fun.symbol != null && fun.symbol.filter(sym => sym != null && sym.isTermMacro && !sym.isErroneous) != NoSymbol)
duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
}
@@ -3525,7 +3525,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
else
tree
original setType ann.tpe
- original setPos tree.pos.focus
TypeTree(tpe) setOriginal original setPos tree.pos.focus
}
@@ -3584,7 +3583,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
ann.tpe = arg1.tpe.withAnnotation(annotInfo)
}
val atype = ann.tpe
- Typed(arg1, resultingTypeTree(atype)) setPos tree.pos.focus setType atype
+ Typed(arg1, resultingTypeTree(atype)) setPos tree.pos setType atype
}
}
@@ -4616,7 +4615,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// A: solely for robustness reasons. this mechanism might change in the future, which might break unprotected code
val expr1 = context.withMacrosDisabled(typed1(expr, mode, pt))
expr1 match {
- case macroDef if macroDef.symbol.isTermMacro =>
+ case macroDef if macroDef.symbol != null && macroDef.symbol.isTermMacro && !macroDef.symbol.isErroneous =>
MacroEtaError(expr1)
case _ =>
typedEta(checkDead(expr1))
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
index d1cdd30dd8..61c7695911 100644
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ b/src/compiler/scala/tools/nsc/util/Statistics.scala
@@ -57,6 +57,9 @@ class Statistics extends scala.reflect.internal.util.Statistics {
val counter2: SubCounter = new SubCounter(subtypeCount)
val timer1: Timer = new Timer
val timer2: Timer = new Timer
+
+ val macroExpandCount = new Counter
+ val macroExpandNanos = new Timer
}
object Statistics extends Statistics
@@ -125,34 +128,36 @@ abstract class StatisticsInfo {
inform("ms type-flow-analysis: " + analysis.timer.millis)
if (phase.name == "typer") {
- inform("time spent typechecking : "+showRelTyper(typerNanos))
- inform("time classfilereading : "+showRelTyper(classReadNanos))
- inform("time spent in implicits : "+showRelTyper(implicitNanos))
- inform(" successful in scope : "+showRelTyper(inscopeSucceedNanos))
- inform(" failed in scope : "+showRelTyper(inscopeFailNanos))
- inform(" successful of type : "+showRelTyper(oftypeSucceedNanos))
- inform(" failed of type : "+showRelTyper(oftypeFailNanos))
- inform(" assembling parts : "+showRelTyper(subtypeETNanos))
- inform(" matchesPT : "+showRelTyper(matchesPtNanos))
- inform("implicit cache hits : "+showRelative(implicitCacheHits.value + implicitCacheMisses.value)(implicitCacheHits.value))
- inform("time spent in failed : "+showRelTyper(failedSilentNanos))
- inform(" failed apply : "+showRelTyper(failedApplyNanos))
- inform(" failed op= : "+showRelTyper(failedOpEqNanos))
- inform("time spent ref scanning : "+showRelTyper(isReferencedNanos))
- inform("micros by tree node : "+showCounts(microsByType))
- inform("#visits by tree node : "+showCounts(visitsByType))
+ inform("time spent typechecking : " + showRelTyper(typerNanos))
+ inform("time classfilereading : " + showRelTyper(classReadNanos))
+ inform("time spent in implicits : " + showRelTyper(implicitNanos))
+ inform(" successful in scope : " + showRelTyper(inscopeSucceedNanos))
+ inform(" failed in scope : " + showRelTyper(inscopeFailNanos))
+ inform(" successful of type : " + showRelTyper(oftypeSucceedNanos))
+ inform(" failed of type : " + showRelTyper(oftypeFailNanos))
+ inform(" assembling parts : " + showRelTyper(subtypeETNanos))
+ inform(" matchesPT : " + showRelTyper(matchesPtNanos))
+ inform("implicit cache hits : " + showRelative(implicitCacheHits.value + implicitCacheMisses.value)(implicitCacheHits.value))
+ inform("time spent in failed : " + showRelTyper(failedSilentNanos))
+ inform(" failed apply : " + showRelTyper(failedApplyNanos))
+ inform(" failed op= : " + showRelTyper(failedOpEqNanos))
+ inform("time spent ref scanning : " + showRelTyper(isReferencedNanos))
+ inform("micros by tree node : " + showCounts(microsByType))
+ inform("#visits by tree node : " + showCounts(visitsByType))
val average = new ClassCounts
for (c <- microsByType.keysIterator) average(c) = microsByType(c)/visitsByType(c)
- inform("avg micros by tree node : "+showCounts(average))
- inform("time spent in <:< : "+showRelTyper(subtypeNanos))
- inform("time spent in findmember : "+showRelTyper(findMemberNanos))
- inform("time spent in asSeenFrom : "+showRelTyper(asSeenFromNanos))
- inform("#implicit searches : " + implicitSearchCount)
+ inform("avg micros by tree node : " + showCounts(average))
+ inform("time spent in <:< : " + showRelTyper(subtypeNanos))
+ inform("time spent in findmember : " + showRelTyper(findMemberNanos))
+ inform("time spent in asSeenFrom : " + showRelTyper(asSeenFromNanos))
+ inform("#implicit searches : " + implicitSearchCount)
inform("#tried, plausible, matching, typed, found implicits: "+triedImplicits+", "+plausiblyCompatibleImplicits+", "+matchingImplicits+", "+typedImplicits+", "+foundImplicits)
- inform("#implicit improves tests : " + improvesCount)
- inform("#implicit improves cached: " + improvesCachedCount)
- inform("#implicit inscope hits : " + inscopeImplicitHits)
- inform("#implicit oftype hits : " + oftypeImplicitHits)
+ inform("#implicit improves tests : " + improvesCount)
+ inform("#implicit improves cached : " + improvesCachedCount)
+ inform("#implicit inscope hits : " + inscopeImplicitHits)
+ inform("#implicit oftype hits : " + oftypeImplicitHits)
+ inform("#macro expansions : " + macroExpandCount)
+ inform("#time spent in macroExpand : " + showRelTyper(macroExpandNanos))
}
if (ctr1 != null) inform("#ctr1 : " + ctr1)
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index 99c54ce58c..5b8ebde308 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -467,6 +467,19 @@ object Array extends FallbackArrayBuilding {
* @version 1.0
* @see [[http://www.scala-lang.org/docu/files/collections-api/collections_38.html#anchor "The Scala 2.8 Collections' API"]]
* section on `Array` by Martin Odersky for more information.
+ * @define coll array
+ * @define Coll Array
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define collectExample
+ * @define undefinedorder
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is either `Array[B]` if a ClassManifest is available for B or `ArraySeq[B]` otherwise.
+ * @define zipthatinfo $thatinfo
+ * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current
+ * representation type `Repr` and the new element type `B`.
*/
final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable {
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 79ceff328e..a58297d7d4 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -82,6 +82,17 @@ object Option {
* @define option [[scala.Option]]
* @define p `p`
* @define f `f`
+ * @define coll option
+ * @define Coll Option
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define collectExample
+ * @define undefinedorder
+ * @define thatinfo the class of the returned collection. In the standard library configuration, `That` is `Iterable[B]`
+ * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current
+ * representation type `Repr` and the new element type `B`.
*/
sealed abstract class Option[+A] extends Product with Serializable {
self =>
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index b1befca4fa..37ab564c3c 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -23,7 +23,7 @@ case class Tuple2[@specialized(Int, Long, Double, Char, Boolean, AnyRef) +T1, @s
extends Product2[T1, T2]
{
override def toString() = "(" + _1 + "," + _2 + ")"
-
+
/** Swaps the elements of this `Tuple`.
* @return a new Tuple where the first element is the second element of this Tuple and the
* second element is the first element of this Tuple.
@@ -54,6 +54,16 @@ case class Tuple2[@specialized(Int, Long, Double, Char, Boolean, AnyRef) +T1, @s
def zipped[Repr1, El1, Repr2, El2](implicit w1: T1 => TLike[El1, Repr1], w2: T2 => ILike[El2, Repr2]): Zipped[Repr1, El1, Repr2, El2]
= new Zipped[Repr1, El1, Repr2, El2](_1, _2)
+ /**
+ * @define coll zipped
+ * @define Coll Zipped
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define collectExample
+ * @define undefinedorder
+ */
class Zipped[+Repr1, +El1, +Repr2, +El2](coll1: TLike[El1, Repr1], coll2: ILike[El2, Repr2]) { // coll2: ILike for filter
def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
val b = cbf(coll1.repr)
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 0d5399308b..cd5ee23757 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -24,7 +24,7 @@ case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
extends Product3[T1, T2, T3]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")"
-
+
@deprecated("Use `zipped` instead.", "2.9.0")
def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1],
@@ -53,6 +53,17 @@ case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
w3: T3 => ILike[El3, Repr3]): Zipped[Repr1, El1, Repr2, El2, Repr3, El3]
= new Zipped[Repr1, El1, Repr2, El2, Repr3, El3](_1, _2, _3)
+ /**
+ * @define coll zipped
+ * @define Coll Zipped
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define collectExample
+ * @define undefinedorder
+ * @define thatInfo The class of the returned collection.
+ */
class Zipped[+Repr1, +El1, +Repr2, +El2, +Repr3, +El3](coll1: TLike[El1, Repr1],
coll2: ILike[El2, Repr2],
coll3: ILike[El3, Repr3]) {
diff --git a/src/library/scala/concurrent/ConcurrentPackageObject.scala b/src/library/scala/concurrent/ConcurrentPackageObject.scala
index c6da343f3a..fafd7fd238 100644
--- a/src/library/scala/concurrent/ConcurrentPackageObject.scala
+++ b/src/library/scala/concurrent/ConcurrentPackageObject.scala
@@ -36,34 +36,42 @@ abstract class ConcurrentPackageObject {
case _ => true
}
- private[concurrent] def resolve[T](source: Either[Throwable, T]): Either[Throwable, T] = source match {
- case Left(t: scala.runtime.NonLocalReturnControl[_]) => Right(t.value.asInstanceOf[T])
- case Left(t: scala.util.control.ControlThrowable) => Left(new ExecutionException("Boxed ControlThrowable", t))
- case Left(t: InterruptedException) => Left(new ExecutionException("Boxed InterruptedException", t))
- case Left(e: Error) => Left(new ExecutionException("Boxed Error", e))
- case _ => source
+ private[concurrent] def resolveEither[T](source: Either[Throwable, T]): Either[Throwable, T] = source match {
+ case Left(t) => resolver(t)
+ case _ => source
}
- private[concurrent] def resolver[T] =
- resolverFunction.asInstanceOf[PartialFunction[Throwable, Either[Throwable, T]]]
-
+ private[concurrent] def resolver[T](throwable: Throwable): Either[Throwable, T] = throwable match {
+ case t: scala.runtime.NonLocalReturnControl[_] => Right(t.value.asInstanceOf[T])
+ case t: scala.util.control.ControlThrowable => Left(new ExecutionException("Boxed ControlThrowable", t))
+ case t: InterruptedException => Left(new ExecutionException("Boxed InterruptedException", t))
+ case e: Error => Left(new ExecutionException("Boxed Error", e))
+ case t => Left(t)
+ }
+
/* concurrency constructs */
+ /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
+ *
+ * The result becomes available once the asynchronous computation is completed.
+ *
+ * @tparam T the type of the result
+ * @param body the asychronous computation
+ * @param execctx the execution context on which the future is run
+ * @return the `Future` holding the result of the computation
+ */
def future[T](body: =>T)(implicit execctx: ExecutionContext = defaultExecutionContext): Future[T] =
Future[T](body)
+ /** Creates a promise object which can be completed with a value.
+ *
+ * @tparam T the type of the value in the promise
+ * @param execctx the execution context on which the promise is created on
+ * @return the newly created `Promise` object
+ */
def promise[T]()(implicit execctx: ExecutionContext = defaultExecutionContext): Promise[T] =
Promise[T]()
- /** Wraps a block of code into an awaitable object. */
- def body2awaitable[T](body: =>T) = new Awaitable[T] {
- def ready(atMost: Duration)(implicit permit: CanAwait) = {
- body
- this
- }
- def result(atMost: Duration)(implicit permit: CanAwait) = body
- }
-
/** Used to block on a piece of code which potentially blocks.
*
* @param body A piece of code which contains potentially blocking or long running calls.
@@ -74,7 +82,7 @@ abstract class ConcurrentPackageObject {
* - TimeoutException - in the case that the blockable object timed out
*/
def blocking[T](body: =>T): T =
- blocking(body2awaitable(body), Duration.fromNanos(0))
+ blocking(impl.Future.body2awaitable(body), Duration.fromNanos(0))
/** Blocks on an awaitable object.
*
@@ -100,11 +108,11 @@ private[concurrent] object ConcurrentPackageObject {
// compiling a subset of sources; it seems that the wildcard is not
// properly handled, and you get messages like "type _$1 defined twice".
// This is consistent with other package object breakdowns.
- private val resolverFunction: PartialFunction[Throwable, Either[Throwable, _]] = {
- case t: scala.runtime.NonLocalReturnControl[_] => Right(t.value)
- case t: scala.util.control.ControlThrowable => Left(new ExecutionException("Boxed ControlThrowable", t))
- case t: InterruptedException => Left(new ExecutionException("Boxed InterruptedException", t))
- case e: Error => Left(new ExecutionException("Boxed Error", e))
- case t => Left(t)
- }
+ // private val resolverFunction: PartialFunction[Throwable, Either[Throwable, _]] = {
+ // case t: scala.runtime.NonLocalReturnControl[_] => Right(t.value)
+ // case t: scala.util.control.ControlThrowable => Left(new ExecutionException("Boxed ControlThrowable", t))
+ // case t: InterruptedException => Left(new ExecutionException("Boxed InterruptedException", t))
+ // case e: Error => Left(new ExecutionException("Boxed Error", e))
+ // case t => Left(t)
+ // }
}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index e1d4276396..3f62f58bf8 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -20,19 +20,22 @@ import collection._
trait ExecutionContext {
-
+
+ /** Runs a block of code on this execution context.
+ */
def execute(runnable: Runnable): Unit
-
- def execute[U](body: () => U): Unit
-
+
+ /** Used internally by the framework - blocks execution for at most `atMost` time while waiting
+ * for an `awaitable` object to become ready.
+ *
+ * Clients should use `scala.concurrent.blocking` instead.
+ */
def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T
+ /** Reports that an asynchronous computation failed.
+ */
def reportFailure(t: Throwable): Unit
-
- /* implementations follow */
-
- private implicit val executionContext = this
-
+
}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 01ce902c79..0d76c23c25 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -513,6 +513,15 @@ trait Future[+T] extends Awaitable[T] {
*/
object Future {
+ /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
+ *
+ * The result becomes available once the asynchronous computation is completed.
+ *
+ * @tparam T the type of the result
+ * @param body the asychronous computation
+ * @param execctx the execution context on which the future is run
+ * @return the `Future` holding the result of the computation
+ */
def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = impl.Future(body)
import scala.collection.mutable.Builder
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index 8f2bce5d1a..cd22a55ce7 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -107,15 +107,27 @@ trait Promise[T] {
object Promise {
- /** Creates a new promise.
+ /** Creates a promise object which can be completed with a value.
+ *
+ * @tparam T the type of the value in the promise
+ * @param execctx the execution context on which the promise is created on
+ * @return the newly created `Promise` object
*/
def apply[T]()(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.DefaultPromise[T]()
- /** Creates an already completed Promise with the specified exception
+ /** Creates an already completed Promise with the specified exception.
+ *
+ * @tparam T the type of the value in the promise
+ * @param execctx the execution context on which the promise is created on
+ * @return the newly created `Promise` object
*/
def failed[T](exception: Throwable)(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.KeptPromise[T](Left(exception))
- /** Creates an already completed Promise with the specified result
+ /** Creates an already completed Promise with the specified result.
+ *
+ * @tparam T the type of the value in the promise
+ * @param execctx the execution context on which the promise is created on
+ * @return the newly created `Promise` object
*/
def successful[T](result: T)(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.KeptPromise[T](Right(result))
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index c5062267dc..c308a59297 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -12,7 +12,7 @@ package scala.concurrent.impl
import java.util.concurrent.{Callable, Executor, ExecutorService, Executors, ThreadFactory}
import scala.concurrent.forkjoin._
-import scala.concurrent.{ExecutionContext, resolver, Awaitable, body2awaitable}
+import scala.concurrent.{ExecutionContext, resolver, Awaitable}
import scala.concurrent.util.{ Duration }
@@ -56,24 +56,40 @@ private[scala] class ExecutionContextImpl(es: AnyRef) extends ExecutionContext w
def execute(runnable: Runnable): Unit = executorService match {
case fj: ForkJoinPool =>
- if (Thread.currentThread.isInstanceOf[ForkJoinWorkerThread]) {
- val fjtask = ForkJoinTask.adapt(runnable)
- fjtask.fork
- } else {
- fj.execute(runnable)
+ Thread.currentThread match {
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq fj =>
+ val fjtask = runnable match {
+ case fjt: ForkJoinTask[_] => fjt
+ case _ => ForkJoinTask.adapt(runnable)
+ }
+ fjtask.fork
+ case _ =>
+ fj.execute(runnable)
}
case executor: Executor =>
executor execute runnable
}
- def execute[U](body: () => U): Unit = execute(new Runnable {
- def run() = body()
- })
-
def internalBlockingCall[T](awaitable: Awaitable[T], atMost: Duration): T = {
Future.releaseStack(this)
- awaitable.result(atMost)(scala.concurrent.Await.canAwaitEvidence)
+ executorService match {
+ case fj: ForkJoinPool =>
+ var result: T = null.asInstanceOf[T]
+ val managedBlocker = new ForkJoinPool.ManagedBlocker {
+ @volatile var isdone = false
+ def block() = {
+ result = awaitable.result(atMost)(scala.concurrent.Await.canAwaitEvidence)
+ isdone = true
+ true
+ }
+ def isReleasable = isdone
+ }
+ ForkJoinPool.managedBlock(managedBlocker)
+ result
+ case _ =>
+ awaitable.result(atMost)(scala.concurrent.Await.canAwaitEvidence)
+ }
}
def reportFailure(t: Throwable) = t match {
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 615ab061a5..a3c8ed3095 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -10,9 +10,11 @@ package scala.concurrent.impl
-import scala.concurrent.{Awaitable, ExecutionContext}
+import scala.concurrent.util.Duration
+import scala.concurrent.{Awaitable, ExecutionContext, CanAwait}
import scala.collection.mutable.Stack
+
private[concurrent] trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] {
implicit def executor: ExecutionContext
@@ -54,6 +56,15 @@ object Future {
classOf[Unit] -> classOf[scala.runtime.BoxedUnit]
)
+ /** Wraps a block of code into an awaitable object. */
+ private[concurrent] def body2awaitable[T](body: =>T) = new Awaitable[T] {
+ def ready(atMost: Duration)(implicit permit: CanAwait) = {
+ body
+ this
+ }
+ def result(atMost: Duration)(implicit permit: CanAwait) = body
+ }
+
def boxedType(c: Class[_]): Class[_] = {
if (c.isPrimitive) toBoxed(c) else c
}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index f7e073cb78..07b6d1f278 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -12,7 +12,7 @@ package scala.concurrent.impl
import java.util.concurrent.TimeUnit.{ NANOSECONDS, MILLISECONDS }
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater
-import scala.concurrent.{Awaitable, ExecutionContext, resolve, resolver, blocking, CanAwait, TimeoutException}
+import scala.concurrent.{Awaitable, ExecutionContext, resolveEither, resolver, blocking, CanAwait, TimeoutException}
//import scala.util.continuations._
import scala.concurrent.util.Duration
import scala.util
@@ -126,7 +126,7 @@ object Promise {
value.isDefined
}
- blocking(concurrent.body2awaitable(awaitUnsafe(dur2long(atMost))), atMost)
+ blocking(Future.body2awaitable(awaitUnsafe(dur2long(atMost))), atMost)
}
def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
@@ -166,7 +166,7 @@ object Promise {
case _ => null
}
}
- tryComplete(resolve(value))
+ tryComplete(resolveEither(value))
} finally {
synchronized { notifyAll() } // notify any blockers from `tryAwait`
}
@@ -220,7 +220,7 @@ object Promise {
*/
final class KeptPromise[T](suppliedValue: Either[Throwable, T])(implicit val executor: ExecutionContext) extends Promise[T] {
- val value = Some(resolve(suppliedValue))
+ val value = Some(resolveEither(suppliedValue))
def tryComplete(value: Either[Throwable, T]): Boolean = false
diff --git a/src/library/scala/concurrent/util/Duration.scala b/src/library/scala/concurrent/util/Duration.scala
index 3f7c44945b..15a546de10 100644
--- a/src/library/scala/concurrent/util/Duration.scala
+++ b/src/library/scala/concurrent/util/Duration.scala
@@ -9,51 +9,6 @@ import TimeUnit._
import java.lang.{ Double ⇒ JDouble }
import language.implicitConversions
-object DurationImplicits {
- trait Classifier[C] {
- type R
- def convert(d: FiniteDuration): R
- }
-
- object span
- implicit object spanConvert extends Classifier[span.type] {
- type R = FiniteDuration
- def convert(d: FiniteDuration) = d
- }
-
- object fromNow
- implicit object fromNowConvert extends Classifier[fromNow.type] {
- type R = Deadline
- def convert(d: FiniteDuration) = Deadline.now + d
- }
-
- implicit def intToDurationInt(n: Int) = new DurationInt(n)
- implicit def longToDurationLong(n: Long) = new DurationLong(n)
- implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
-
- implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2)
- implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2)
- implicit def durationToPair(d: Duration) = (d.length, d.unit)
-
- /*
- * Avoid reflection based invocation by using non-duck type
- */
- class IntMult(i: Int) {
- def *(d: Duration) = d * i
- }
- implicit def intMult(i: Int) = new IntMult(i)
-
- class LongMult(l: Long) {
- def *(d: Duration) = d * l
- }
- implicit def longMult(l: Long) = new LongMult(l)
-
- class DoubleMult(f: Double) {
- def *(d: Duration) = d * f
- }
- implicit def doubleMult(f: Double) = new DoubleMult(f)
-}
-
case class Deadline private (time: Duration) {
def +(other: Duration): Deadline = copy(time = time + other)
def -(other: Duration): Deadline = copy(time = time - other)
@@ -72,10 +27,7 @@ object Duration {
def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit)
def apply(length: Double, unit: TimeUnit): FiniteDuration = fromNanos(unit.toNanos(1) * length)
- def apply(length: Long, unit: String): FiniteDuration = {
- val (mult, timeUnit) = Duration.timeUnit(unit)
- new FiniteDuration(length * mult, timeUnit)
- }
+ def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit))
/**
* Construct a Duration by parsing a String. In case of a format error, a
@@ -118,11 +70,11 @@ object Duration {
def unapply(s: String): Option[Duration] = s match {
case RE(length, d, h, m, s, ms, mus, ns) ⇒
if (d ne null)
- Some(Duration(JDouble.parseDouble(length) * 86400, SECONDS))
+ Some(Duration(JDouble.parseDouble(length), DAYS))
else if (h ne null)
- Some(Duration(JDouble.parseDouble(length) * 3600, SECONDS))
+ Some(Duration(JDouble.parseDouble(length), HOURS))
else if (m ne null)
- Some(Duration(JDouble.parseDouble(length) * 60, SECONDS))
+ Some(Duration(JDouble.parseDouble(length), MINUTES))
else if (s ne null)
Some(Duration(JDouble.parseDouble(length), SECONDS))
else if (ms ne null)
@@ -143,11 +95,11 @@ object Duration {
def fromNanos(nanos: Long): FiniteDuration = {
if (nanos % 86400000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
- } else if (nanos % 1000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
- } else if (nanos % 1000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
+ Duration(nanos / 86400000000000L, DAYS)
+ } else if (nanos % 3600000000000L == 0) {
+ Duration(nanos / 3600000000000L, HOURS)
+ } else if (nanos % 60000000000L == 0) {
+ Duration(nanos / 60000000000L, MINUTES)
} else if (nanos % 1000000000L == 0) {
Duration(nanos / 1000000000L, SECONDS)
} else if (nanos % 1000000L == 0) {
@@ -162,14 +114,14 @@ object Duration {
/**
* Parse TimeUnit from string representation.
*/
- protected[util] def timeUnit(unit: String): (Long, TimeUnit) = unit.toLowerCase match {
- case "d" | "day" | "days" ⇒ (86400, SECONDS)
- case "h" | "hour" | "hours" ⇒ (3600, SECONDS)
- case "min" | "minute" | "minutes" ⇒ (60, SECONDS)
- case "s" | "sec" | "second" | "seconds" ⇒ (1, SECONDS)
- case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" ⇒ (1, MILLISECONDS)
- case "µs" | "micro" | "micros" | "microsecond" | "microseconds" ⇒ (1, MICROSECONDS)
- case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" ⇒ (1, NANOSECONDS)
+ protected[util] def timeUnit(unit: String): TimeUnit = unit.toLowerCase match {
+ case "d" | "day" | "days" ⇒ DAYS
+ case "h" | "hour" | "hours" ⇒ HOURS
+ case "min" | "minute" | "minutes" ⇒ MINUTES
+ case "s" | "sec" | "second" | "seconds" ⇒ SECONDS
+ case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" ⇒ MILLISECONDS
+ case "µs" | "micro" | "micros" | "microsecond" | "microseconds" ⇒ MICROSECONDS
+ case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" ⇒ NANOSECONDS
}
val Zero: FiniteDuration = new FiniteDuration(0, NANOSECONDS)
@@ -329,13 +281,9 @@ object FiniteDuration {
def compare(a: FiniteDuration, b: FiniteDuration) = a compare b
}
- def apply(length: Long, unit: TimeUnit) =
- new FiniteDuration(length, unit)
+ def apply(length: Long, unit: TimeUnit) = new FiniteDuration(length, unit)
- def apply(length: Long, unit: String) = {
- val (mult, timeUnit) = Duration.timeUnit(unit)
- new FiniteDuration(length * mult, timeUnit)
- }
+ def apply(length: Long, unit: String) = new FiniteDuration(length, Duration.timeUnit(unit))
}
@@ -352,6 +300,12 @@ class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u)
override def toString = this match {
+ case Duration(1, DAYS) ⇒ "1 day"
+ case Duration(x, DAYS) ⇒ x + " days"
+ case Duration(1, HOURS) ⇒ "1 hour"
+ case Duration(x, HOURS) ⇒ x + " hours"
+ case Duration(1, MINUTES) ⇒ "1 minute"
+ case Duration(x, MINUTES) ⇒ x + " minutes"
case Duration(1, SECONDS) ⇒ "1 second"
case Duration(x, SECONDS) ⇒ x + " seconds"
case Duration(1, MILLISECONDS) ⇒ "1 millisecond"
@@ -405,7 +359,7 @@ class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
}
class DurationInt(n: Int) {
- import DurationImplicits.Classifier
+ import duration.Classifier
def nanoseconds = Duration(n, NANOSECONDS)
def nanos = Duration(n, NANOSECONDS)
@@ -425,14 +379,14 @@ class DurationInt(n: Int) {
def seconds = Duration(n, SECONDS)
def second = Duration(n, SECONDS)
- def minutes = Duration(n * 60, SECONDS)
- def minute = Duration(n * 60, SECONDS)
+ def minutes = Duration(n, MINUTES)
+ def minute = Duration(n, MINUTES)
- def hours = Duration(n * 3600, SECONDS)
- def hour = Duration(n * 3600, SECONDS)
+ def hours = Duration(n, HOURS)
+ def hour = Duration(n, HOURS)
- def days = Duration(n * 86400, SECONDS)
- def day = Duration(n * 86400, SECONDS)
+ def days = Duration(n, DAYS)
+ def day = Duration(n, DAYS)
def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
@@ -452,18 +406,18 @@ class DurationInt(n: Int) {
def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
}
class DurationLong(n: Long) {
- import DurationImplicits.Classifier
+ import duration.Classifier
def nanoseconds = Duration(n, NANOSECONDS)
def nanos = Duration(n, NANOSECONDS)
@@ -483,14 +437,14 @@ class DurationLong(n: Long) {
def seconds = Duration(n, SECONDS)
def second = Duration(n, SECONDS)
- def minutes = Duration(n * 60, SECONDS)
- def minute = Duration(n * 60, SECONDS)
+ def minutes = Duration(n, MINUTES)
+ def minute = Duration(n, MINUTES)
- def hours = Duration(n * 3600, SECONDS)
- def hour = Duration(n * 3600, SECONDS)
+ def hours = Duration(n, HOURS)
+ def hour = Duration(n, HOURS)
- def days = Duration(n * 86400, SECONDS)
- def day = Duration(n * 86400, SECONDS)
+ def days = Duration(n, DAYS)
+ def day = Duration(n, DAYS)
def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
@@ -510,18 +464,18 @@ class DurationLong(n: Long) {
def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
}
class DurationDouble(d: Double) {
- import DurationImplicits.Classifier
+ import duration.Classifier
def nanoseconds = Duration(d, NANOSECONDS)
def nanos = Duration(d, NANOSECONDS)
@@ -541,14 +495,14 @@ class DurationDouble(d: Double) {
def seconds = Duration(d, SECONDS)
def second = Duration(d, SECONDS)
- def minutes = Duration(d * 60, SECONDS)
- def minute = Duration(d * 60, SECONDS)
+ def minutes = Duration(d, MINUTES)
+ def minute = Duration(d, MINUTES)
- def hours = Duration(d * 3600, SECONDS)
- def hour = Duration(d * 3600, SECONDS)
+ def hours = Duration(d, HOURS)
+ def hour = Duration(d, HOURS)
- def days = Duration(d * 86400, SECONDS)
- def day = Duration(d * 86400, SECONDS)
+ def days = Duration(d, DAYS)
+ def day = Duration(d, DAYS)
def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
@@ -568,12 +522,12 @@ class DurationDouble(d: Double) {
def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
- def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 60, SECONDS))
- def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 60, SECONDS))
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MINUTES))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MINUTES))
- def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 3600, SECONDS))
- def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 3600, SECONDS))
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, HOURS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, HOURS))
- def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 86400, SECONDS))
- def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 86400, SECONDS))
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
}
diff --git a/src/library/scala/concurrent/util/duration/Classifier.scala b/src/library/scala/concurrent/util/duration/Classifier.scala
new file mode 100644
index 0000000000..10faf0a5ce
--- /dev/null
+++ b/src/library/scala/concurrent/util/duration/Classifier.scala
@@ -0,0 +1,9 @@
+package scala.concurrent.util.duration
+
+import scala.concurrent.util.{ FiniteDuration }
+
+trait Classifier[C] {
+ type R
+ def convert(d: FiniteDuration): R
+}
+
diff --git a/src/library/scala/concurrent/util/duration/IntMult.scala b/src/library/scala/concurrent/util/duration/IntMult.scala
new file mode 100644
index 0000000000..94c58fb8c2
--- /dev/null
+++ b/src/library/scala/concurrent/util/duration/IntMult.scala
@@ -0,0 +1,18 @@
+package scala.concurrent.util.duration
+
+import scala.concurrent.util.{ Duration }
+
+/*
+ * Avoid reflection based invocation by using non-duck type
+ */
+protected[duration] class IntMult(i: Int) {
+ def *(d: Duration) = d * i
+}
+
+protected[duration] class LongMult(i: Long) {
+ def *(d: Duration) = d * i
+}
+
+protected[duration] class DoubleMult(f: Double) {
+ def *(d: Duration) = d * f
+}
diff --git a/src/library/scala/concurrent/util/duration/package.scala b/src/library/scala/concurrent/util/duration/package.scala
new file mode 100644
index 0000000000..25625054ee
--- /dev/null
+++ b/src/library/scala/concurrent/util/duration/package.scala
@@ -0,0 +1,30 @@
+package scala.concurrent.util
+
+import java.util.concurrent.TimeUnit
+
+package object duration {
+
+ object span
+ implicit object spanConvert extends Classifier[span.type] {
+ type R = FiniteDuration
+ def convert(d: FiniteDuration) = d
+ }
+
+ object fromNow
+ implicit object fromNowConvert extends Classifier[fromNow.type] {
+ type R = Deadline
+ def convert(d: FiniteDuration) = Deadline.now + d
+ }
+
+ implicit def intToDurationInt(n: Int) = new DurationInt(n)
+ implicit def longToDurationLong(n: Long) = new DurationLong(n)
+ implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
+
+ implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2)
+ implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2)
+ implicit def durationToPair(d: Duration) = (d.length, d.unit)
+
+ implicit def intMult(i: Int) = new IntMult(i)
+ implicit def longMult(l: Long) = new LongMult(l)
+ implicit def doubleMult(f: Double) = new DoubleMult(f)
+} \ No newline at end of file
diff --git a/src/library/scala/reflect/ArrayTags.scala b/src/library/scala/reflect/ArrayTag.scala
index 8df7fe5f4e..8df7fe5f4e 100644
--- a/src/library/scala/reflect/ArrayTags.scala
+++ b/src/library/scala/reflect/ArrayTag.scala
diff --git a/src/library/scala/reflect/ClassTags.scala b/src/library/scala/reflect/ClassTag.scala
index 7138837f0d..7138837f0d 100644
--- a/src/library/scala/reflect/ClassTags.scala
+++ b/src/library/scala/reflect/ClassTag.scala
diff --git a/src/library/scala/reflect/api/Attachments.scala b/src/library/scala/reflect/api/Attachment.scala
index dfd362ebe0..dfd362ebe0 100644
--- a/src/library/scala/reflect/api/Attachments.scala
+++ b/src/library/scala/reflect/api/Attachment.scala
diff --git a/src/library/scala/reflect/api/StandardNames.scala b/src/library/scala/reflect/api/StandardNames.scala
index bfc165f613..d2110ede75 100644
--- a/src/library/scala/reflect/api/StandardNames.scala
+++ b/src/library/scala/reflect/api/StandardNames.scala
@@ -93,18 +93,13 @@ trait StandardNames { self: Universe =>
val ZOR: TermName
// [Eugene] this doesn't compile. why?!
-// val UNARY_~: TermName
-// val UNARY_+: TermName
-// val UNARY_-: TermName
-// val UNARY_!: TermName
- val UNARY_TILDE: TermName
- val UNARY_PLUS: TermName
- val UNARY_MINUS: TermName
- val UNARY_NOT: TermName
+ val UNARY_~ : TermName
+ val UNARY_+ : TermName
+ val UNARY_- : TermName
+ val UNARY_! : TermName
// [Eugene] this doesn't compile. why?!
-// val ???: TermName
- val QQQ: TermName
+ val ??? : TermName
val MODULE_SUFFIX_NAME: TermName
val NAME_JOIN_NAME: TermName
diff --git a/src/library/scala/reflect/makro/internal/typeTagImpl.scala b/src/library/scala/reflect/makro/internal/Utils.scala
index db658fd637..db658fd637 100644
--- a/src/library/scala/reflect/makro/internal/typeTagImpl.scala
+++ b/src/library/scala/reflect/makro/internal/Utils.scala
diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala
index 2eb026ceee..142f2baea5 100644
--- a/src/partest/scala/tools/partest/ScaladocModelTest.scala
+++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala
@@ -21,10 +21,10 @@ import scala.tools.nsc.reporters.ConsoleReporter
import scala.tools.nsc.doc.model._
import scala.tools.partest.ScaladocModelTest
- object Test extends ScaladocModelTest {
+ object Test extends ScaladocModelTest {
- def code = """ ... """
- def scaladocSettings = ""
+ override def code = """ ... """ // or override def resourceFile = "<file>.scala" (from test/scaladoc/resources)
+ def scaladocSettings = " ... "
def testModel(rootPackage: Package) = {
// get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
import access._
@@ -39,10 +39,22 @@ abstract class ScaladocModelTest extends DirectTest {
/** Override this to give scaladoc command line parameters */
def scaladocSettings: String
-
+
/** Override this to test the model */
def testModel(root: Package): Unit
+ /** Override to feed a file in resources to scaladoc*/
+ def resourceFile: String = null
+
+ /** Override to feed code into scaladoc */
+ override def code =
+ if (resourceFile ne null)
+ io.File(resourcePath + "/" + resourceFile).slurp()
+ else
+ sys.error("Scaladoc Model Test: You need to give a file or some code to feed to scaladoc!")
+
+ def resourcePath = io.Directory(sys.props("partest.cwd") + "/../resources")
+
// Implementation follows:
override def extraSettings: String = "-usejavacp"
@@ -50,15 +62,15 @@ abstract class ScaladocModelTest extends DirectTest {
// redirect err to out, for logging
val prevErr = System.err
System.setErr(System.out)
-
+
try {
// 1 - compile with scaladoc and get the model out
- val args = scaladocSettings.split(" ")
- val universe = model(args:_*).getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")})
+ val universe = model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")})
// 2 - check the model generated
testModel(universe.rootPackage)
+ println("Done.")
} catch {
- case e =>
+ case e =>
println(e)
e.printStackTrace
}
@@ -66,51 +78,46 @@ abstract class ScaladocModelTest extends DirectTest {
System.setErr(prevErr)
}
+ private[this] var settings: Settings = null
+
// create a new scaladoc compiler
- def newDocFactory(args: String*): DocFactory = {
- val settings = new Settings(_ => ())
- val command = new ScalaDoc.Command((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
+ def newDocFactory: DocFactory = {
+ settings = new Settings(_ => ())
+ settings.reportModel = false // yaay, no more "model contains X documentable templates"!
+ val args = extraSettings + " " + scaladocSettings
+ val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings)
val docFact = new DocFactory(new ConsoleReporter(settings), settings)
docFact
}
// compile with scaladoc and output the result
- def model(args: String*): Option[Universe] = newDocFactory(args: _*).makeUniverse(Right(code))
+ def model: Option[Universe] = newDocFactory.makeUniverse(Right(code))
// so we don't get the newSettings warning
- override def isDebug = false
+ override def isDebug = false
// finally, enable easy navigation inside the entities
object access {
- // Make it easy to access things
class TemplateAccess(tpl: DocTemplateEntity) {
-
def _class(name: String): DocTemplateEntity = getTheFirst(_classes(name), tpl.qualifiedName + ".class(" + name + ")")
- def _classes(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).flatMap({ case c: Class => List(c)})
+ def _classes(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: Class => c})
def _trait(name: String): DocTemplateEntity = getTheFirst(_traits(name), tpl.qualifiedName + ".trait(" + name + ")")
- def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).flatMap({ case t: Trait => List(t)})
+ def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: Trait => t})
def _object(name: String): DocTemplateEntity = getTheFirst(_objects(name), tpl.qualifiedName + ".object(" + name + ")")
- def _objects(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).flatMap({ case o: Object => List(o)})
+ def _objects(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: Object => o})
def _method(name: String): Def = getTheFirst(_methods(name), tpl.qualifiedName + ".method(" + name + ")")
def _methods(name: String): List[Def] = tpl.methods.filter(_.name == name)
-
+
def _value(name: String): Val = getTheFirst(_values(name), tpl.qualifiedName + ".value(" + name + ")")
def _values(name: String): List[Val] = tpl.values.filter(_.name == name)
- def getTheFirst[T](list: List[T], expl: String): T = {
- if (list.length == 1)
- list.head
- else if (list.length == 0)
- sys.error("Error getting " + expl + ": No such element. All elements in list: [" + list.mkString(", ") + "]")
- else
- sys.error("Error getting " + expl + ": " + list.length + " elements with this name. " +
- "All elements in list: [" + list.mkString(", ") + "]")
- }
+ def _conversion(name: String): ImplicitConversion = getTheFirst(_conversions(name), tpl.qualifiedName + ".conversion(" + name + ")")
+ def _conversions(name: String): List[ImplicitConversion] = tpl.conversions.filter(_.conversionQualifiedName == name)
}
class PackageAccess(pack: Package) extends TemplateAccess(pack) {
@@ -118,7 +125,22 @@ abstract class ScaladocModelTest extends DirectTest {
def _packages(name: String): List[Package] = pack.packages.filter(_.name == name)
}
+ class MemberAccess(mbrs: WithMembers) {
+ def _member(name: String): MemberEntity = getTheFirst(_members(name), mbrs.toString + ".member(" + name + ")")
+ def _members(name: String): List[MemberEntity] = mbrs.members.filter(_.name == name)
+ }
+
+ type WithMembers = { def members: List[MemberEntity]; def toString: String } /* DocTemplates and ImplicitConversions */
+
implicit def templateAccess(tpl: DocTemplateEntity) = new TemplateAccess(tpl)
implicit def packageAccess(pack: Package) = new PackageAccess(pack)
+ implicit def membersAccess(mbrs: WithMembers) = new MemberAccess(mbrs)
+
+ def getTheFirst[T](list: List[T], expl: String): T = list.length match {
+ case 1 => list.head
+ case 0 => sys.error("Error getting " + expl + ": No such element.")
+ case _ => sys.error("Error getting " + expl + ": " + list.length + " elements with this name. " +
+ "All elements in list: [" + list.mkString(", ") + "]")
+ }
}
}
diff --git a/test/disabled/jvm/concurrent-future.check b/test/disabled/jvm/concurrent-future.check
index c55e824818..715ac90ce7 100644
--- a/test/disabled/jvm/concurrent-future.check
+++ b/test/disabled/jvm/concurrent-future.check
@@ -12,5 +12,3 @@ test6: hai world
test6: kthxbye
test7: hai world
test7: kthxbye
-test8: hai world
-test8: im in yr loop
diff --git a/test/disabled/jvm/concurrent-future.scala b/test/disabled/jvm/concurrent-future.scala
index b44d054219..eda05428c8 100644
--- a/test/disabled/jvm/concurrent-future.scala
+++ b/test/disabled/jvm/concurrent-future.scala
@@ -90,25 +90,25 @@ object Test extends App {
}
}
- def testOnFailureWhenFutureTimeoutException(): Unit = once {
- done =>
- val f = future[Unit] {
- output(8, "hai world")
- throw new FutureTimeoutException(null)
- }
- f onSuccess { case _ =>
- output(8, "onoes")
- done()
- }
- f onFailure {
- case e: FutureTimeoutException =>
- output(8, "im in yr loop")
- done()
- case other =>
- output(8, "onoes: " + other)
- done()
- }
- }
+ // def testOnFailureWhenFutureTimeoutException(): Unit = once {
+ // done =>
+ // val f = future[Unit] {
+ // output(8, "hai world")
+ // throw new FutureTimeoutException(null)
+ // }
+ // f onSuccess { case _ =>
+ // output(8, "onoes")
+ // done()
+ // }
+ // f onFailure {
+ // case e: FutureTimeoutException =>
+ // output(8, "im in yr loop")
+ // done()
+ // case other =>
+ // output(8, "onoes: " + other)
+ // done()
+ // }
+ // }
testOnSuccess()
testOnSuccessWhenCompleted()
@@ -117,6 +117,6 @@ object Test extends App {
testOnFailureWhenSpecialThrowable(5, new Error)
testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
testOnFailureWhenSpecialThrowable(7, new InterruptedException)
- testOnFailureWhenFutureTimeoutException()
+ // testOnFailureWhenFutureTimeoutException()
}
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents.check b/test/files/neg/macro-deprecate-dont-touch-backquotedidents.check
index c97be5d9f6..25df9a6a4a 100644
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents.check
+++ b/test/files/neg/macro-deprecate-dont-touch-backquotedidents.check
@@ -1,14 +1,10 @@
-Macros_Package_10.scala:1: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
-package `macro`
+macro-deprecate-dont-touch-backquotedidents.scala:37: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+package `macro` {
^
-Macros_Package_10.scala:3: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
-package `macro`.bar
- ^
-Macros_Package_11.scala:3: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
-package `macro`.foo
- ^
-Main.scala:2: error: Unmatched closing brace '}' ignored here
-}
-^
-three warnings found
-one error found
+macro-deprecate-dont-touch-backquotedidents.scala:38: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ package `macro`.bar {
+ ^
+macro-deprecate-dont-touch-backquotedidents.scala:43: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ package `macro`.foo {
+ ^
+three errors found
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents.flags b/test/files/neg/macro-deprecate-dont-touch-backquotedidents.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/macro-deprecate-dont-touch-backquotedidents.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents.scala
new file mode 100644
index 0000000000..dee2f1de3b
--- /dev/null
+++ b/test/files/neg/macro-deprecate-dont-touch-backquotedidents.scala
@@ -0,0 +1,56 @@
+object Test1 {
+ val `macro` = ???
+}
+
+object Test2 {
+ var `macro` = ???
+}
+
+object Test3 {
+ type `macro` = Int
+}
+
+package test4 {
+ class `macro`
+}
+
+object Test5 {
+ class `macro`
+}
+
+package test6 {
+ object `macro`
+}
+
+object Test7 {
+ object `macro`
+}
+
+package test8 {
+ trait `macro`
+}
+
+object Test9 {
+ trait `macro`
+}
+
+package `macro` {
+ package `macro`.bar {
+ }
+}
+
+package foo {
+ package `macro`.foo {
+ }
+}
+
+object Test12 {
+ val Some(`macro`) = Some(42)
+ `macro` match {
+ case `macro` => println(`macro`)
+ }
+}
+
+object Test13 {
+ def `macro` = 2
+} \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Bind_12.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Bind_12.scala
deleted file mode 100644
index 97c07b04a0..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Bind_12.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test12 {
- val Some(`macro`) = Some(42)
- `macro` match {
- case `macro` => println(`macro`)
- }
-} \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Class_4.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Class_4.scala
deleted file mode 100644
index f0037b5f82..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Class_4.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package test4
-
-class `macro`
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Class_5.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Class_5.scala
deleted file mode 100644
index a6d0903cbb..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Class_5.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test5 {
- class `macro`
-}
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Def_13.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Def_13.scala
deleted file mode 100644
index 6af8e1d65e..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Def_13.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test13 {
- def `macro` = 2
-} \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Object_6.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Object_6.scala
deleted file mode 100644
index 29dab017d2..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Object_6.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package test6
-
-object `macro`
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Object_7.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Object_7.scala
deleted file mode 100644
index 6cbcac55ca..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Object_7.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test7 {
- object `macro`
-}
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Package_10.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Package_10.scala
deleted file mode 100644
index 4985d6691e..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Package_10.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package `macro`
-
-package `macro`.bar \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Package_11.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Package_11.scala
deleted file mode 100644
index 35ed610637..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Package_11.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package foo
-
-package `macro`.foo
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Trait_8.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Trait_8.scala
deleted file mode 100644
index 7895cf9a43..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Trait_8.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package test8
-
-trait `macro`
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Trait_9.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Trait_9.scala
deleted file mode 100644
index 90ba2207b7..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Trait_9.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test9 {
- trait `macro`
-}
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Type_3.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Type_3.scala
deleted file mode 100644
index 7a2196c9cd..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Type_3.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test3 {
- type `macro` = Int
-} \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Val_1.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Val_1.scala
deleted file mode 100644
index 9ad08b8ba0..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Val_1.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test1 {
- val `macro` = ???
-} \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Var_2.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Var_2.scala
deleted file mode 100644
index 4fbe152e76..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Macros_Var_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test2 {
- var `macro` = ???
-} \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Main.scala b/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Main.scala
deleted file mode 100644
index f5278d9e7e..0000000000
--- a/test/files/neg/macro-deprecate-dont-touch-backquotedidents/Main.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-object Test extends App
-} \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents.check b/test/files/neg/macro-deprecate-idents.check
index 5fa1dc84d0..bd685fc7b9 100644
--- a/test/files/neg/macro-deprecate-idents.check
+++ b/test/files/neg/macro-deprecate-idents.check
@@ -1,50 +1,46 @@
-Macros_Bind_12.scala:2: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
- val Some(macro) = Some(42)
- ^
-Macros_Bind_12.scala:4: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
- case macro => println(macro)
- ^
-Macros_Class_4.scala:3: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
-class macro
+macro-deprecate-idents.scala:2: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ val macro = ???
^
-Macros_Class_5.scala:2: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
- class macro
- ^
-Macros_Def_13.scala:2: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
- def macro = 2
+macro-deprecate-idents.scala:6: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ var macro = ???
^
-Macros_Object_6.scala:3: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
-object macro
+macro-deprecate-idents.scala:10: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ type macro = Int
^
-Macros_Object_7.scala:2: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
- object macro
- ^
-Macros_Package_10.scala:1: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
-package macro
+macro-deprecate-idents.scala:14: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ class macro
^
-Macros_Package_10.scala:3: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
-package macro.bar
+macro-deprecate-idents.scala:18: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ class macro
^
-Macros_Package_11.scala:3: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
-package macro.foo
+macro-deprecate-idents.scala:22: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ object macro
+ ^
+macro-deprecate-idents.scala:26: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ object macro
+ ^
+macro-deprecate-idents.scala:30: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ trait macro
^
-Macros_Trait_8.scala:3: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
-trait macro
- ^
-Macros_Trait_9.scala:2: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
+macro-deprecate-idents.scala:34: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
trait macro
^
-Macros_Type_3.scala:2: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
- type macro = Int
- ^
-Macros_Val_1.scala:2: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
- val macro = ???
- ^
-Macros_Var_2.scala:2: warning: in future versions of Scala "macro" will be a keyword. consider using a different name.
- var macro = ???
+macro-deprecate-idents.scala:37: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+package macro {
+ ^
+macro-deprecate-idents.scala:38: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ package macro.bar {
+ ^
+macro-deprecate-idents.scala:43: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ package macro.foo {
+ ^
+macro-deprecate-idents.scala:48: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ val Some(macro) = Some(42)
+ ^
+macro-deprecate-idents.scala:50: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ case macro => println(macro)
+ ^
+macro-deprecate-idents.scala:55: error: in future versions of Scala "macro" will be a keyword. consider using a different name.
+ def macro = 2
^
-Main.scala:2: error: Unmatched closing brace '}' ignored here
-}
-^
-15 warnings found
-one error found
+15 errors found
diff --git a/test/files/neg/macro-deprecate-idents.flags b/test/files/neg/macro-deprecate-idents.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/macro-deprecate-idents.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents.scala b/test/files/neg/macro-deprecate-idents.scala
new file mode 100644
index 0000000000..23c398e341
--- /dev/null
+++ b/test/files/neg/macro-deprecate-idents.scala
@@ -0,0 +1,56 @@
+object Test1 {
+ val macro = ???
+}
+
+object Test2 {
+ var macro = ???
+}
+
+object Test3 {
+ type macro = Int
+}
+
+package test4 {
+ class macro
+}
+
+object Test5 {
+ class macro
+}
+
+package test6 {
+ object macro
+}
+
+object Test7 {
+ object macro
+}
+
+package test8 {
+ trait macro
+}
+
+object Test9 {
+ trait macro
+}
+
+package macro {
+ package macro.bar {
+ }
+}
+
+package foo {
+ package macro.foo {
+ }
+}
+
+object Test12 {
+ val Some(macro) = Some(42)
+ macro match {
+ case macro => println(macro)
+ }
+}
+
+object Test13 {
+ def macro = 2
+} \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Def_13.scala b/test/files/neg/macro-deprecate-idents/Macros_Def_13.scala
deleted file mode 100644
index f4e25bfdfc..0000000000
--- a/test/files/neg/macro-deprecate-idents/Macros_Def_13.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test13 {
- def macro = 2
-} \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Main.scala b/test/files/neg/macro-deprecate-idents/Main.scala
deleted file mode 100644
index f5278d9e7e..0000000000
--- a/test/files/neg/macro-deprecate-idents/Main.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-object Test extends App
-} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala b/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala
index cbd6232073..fa50ac4f73 100644
--- a/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala
+++ b/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala
@@ -1,4 +1,4 @@
object Test extends App {
import Macros._
- foo(42)
+ foo(42, 100)
} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala b/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala
index cbd6232073..fa50ac4f73 100644
--- a/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala
+++ b/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala
@@ -1,4 +1,4 @@
object Test extends App {
import Macros._
- foo(42)
+ foo(42, 100)
} \ No newline at end of file
diff --git a/test/files/neg/macro-keyword-bind.check b/test/files/neg/macro-keyword-bind.check
new file mode 100644
index 0000000000..1f74cfe5cd
--- /dev/null
+++ b/test/files/neg/macro-keyword-bind.check
@@ -0,0 +1,7 @@
+macro-keyword-bind.scala:2: error: illegal start of simple pattern
+ val Some(macro) = Some(42)
+ ^
+macro-keyword-bind.scala:6: error: ')' expected but '}' found.
+}
+^
+two errors found
diff --git a/test/files/neg/macro-keyword-bind.flags b/test/files/neg/macro-keyword-bind.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-bind.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Bind_12.scala b/test/files/neg/macro-keyword-bind.scala
index a3b1553348..a3b1553348 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Bind_12.scala
+++ b/test/files/neg/macro-keyword-bind.scala
diff --git a/test/files/neg/macro-keyword-class1.check b/test/files/neg/macro-keyword-class1.check
new file mode 100644
index 0000000000..d8983180ef
--- /dev/null
+++ b/test/files/neg/macro-keyword-class1.check
@@ -0,0 +1,4 @@
+macro-keyword-class1.scala:3: error: identifier expected but 'macro' found.
+class macro
+ ^
+one error found
diff --git a/test/files/neg/macro-keyword-class1.flags b/test/files/neg/macro-keyword-class1.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-class1.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Class_4.scala b/test/files/neg/macro-keyword-class1.scala
index 8635d1f4f6..8635d1f4f6 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Class_4.scala
+++ b/test/files/neg/macro-keyword-class1.scala
diff --git a/test/files/neg/macro-keyword-class2.check b/test/files/neg/macro-keyword-class2.check
new file mode 100644
index 0000000000..0e4d11bcc4
--- /dev/null
+++ b/test/files/neg/macro-keyword-class2.check
@@ -0,0 +1,4 @@
+macro-keyword-class2.scala:2: error: identifier expected but 'macro' found.
+ class macro
+ ^
+one error found
diff --git a/test/files/neg/macro-keyword-class2.flags b/test/files/neg/macro-keyword-class2.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-class2.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Class_5.scala b/test/files/neg/macro-keyword-class2.scala
index af24a489d0..af24a489d0 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Class_5.scala
+++ b/test/files/neg/macro-keyword-class2.scala
diff --git a/test/files/neg/macro-keyword-object1.check b/test/files/neg/macro-keyword-object1.check
new file mode 100644
index 0000000000..cfbd06ffd6
--- /dev/null
+++ b/test/files/neg/macro-keyword-object1.check
@@ -0,0 +1,4 @@
+macro-keyword-object1.scala:3: error: identifier expected but 'macro' found.
+object macro
+ ^
+one error found
diff --git a/test/files/neg/macro-keyword-object1.flags b/test/files/neg/macro-keyword-object1.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-object1.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Object_6.scala b/test/files/neg/macro-keyword-object1.scala
index 66eb494e6b..66eb494e6b 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Object_6.scala
+++ b/test/files/neg/macro-keyword-object1.scala
diff --git a/test/files/neg/macro-keyword-object2.check b/test/files/neg/macro-keyword-object2.check
new file mode 100644
index 0000000000..ede31f13e5
--- /dev/null
+++ b/test/files/neg/macro-keyword-object2.check
@@ -0,0 +1,4 @@
+macro-keyword-object2.scala:2: error: identifier expected but 'macro' found.
+ object macro
+ ^
+one error found
diff --git a/test/files/neg/macro-keyword-object2.flags b/test/files/neg/macro-keyword-object2.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-object2.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Object_7.scala b/test/files/neg/macro-keyword-object2.scala
index 6f5b9ceacd..6f5b9ceacd 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Object_7.scala
+++ b/test/files/neg/macro-keyword-object2.scala
diff --git a/test/files/neg/macro-keyword-package1.check b/test/files/neg/macro-keyword-package1.check
new file mode 100644
index 0000000000..22c1e11ded
--- /dev/null
+++ b/test/files/neg/macro-keyword-package1.check
@@ -0,0 +1,4 @@
+macro-keyword-package1.scala:1: error: identifier expected but 'macro' found.
+package macro
+ ^
+one error found
diff --git a/test/files/neg/macro-keyword-package1.flags b/test/files/neg/macro-keyword-package1.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-package1.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Package_10.scala b/test/files/neg/macro-keyword-package1.scala
index 52d3fbabf6..52d3fbabf6 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Package_10.scala
+++ b/test/files/neg/macro-keyword-package1.scala
diff --git a/test/files/neg/macro-keyword-package2.check b/test/files/neg/macro-keyword-package2.check
new file mode 100644
index 0000000000..0cb542a85d
--- /dev/null
+++ b/test/files/neg/macro-keyword-package2.check
@@ -0,0 +1,4 @@
+macro-keyword-package2.scala:3: error: identifier expected but 'macro' found.
+package macro.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-keyword-package2.flags b/test/files/neg/macro-keyword-package2.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-package2.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Package_11.scala b/test/files/neg/macro-keyword-package2.scala
index a68ebd935f..a68ebd935f 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Package_11.scala
+++ b/test/files/neg/macro-keyword-package2.scala
diff --git a/test/files/neg/macro-keyword-trait1.check b/test/files/neg/macro-keyword-trait1.check
new file mode 100644
index 0000000000..9586a62e08
--- /dev/null
+++ b/test/files/neg/macro-keyword-trait1.check
@@ -0,0 +1,4 @@
+macro-keyword-trait1.scala:3: error: identifier expected but 'macro' found.
+trait macro
+ ^
+one error found
diff --git a/test/files/neg/macro-keyword-trait1.flags b/test/files/neg/macro-keyword-trait1.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-trait1.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Trait_8.scala b/test/files/neg/macro-keyword-trait1.scala
index e32d4c1385..e32d4c1385 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Trait_8.scala
+++ b/test/files/neg/macro-keyword-trait1.scala
diff --git a/test/files/neg/macro-keyword-trait2.check b/test/files/neg/macro-keyword-trait2.check
new file mode 100644
index 0000000000..40aa764378
--- /dev/null
+++ b/test/files/neg/macro-keyword-trait2.check
@@ -0,0 +1,4 @@
+macro-keyword-trait2.scala:2: error: identifier expected but 'macro' found.
+ trait macro
+ ^
+one error found
diff --git a/test/files/neg/macro-keyword-trait2.flags b/test/files/neg/macro-keyword-trait2.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-trait2.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Trait_9.scala b/test/files/neg/macro-keyword-trait2.scala
index 243a54abe6..243a54abe6 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Trait_9.scala
+++ b/test/files/neg/macro-keyword-trait2.scala
diff --git a/test/files/neg/macro-keyword-type.check b/test/files/neg/macro-keyword-type.check
new file mode 100644
index 0000000000..4a7481114c
--- /dev/null
+++ b/test/files/neg/macro-keyword-type.check
@@ -0,0 +1,4 @@
+macro-keyword-type.scala:2: error: identifier expected but 'macro' found.
+ type macro = Int
+ ^
+one error found
diff --git a/test/files/neg/macro-keyword-type.flags b/test/files/neg/macro-keyword-type.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-type.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Type_3.scala b/test/files/neg/macro-keyword-type.scala
index 30e523bcaf..30e523bcaf 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Type_3.scala
+++ b/test/files/neg/macro-keyword-type.scala
diff --git a/test/files/neg/macro-keyword-val.check b/test/files/neg/macro-keyword-val.check
new file mode 100644
index 0000000000..0dc4c030a9
--- /dev/null
+++ b/test/files/neg/macro-keyword-val.check
@@ -0,0 +1,7 @@
+macro-keyword-val.scala:2: error: illegal start of simple pattern
+ val macro = ???
+ ^
+macro-keyword-val.scala:3: error: '=' expected but '}' found.
+}
+^
+two errors found
diff --git a/test/files/neg/macro-keyword-val.flags b/test/files/neg/macro-keyword-val.flags
new file mode 100644
index 0000000000..7fea2ff901
--- /dev/null
+++ b/test/files/neg/macro-keyword-val.flags
@@ -0,0 +1 @@
+-Xmacros \ No newline at end of file
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Val_1.scala b/test/files/neg/macro-keyword-val.scala
index 96f57acb30..96f57acb30 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Val_1.scala
+++ b/test/files/neg/macro-keyword-val.scala
diff --git a/test/files/neg/macro-keyword-var.check b/test/files/neg/macro-keyword-var.check
new file mode 100644
index 0000000000..96d02e0052
--- /dev/null
+++ b/test/files/neg/macro-keyword-var.check
@@ -0,0 +1,7 @@
+macro-keyword-var.scala:2: error: illegal start of simple pattern
+ var macro = ???
+ ^
+macro-keyword-var.scala:3: error: '=' expected but '}' found.
+}
+^
+two errors found
diff --git a/test/files/neg/macro-keyword.flags b/test/files/neg/macro-keyword-var.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/macro-keyword.flags
+++ b/test/files/neg/macro-keyword-var.flags
diff --git a/test/files/neg/macro-deprecate-idents/Macros_Var_2.scala b/test/files/neg/macro-keyword-var.scala
index a79dda6dc2..a79dda6dc2 100644
--- a/test/files/neg/macro-deprecate-idents/Macros_Var_2.scala
+++ b/test/files/neg/macro-keyword-var.scala
diff --git a/test/files/neg/macro-keyword.check b/test/files/neg/macro-keyword.check
deleted file mode 100644
index fd63db951c..0000000000
--- a/test/files/neg/macro-keyword.check
+++ /dev/null
@@ -1,49 +0,0 @@
-Macros_Bind_12.scala:2: error: illegal start of simple pattern
- val Some(macro) = Some(42)
- ^
-Macros_Bind_12.scala:6: error: ')' expected but '}' found.
-}
-^
-Macros_Class_4.scala:3: error: identifier expected but 'macro' found.
-class macro
- ^
-Macros_Class_5.scala:2: error: identifier expected but 'macro' found.
- class macro
- ^
-Macros_Def_13.scala:2: error: identifier expected but 'macro' found.
- def macro = 2
- ^
-Macros_Object_6.scala:3: error: identifier expected but 'macro' found.
-object macro
- ^
-Macros_Object_7.scala:2: error: identifier expected but 'macro' found.
- object macro
- ^
-Macros_Package_10.scala:1: error: identifier expected but 'macro' found.
-package macro
- ^
-Macros_Package_11.scala:3: error: identifier expected but 'macro' found.
-package macro.foo
- ^
-Macros_Trait_8.scala:3: error: identifier expected but 'macro' found.
-trait macro
- ^
-Macros_Trait_9.scala:2: error: identifier expected but 'macro' found.
- trait macro
- ^
-Macros_Type_3.scala:2: error: identifier expected but 'macro' found.
- type macro = Int
- ^
-Macros_Val_1.scala:2: error: illegal start of simple pattern
- val macro = ???
- ^
-Macros_Val_1.scala:3: error: '=' expected but '}' found.
-}
-^
-Macros_Var_2.scala:2: error: illegal start of simple pattern
- var macro = ???
- ^
-Macros_Var_2.scala:3: error: '=' expected but '}' found.
-}
-^
-16 errors found
diff --git a/test/files/neg/macro-keyword/Macros_Bind_12.scala b/test/files/neg/macro-keyword/Macros_Bind_12.scala
deleted file mode 100644
index a3b1553348..0000000000
--- a/test/files/neg/macro-keyword/Macros_Bind_12.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test12 {
- val Some(macro) = Some(42)
- macro match {
- case macro => println(macro)
- }
-} \ No newline at end of file
diff --git a/test/files/neg/macro-keyword/Macros_Class_4.scala b/test/files/neg/macro-keyword/Macros_Class_4.scala
deleted file mode 100644
index 8635d1f4f6..0000000000
--- a/test/files/neg/macro-keyword/Macros_Class_4.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package test4
-
-class macro
diff --git a/test/files/neg/macro-keyword/Macros_Class_5.scala b/test/files/neg/macro-keyword/Macros_Class_5.scala
deleted file mode 100644
index af24a489d0..0000000000
--- a/test/files/neg/macro-keyword/Macros_Class_5.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test5 {
- class macro
-}
diff --git a/test/files/neg/macro-keyword/Macros_Def_13.scala b/test/files/neg/macro-keyword/Macros_Def_13.scala
deleted file mode 100644
index f4e25bfdfc..0000000000
--- a/test/files/neg/macro-keyword/Macros_Def_13.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test13 {
- def macro = 2
-} \ No newline at end of file
diff --git a/test/files/neg/macro-keyword/Macros_Object_6.scala b/test/files/neg/macro-keyword/Macros_Object_6.scala
deleted file mode 100644
index 66eb494e6b..0000000000
--- a/test/files/neg/macro-keyword/Macros_Object_6.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package test6
-
-object macro
diff --git a/test/files/neg/macro-keyword/Macros_Object_7.scala b/test/files/neg/macro-keyword/Macros_Object_7.scala
deleted file mode 100644
index 6f5b9ceacd..0000000000
--- a/test/files/neg/macro-keyword/Macros_Object_7.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test7 {
- object macro
-}
diff --git a/test/files/neg/macro-keyword/Macros_Package_10.scala b/test/files/neg/macro-keyword/Macros_Package_10.scala
deleted file mode 100644
index 52d3fbabf6..0000000000
--- a/test/files/neg/macro-keyword/Macros_Package_10.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package macro
-
-package macro.bar \ No newline at end of file
diff --git a/test/files/neg/macro-keyword/Macros_Package_11.scala b/test/files/neg/macro-keyword/Macros_Package_11.scala
deleted file mode 100644
index a68ebd935f..0000000000
--- a/test/files/neg/macro-keyword/Macros_Package_11.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package foo
-
-package macro.foo
diff --git a/test/files/neg/macro-keyword/Macros_Trait_8.scala b/test/files/neg/macro-keyword/Macros_Trait_8.scala
deleted file mode 100644
index e32d4c1385..0000000000
--- a/test/files/neg/macro-keyword/Macros_Trait_8.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package test8
-
-trait macro
diff --git a/test/files/neg/macro-keyword/Macros_Trait_9.scala b/test/files/neg/macro-keyword/Macros_Trait_9.scala
deleted file mode 100644
index 243a54abe6..0000000000
--- a/test/files/neg/macro-keyword/Macros_Trait_9.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test9 {
- trait macro
-}
diff --git a/test/files/neg/macro-keyword/Macros_Type_3.scala b/test/files/neg/macro-keyword/Macros_Type_3.scala
deleted file mode 100644
index 30e523bcaf..0000000000
--- a/test/files/neg/macro-keyword/Macros_Type_3.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test3 {
- type macro = Int
-} \ No newline at end of file
diff --git a/test/files/neg/macro-keyword/Macros_Val_1.scala b/test/files/neg/macro-keyword/Macros_Val_1.scala
deleted file mode 100644
index 96f57acb30..0000000000
--- a/test/files/neg/macro-keyword/Macros_Val_1.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test1 {
- val macro = ???
-} \ No newline at end of file
diff --git a/test/files/neg/macro-keyword/Macros_Var_2.scala b/test/files/neg/macro-keyword/Macros_Var_2.scala
deleted file mode 100644
index a79dda6dc2..0000000000
--- a/test/files/neg/macro-keyword/Macros_Var_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test2 {
- var macro = ???
-} \ No newline at end of file
diff --git a/test/files/neg/t5510.check b/test/files/neg/t5510.check
new file mode 100644
index 0000000000..60da3bed40
--- /dev/null
+++ b/test/files/neg/t5510.check
@@ -0,0 +1,19 @@
+t5510.scala:2: error: unclosed string literal
+ val s1 = s"xxx
+ ^
+t5510.scala:3: error: unclosed string literal
+ val s2 = s"xxx $x
+ ^
+t5510.scala:4: error: unclosed string literal
+ val s3 = s"xxx $$
+ ^
+t5510.scala:5: error: unclosed string literal
+ val s4 = ""s"
+ ^
+t5510.scala:6: error: unclosed multi-line string literal
+ val s5 = ""s""" $s1 $s2 s"
+ ^
+t5510.scala:7: error: '}' expected but eof found.
+}
+ ^
+6 errors found
diff --git a/test/files/neg/t5510.scala b/test/files/neg/t5510.scala
new file mode 100644
index 0000000000..12630eb2cd
--- /dev/null
+++ b/test/files/neg/t5510.scala
@@ -0,0 +1,7 @@
+object Test {
+ val s1 = s"xxx
+ val s2 = s"xxx $x
+ val s3 = s"xxx $$
+ val s4 = ""s"
+ val s5 = ""s""" $s1 $s2 s"
+}
diff --git a/test/files/run/t5535.check b/test/files/run/t5535.check
new file mode 100644
index 0000000000..8da9829b78
--- /dev/null
+++ b/test/files/run/t5535.check
@@ -0,0 +1,20 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> def h()(i: Int) = 1 + i
+h: ()(i: Int)Int
+
+scala> println(h()(5))
+6
+
+scala> val f = h() _
+f: Int => Int = <function1>
+
+scala> println(f(10))
+11
+
+scala>
+
+scala>
diff --git a/test/files/run/t5535.scala b/test/files/run/t5535.scala
new file mode 100644
index 0000000000..7bc12f3470
--- /dev/null
+++ b/test/files/run/t5535.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+def h()(i: Int) = 1 + i
+println(h()(5))
+val f = h() _
+println(f(10))
+ """
+}
diff --git a/test/files/run/t5583.check b/test/files/run/t5583.check
new file mode 100644
index 0000000000..39b969fbe7
--- /dev/null
+++ b/test/files/run/t5583.check
@@ -0,0 +1,20 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> var s = 0
+s: Int = 0
+
+scala> for (i <- 1 to 10) {s += i}
+
+scala> for (i <- 1 to 10) {s += i}
+
+scala> for (i <- 1 to 10) {s += i}
+
+scala> println(s)
+165
+
+scala>
+
+scala>
diff --git a/test/files/run/t5583.scala b/test/files/run/t5583.scala
new file mode 100644
index 0000000000..8561a5946f
--- /dev/null
+++ b/test/files/run/t5583.scala
@@ -0,0 +1,11 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+var s = 0
+for (i <- 1 to 10) {s += i}
+for (i <- 1 to 10) {s += i}
+for (i <- 1 to 10) {s += i}
+println(s)
+ """
+}
diff --git a/test/scaladoc/resources/implicits-base-res.scala b/test/scaladoc/resources/implicits-base-res.scala
new file mode 100644
index 0000000000..db7ca4fa51
--- /dev/null
+++ b/test/scaladoc/resources/implicits-base-res.scala
@@ -0,0 +1,143 @@
+/**
+ * Test scaladoc implicits - the bread and butter of the testsuite :)
+ */
+package scala.test.scaladoc.implicits.base
+
+class Foo[T]
+class Bar[T]
+trait MyNumeric[R]
+
+/** Class A
+ * - tests the complete type inference
+ * - the following inherited methods should appear:
+ * {{{
+ * def convToGtColonDoubleA: Double // pimpA3: with a constraint that T <: Double
+ * def convToIntA: Int // pimpA2: with a constraint that T = Int
+ * def convToManifestA: T // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ * def convToMyNumericA: T // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
+ * def convToNumericA: T // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
+ * def convToPimpedA: Bar[Foo[T]] // pimpA5: no constraints
+ * def convToPimpedA: S // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar
+ * def convToTraversableOps: T // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ * // should not be abstract!
+ * }}}
+ */
+class A[T] {
+ /** This should prevent the implicitly inherited `def convToPimpedA: T` from `pimpA0` from showing up */
+ def convToPimpedA: T = sys.error("Let's check it out!")
+}
+/** Companion object with implicit transformations */
+object A {
+ implicit def pimpA0[V](a: A[V]) = new PimpedA(a)
+ implicit def pimpA1[ZBUR: Numeric](a: A[ZBUR]) = new NumericA[ZBUR](a)
+ implicit def pimpA2(a: A[Int]) = new IntA(a)
+ implicit def pimpA3(a: A[T] forSome { type T <: Double }) = new GtColonDoubleA(a)
+ implicit def pimpA4[S](a: A[Foo[Bar[S]]])(implicit foo: Foo[S], bar: Bar[S]): PimpedA[S] = sys.error("not implemented")
+ implicit def pimpA5[Z](a: A[Z]): PimpedA[Bar[Foo[Z]]] = sys.error("not implemented")
+ implicit def pimpA6[Z: MyNumeric](a: A[Z]) = new MyNumericA[Z](a)
+ // TODO: Add H <: Double and see why it crashes for C and D -- context bounds, need to check!
+ implicit def pimpA7[H <: Double : Manifest](a: A[H]) = new ManifestA[H](a) with MyTraversableOps[H] { def convToTraversableOps: H = sys.error("no") }
+}
+
+
+/** Class B
+ * - tests the existential type solving
+ * - the following inherited methods should appear:
+ * {{{
+ * def convToGtColonDoubleA: Double // pimpA3: no constraints
+ * def convToManifestA: Double // pimpA7: no constraints
+ * def convToMyNumericA: Double // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
+ * def convToNumericA: Double // pimpA1: no constraintsd
+ * def convToPimpedA: Bar[Foo[Double]] // pimpA5: no constraints
+ * def convToTraversableOps: Double // pimpA7: no constraints
+ * // should not be abstract!
+ * }}}
+ */
+class B extends A[Double]
+object B extends A
+
+
+/** Class C
+ * - tests asSeenFrom
+ * - the following inherited methods should appear:
+ * {{{
+ * def convToIntA: Int // pimpA2: no constraints
+ * def convToMyNumericA: Int // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
+ * def convToNumericA: Int // pimpA1: no constraints
+ * def convToPimpedA: Bar[Foo[Int]] // pimpA5: no constraints
+ * }}}
+ */
+class C extends A[Int]
+object C extends A
+
+
+/** Class D
+ * - tests implicit elimination
+ * - the following inherited methods should appear:
+ * {{{
+ * def convToMyNumericA: String // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
+ * def convToNumericA: String // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
+ * def convToPimpedA: Bar[Foo[String]] // pimpA5: no constraints
+ * }}}
+ */
+class D extends A[String]
+/** Companion object with implicit transformations */
+object D extends A
+
+
+/** PimpedA class <br/>
+ * - tests simple inheritance and asSeenFrom
+ * - A, B and C should be implicitly converted to this */
+class PimpedA[V](a: A[V]) {
+ /** The convToPimpedA: V documentation... */
+ def convToPimpedA: V = sys.error("Not implemented")
+}
+
+/** NumericA class <br/>
+ * - tests the implicit conversion between parametric and fixed types
+ * - A, B and C should be implicitly converted to this */
+class NumericA[U: Numeric](a: A[U]) {
+ /** The convToNumericA: U documentation... */
+ def convToNumericA: U = implicitly[Numeric[U]].zero
+}
+
+/** IntA class <br/>
+ * - tests the interaction between implicit conversion and specific types
+ * - A and C should be implicitly converted to this */
+class IntA(a: A[Int]) {
+ /** The convToIntA: Int documentation... */
+ def convToIntA: Int = 0
+}
+
+/** GtColonDoubleA class <br/>
+ * - tests the interaction between implicit conversion and existential types
+ * - A and B should be implicitly converted to this */
+class GtColonDoubleA(a: A[T] forSome { type T <: Double }) {
+ /** The convToGtColonDoubleA: Double documentation... */
+ def convToGtColonDoubleA: Double = 0
+}
+
+/** MyNumericA class <br/>
+ * - tests the implicit conversion between parametric and fixed types
+ * - A should be implicitly converted to this */
+class MyNumericA[U: MyNumeric](a: A[U]) {
+ /** The convToMyNumericA: U documentation... */
+ def convToMyNumericA: U = sys.error("dunno")
+}
+
+/** ManifestA class <br/>
+ * - tests the manifest recognition
+ * - A, B, C, D should be implicitly converted to this */
+class ManifestA[W: Manifest](a: A[W]) {
+ /** The convToManifestA: W documentation... */
+ def convToManifestA: W = sys.error("dunno")
+}
+
+/** MyTraversableOps class <br/>
+ * - checks if any abstract members are added - should not happen!
+ */
+trait MyTraversableOps[S] {
+ /** The convToTraversableOps: S documentation... */
+ def convToTraversableOps: S
+}
+
diff --git a/test/scaladoc/resources/implicits-chaining-res.scala b/test/scaladoc/resources/implicits-chaining-res.scala
new file mode 100644
index 0000000000..b20c8f846c
--- /dev/null
+++ b/test/scaladoc/resources/implicits-chaining-res.scala
@@ -0,0 +1,48 @@
+/**
+ * Testing scaladoc implicits chaining
+ */
+package scala.test.scaladoc.implicits {
+
+ // the classes involved
+ case class Z[U](a: U)
+ case class Intermediate[T, U](t: T, u: U)
+ class Implicit1[T](b: Implicit2[T])
+ class Implicit2[T](c: Implicit3[T])
+ class Implicit3[T](/* and so on */)
+
+ object chaining {
+
+ // the base conversion
+ implicit def convertToZ[T](a: A[T])(implicit b: Implicit1[T]): Z[A[T]] = Z(a)
+
+ // and the implicit chaining, don't you just love it? :D
+ // implicit1, with one alternative
+ implicit def implicit1[T <: Intermediate[_, _]](implicit b: Implicit2[T]) = new Implicit1[T](b)
+ // implicit2, with two alternatives
+ implicit def implicit2alt1[T <: Intermediate[_ <: String, _]](implicit c: Implicit3[T]) = new Implicit2[T](c)
+ implicit def implicit2alt2[T <: Intermediate[_ <: Double, _]](implicit c: Implicit3[T]) = new Implicit2[T](c)
+ // implicit3, with two alternatives
+ implicit def implicit3alt1[T <: Intermediate[_, _ <: Int]] = new Implicit3[T]()
+ implicit def implicit3alt2[T <: Intermediate[_ <: Double, _ <: AnyRef],X] = new Implicit3[T]()
+
+ // and our targets
+ /** conversion here, with constraints */
+ class A[T]()
+ /** conversion here, no constraints */
+ class B extends A[Intermediate[String, Int]]
+ /** no conversion */
+ class C extends A[Intermediate[String, String]]
+ /** conversion here, no constraints */
+ class D extends A[Intermediate[Double, Int]]
+ /** conversion here, no constraints */
+ class E extends A[Intermediate[Double, String]]
+ /** no conversion */
+ class F extends A[Intermediate[String, Double]]
+
+ object scalacTest {
+ (new B).a
+ (new D).a
+ (new E).a
+ }
+ }
+}
diff --git a/test/scaladoc/resources/implicits-elimination-res.scala b/test/scaladoc/resources/implicits-elimination-res.scala
new file mode 100644
index 0000000000..68743aee06
--- /dev/null
+++ b/test/scaladoc/resources/implicits-elimination-res.scala
@@ -0,0 +1,9 @@
+/**
+ * Testing scaladoc implicits elimination
+ */
+package scala.test.scaladoc.implicits.elimination {
+ /** No conversion, as B doesn't bring any member */
+ class A
+ class B { class C; trait V; type T; }
+ object A { implicit def toB(a: A): B = null }
+}
diff --git a/test/scaladoc/resources/implicits-scopes-res.scala b/test/scaladoc/resources/implicits-scopes-res.scala
new file mode 100644
index 0000000000..4e55c3e388
--- /dev/null
+++ b/test/scaladoc/resources/implicits-scopes-res.scala
@@ -0,0 +1,51 @@
+/**
+ * Testing scaladoc implicit scopes - looking for implicits in the right places
+ */
+package scala.test.scaladoc.implicits.scopes
+
+// TEST1 - In package object
+package object test1 {
+ implicit def toB(a: A): B = null
+}
+package test1 {
+ class A
+ class B { def b = "" }
+}
+
+// TEST2 - In enclosing package - doesn't seem to work even in scalac
+package object test2 {
+ import classes._
+ implicit def toB(a: A): B = null
+}
+package test2 {
+ package classes {
+ class A
+ class B { def b = "" }
+ object test { /* (new A).b won't compile */ }
+ }
+}
+
+// TEST3 - In companion object
+package test3 {
+ class A
+ object A { implicit def toB(a: A): B = null }
+ class B { def b = "" }
+}
+
+// TEST4 - Nested type's companion object
+package test4 {
+ class U[V]
+ class S
+ object S { implicit def toB(a: A): B = null }
+ class A extends U[S]
+ class B { def b = "" }
+}
+
+// TEST5 - In scope
+package test5 {
+ object scope {
+ class A
+ class B { def b = "" }
+ implicit def toB(a: A): B = null
+ }
+}
diff --git a/test/scaladoc/run/SI-5373.check b/test/scaladoc/run/SI-5373.check
index c55eb001cf..619c56180b 100644
--- a/test/scaladoc/run/SI-5373.check
+++ b/test/scaladoc/run/SI-5373.check
@@ -1 +1 @@
-model contains 6 documentable templates
+Done.
diff --git a/test/scaladoc/run/SI-5373.scala b/test/scaladoc/run/SI-5373.scala
index af433a1844..0062abbb2a 100644
--- a/test/scaladoc/run/SI-5373.scala
+++ b/test/scaladoc/run/SI-5373.scala
@@ -1,9 +1,9 @@
import scala.tools.nsc.doc.model._
import scala.tools.partest.ScaladocModelTest
-object Test extends ScaladocModelTest {
+object Test extends ScaladocModelTest {
- def code = """
+ override def code = """
import scala.annotation.bridge
package scala.test {
@@ -23,7 +23,7 @@ object Test extends ScaladocModelTest {
// no need for special settings
def scaladocSettings = ""
-
+
def testModel(rootPackage: Package) = {
// get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
import access._
diff --git a/test/scaladoc/run/implicits-base.check b/test/scaladoc/run/implicits-base.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/implicits-base.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-base.scala b/test/scaladoc/run/implicits-base.scala
new file mode 100644
index 0000000000..a0dd2071d7
--- /dev/null
+++ b/test/scaladoc/run/implicits-base.scala
@@ -0,0 +1,179 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-base-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits -implicits-show-all"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // SEE THE test/resources/implicits-base-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("base")
+ var conv: ImplicitConversion = null
+
+//// class A ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val A = base._class("A")
+
+ // the method pimped on by pimpA0 should be shadowed by the method in class A
+ assert(A._conversions(A.qualifiedName + ".pimpA0").isEmpty)
+
+ // def convToNumericA: T // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
+ conv = A._conversion(A.qualifiedName + ".pimpA1")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToNumericA").resultType.name == "T")
+
+ // def convToIntA: Int // pimpA2: with a constraint that T = Int
+ conv = A._conversion(A.qualifiedName + ".pimpA2")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToIntA").resultType.name == "Int")
+
+ // def convToGtColonDoubleA: Double // pimpA3: with a constraint that T <: Double
+ conv = A._conversion(A.qualifiedName + ".pimpA3")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToGtColonDoubleA").resultType.name == "Double")
+
+ // def convToPimpedA: S // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar
+ conv = A._conversion(A.qualifiedName + ".pimpA4")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 3)
+ assert(conv._member("convToPimpedA").resultType.name == "S")
+
+ // def convToPimpedA: Bar[Foo[T]] // pimpA5: no constraints
+ conv = A._conversion(A.qualifiedName + ".pimpA5")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[T]]")
+
+ // def convToMyNumericA: T // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
+ conv = A._conversion(A.qualifiedName + ".pimpA6")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToMyNumericA").resultType.name == "T")
+
+ // def convToManifestA: T // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ // def convToTraversableOps: T // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ // should not be abstract!
+ conv = A._conversion(A.qualifiedName + ".pimpA7")
+ assert(conv.members.length == 2)
+ assert(conv.constraints.length == 2)
+ assert(conv._member("convToManifestA").resultType.name == "T")
+ assert(conv._member("convToTraversableOps").resultType.name == "T")
+ assert(conv._member("convToTraversableOps").flags.toString.indexOf("abstract") == -1)
+
+//// class B ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val B = base._class("B")
+
+ // these conversions should not affect B
+ assert(B._conversions(A.qualifiedName + ".pimpA0").isEmpty)
+ assert(B._conversions(A.qualifiedName + ".pimpA2").isEmpty)
+ assert(B._conversions(A.qualifiedName + ".pimpA4").isEmpty)
+
+ // def convToNumericA: Double // pimpA1: no constraintsd
+ conv = B._conversion(A.qualifiedName + ".pimpA1")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToNumericA").resultType.name == "Double")
+
+ // def convToGtColonDoubleA: Double // pimpA3: no constraints
+ conv = B._conversion(A.qualifiedName + ".pimpA3")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToGtColonDoubleA").resultType.name == "Double")
+
+ // def convToPimpedA: Bar[Foo[Double]] // pimpA5: no constraints
+ conv = B._conversion(A.qualifiedName + ".pimpA5")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Double]]")
+
+ // def convToMyNumericA: Double // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
+ conv = B._conversion(A.qualifiedName + ".pimpA6")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToMyNumericA").resultType.name == "Double")
+
+ // def convToManifestA: Double // pimpA7: no constraints
+ // def convToTraversableOps: Double // pimpA7: no constraints
+ // // should not be abstract!
+ conv = B._conversion(A.qualifiedName + ".pimpA7")
+ assert(conv.members.length == 2)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToManifestA").resultType.name == "Double")
+ assert(conv._member("convToTraversableOps").resultType.name == "Double")
+ assert(conv._member("convToTraversableOps").flags.toString.indexOf("abstract") == -1)
+
+//// class C ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val C = base._class("C")
+
+ // these conversions should not affect C
+ assert(C._conversions(A.qualifiedName + ".pimpA0").isEmpty)
+ assert(C._conversions(A.qualifiedName + ".pimpA3").isEmpty)
+ assert(C._conversions(A.qualifiedName + ".pimpA4").isEmpty)
+ assert(C._conversions(A.qualifiedName + ".pimpA7").isEmpty)
+
+ // def convToNumericA: Int // pimpA1: no constraints
+ conv = C._conversion(A.qualifiedName + ".pimpA1")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToNumericA").resultType.name == "Int")
+
+ // def convToIntA: Int // pimpA2: no constraints
+ conv = C._conversion(A.qualifiedName + ".pimpA2")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToIntA").resultType.name == "Int")
+
+ // def convToPimpedA: Bar[Foo[Int]] // pimpA5: no constraints
+ conv = C._conversion(A.qualifiedName + ".pimpA5")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Int]]")
+
+ // def convToMyNumericA: Int // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
+ conv = C._conversion(A.qualifiedName + ".pimpA6")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToMyNumericA").resultType.name == "Int")
+
+//// class D ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val D = base._class("D")
+
+ // these conversions should not affect D
+ assert(D._conversions(A.qualifiedName + ".pimpA0").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".pimpA2").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".pimpA3").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".pimpA4").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".pimpA7").isEmpty)
+
+ // def convToNumericA: String // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
+ conv = D._conversion(A.qualifiedName + ".pimpA1")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToNumericA").resultType.name == "String")
+
+ // def convToPimpedA: Bar[Foo[String]] // pimpA5: no constraints
+ conv = D._conversion(A.qualifiedName + ".pimpA5")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[String]]")
+
+ // def convToMyNumericA: String // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
+ conv = D._conversion(A.qualifiedName + ".pimpA6")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToMyNumericA").resultType.name == "String")
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/run/implicits-chaining.check b/test/scaladoc/run/implicits-chaining.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/implicits-chaining.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-chaining.scala b/test/scaladoc/run/implicits-chaining.scala
new file mode 100644
index 0000000000..96e288b204
--- /dev/null
+++ b/test/scaladoc/run/implicits-chaining.scala
@@ -0,0 +1,64 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-chaining-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // SEE THE test/resources/implicits-chaining-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._object("chaining")
+ var conv: ImplicitConversion = null
+
+//// class A ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val A = base._class("A")
+
+ conv = A._conversion(base.qualifiedName + ".convertToZ")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+
+//// class B ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val B = base._class("B")
+
+ conv = B._conversion(base.qualifiedName + ".convertToZ")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+
+//// class C ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val C = base._class("C")
+
+ assert(C._conversions(base.qualifiedName + ".convertToZ").isEmpty)
+
+//// class D ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val D = base._class("D")
+
+ conv = D._conversion(base.qualifiedName + ".convertToZ")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+
+//// class E ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val E = base._class("E")
+
+ conv = E._conversion(base.qualifiedName + ".convertToZ")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+
+//// class F ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val F = base._class("F")
+
+ assert(F._conversions(base.qualifiedName + ".convertToZ").isEmpty)
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/run/implicits-elimination.check b/test/scaladoc/run/implicits-elimination.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/implicits-elimination.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-elimination.scala b/test/scaladoc/run/implicits-elimination.scala
new file mode 100644
index 0000000000..71319f9f47
--- /dev/null
+++ b/test/scaladoc/run/implicits-elimination.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-elimination-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // SEE THE test/resources/implicits-elimination-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("elimination")
+ val A = base._class("A")
+
+ assert(A._conversions(A.qualifiedName + ".toB").isEmpty)
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/run/implicits-scopes.check b/test/scaladoc/run/implicits-scopes.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/implicits-scopes.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-scopes.scala b/test/scaladoc/run/implicits-scopes.scala
new file mode 100644
index 0000000000..7fb41e1ae8
--- /dev/null
+++ b/test/scaladoc/run/implicits-scopes.scala
@@ -0,0 +1,76 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-scopes-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+ var conv: ImplicitConversion = null
+
+ // SEE THE test/resources/implicits-chaining-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("scopes")
+
+//// test1 /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val doTest1 = {
+ val test1 = base._package("test1")
+ val A = test1._class("A")
+
+ conv = A._conversion(test1.qualifiedName + ".package.toB") // the .package means it's the package object
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ }
+
+//// test2 /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val doTest2 = {
+ val test2 = base._package("test2")
+ val classes = test2._package("classes")
+ val A = classes._class("A")
+
+ assert(A._conversions(test2.qualifiedName + ".toB").isEmpty)
+ }
+
+//// test3 /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val doTest3 = {
+ val test3 = base._package("test3")
+ val A = test3._class("A")
+
+ conv = A._conversion(A.qualifiedName + ".toB")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ }
+
+//// test4 /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val doTest4 = {
+ val test4 = base._package("test4")
+ val A = test4._class("A")
+ val S = test4._object("S")
+
+ conv = A._conversion(S.qualifiedName + ".toB")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ }
+
+//// test5 /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val doTest5 = {
+ val test5 = base._package("test5")
+ val scope = test5._object("scope")
+ val A = scope._class("A")
+
+ conv = A._conversion(scope.qualifiedName + ".toB")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ }
+ }
+} \ No newline at end of file
diff --git a/test/scaladoc/scalacheck/CommentFactoryTest.scala b/test/scaladoc/scalacheck/CommentFactoryTest.scala
index 69c314a64c..68ca68efdd 100644
--- a/test/scaladoc/scalacheck/CommentFactoryTest.scala
+++ b/test/scaladoc/scalacheck/CommentFactoryTest.scala
@@ -3,11 +3,12 @@ import org.scalacheck.Prop._
import scala.tools.nsc.Global
import scala.tools.nsc.doc
+import scala.tools.nsc.doc.model._
import scala.tools.nsc.doc.model.comment._
class Factory(val g: Global, val s: doc.Settings)
extends doc.model.ModelFactory(g, s) {
- thisFactory: Factory with CommentFactory with doc.model.TreeFactory =>
+ thisFactory: Factory with ModelFactoryImplicitSupport with CommentFactory with doc.model.TreeFactory =>
def strip(c: Comment): Option[Inline] = {
c.body match {
@@ -28,7 +29,7 @@ object Test extends Properties("CommentFactory") {
val settings = new doc.Settings((str: String) => {})
val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
val g = new Global(settings, reporter)
- (new Factory(g, settings) with CommentFactory with doc.model.TreeFactory)
+ (new Factory(g, settings) with ModelFactoryImplicitSupport with CommentFactory with doc.model.TreeFactory)
}
def parse(src: String, dst: Inline) = {
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
index 4c5497e803..a22747520c 100755
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -88,12 +88,28 @@ pushJarFile() {
# rm $jar
}
+getJarSha() {
+ local jar=$1
+ if [[ ! -f "$jar" ]]; then
+ echo ""
+ elif which sha1sum 2>/dev/null >/dev/null; then
+ shastring=$(sha1sum "$jar")
+ echo "$shastring" | sed 's/ .*//'
+ elif which shasum 2>/dev/null >/dev/null; then
+ shastring=$(shasum "$jar")
+ echo "$shastring" | sed 's/ .*//'
+ else
+ shastring=$(openssl sha1 "$jar")
+ echo "$shastring" | sed 's/^.*= //'
+ fi
+}
+
# Tests whether or not the .desired.sha1 hash matches a given file.
# Arugment 1 - The jar file to test validity.
# Returns: Empty string on failure, "OK" on success.
isJarFileValid() {
local jar=$1
- if [[ ! -f $jar ]]; then
+ if [[ ! -f "$jar" ]]; then
echo ""
else
local jar_dir=$(dirname $jar)
@@ -131,6 +147,27 @@ pushJarFiles() {
fi
}
+
+checkJarSha() {
+ local jar=$1
+ local sha=$2
+ local testsha=$(getJarSha "$jar")
+ if test "$sha" == "$testsha"; then
+ echo "OK"
+ fi
+}
+
+makeCacheLocation() {
+ local uri=$1
+ local sha=$2
+ local cache_loc="$cache_dir/$uri"
+ local cdir=$(dirname $cache_loc)
+ if [[ ! -d "$cdir" ]]; then
+ mkdir -p "$cdir"
+ fi
+ echo "$cache_loc"
+}
+
# Pulls a single binary artifact from a remote repository.
# Argument 1 - The uri to the file that should be downloaded.
# Argument 2 - SHA of the file...
@@ -138,16 +175,19 @@ pushJarFiles() {
pullJarFileToCache() {
local uri=$1
local sha=$2
- local cache_loc=$cache_dir/$uri
- local cdir=$(dirname $cache_loc)
- if [[ ! -d $cdir ]]; then
- mkdir -p $cdir
- fi
+ local cache_loc="$(makeCacheLocation $uri)"
# TODO - Check SHA of local cache is accurate.
- if [[ ! -f $cache_loc ]]; then
+ if test -f "$cache_loc" && test "$(checkJarSha "$cache_loc" "$sha")" != "OK"; then
+ echo "Found bad cached file: $cache_loc"
+ rm -f "$cache_loc"
+ fi
+ if [[ ! -f "$cache_loc" ]]; then
curlDownload $cache_loc ${remote_urlbase}/${uri}
+ if test "$(checkJarSha "$cache_loc" "$sha")" != "OK"; then
+ echo "Trouble downloading $uri. Please try pull-binary-libs again when your internet connection is stable."
+ exit 2
+ fi
fi
- echo "$cache_loc"
}
# Pulls a single binary artifact from a remote repository.
@@ -162,7 +202,8 @@ pullJarFile() {
local version=${sha1% ?$jar_name}
local remote_uri=${version}/${jar#$basedir/}
echo "Resolving [${remote_uri}]"
- local cached_file=$(pullJarFileToCache $remote_uri $version)
+ pullJarFileToCache $remote_uri $version
+ local cached_file=$(makeCacheLocation $remote_uri)
cp $cached_file $jar
}
diff --git a/tools/cleanup-commit b/tools/cleanup-commit
new file mode 100755
index 0000000000..400d434359
--- /dev/null
+++ b/tools/cleanup-commit
@@ -0,0 +1,130 @@
+#!/bin/bash
+
+##
+## The cleanup-commit script
+## -------------------------
+## This little script will cleanup your commit before you send it. You need to add the files to the staged area and
+## run this script. It will automatically cleanup tabs and trailing spaces for the files you added and then add the
+## clean versions to the staging area.
+##
+## Use at your own risk, I spent some time making the script error-proof so it will abort if sees any inconsistency,
+## but of course playing around with your commit might break things. Btw, it saves the original file to file.bak.
+##
+## Happy hacking!
+##
+
+ABORT="Ab0rT0p3r4+|0n"
+
+#
+# Cleanup function
+#
+function cleanup {
+ echo Cleaning up $1...
+ # prepare the ground
+ rm -rf $1.bak
+ # compress <TAB> into double <BLANK> and eliminate trailing <BLANC>s
+ sed -i.bak -e 's/\t/ /g' -e 's/ *$//' $1
+}
+
+
+#
+# Get the git status for the current staged commit
+#
+FULLSTATUS=`git status --porcelain`
+
+if [ $? -ne 0 ]
+then
+ echo "Unable to run git. Check if:"
+ echo " -- git is installed (you can run git in the command line)"
+ echo " -- the current directory is a valid git repository"
+ exit 1
+fi
+
+echo
+
+#
+# Based on the status decide what files will get cleaned up
+#
+CLEANUP_FILES=`echo "$FULLSTATUS" | while read LINE
+do
+
+ STATUS=$(echo $LINE | sed 's/^\(..\).*$/\1/')
+ if [ $? -ne 0 ]
+ then
+ echo "Could not get the status for line: $LINE"
+ echo " -- you have the basic unix tools installed (grep, cut, sed)"
+ echo $ABORT # This goes to CLEANUP_FILES
+ exit 1
+ fi
+
+ FILES=$(echo $LINE | sed 's/^..//')
+ FILE1=$(echo $FILES | cut -d ' ' -f 1)
+ FILE2=$(echo $FILES | cut -d ' ' -f 3)
+
+ case "$STATUS" in
+ [AMRDC]" ")
+ case "$STATUS" in
+ "A "|"M ")
+ echo $FILE1
+ ;;
+ "R ")
+ echo $FILE2
+ ;;
+ "D ")
+ #nothing to do
+ ;;
+ "C ")
+ echo $FILE1
+ echo $FILE2
+ ;;
+ esac
+ ;;
+ "??")
+ # File is not tracked, no need to do anything about it
+ # echo Untracked: $FILE1
+ ;;
+ *)
+ echo "Unstable status of file $FILE1 (\"$STATUS\")" >&2
+ echo "Aborting cleanup!" >&2
+ echo $ABORT # This goes to CLEANUP_FILES
+ exit 1
+ esac
+done; echo $CLEANUP_FILES`
+
+
+#
+# Perform actual cleanup
+#
+case $CLEANUP_FILES in
+*"$ABORT")
+ echo
+ exit 1
+ ;;
+"")
+ echo Nothing to do!
+ ;;
+*)
+ cd $(git rev-parse --show-toplevel)
+
+ if [ $? -ne 0 ]
+ then
+ echo Unexpected error: cannot cd to the repository root
+ echo Aborting cleanup!
+ exit 1
+ fi
+
+ echo "$CLEANUP_FILES" | while read FILE
+ do
+ cleanup $FILE
+ done
+
+ cd - &>/dev/null
+
+ echo
+ echo "Cleanup done: "
+ echo " - original files saved as .bak"
+ echo " - you can do \"git diff\" to see the changes the script did"
+ echo " - you can do \"git commit -a\" to commit the cleaned up files"
+ echo
+ ;;
+esac