From 5d2c92936ffdab259b71f1adb447602d98222ab2 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 7 Oct 2015 14:32:17 -0400 Subject: misc upgrades to README * link to new scala/contributors room on Gitter * remove link to old disused CI * replace list of mailing lists with single link to community page * link to 2.12 nightlies too, not just 2.11 * update required JDK version info --- README.md | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 1b4dbb03c2..c166873514 100644 --- a/README.md +++ b/README.md @@ -31,16 +31,12 @@ P.S.: If you have some spare time to help out around here, we would be delighted # Handy Links - [A wealth of documentation](http://docs.scala-lang.org) + - [mailing lists](http://www.scala-lang.org/community/) + - [Gitter room for Scala contributors](https://gitter.im/scala/contributors) - [Scala CI](https://scala-ci.typesafe.com/) - - [Download the latest nightly](http://www.scala-lang.org/files/archive/nightly/2.11.x/) - - [(Deprecated) Scala CI at EPFL](https://scala-webapps.epfl.ch/jenkins/) - - Scala mailing lists: - - [Compiler and standard library development](https://groups.google.com/group/scala-internals) - - [Users of Scala](https://groups.google.com/group/scala-user) - - [Scala language discussion](https://groups.google.com/group/scala-language) - - [Scala Improvement Process](https://groups.google.com/group/scala-sips) - - [Debate](https://groups.google.com/group/scala-debate) - - [Announcements](https://groups.google.com/group/scala-announce) + - download the latest nightlies: + - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/) + - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/) # Repository structure @@ -65,7 +61,7 @@ scala/ ## Requirements -You'll need a Java SDK (6 or newer), Apache Ant (version 1.9.0 or above), and curl (for `./pull-binary-libs.sh`). +You'll need a Java SDK (6 for 2.11.x, 8 for 2.12.x), Apache Ant (version 1.9.0 or above), and curl (for `./pull-binary-libs.sh`). ## Git Hygiene -- cgit v1.2.3 From 129c9d2ca3fa244d9605e291264bf6dc10332186 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 8 Oct 2015 10:25:52 -0400 Subject: fix t7634 to work on Cygwin this was failing because the expected output was: res1: List[String] = List(shello, world.) but the actual output was: res1: List[String] = List(shello, world., Picked up _JAVA_OPTIONS: -Duser.home=y:\jenkins) but the "Picked up..." part caused partest's filters feature to ignore the entire line (it doesn't anchor matches to start of line.) This was a tough one to track down. --- test/files/run/t7634.check | 4 ++-- test/files/run/t7634.scala | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/test/files/run/t7634.check b/test/files/run/t7634.check index 879aea67a2..43128cad95 100644 --- a/test/files/run/t7634.check +++ b/test/files/run/t7634.check @@ -1,6 +1,6 @@ -scala> .lines -res1: List[String] = List(shello, world.) +scala> .lines.foreach(println) +shello, world. scala> :quit diff --git a/test/files/run/t7634.scala b/test/files/run/t7634.scala index aeb6a5e671..9520931941 100644 --- a/test/files/run/t7634.scala +++ b/test/files/run/t7634.scala @@ -9,7 +9,7 @@ import scala.util.Properties.propOrElse object Test extends ReplTest { def java = propOrElse("javacmd", "java") def code = s""":sh $java -classpath $testOutput hello.Hello - |.lines""".stripMargin + |.lines.foreach(println)""".stripMargin } package hello { -- cgit v1.2.3 From 213ecd511255572d219adc78f48a10bbab76563a Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 8 Oct 2015 10:32:13 -0400 Subject: Windows: make get-scala-commit-sha/date scripts work on Cygwin bash's -x flag was flowing down into these scripts, resulting in extra stuff was getting printed to stderr, resulting in a corrupted build\pack\META-INF\MANIFEST.MF --- tools/get-scala-commit-date.bat | 4 ++-- tools/get-scala-commit-sha.bat | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tools/get-scala-commit-date.bat b/tools/get-scala-commit-date.bat index e169de1b04..735a80b927 100644 --- a/tools/get-scala-commit-date.bat +++ b/tools/get-scala-commit-date.bat @@ -1,9 +1,9 @@ @echo off for %%X in (bash.exe) do (set FOUND=%%~$PATH:X) if defined FOUND ( - bash "%~dp0\get-scala-commit-date" + bash "%~dp0\get-scala-commit-date" 2>NUL ) else ( rem echo this script does not work with cmd.exe. please, install bash echo unknown exit 1 -) \ No newline at end of file +) diff --git a/tools/get-scala-commit-sha.bat b/tools/get-scala-commit-sha.bat index 1eaffc0a15..6559a19120 100644 --- a/tools/get-scala-commit-sha.bat +++ b/tools/get-scala-commit-sha.bat @@ -1,9 +1,9 @@ @echo off for %%X in (bash.exe) do (set FOUND=%%~$PATH:X) if defined FOUND ( - bash "%~dp0\get-scala-commit-sha" + bash "%~dp0\get-scala-commit-sha" 2>NUL ) else ( rem echo this script does not work with cmd.exe. please, install bash echo unknown exit 1 -) \ No newline at end of file +) -- cgit v1.2.3 From 37f571355d8516b6391a6f92d644f3753f8b25c3 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 8 Oct 2015 11:10:50 -0400 Subject: more readme improvements * be clearer about required JDK versions * eliminate redundancy with existing src/eclipse and src/intellij readmes * give IntelliJ readme a .md extension --- README.md | 25 +++++++------------------ src/eclipse/README.md | 17 +++++++++++------ src/intellij/README | 12 ------------ src/intellij/README.md | 13 +++++++++++++ 4 files changed, 31 insertions(+), 36 deletions(-) delete mode 100644 src/intellij/README create mode 100644 src/intellij/README.md diff --git a/README.md b/README.md index c166873514..c015169322 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,11 @@ scala/ ## Requirements -You'll need a Java SDK (6 for 2.11.x, 8 for 2.12.x), Apache Ant (version 1.9.0 or above), and curl (for `./pull-binary-libs.sh`). +You'll need a Java SDK. The baseline version is 6 for 2.11.x, 8 for +2.12.x. (It's also possible to use a later SDK for local development, +but the CI will verify against the baseline version.) + +You'll also need Apache Ant (version 1.9.0 or above) and curl (for `./pull-binary-libs.sh`). ## Git Hygiene @@ -116,25 +120,10 @@ Here, `` is the milestone targeted by the PR (e.g., 2.11.6), and ` Date: Thu, 8 Oct 2015 12:51:38 -0400 Subject: add experimental Windows script for testing experimental for now. destined to be automated via scala-jenkins-infra --- scripts/jobs/integrate/windows | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100755 scripts/jobs/integrate/windows diff --git a/scripts/jobs/integrate/windows b/scripts/jobs/integrate/windows new file mode 100755 index 0000000000..cca6de1347 --- /dev/null +++ b/scripts/jobs/integrate/windows @@ -0,0 +1,11 @@ +#!/bin/bash -x + +./pull-binary-libs.sh + +export ANT_OPTS="-Dfile.encoding=UTF-8 -server -XX:+AggressiveOpts -XX:+UseParNewGC -Xmx2G -Xss1M -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=128M" + +/home/tisue/apache-ant-1.9.6/bin/ant.bat \ + -Dstarr.version=2.11.7 \ + -Dscalac.args.optimise=-optimise \ + -Dlocker.skip=1 \ + test -- cgit v1.2.3 From 52cf99af83e75b8e15c93542dca8bd24a214ce0d Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 8 Oct 2015 13:04:05 -0400 Subject: add comment about Cygwin trouble to test/partest --- test/partest | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/partest b/test/partest index 8b827f276f..cb07c00e04 100755 --- a/test/partest +++ b/test/partest @@ -22,6 +22,14 @@ findScalaHome () { } # Use tput to detect color-capable terminal. +# (note: I have found that on Cygwin, the script sometimes dies here. +# it doesn't happen from the Cygwin prompt when ssh'ing in to +# jenkins-worker-windows-publish, only when I make a Jenkins job +# that runs this script. I don't know why. it may have to do with +# which bash flags are set (-e? -x?) and with bash flags propagating +# from one script to another? not sure. anyway, normally in a CI +# context we run partest through ant, not through this script, so I'm +# not investigating further for now.) term_colors=$(tput colors 2>/dev/null) if [[ $? == 0 ]] && [[ $term_colors -gt 2 ]]; then git_diff_options="--color=always --word-diff" -- cgit v1.2.3 From 543e739bf9ff0f512e71f6d971c87187f7a6f4d3 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 8 Oct 2015 13:10:14 -0400 Subject: add note to readme about Windows build --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index c8f3f206c4..d5d85cbaa3 100644 --- a/README.md +++ b/README.md @@ -76,6 +76,8 @@ scala/ You'll need a Java SDK (6 or newer), Apache Ant (version 1.9.0 or above), and curl (for `./pull-binary-libs.sh`). +Mac OS X and Linux work. Windows may work if you use Cygwin. (Community help with keeping the build working on Windows is appreciated.) + ## Git Hygiene As git history is forever, we take great pride in the quality of the commits we merge into the repository. The title of your commit will be read hundreds (of thousands? :-)) of times, so it pays off to spend just a little bit more time to polish it, making it descriptive and concise. Please take a minute to read the advice [most projects agree on](https://github.com/erlang/otp/wiki/Writing-good-commit-messages), and stick to 72 or fewer characters for the first line, wrapping subsequent ones at 80 (at most). -- cgit v1.2.3 From b3bffef543e5cbd4e493b7630c8aba00f09ab795 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 9 Oct 2015 18:04:18 -0400 Subject: Windows CI: don't hardcode Ant path quite so hard --- scripts/jobs/integrate/windows | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/jobs/integrate/windows b/scripts/jobs/integrate/windows index cca6de1347..be68a826f7 100755 --- a/scripts/jobs/integrate/windows +++ b/scripts/jobs/integrate/windows @@ -4,7 +4,11 @@ export ANT_OPTS="-Dfile.encoding=UTF-8 -server -XX:+AggressiveOpts -XX:+UseParNewGC -Xmx2G -Xss1M -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=128M" -/home/tisue/apache-ant-1.9.6/bin/ant.bat \ +# TODO: don't hardcode this path, which is just where we currently have +# ant manually installed on jenkins-worker-windows-publish. +PATH=/cygdrive/c/apache-ant-1.9.6/bin:$PATH + +ant \ -Dstarr.version=2.11.7 \ -Dscalac.args.optimise=-optimise \ -Dlocker.skip=1 \ -- cgit v1.2.3 From e01bff82169d79985796bdb6233abc28b9c2ff17 Mon Sep 17 00:00:00 2001 From: Sébastien Doeraene Date: Sun, 11 Oct 2015 18:27:32 +0200 Subject: Remove two lingering `println`s in non-debug methods of the library. --- src/library/scala/collection/GenMapLike.scala | 3 +-- src/library/scala/collection/immutable/Vector.scala | 2 -- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala index bce9740522..2b39fa2289 100644 --- a/src/library/scala/collection/GenMapLike.scala +++ b/src/library/scala/collection/GenMapLike.scala @@ -124,8 +124,7 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals } } } catch { - case ex: ClassCastException => - println("class cast "); false + case ex: ClassCastException => false }} case _ => false diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index 8bb581d44c..cd2d3f843b 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -951,8 +951,6 @@ private[immutable] trait VectorPointer[T] { // STUFF BELOW USED BY APPEND / UPDATE private[immutable] final def copyOf(a: Array[AnyRef]) = { - //println("copy") - if (a eq null) println ("NULL") val b = new Array[AnyRef](a.length) Platform.arraycopy(a, 0, b, 0, a.length) b -- cgit v1.2.3 From e7079ca36aef1b74696f50fbdfe11d99273274d7 Mon Sep 17 00:00:00 2001 From: Rui Gonçalves Date: Wed, 21 Oct 2015 21:22:18 +0100 Subject: SI-9497 Fix SetLike#clear() default implementation When dealing with mutable collections, it is not safe to assume iterators will remain consistent when the collection is modified mid-traversal. The bug reported in SI-9497 is very similar to SI-7269, "ConcurrentModificationException when filtering converted Java HashMap". Then, only the `retain` method was fixed. This commit fixes `clear`, which had the same problem. --- src/library/scala/collection/mutable/SetLike.scala | 4 +++- .../scala/collection/mutable/SetLikeTest.scala | 26 ++++++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 test/junit/scala/collection/mutable/SetLikeTest.scala diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala index 81a71adc91..01075a2633 100644 --- a/src/library/scala/collection/mutable/SetLike.scala +++ b/src/library/scala/collection/mutable/SetLike.scala @@ -129,7 +129,9 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]] /** Removes all elements from the set. After this operation is completed, * the set will be empty. */ - def clear() { foreach(-=) } + def clear(): Unit = + for (elem <- this.toList) + this -= elem override def clone(): This = empty ++= repr.seq diff --git a/test/junit/scala/collection/mutable/SetLikeTest.scala b/test/junit/scala/collection/mutable/SetLikeTest.scala new file mode 100644 index 0000000000..c819024558 --- /dev/null +++ b/test/junit/scala/collection/mutable/SetLikeTest.scala @@ -0,0 +1,26 @@ +package scala.collection.mutable + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class SetLikeTest { + + class MySet(self: Set[String]) extends Set[String] with SetLike[String, MySet] { + override def -=(elem: String) = { self -= elem; this } + override def +=(elem: String) = { self += elem; this } + + override def empty = new MySet(self.empty) + override def iterator = self.iterator + override def contains(elem: String) = self.contains(elem) + } + + @Test + def hasCorrectClear() { + val s = new MySet(Set("EXPOSEDNODE", "CONNECTABLE")) + s.clear() + assertEquals(new MySet(Set()), s) + } +} -- cgit v1.2.3 From 6ed701004550fa04b8ac2d3419f2ea4141c834ad Mon Sep 17 00:00:00 2001 From: vsalvis Date: Mon, 29 Jun 2015 18:56:06 +0200 Subject: Conform foreach tparam to majority naming convention 'U' is the common choice for the foreach function result tparam. This command summarises the naming diversity before and after this change. $ fgrep -r 'def foreach[' *|cut -f2 -d:|cut -f1 -d'('|tr -s ' '|sed 's/override //g'|sort|uniq -c|sort -nr Before, 80 def foreach[U] 6 def foreach[C] 6 def foreach[B] 4 final def foreach[U] 3 def foreach[S] 2 inline final def foreach[U] 2 def foreach[A] 1 inline final def foreach[specialized 1 final def foreach[B] 1 * def foreach[U] 1 def foreach[Q] 1 def foreach[D] 1 def foreach[A,B,U] After, 98 def foreach[U] 5 final def foreach[U] 2 inline final def foreach[U] 1 inline final def foreach[specialized 1 * def foreach[U] 1 def foreach[A,B,U] (@ symbols removed.) --- src/library/scala/collection/BitSetLike.scala | 2 +- .../scala/collection/GenTraversableLike.scala | 8 +- src/library/scala/collection/Iterator.scala | 2 +- .../scala/collection/LinearSeqOptimized.scala | 2 +- src/library/scala/collection/MapLike.scala | 8 +- src/library/scala/collection/Traversable.scala | 2 +- src/library/scala/collection/TraversableOnce.scala | 13 ++- .../scala/collection/TraversableProxyLike.scala | 2 +- .../collection/generic/TraversableForwarder.scala | 2 +- .../scala/collection/immutable/HashMap.scala | 4 +- .../scala/collection/immutable/HashSet.scala | 6 +- .../scala/collection/immutable/IntMap.scala | 2 +- .../scala/collection/immutable/LongMap.scala | 2 +- src/library/scala/collection/immutable/Map.scala | 8 +- .../scala/collection/immutable/Stream.scala | 8 +- .../scala/collection/immutable/TreeMap.scala | 2 +- .../scala/collection/immutable/TreeSet.scala | 2 +- .../scala/collection/mutable/AnyRefMap.scala | 116 ++++++++++---------- .../scala/collection/mutable/ArraySeq.scala | 2 +- .../scala/collection/mutable/ArrayStack.scala | 2 +- src/library/scala/collection/mutable/HashMap.scala | 6 +- src/library/scala/collection/mutable/HashSet.scala | 2 +- .../collection/mutable/ImmutableSetAdaptor.scala | 2 +- .../scala/collection/mutable/LinkedListLike.scala | 2 +- src/library/scala/collection/mutable/LongMap.scala | 122 ++++++++++----------- .../scala/collection/mutable/SynchronizedSet.scala | 2 +- .../scala/collection/parallel/ParMapLike.scala | 8 +- 27 files changed, 170 insertions(+), 169 deletions(-) diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala index 8a8af79151..29369447d1 100644 --- a/src/library/scala/collection/BitSetLike.scala +++ b/src/library/scala/collection/BitSetLike.scala @@ -115,7 +115,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe else Iterator.empty.next() } - override def foreach[B](f: Int => B) { + override def foreach[U](f: Int => U) { /* NOTE: while loops are significantly faster as of 2.11 and one major use case of bitsets is performance. Also, there is nothing to do when all bits are clear, so use that as diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala index 8b9d3e7a17..479a8b5b1b 100644 --- a/src/library/scala/collection/GenTraversableLike.scala +++ b/src/library/scala/collection/GenTraversableLike.scala @@ -269,16 +269,16 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with * {{{ * scala> val a = List(1) * a: List[Int] = List(1) - * + * * scala> val b = List(2) * b: List[Int] = List(2) - * + * * scala> val c = a ++ b * c: List[Int] = List(1, 2) - * + * * scala> val d = List('a') * d: List[Char] = List(a) - * + * * scala> val e = c ++ d * e: List[AnyVal] = List(1, 2, a) * }}} diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index b69e51fdf5..ed536f10a8 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -851,7 +851,7 @@ trait Iterator[+A] extends TraversableOnce[A] { * @usecase def foreach(f: A => Unit): Unit * @inheritdoc */ - def foreach[U](f: A => U) { while (hasNext) f(next()) } + def foreach[U](f: A => U) { while (hasNext) f(next()) } /** Tests whether a predicate holds for all values produced by this iterator. * $mayNotTerminateInf diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala index b426061537..b7af8840a9 100644 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ b/src/library/scala/collection/LinearSeqOptimized.scala @@ -67,7 +67,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea } override /*IterableLike*/ - def foreach[B](f: A => B) { + def foreach[U](f: A => U) { var these = this while (!these.isEmpty) { f(these.head) diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala index d133400570..99ed67325c 100644 --- a/src/library/scala/collection/MapLike.scala +++ b/src/library/scala/collection/MapLike.scala @@ -171,7 +171,7 @@ self => def + (elem: A): Set[A] = (Set[A]() ++ this + elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem def - (elem: A): Set[A] = (Set[A]() ++ this - elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem override def size = self.size - override def foreach[C](f: A => C) = self.keysIterator foreach f + override def foreach[U](f: A => U) = self.keysIterator foreach f } /** Creates an iterator for all keys. @@ -203,7 +203,7 @@ self => protected class DefaultValuesIterable extends AbstractIterable[B] with Iterable[B] with Serializable { def iterator = valuesIterator override def size = self.size - override def foreach[C](f: B => C) = self.valuesIterator foreach f + override def foreach[U](f: B => U) = self.valuesIterator foreach f } /** Creates an iterator for all values in this map. @@ -228,7 +228,7 @@ self => throw new NoSuchElementException("key not found: " + key) protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] { - override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv) + override def foreach[U](f: ((A, B)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv) def iterator = self.iterator.filter(kv => p(kv._1)) override def contains(key: A) = self.contains(key) && p(key) def get(key: A) = if (!p(key)) None else self.get(key) @@ -242,7 +242,7 @@ self => def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] { - override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v))) + override def foreach[U](g: ((A, C)) => U): Unit = for ((k, v) <- self) g((k, f(v))) def iterator = for ((k, v) <- self.iterator) yield (k, f(v)) override def size = self.size override def contains(key: A) = self.contains(key) diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala index a35750a35f..8145eaa204 100644 --- a/src/library/scala/collection/Traversable.scala +++ b/src/library/scala/collection/Traversable.scala @@ -38,7 +38,7 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]] override def remove(p: A => Boolean): Traversable[A] override def partition(p: A => Boolean): (Traversable[A], Traversable[A]) override def groupBy[K](f: A => K): Map[K, Traversable[A]] - override def foreach[U](f: A => U): Unit + override def foreach[U](f: A => U): Unit override def forall(p: A => Boolean): Boolean override def exists(p: A => Boolean): Boolean override def count(p: A => Boolean): Int diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index c5b0d0f085..910c59b179 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -61,7 +61,8 @@ import scala.reflect.ClassTag trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { self => - /** Self-documenting abstract methods. */ + /* Self-documenting abstract methods. */ + def foreach[U](f: A => U): Unit def isEmpty: Boolean def hasDefiniteSize: Boolean @@ -334,10 +335,10 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { * {{{ * scala> val a = List(1,2,3,4) * a: List[Int] = List(1, 2, 3, 4) - * + * * scala> val b = new StringBuilder() - * b: StringBuilder = - * + * b: StringBuilder = + * * scala> a.addString(b , "List(" , ", " , ")") * res5: StringBuilder = List(1, 2, 3, 4) * }}} @@ -376,7 +377,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { * {{{ * scala> val a = List(1,2,3,4) * a: List[Int] = List(1, 2, 3, 4) - * + * * scala> val b = new StringBuilder() * b: StringBuilder = * @@ -399,7 +400,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { * {{{ * scala> val a = List(1,2,3,4) * a: List[Int] = List(1, 2, 3, 4) - * + * * scala> val b = new StringBuilder() * b: StringBuilder = * diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala index 4399dbc289..fa470ea238 100644 --- a/src/library/scala/collection/TraversableProxyLike.scala +++ b/src/library/scala/collection/TraversableProxyLike.scala @@ -28,7 +28,7 @@ import scala.reflect.ClassTag trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversable[A]] extends TraversableLike[A, Repr] with Proxy { def self: Repr - override def foreach[B](f: A => B): Unit = self.foreach(f) + override def foreach[U](f: A => U): Unit = self.foreach(f) override def isEmpty: Boolean = self.isEmpty override def nonEmpty: Boolean = self.nonEmpty override def size: Int = self.size diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala index 1d7974f7a4..359ea402b6 100644 --- a/src/library/scala/collection/generic/TraversableForwarder.scala +++ b/src/library/scala/collection/generic/TraversableForwarder.scala @@ -32,7 +32,7 @@ trait TraversableForwarder[+A] extends Traversable[A] { /** The traversable object to which calls are forwarded. */ protected def underlying: Traversable[A] - override def foreach[B](f: A => B): Unit = underlying foreach f + override def foreach[U](f: A => U): Unit = underlying foreach f override def isEmpty: Boolean = underlying.isEmpty override def nonEmpty: Boolean = underlying.nonEmpty override def size: Int = underlying.size diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 3b3e65ea61..92d915fe8b 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -48,7 +48,7 @@ class HashMap[A, +B] extends AbstractMap[A, B] def iterator: Iterator[(A,B)] = Iterator.empty - override def foreach[U](f: ((A, B)) => U): Unit = { } + override def foreach[U](f: ((A, B)) => U): Unit = () def get(key: A): Option[B] = get0(key, computeHash(key), 0) @@ -422,7 +422,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { final override def getElem(cc: AnyRef): (A, B) = cc.asInstanceOf[HashMap1[A, B]].ensurePair } - override def foreach[U](f: ((A, B)) => U): Unit = { + override def foreach[U](f: ((A, B)) => U): Unit = { var i = 0 while (i < elems.length) { elems(i).foreach(f) diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 27b2bfdde7..050e90b49b 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -53,7 +53,7 @@ class HashSet[A] extends AbstractSet[A] def iterator: Iterator[A] = Iterator.empty - override def foreach[U](f: A => U): Unit = { } + override def foreach[U](f: A => U): Unit = () def contains(e: A): Boolean = get0(e, computeHash(e), 0) @@ -215,7 +215,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { private object EmptyHashSet extends HashSet[Any] { } private[collection] def emptyInstance: HashSet[Any] = EmptyHashSet - + // utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code) private def makeHashTrieSet[A](hash0:Int, elem0:HashSet[A], hash1:Int, elem1:HashSet[A], level:Int) : HashTrieSet[A] = { val index0 = (hash0 >>> level) & 0x1f @@ -966,7 +966,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { final override def getElem(cc: AnyRef): A = cc.asInstanceOf[HashSet1[A]].key } - override def foreach[U](f: A => U): Unit = { + override def foreach[U](f: A => U): Unit = { var i = 0 while (i < elems.length) { elems(i).foreach(f) diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index cb6196e130..c6bf6a77e8 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -184,7 +184,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] /** * Loops over the key, value pairs of the map in unsigned order of the keys. */ - override final def foreach[U](f: ((Int, T)) => U): Unit = this match { + override final def foreach[U](f: ((Int, T)) => U): Unit = this match { case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } case IntMap.Tip(key, value) => f((key, value)) case IntMap.Nil => diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index 868c0c0f47..173d912fe5 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -176,7 +176,7 @@ extends AbstractMap[Long, T] /** * Loops over the key, value pairs of the map in unsigned order of the keys. */ - override final def foreach[U](f: ((Long, T)) => U): Unit = this match { + override final def foreach[U](f: ((Long, T)) => U): Unit = this match { case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } case LongMap.Tip(key, value) => f((key, value)) case LongMap.Nil => diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala index 5178d5a862..2c5b444c70 100644 --- a/src/library/scala/collection/immutable/Map.scala +++ b/src/library/scala/collection/immutable/Map.scala @@ -112,7 +112,7 @@ object Map extends ImmutableMapFactory[Map] { def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) def - (key: A): Map[A, B] = if (key == key1) Map.empty else this - override def foreach[U](f: ((A, B)) => U): Unit = { + override def foreach[U](f: ((A, B)) => U): Unit = { f((key1, value1)) } } @@ -133,7 +133,7 @@ object Map extends ImmutableMapFactory[Map] { if (key == key1) new Map1(key2, value2) else if (key == key2) new Map1(key1, value1) else this - override def foreach[U](f: ((A, B)) => U): Unit = { + override def foreach[U](f: ((A, B)) => U): Unit = { f((key1, value1)); f((key2, value2)) } } @@ -157,7 +157,7 @@ object Map extends ImmutableMapFactory[Map] { else if (key == key2) new Map2(key1, value1, key3, value3) else if (key == key3) new Map2(key1, value1, key2, value2) else this - override def foreach[U](f: ((A, B)) => U): Unit = { + override def foreach[U](f: ((A, B)) => U): Unit = { f((key1, value1)); f((key2, value2)); f((key3, value3)) } } @@ -184,7 +184,7 @@ object Map extends ImmutableMapFactory[Map] { else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) else this - override def foreach[U](f: ((A, B)) => U): Unit = { + override def foreach[U](f: ((A, B)) => U): Unit = { f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) } } diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index a6c55f8828..d3be809255 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -176,9 +176,9 @@ import scala.language.implicitConversions * loop(1, 1) * } * }}} - * + * * Note that `mkString` forces evaluation of a `Stream`, but `addString` does - * not. In both cases, a `Stream` that is or ends in a cycle + * not. In both cases, a `Stream` that is or ends in a cycle * (e.g. `lazy val s: Stream[Int] = 0 #:: s`) will convert additional trips * through the cycle to `...`. Additionally, `addString` will display an * un-memoized tail as `?`. @@ -566,7 +566,7 @@ self => else super.flatMap(f)(bf) } - override def foreach[B](f: A => B) = + override def foreach[U](f: A => U) = for (x <- self) if (p(x)) f(x) @@ -589,7 +589,7 @@ self => * unless the `f` throws an exception. */ @tailrec - override final def foreach[B](f: A => B) { + override final def foreach[U](f: A => U) { if (!this.isEmpty) { f(head) tail.foreach(f) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 662075cd93..b845b76026 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -200,5 +200,5 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi override def contains(key: A): Boolean = RB.contains(tree, key) override def isDefinedAt(key: A): Boolean = RB.contains(tree, key) - override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f) + override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f) } diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 7378211db0..2800030d67 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -151,7 +151,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin def iterator: Iterator[A] = RB.keysIterator(tree) override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) - override def foreach[U](f: A => U) = RB.foreachKey(tree, f) + override def foreach[U](f: A => U) = RB.foreachKey(tree, f) override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSet(RB.rangeImpl(tree, from, until)) override def range(from: A, until: A): TreeSet[A] = newSet(RB.range(tree, from, until)) diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala index fccc9d83e6..369d596ec3 100644 --- a/src/library/scala/collection/mutable/AnyRefMap.scala +++ b/src/library/scala/collection/mutable/AnyRefMap.scala @@ -5,23 +5,23 @@ package mutable import generic.CanBuildFrom /** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. - * - * Basic map operations on single entries, including `contains` and `get`, + * + * Basic map operations on single entries, including `contains` and `get`, * are typically significantly faster with `AnyRefMap` than [[HashMap]]. * Note that numbers and characters are not handled specially in AnyRefMap; * only plain `equals` and `hashCode` are used in comparisons. - * + * * Methods that traverse or regenerate the map, including `foreach` and `map`, * are not in general faster than with `HashMap`. The methods `foreachKey`, * `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster * than alternative ways to achieve the same functionality. - * + * * Maps with open addressing may become less efficient at lookup after * repeated addition/removal of elements. Although `AnyRefMap` makes a * decent attempt to remain efficient regardless, calling `repack` * on a map that will no longer have elements removed but will be * used heavily may save both time and storage space. - * + * * This map is not intended to contain more than 2^29^ entries (approximately * 500 million). The maximum capacity is 2^30^, but performance will degrade * rapidly as 2^30^ is approached. @@ -34,50 +34,50 @@ extends AbstractMap[K, V] { import AnyRefMap._ def this() = this(AnyRefMap.exceptionDefault, 16, true) - + /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ def this(defaultEntry: K => V) = this(defaultEntry, 16, true) /** Creates a new `AnyRefMap` with an initial buffer of specified size. - * + * * An `AnyRefMap` can typically contain half as many elements as its buffer size * before it requires resizing. */ def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) - + /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ def this(defaultEntry: K => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) - + private[this] var mask = 0 private[this] var _size = 0 private[this] var _vacant = 0 private[this] var _hashes: Array[Int] = null private[this] var _keys: Array[AnyRef] = null private[this] var _values: Array[AnyRef] = null - + if (initBlank) defaultInitialize(initialBufferSize) - + private[this] def defaultInitialize(n: Int) { - mask = + mask = if (n<0) 0x7 else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 _hashes = new Array[Int](mask+1) _keys = new Array[AnyRef](mask+1) _values = new Array[AnyRef](mask+1) } - + private[collection] def initializeTo( m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef] ) { mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz } - + override def size: Int = _size override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry) - - private def imbalanced: Boolean = + + private def imbalanced: Boolean = (_size + _vacant) > 0.5*mask || _vacant > _size - + private def hashOf(key: K): Int = { if (key eq null) 0x41081989 else { @@ -88,7 +88,7 @@ extends AbstractMap[K, V] if (j==0) 0x41081989 else j & 0x7FFFFFFF } } - + private def seekEntry(h: Int, k: AnyRef): Int = { var e = h & mask var x = 0 @@ -100,7 +100,7 @@ extends AbstractMap[K, V] } e | MissingBit } - + private def seekEntryOrOpen(h: Int, k: AnyRef): Int = { var e = h & mask var x = 0 @@ -114,19 +114,19 @@ extends AbstractMap[K, V] } if (o >= 0) o | MissVacant else e | MissingBit } - + override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0 - + override def get(key: K): Option[V] = { val i = seekEntry(hashOf(key), key) if (i < 0) None else Some(_values(i).asInstanceOf[V]) } - + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { val i = seekEntry(hashOf(key), key) if (i < 0) default else _values(i).asInstanceOf[V] } - + override def getOrElseUpdate(key: K, defaultValue: => V): V = { val h = hashOf(key) var i = seekEntryOrOpen(h, key) @@ -154,10 +154,10 @@ extends AbstractMap[K, V] } else _values(i).asInstanceOf[V] } - + /** Retrieves the value associated with a key, or the default for that type if none exists * (null for AnyRef, 0 for floats and integers). - * + * * Note: this is the fastest way to retrieve a value that may or * may not exist, if the default null/zero is acceptable. For key/value * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. @@ -166,22 +166,22 @@ extends AbstractMap[K, V] val i = seekEntry(hashOf(key), key) (if (i < 0) null else _values(i)).asInstanceOf[V] } - - /** Retrieves the value associated with a key. + + /** Retrieves the value associated with a key. * If the key does not exist in the map, the `defaultEntry` for that key - * will be returned instead; an exception will be thrown if no + * will be returned instead; an exception will be thrown if no * `defaultEntry` was supplied. */ override def apply(key: K): V = { val i = seekEntry(hashOf(key), key) if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] } - + /** Defers to defaultEntry to find a default value for the key. Throws an * exception if no other default behavior was specified. */ override def default(key: K) = defaultEntry(key) - + private def repack(newMask: Int) { val oh = _hashes val ok = _keys @@ -205,9 +205,9 @@ extends AbstractMap[K, V] i += 1 } } - + /** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup. - * + * * For maps that undergo a complex creation process with both addition and * removal of keys, and then are used heavily with no further removal of * elements, calling `repack` after the end of the creation can result in @@ -220,7 +220,7 @@ extends AbstractMap[K, V] while (m > 8 && 8*_size < m) m = m >>> 1 repack(m) } - + override def put(key: K, value: V): Option[V] = { val h = hashOf(key) val k = key @@ -243,9 +243,9 @@ extends AbstractMap[K, V] ans } } - + /** Updates the map to include a new key-value pair. - * + * * This is the fastest way to add an entry to an `AnyRefMap`. */ override def update(key: K, value: V): Unit = { @@ -267,12 +267,12 @@ extends AbstractMap[K, V] _values(i) = value.asInstanceOf[AnyRef] } } - + /** Adds a new key/value pair to this map and returns the map. */ def +=(key: K, value: V): this.type = { update(key, value); this } def +=(kv: (K, V)): this.type = { update(kv._1, kv._2); this } - + def -=(key: K): this.type = { val i = seekEntry(hashOf(key), key) if (i >= 0) { @@ -284,14 +284,14 @@ extends AbstractMap[K, V] } this } - + def iterator: Iterator[(K, V)] = new Iterator[(K, V)] { private[this] val hz = _hashes private[this] val kz = _keys private[this] val vz = _values - + private[this] var index = 0 - + def hasNext: Boolean = index A) { + + override def foreach[U](f: ((K,V)) => U) { var i = 0 var e = _size while (e > 0) { @@ -325,7 +325,7 @@ extends AbstractMap[K, V] else return } } - + override def clone(): AnyRefMap[K, V] = { val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) val kz = java.util.Arrays.copyOf(_keys, _keys.length) @@ -334,7 +334,7 @@ extends AbstractMap[K, V] arm.initializeTo(mask, _size, _vacant, hz, kz, vz) arm } - + private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B) { var i,j = 0 while (i < _hashes.length & j < _size) { @@ -346,13 +346,13 @@ extends AbstractMap[K, V] i += 1 } } - + /** Applies a function to all keys of this map. */ def foreachKey[A](f: K => A) { foreachElement[K,A](_keys, f) } /** Applies a function to all values of this map. */ def foreachValue[A](f: V => A) { foreachElement[V,A](_values, f) } - + /** Creates a new `AnyRefMap` with different values. * Unlike `mapValues`, this method generates a new * collection immediately. @@ -374,8 +374,8 @@ extends AbstractMap[K, V] arm.initializeTo(mask, _size, _vacant, hz, kz, vz) arm } - - /** Applies a transformation function to all values stored in this map. + + /** Applies a transformation function to all values stored in this map. * Note: the default, if any, is not transformed. */ def transformValues(f: V => V): this.type = { @@ -398,15 +398,15 @@ object AnyRefMap { private final val MissingBit = 0x80000000 private final val VacantBit = 0x40000000 private final val MissVacant = 0xC0000000 - + private val exceptionDefault = (k: Any) => throw new NoSuchElementException(if (k == null) "(null)" else k.toString) - + implicit def canBuildFrom[K <: AnyRef, V, J <: AnyRef, U]: CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] = new CanBuildFrom[AnyRefMap[K,V], (J, U), AnyRefMap[J,U]] { def apply(from: AnyRefMap[K,V]): AnyRefMapBuilder[J, U] = apply() def apply(): AnyRefMapBuilder[J, U] = new AnyRefMapBuilder[J, U] } - + final class AnyRefMapBuilder[K <: AnyRef, V] extends Builder[(K, V), AnyRefMap[K, V]] { private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] def +=(entry: (K, V)): this.type = { @@ -425,14 +425,14 @@ object AnyRefMap { if (arm.size < (sz>>3)) arm.repack() arm } - + /** Creates a new empty `AnyRefMap`. */ def empty[K <: AnyRef, V]: AnyRefMap[K, V] = new AnyRefMap[K, V] - + /** Creates a new empty `AnyRefMap` with the supplied default */ def withDefault[K <: AnyRef, V](default: K => V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) - - /** Creates a new `AnyRefMap` from arrays of keys and values. + + /** Creates a new `AnyRefMap` from arrays of keys and values. * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. */ def fromZip[K <: AnyRef, V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { @@ -443,8 +443,8 @@ object AnyRefMap { if (arm.size < (sz>>3)) arm.repack() arm } - - /** Creates a new `AnyRefMap` from keys and values. + + /** Creates a new `AnyRefMap` from keys and values. * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. */ def fromZip[K <: AnyRef, V](keys: Iterable[K], values: Iterable[V]): AnyRefMap[K, V] = { diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala index 577a838315..ddb48627af 100644 --- a/src/library/scala/collection/mutable/ArraySeq.scala +++ b/src/library/scala/collection/mutable/ArraySeq.scala @@ -68,7 +68,7 @@ extends AbstractSeq[A] array(idx) = elem.asInstanceOf[AnyRef] } - override def foreach[U](f: A => U) { + override def foreach[U](f: A => U) { var i = 0 while (i < length) { f(array(i).asInstanceOf[A]) diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala index fec2da8839..8ff128c026 100644 --- a/src/library/scala/collection/mutable/ArrayStack.scala +++ b/src/library/scala/collection/mutable/ArrayStack.scala @@ -233,7 +233,7 @@ extends AbstractSeq[T] } } - override def foreach[U](f: T => U) { + override def foreach[U](f: T => U) { var currentIndex = index while (currentIndex > 0) { currentIndex -= 1 diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala index 6fca75ffea..eab4202353 100644 --- a/src/library/scala/collection/mutable/HashMap.scala +++ b/src/library/scala/collection/mutable/HashMap.scala @@ -96,16 +96,16 @@ extends AbstractMap[A, B] def iterator = entriesIterator map (e => ((e.key, e.value))) - override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f((e.key, e.value))) + override def foreach[U](f: ((A, B)) => U): Unit = foreachEntry(e => f((e.key, e.value))) /* Override to avoid tuple allocation in foreach */ override def keySet: scala.collection.Set[A] = new DefaultKeySet { - override def foreach[C](f: A => C) = foreachEntry(e => f(e.key)) + override def foreach[U](f: A => U) = foreachEntry(e => f(e.key)) } /* Override to avoid tuple allocation in foreach */ override def values: scala.collection.Iterable[B] = new DefaultValuesIterable { - override def foreach[C](f: B => C) = foreachEntry(e => f(e.value)) + override def foreach[U](f: B => U) = foreachEntry(e => f(e.value)) } /* Override to avoid tuple allocation */ diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala index 886fee5a59..3a16e4efa5 100644 --- a/src/library/scala/collection/mutable/HashSet.scala +++ b/src/library/scala/collection/mutable/HashSet.scala @@ -70,7 +70,7 @@ extends AbstractSet[A] override def iterator: Iterator[A] = super[FlatHashTable].iterator - override def foreach[U](f: A => U) { + override def foreach[U](f: A => U) { var i = 0 val len = table.length while (i < len) { diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala index 730b22227d..d7eec70b15 100644 --- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala +++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala @@ -32,7 +32,7 @@ extends AbstractSet[A] def contains(elem: A): Boolean = set.contains(elem) - override def foreach[U](f: A => U): Unit = set.foreach(f) + override def foreach[U](f: A => U): Unit = set.foreach(f) override def exists(p: A => Boolean): Boolean = set.exists(p) diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala index a9d385bc5b..d0748b8a9f 100644 --- a/src/library/scala/collection/mutable/LinkedListLike.scala +++ b/src/library/scala/collection/mutable/LinkedListLike.scala @@ -172,7 +172,7 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq } } - override def foreach[B](f: A => B) { + override def foreach[U](f: A => U) { var these = this while (these.nonEmpty) { f(these.elem) diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala index c124f35cd7..198e34bd29 100644 --- a/src/library/scala/collection/mutable/LongMap.scala +++ b/src/library/scala/collection/mutable/LongMap.scala @@ -5,20 +5,20 @@ package mutable import generic.CanBuildFrom /** This class implements mutable maps with `Long` keys based on a hash table with open addressing. - * - * Basic map operations on single entries, including `contains` and `get`, + * + * Basic map operations on single entries, including `contains` and `get`, * are typically substantially faster with `LongMap` than [[HashMap]]. Methods * that act on the whole map, including `foreach` and `map` are not in * general expected to be faster than with a generic map, save for those * that take particular advantage of the internal structure of the map: * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`. - * + * * Maps with open addressing may become less efficient at lookup after * repeated addition/removal of elements. Although `LongMap` makes a * decent attempt to remain efficient regardless, calling `repack` * on a map that will no longer have elements removed but will be * used heavily may save both time and storage space. - * + * * This map is not intended to contain more than 2^29 entries (approximately * 500 million). The maximum capacity is 2^30, but performance will degrade * rapidly as 2^30 is approached. @@ -33,20 +33,20 @@ extends AbstractMap[Long, V] import LongMap._ def this() = this(LongMap.exceptionDefault, 16, true) - + /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ def this(defaultEntry: Long => V) = this(defaultEntry, 16, true) - + /** Creates a new `LongMap` with an initial buffer of specified size. - * + * * A LongMap can typically contain half as many elements as its buffer size * before it requires resizing. */ def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) - + /** Creates a new `LongMap` with specified default values and initial buffer size. */ def this(defaultEntry: Long => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) - + private[this] var mask = 0 private[this] var extraKeys: Int = 0 private[this] var zeroValue: AnyRef = null @@ -55,43 +55,43 @@ extends AbstractMap[Long, V] private[this] var _vacant = 0 private[this] var _keys: Array[Long] = null private[this] var _values: Array[AnyRef] = null - + if (initBlank) defaultInitialize(initialBufferSize) - + private[this] def defaultInitialize(n: Int) = { - mask = + mask = if (n<0) 0x7 else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 _keys = new Array[Long](mask+1) _values = new Array[AnyRef](mask+1) } - + private[collection] def initializeTo( m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] ) { mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz } - + override def size: Int = _size + (extraKeys+1)/2 override def empty: LongMap[V] = new LongMap() - - private def imbalanced: Boolean = + + private def imbalanced: Boolean = (_size + _vacant) > 0.5*mask || _vacant > _size - + private def toIndex(k: Long): Int = { // Part of the MurmurHash3 32 bit finalizer val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt val x = (h ^ (h >>> 16)) * 0x85EBCA6B (x ^ (x >>> 13)) & mask } - + private def seekEmpty(k: Long): Int = { var e = toIndex(k) var x = 0 while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } e } - + private def seekEntry(k: Long): Int = { var e = toIndex(k) var x = 0 @@ -99,7 +99,7 @@ extends AbstractMap[Long, V] while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } e | MissingBit } - + private def seekEntryOrOpen(k: Long): Int = { var e = toIndex(k) var x = 0 @@ -116,12 +116,12 @@ extends AbstractMap[Long, V] } o } - + override def contains(key: Long): Boolean = { if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0 else seekEntry(key) >= 0 } - + override def get(key: Long): Option[V] = { if (key == -key) { if ((((key>>>63).toInt+1) & extraKeys) == 0) None @@ -133,7 +133,7 @@ extends AbstractMap[Long, V] if (i < 0) None else Some(_values(i).asInstanceOf[V]) } } - + override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = { if (key == -key) { if ((((key>>>63).toInt+1) & extraKeys) == 0) default @@ -145,7 +145,7 @@ extends AbstractMap[Long, V] if (i < 0) default else _values(i).asInstanceOf[V1] } } - + override def getOrElseUpdate(key: Long, defaultValue: => V): V = { if (key == -key) { val kbits = (key>>>63).toInt + 1 @@ -185,10 +185,10 @@ extends AbstractMap[Long, V] else _values(i).asInstanceOf[V] } } - + /** Retrieves the value associated with a key, or the default for that type if none exists * (null for AnyRef, 0 for floats and integers). - * + * * Note: this is the fastest way to retrieve a value that may or * may not exist, if the default null/zero is acceptable. For key/value * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. @@ -204,8 +204,8 @@ extends AbstractMap[Long, V] if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V] } } - - /** Retrieves the value associated with a key. + + /** Retrieves the value associated with a key. * If the key does not exist in the map, the `defaultEntry` for that key * will be returned instead. */ @@ -220,12 +220,12 @@ extends AbstractMap[Long, V] if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] } } - + /** The user-supplied default value for the key. Throws an exception * if no other default behavior was specified. */ override def default(key: Long) = defaultEntry(key) - + private def repack(newMask: Int) { val ok = _keys val ov = _values @@ -244,9 +244,9 @@ extends AbstractMap[Long, V] i += 1 } } - + /** Repacks the contents of this `LongMap` for maximum efficiency of lookup. - * + * * For maps that undergo a complex creation process with both addition and * removal of keys, and then are used heavily with no further removal of * elements, calling `repack` after the end of the creation can result in @@ -259,7 +259,7 @@ extends AbstractMap[Long, V] while (m > 8 && 8*_size < m) m = m >>> 1 repack(m) } - + override def put(key: Long, value: V): Option[V] = { if (key == -key) { if (key == 0) { @@ -294,9 +294,9 @@ extends AbstractMap[Long, V] } } } - + /** Updates the map to include a new key-value pair. - * + * * This is the fastest way to add an entry to a `LongMap`. */ override def update(key: Long, value: V): Unit = { @@ -326,12 +326,12 @@ extends AbstractMap[Long, V] } } } - + /** Adds a new key/value pair to this map and returns the map. */ def +=(key: Long, value: V): this.type = { update(key, value); this } - + def +=(kv: (Long, V)): this.type = { update(kv._1, kv._2); this } - + def -=(key: Long): this.type = { if (key == -key) { if (key == 0L) { @@ -354,22 +354,22 @@ extends AbstractMap[Long, V] } this } - + def iterator: Iterator[(Long, V)] = new Iterator[(Long, V)] { private[this] val kz = _keys private[this] val vz = _values - - private[this] var nextPair: (Long, V) = + + private[this] var nextPair: (Long, V) = if (extraKeys==0) null else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V]) else (Long.MinValue, minValue.asInstanceOf[V]) - private[this] var anotherPair: (Long, V) = + private[this] var anotherPair: (Long, V) = if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V]) else null - + private[this] var index = 0 - + def hasNext: Boolean = nextPair != null || (index < kz.length && { var q = kz(index) while (q == -q) { @@ -392,8 +392,8 @@ extends AbstractMap[Long, V] ans } } - - override def foreach[A](f: ((Long,V)) => A) { + + override def foreach[U](f: ((Long,V)) => U) { if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) var i,j = 0 @@ -406,7 +406,7 @@ extends AbstractMap[Long, V] i += 1 } } - + override def clone(): LongMap[V] = { val kz = java.util.Arrays.copyOf(_keys, _keys.length) val vz = java.util.Arrays.copyOf(_values, _values.length) @@ -414,7 +414,7 @@ extends AbstractMap[Long, V] lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz) lm } - + /** Applies a function to all keys of this map. */ def foreachKey[A](f: Long => A) { if ((extraKeys & 1) == 1) f(0L) @@ -444,7 +444,7 @@ extends AbstractMap[Long, V] i += 1 } } - + /** Creates a new `LongMap` with different values. * Unlike `mapValues`, this method generates a new * collection immediately. @@ -467,8 +467,8 @@ extends AbstractMap[Long, V] lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz) lm } - - /** Applies a transformation function to all values stored in this map. + + /** Applies a transformation function to all values stored in this map. * Note: the default, if any, is not transformed. */ def transformValues(f: V => V): this.type = { @@ -492,15 +492,15 @@ object LongMap { private final val MissingBit = 0x80000000 private final val VacantBit = 0x40000000 private final val MissVacant = 0xC0000000 - + private val exceptionDefault: Long => Nothing = (k: Long) => throw new NoSuchElementException(k.toString) - - implicit def canBuildFrom[V, U]: CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] = + + implicit def canBuildFrom[V, U]: CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] = new CanBuildFrom[LongMap[V], (Long, U), LongMap[U]] { def apply(from: LongMap[V]): LongMapBuilder[U] = apply() def apply(): LongMapBuilder[U] = new LongMapBuilder[U] } - + final class LongMapBuilder[V] extends Builder[(Long, V), LongMap[V]] { private[collection] var elems: LongMap[V] = new LongMap[V] def +=(entry: (Long, V)): this.type = { @@ -519,14 +519,14 @@ object LongMap { if (lm.size < (sz>>3)) lm.repack() lm } - + /** Creates a new empty `LongMap`. */ def empty[V]: LongMap[V] = new LongMap[V] - + /** Creates a new empty `LongMap` with the supplied default */ def withDefault[V](default: Long => V): LongMap[V] = new LongMap[V](default) - - /** Creates a new `LongMap` from arrays of keys and values. + + /** Creates a new `LongMap` from arrays of keys and values. * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. */ def fromZip[V](keys: Array[Long], values: Array[V]): LongMap[V] = { @@ -537,8 +537,8 @@ object LongMap { if (lm.size < (sz>>3)) lm.repack() lm } - - /** Creates a new `LongMap` from keys and values. + + /** Creates a new `LongMap` from keys and values. * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. */ def fromZip[V](keys: Iterable[Long], values: Iterable[V]): LongMap[V] = { diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala index 60e2e79d3f..dd842f26ce 100644 --- a/src/library/scala/collection/mutable/SynchronizedSet.scala +++ b/src/library/scala/collection/mutable/SynchronizedSet.scala @@ -78,7 +78,7 @@ trait SynchronizedSet[A] extends Set[A] { super.subsetOf(that) } - override def foreach[U](f: A => U) = synchronized { + override def foreach[U](f: A => U) = synchronized { super.foreach(f) } diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala index ee1334ba55..0a671fb085 100644 --- a/src/library/scala/collection/parallel/ParMapLike.scala +++ b/src/library/scala/collection/parallel/ParMapLike.scala @@ -99,14 +99,14 @@ self => def - (elem: K): ParSet[K] = (ParSet[K]() ++ this - elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem override def size = self.size - override def foreach[S](f: K => S) = for ((k, v) <- self) f(k) + override def foreach[U](f: K => U) = for ((k, v) <- self) f(k) override def seq = self.seq.keySet } protected class DefaultValuesIterable extends ParIterable[V] { def splitter = valuesIterator(self.splitter) override def size = self.size - override def foreach[S](f: V => S) = for ((k, v) <- self) f(v) + override def foreach[U](f: V => U) = for ((k, v) <- self) f(v) def seq = self.seq.values } @@ -118,7 +118,7 @@ self => def filterKeys(p: K => Boolean): ParMap[K, V] = new ParMap[K, V] { lazy val filtered = self.filter(kv => p(kv._1)) - override def foreach[S](f: ((K, V)) => S): Unit = for (kv <- self) if (p(kv._1)) f(kv) + override def foreach[U](f: ((K, V)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv) def splitter = filtered.splitter override def contains(key: K) = self.contains(key) && p(key) def get(key: K) = if (!p(key)) None else self.get(key) @@ -129,7 +129,7 @@ self => } def mapValues[S](f: V => S): ParMap[K, S] = new ParMap[K, S] { - override def foreach[Q](g: ((K, S)) => Q): Unit = for ((k, v) <- self) g((k, f(v))) + override def foreach[U](g: ((K, S)) => U): Unit = for ((k, v) <- self) g((k, f(v))) def splitter = self.splitter.map(kv => (kv._1, f(kv._2))) override def size = self.size override def contains(key: K) = self.contains(key) -- cgit v1.2.3 From 212da3d5be061e235c7ee179a96ffa7b41b7bead Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 21 Jan 2015 23:38:34 -0800 Subject: SI-4950 Test reduction A session test with extra filtering best expresses the intentions. No check file is required. --- test/files/run/t4950.check | 7 ------- test/files/run/t4950.scala | 24 ++++++++++++++++++------ 2 files changed, 18 insertions(+), 13 deletions(-) delete mode 100644 test/files/run/t4950.check diff --git a/test/files/run/t4950.check b/test/files/run/t4950.check deleted file mode 100644 index 8994441163..0000000000 --- a/test/files/run/t4950.check +++ /dev/null @@ -1,7 +0,0 @@ - -scala> val 1 = 2 -scala.MatchError: 2 (of class java.lang.Integer) - -scala> val List(1) = List(1) - -scala> :quit diff --git a/test/files/run/t4950.scala b/test/files/run/t4950.scala index cef06027bf..e34b2cf3f2 100644 --- a/test/files/run/t4950.scala +++ b/test/files/run/t4950.scala @@ -1,12 +1,24 @@ -import scala.tools.partest.ReplTest +import scala.tools.partest.SessionTest +import scala.PartialFunction.{ cond => when } + +object Elision { + val elideMsg = """ ... \d+ elided""".r +} + +object Test extends SessionTest { + import Elision._ -object Test extends ReplTest { // Filter out the abbreviated stacktrace "... X elided" // because the number seems to differ between versions/platforms/... - override def show = eval() filterNot (_ contains "elided") foreach println - def code = + def elided(s: String) = when(s) { case elideMsg() => true } + override def eval() = super.eval() filterNot elided + def session = """ -val 1 = 2 -val List(1) = List(1) +scala> val 1 = 2 +scala.MatchError: 2 (of class java.lang.Integer) + +scala> val List(1) = List(1) + +scala> :quit """ } -- cgit v1.2.3 From e3cbcd5298108b2a0ba66bf70fd9b78664cbf483 Mon Sep 17 00:00:00 2001 From: vsalvis Date: Sat, 6 Jun 2015 10:56:06 +0200 Subject: Improve collections documentation and prefer () to {} - Remove some duplicate method documentation that is now inherited - Whitespace edits - Rewording of method docs - Clearer usage examples - tparam alignment for some usecase tags - Prefer () to { } for do nothing bodies --- .../scala/collection/GenTraversableLike.scala | 12 --- .../scala/collection/GenTraversableOnce.scala | 86 +++++++++++++++------- src/library/scala/collection/TraversableLike.scala | 36 +-------- src/library/scala/collection/package.scala | 38 ++++++---- .../collection/parallel/ParIterableLike.scala | 1 - 5 files changed, 85 insertions(+), 88 deletions(-) diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala index 479a8b5b1b..d730996be2 100644 --- a/src/library/scala/collection/GenTraversableLike.scala +++ b/src/library/scala/collection/GenTraversableLike.scala @@ -158,18 +158,6 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with @migration("The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.", "2.9.0") def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That - /** Applies a function `f` to all elements of this $coll. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - * - * @usecase def foreach(f: A => Unit): Unit - * @inheritdoc - */ def foreach[U](f: A => U): Unit /** Builds a new collection by applying a function to all elements of this $coll. diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala index a45ec965f5..244ff26397 100644 --- a/src/library/scala/collection/GenTraversableOnce.scala +++ b/src/library/scala/collection/GenTraversableOnce.scala @@ -49,6 +49,22 @@ import scala.language.higherKinds */ trait GenTraversableOnce[+A] extends Any { + /** Applies a function `f` to all elements of this $coll. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + * + * @usecase def foreach(f: A => Unit): Unit + * @inheritdoc + * + * Note: this method underlies the implementation of most other bulk operations. + * It's important to implement this method in an efficient way. + * + */ def foreach[U](f: A => U): Unit def hasDefiniteSize: Boolean @@ -110,13 +126,14 @@ trait GenTraversableOnce[+A] extends Any { * binary operator. * * $undefinedorder + * $willNotTerminateInf * * @tparam A1 a type parameter for the binary operator, a supertype of `A`. * @param z a neutral element for the fold operation; may be added to the result * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, - * 0 for addition, or 1 for multiplication.) - * @param op a binary operator that must be associative - * @return the result of applying fold operator `op` between all the elements and `z` + * 0 for addition, or 1 for multiplication). + * @param op a binary operator that must be associative. + * @return the result of applying the fold operator `op` between all the elements and `z`, or `z` if this $coll is empty. */ def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 @@ -205,6 +222,7 @@ trait GenTraversableOnce[+A] extends Any { * op(...op(z, x_1), x_2, ..., x_n) * }}} * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. + * Returns `z` if this $coll is empty. */ def foldLeft[B](z: B)(op: (B, A) => B): B @@ -222,30 +240,32 @@ trait GenTraversableOnce[+A] extends Any { * op(x_1, op(x_2, ... op(x_n, z)...)) * }}} * where `x,,1,,, ..., x,,n,,` are the elements of this $coll. + * Returns `z` if this $coll is empty. */ def foldRight[B](z: B)(op: (A, B) => B): B /** Aggregates the results of applying an operator to subsequent elements. * - * This is a more general form of `fold` and `reduce`. It has similar - * semantics, but does not require the result to be a supertype of the - * element type. It traverses the elements in different partitions - * sequentially, using `seqop` to update the result, and then applies - * `combop` to results from different partitions. The implementation of - * this operation may operate on an arbitrary number of collection - * partitions, so `combop` may be invoked an arbitrary number of times. - * - * For example, one might want to process some elements and then produce - * a `Set`. In this case, `seqop` would process an element and append it - * to the list, while `combop` would concatenate two lists from different - * partitions together. The initial value `z` would be an empty set. + * This is a more general form of `fold` and `reduce`. It is similar to + * `foldLeft` in that it doesn't require the result to be a supertype of the + * element type. In addition, it allows parallel collections to be processed + * in chunks, and then combines the intermediate results. + * + * `aggregate` splits the $coll into partitions and processes each + * partition by sequentially applying `seqop`, starting with `z` (like + * `foldLeft`). Those intermediate results are then combined by using + * `combop` (like `fold`). The implementation of this operation may operate + * on an arbitrary number of collection partitions (even 1), so `combop` may + * be invoked an arbitrary number of times (even 0). + * + * As an example, consider summing up the integer values of a list of chars. + * The initial value for the sum is 0. First, `seqop` transforms each input + * character to an Int and adds it to the sum (of the partition). Then, + * `combop` just needs to sum up the intermediate results of the partitions: * {{{ - * pc.aggregate(Set[Int]())(_ += process(_), _ ++ _) + * List('a', 'b', 'c').aggregate(0)({ (sum, ch) => sum + ch.toInt }, { (p1, p2) => p1 + p2 }) * }}} * - * Another example is calculating geometric mean from a collection of doubles - * (one would typically require big doubles for this). - * * @tparam B the type of accumulated results * @param z the initial value for the accumulated result of the partition - this * will typically be the neutral element for the `seqop` operator (e.g. @@ -423,13 +443,13 @@ trait GenTraversableOnce[+A] extends Any { */ def find(@deprecatedName('pred) p: A => Boolean): Option[A] - /** Copies values of this $coll to an array. + /** Copies the elements of this $coll to an array. * Fills the given array `xs` with values of this $coll. * Copying will stop once either the end of the current $coll is reached, - * or the end of the array is reached. + * or the end of the target array is reached. * * @param xs the array to fill. - * @tparam B the type of the elements of the array. + * @tparam B the type of the elements of the target array. * * @usecase def copyToArray(xs: Array[A]): Unit * @inheritdoc @@ -438,14 +458,14 @@ trait GenTraversableOnce[+A] extends Any { */ def copyToArray[B >: A](xs: Array[B]): Unit - /** Copies values of this $coll to an array. + /** Copies the elements of this $coll to an array. * Fills the given array `xs` with values of this $coll, beginning at index `start`. * Copying will stop once either the end of the current $coll is reached, - * or the end of the array is reached. + * or the end of the target array is reached. * * @param xs the array to fill. * @param start the starting index. - * @tparam B the type of the elements of the array. + * @tparam B the type of the elements of the target array. * * @usecase def copyToArray(xs: Array[A], start: Int): Unit * @inheritdoc @@ -454,6 +474,22 @@ trait GenTraversableOnce[+A] extends Any { */ def copyToArray[B >: A](xs: Array[B], start: Int): Unit + /** Copies the elements of this $coll to an array. + * Fills the given array `xs` with at most `len` elements of + * this $coll, starting at position `start`. + * Copying will stop once either the end of the current $coll is reached, + * or the end of the target array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the target array. + * + * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit + * @inheritdoc + * + * $willNotTerminateInf + */ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit /** Displays all elements of this $coll in a string using start, end, and diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index 04ae8c8aff..bd1be84e97 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -340,14 +340,6 @@ trait TraversableLike[+A, +Repr] extends Any b.result } - /** Tests whether a predicate holds for all elements of this $coll. - * - * $mayNotTerminateInf - * - * @param p the predicate used to test elements. - * @return `true` if this $coll is empty, otherwise `true` if the given predicate `p` - * holds for all elements of this $coll, otherwise `false`. - */ def forall(p: A => Boolean): Boolean = { var result = true breakable { @@ -374,15 +366,6 @@ trait TraversableLike[+A, +Repr] extends Any result } - /** Finds the first element of the $coll satisfying a predicate, if any. - * - * $mayNotTerminateInf - * $orderDependent - * - * @param p the predicate used to test elements. - * @return an option value containing the first element in the $coll - * that satisfies `p`, or `None` if none exists. - */ def find(p: A => Boolean): Option[A] = { var result: Option[A] = None breakable { @@ -594,23 +577,6 @@ trait TraversableLike[+A, +Repr] extends Any */ def inits: Iterator[Repr] = iterateUntilEmpty(_.init) - /** Copies elements of this $coll to an array. - * Fills the given array `xs` with at most `len` elements of - * this $coll, starting at position `start`. - * Copying will stop once either the end of the current $coll is reached, - * or the end of the array is reached, or `len` elements have been copied. - * - * @param xs the array to fill. - * @param start the starting index. - * @param len the maximal number of elements to copy. - * @tparam B the type of the elements of the array. - * - * - * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit - * @inheritdoc - * - * $willNotTerminateInf - */ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { var i = start val end = (start + len) min xs.length @@ -625,7 +591,7 @@ trait TraversableLike[+A, +Repr] extends Any @deprecatedOverriding("Enforce contract of toTraversable that if it is Traversable it returns itself.", "2.11.0") def toTraversable: Traversable[A] = thisCollection - + def toIterator: Iterator[A] = toStream.iterator def toStream: Stream[A] = toBuffer.toStream // Override to provide size hint. diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala index 13fe7a79c4..856f901b77 100644 --- a/src/library/scala/collection/package.scala +++ b/src/library/scala/collection/package.scala @@ -13,8 +13,11 @@ package scala * * == Guide == * - * A detailed guide for the collections library is available + * A detailed guide for using the collections library is available * at [[http://docs.scala-lang.org/overviews/collections/introduction.html]]. + * Developers looking to extend the collections library can find a description + * of its architecture at + * [[http://docs.scala-lang.org/overviews/core/architecture-of-scala-collections.html]]. * * == Using Collections == * @@ -31,24 +34,25 @@ package scala * array: Array[Int] = Array(1, 2, 3, 4, 5, 6) * * scala> array map { _.toString } - * res0: Array[java.lang.String] = Array(1, 2, 3, 4, 5, 6) + * res0: Array[String] = Array(1, 2, 3, 4, 5, 6) * * scala> val list = List(1,2,3,4,5,6) * list: List[Int] = List(1, 2, 3, 4, 5, 6) * * scala> list map { _.toString } - * res1: List[java.lang.String] = List(1, 2, 3, 4, 5, 6) + * res1: List[String] = List(1, 2, 3, 4, 5, 6) * * }}} * * == Creating Collections == * - * The most common way to create a collection is to use the companion objects as factories. - * Of these, the three most common - * are [[scala.collection.Seq]], [[scala.collection.immutable.Set]], and [[scala.collection.immutable.Map]]. Their - * companion objects are all available - * as type aliases the either the [[scala]] package or in `scala.Predef`, and can be used - * like so: + * The most common way to create a collection is to use its companion object as + * a factory. The three most commonly used collections are + * [[scala.collection.Seq]], [[scala.collection.immutable.Set]], and + * [[scala.collection.immutable.Map]]. + * They can be used directly as shown below since their companion objects are + * all available as type aliases in either the [[scala]] package or in + * `scala.Predef`. New collections are created like this: * {{{ * scala> val seq = Seq(1,2,3,4,1) * seq: Seq[Int] = List(1, 2, 3, 4, 1) @@ -56,12 +60,12 @@ package scala * scala> val set = Set(1,2,3,4,1) * set: scala.collection.immutable.Set[Int] = Set(1, 2, 3, 4) * - * scala> val map = Map(1 -> "one",2 -> "two", 3 -> "three",2 -> "too") - * map: scala.collection.immutable.Map[Int,java.lang.String] = Map((1,one), (2,too), (3,three)) + * scala> val map = Map(1 -> "one", 2 -> "two", 3 -> "three", 2 -> "too") + * map: scala.collection.immutable.Map[Int,String] = Map(1 -> one, 2 -> too, 3 -> three) * }}} * - * It is also typical to use the [[scala.collection.immutable]] collections over those - * in [[scala.collection.mutable]]; The types aliased in + * It is also typical to prefer the [[scala.collection.immutable]] collections + * over those in [[scala.collection.mutable]]; the types aliased in * the `scala.Predef` object are the immutable versions. * * Also note that the collections library was carefully designed to include several implementations of @@ -74,9 +78,13 @@ package scala * * === Converting between Java Collections === * - * The `JavaConversions` object provides implicit defs that will allow mostly seamless integration - * between Java Collections-based APIs and the Scala collections library. + * The [[scala.collection.JavaConversions]] object provides implicit defs that + * will allow mostly seamless integration between APIs using Java Collections + * and the Scala collections library. * + * Alternatively the [[scala.collection.JavaConverters]] object provides a collection + * of decorators that allow converting between Scala and Java collections using `asScala` + * and `asJava` methods. */ package object collection { import scala.collection.generic.CanBuildFrom diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index e7b022b895..8c9b959569 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -1499,5 +1499,4 @@ self: ParIterableLike[T, Repr, Sequential] => append(s) } }) - } -- cgit v1.2.3 From 5e080eb204dab36dd4ae1e42adc63737fe8a9e6d Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Tue, 27 Oct 2015 16:17:41 +0100 Subject: Fix the dist/mkBin target in the sbt build MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Set the correct unmanagedResourceDirectories for the compiler and library projects. These projects already had includeFilter definitions to copy the correct resource files to the target dirs but they were meaningless without a source dir. - Set a target path for ‘dist’ to make stream files go to the proper .gitignored dir. - Set permissions ugo+rx on all generated script and batch files. --- build.sbt | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/build.sbt b/build.sbt index 8e79e1019f..5dc8cad01d 100644 --- a/build.sbt +++ b/build.sbt @@ -154,6 +154,7 @@ lazy val library = configureAsSubproject(project) val libraryAuxDir = (baseDirectory in ThisBuild).value / "src/library-aux" Seq("-doc-no-compile", libraryAuxDir.toString) }, + unmanagedResourceDirectories in Compile += (baseDirectory in ThisBuild).value / "src" / project.id, includeFilter in unmanagedResources in Compile := libIncludes) .dependsOn (forkjoin) @@ -184,6 +185,7 @@ lazy val compiler = configureAsSubproject(project) (mappings in Compile in packageBin in LocalProject("interactive")).value ++ (mappings in Compile in packageBin in LocalProject("scaladoc")).value ++ (mappings in Compile in packageBin in LocalProject("repl")).value, + unmanagedResourceDirectories in Compile += (baseDirectory in ThisBuild).value / "src" / project.id, includeFilter in unmanagedResources in Compile := compilerIncludes) .dependsOn(library, reflect) @@ -299,7 +301,8 @@ lazy val root = (project in file(".")). ) lazy val dist = (project in file("dist")).settings( - mkBin := mkBinImpl.value + mkBin := mkBinImpl.value, + target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id ) /** @@ -446,6 +449,7 @@ def clearSourceAndResourceDirectories = Seq(Compile, Test).flatMap(config => inC ))) lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task { + import java.io.IOException def mkScalaTool(mainCls: String, classpath: Seq[Attributed[File]]): ScalaTool = ScalaTool(mainClass = mainCls, classpath = classpath.toList.map(_.data.getAbsolutePath), @@ -453,11 +457,18 @@ lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task { javaOpts = "-Xmx256M -Xms32M", toolFlags = "") val rootDir = (classDirectory in Compile in compiler).value - def writeScripts(scalaTool: ScalaTool, file: String, outDir: File): Seq[File] = - Seq( + def writeScripts(scalaTool: ScalaTool, file: String, outDir: File): Seq[File] = { + val res = Seq( scalaTool.writeScript(file, "unix", rootDir, outDir), scalaTool.writeScript(file, "windows", rootDir, outDir) ) + res.foreach { f => + //TODO 2.12: Use Files.setPosixFilePermissions() (Java 7+) instead of calling out to chmod + if(Process(List("chmod", "ugo+rx", f.getAbsolutePath())).! > 0) + throw new IOException("chmod failed") + } + res + } def mkQuickBin(file: String, mainCls: String, classpath: Seq[Attributed[File]]): Seq[File] = { val scalaTool = mkScalaTool(mainCls, classpath) val outDir = buildDirectory.value / "quick/bin" -- cgit v1.2.3 From 9debc84dcd57c331c184a3cf58b627045db632e0 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Wed, 28 Oct 2015 20:28:52 +0100 Subject: Create shaded JLine in sbt build Reusing parts of #4593, this commits adds two additional subprojects to the sbt build: - repl-jline, which is already used by the ant build, builds the part of the REPL that depends on JLine. The actual JLine depenency is moved to this project. - repl-jline-shaded uses JarJar to create a shaded version of repl-jline and jline.jar. Unlike the ant build, this setup does not use any circular dependencies. dist/mkBin puts all of quick/repl, quick/repl-jline and quick/repl-jline-shaded onto the classpath of build-sbt/quick/bin/scala. A future addition to the sbt build for building build-sbt/pack will have to put the generated classfiles into the correct JARs, mirroring the old structure. --- build.sbt | 53 +++++++++++++-- project/JarJar.scala | 83 ++++++++++++++++++++++++ project/plugins.sbt | 4 +- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 2 +- 4 files changed, 133 insertions(+), 9 deletions(-) create mode 100644 project/JarJar.scala diff --git a/build.sbt b/build.sbt index 5dc8cad01d..1bb6962977 100644 --- a/build.sbt +++ b/build.sbt @@ -184,7 +184,7 @@ lazy val compiler = configureAsSubproject(project) streams.value.cacheDirectory) ++ (mappings in Compile in packageBin in LocalProject("interactive")).value ++ (mappings in Compile in packageBin in LocalProject("scaladoc")).value ++ - (mappings in Compile in packageBin in LocalProject("repl")).value, + (mappings in Compile in packageBin in LocalProject("repl-jline-shaded")).value, unmanagedResourceDirectories in Compile += (baseDirectory in ThisBuild).value / "src" / project.id, includeFilter in unmanagedResources in Compile := compilerIncludes) .dependsOn(library, reflect) @@ -193,10 +193,8 @@ lazy val interactive = configureAsSubproject(project) .settings(disableDocsAndPublishingTasks: _*) .dependsOn(compiler) -// TODO: SI-9339 embed shaded copy of jline & its interface (see #4563) lazy val repl = configureAsSubproject(project) .settings( - libraryDependencies += jlineDep, connectInput in run := true, outputStrategy in run := Some(StdoutOutput), run <<= (run in Compile).partialInput(" -usejavacp") // Automatically add this so that `repl/run` works without additional arguments. @@ -204,6 +202,47 @@ lazy val repl = configureAsSubproject(project) .settings(disableDocsAndPublishingTasks: _*) .dependsOn(compiler, interactive) +lazy val replJline = configureAsSubproject(Project("repl-jline", file(".") / "src" / "repl-jline")) + .settings( + libraryDependencies += jlineDep + ) + .settings(disableDocsAndPublishingTasks: _*) + .dependsOn(repl) + +lazy val replJlineShaded = Project("repl-jline-shaded", file(".") / "target" / "repl-jline-shaded-src-dummy") + .settings(scalaSubprojectSettings: _*) + .settings(disableDocsAndPublishingTasks: _*) + .settings( + // There is nothing to compile for this project. Instead we use the compile task to create + // shaded versions of repl-jline and jline.jar. dist/mkBin puts all of quick/repl, + // quick/repl-jline and quick/repl-jline-shaded on the classpath for quick/bin scripts. + // This is different from the ant build where all parts are combined into quick/repl, but + // it is cleaner because it avoids circular dependencies. + compile in Compile <<= (compile in Compile).dependsOn(Def.task { + import java.util.jar._ + import collection.JavaConverters._ + val inputs: Iterator[JarJar.Entry] = { + val repljlineClasses = (products in Compile in replJline).value.flatMap(base => Path.allSubpaths(base).map(x => (base, x._1))) + val jlineJAR = (dependencyClasspath in Compile).value.find(_.get(moduleID.key) == Some(jlineDep)).get.data + val jarFile = new JarFile(jlineJAR) + val jarEntries = jarFile.entries.asScala.filterNot(_.isDirectory).map(entry => JarJar.JarEntryInput(jarFile, entry)) + def compiledClasses = repljlineClasses.iterator.map { case (base, file) => JarJar.FileInput(base, file) } + (jarEntries ++ compiledClasses).filter(x => x.name.endsWith(".class") || x.name.endsWith(".properties") || x.name.startsWith("META-INF/native")) + } + //println(inputs.map(_.name).mkString("\n")) + import JarJar.JarJarConfig._ + val config: Seq[JarJar.JarJarConfig] = Seq( + Rule("org.fusesource.**", "scala.tools.fusesource_embedded.@1"), + Rule("jline.**", "scala.tools.jline_embedded.@1"), + Rule("scala.tools.nsc.interpreter.jline.**", "scala.tools.nsc.interpreter.jline_embedded.@1"), + Keep("scala.tools.**") + ) + val outdir = (classDirectory in Compile).value + JarJar(inputs, outdir, config) + }) + ) + .dependsOn(replJline) + lazy val scaladoc = configureAsSubproject(project) .settings( libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, partestDep) @@ -224,7 +263,7 @@ lazy val actors = configureAsSubproject(project) lazy val forkjoin = configureAsForkOfJavaProject(project) lazy val partestExtras = configureAsSubproject(Project("partest-extras", file(".") / "src" / "partest-extras")) - .dependsOn(repl) + .dependsOn(replJlineShaded) .settings(clearSourceAndResourceDirectories: _*) .settings( libraryDependencies += partestDep, @@ -261,7 +300,7 @@ lazy val partestJavaAgent = (project in file(".") / "src" / "partest-javaagent") ) lazy val test = project. - dependsOn(compiler, interactive, actors, repl, scalap, partestExtras, partestJavaAgent, scaladoc). + dependsOn(compiler, interactive, actors, replJlineShaded, scalap, partestExtras, partestJavaAgent, scaladoc). configs(IntegrationTest). settings(disableDocsAndPublishingTasks: _*). settings(commonSettings: _*). @@ -291,7 +330,7 @@ lazy val test = project. ) lazy val root = (project in file(".")). - aggregate(library, forkjoin, reflect, compiler, interactive, repl, + aggregate(library, forkjoin, reflect, compiler, interactive, repl, replJline, replJlineShaded, scaladoc, scalap, actors, partestExtras, junit).settings( sources in Compile := Seq.empty, onLoadMessage := """|*** Welcome to the sbt build definition for Scala! *** @@ -484,7 +523,7 @@ lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task { def mkBin(file: String, mainCls: String, classpath: Seq[Attributed[File]]): Seq[File] = mkQuickBin(file, mainCls, classpath) ++ mkPackBin(file, mainCls) - mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (fullClasspath in Compile in repl).value) ++ + mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (fullClasspath in Compile in replJlineShaded).value) ++ mkBin("scalac" , "scala.tools.nsc.Main", (fullClasspath in Compile in compiler).value) ++ mkBin("fsc" , "scala.tools.nsc.CompileClient", (fullClasspath in Compile in compiler).value) ++ mkBin("scaladoc" , "scala.tools.nsc.ScalaDoc", (fullClasspath in Compile in scaladoc).value) ++ diff --git a/project/JarJar.scala b/project/JarJar.scala new file mode 100644 index 0000000000..64281f23c1 --- /dev/null +++ b/project/JarJar.scala @@ -0,0 +1,83 @@ +import org.pantsbuild.jarjar +import org.pantsbuild.jarjar._ +import org.pantsbuild.jarjar.util._ +import scala.collection.JavaConverters._ +import java.util.jar._ +import java.io._ +import sbt._ + +object JarJar { + sealed abstract class JarJarConfig { + def toPatternElement: PatternElement + } + object JarJarConfig { + case class Rule(pattern: String, result: String) extends JarJarConfig { + def toPatternElement: PatternElement = { + val rule = new jarjar.Rule + rule.setPattern(pattern) + rule.setResult(result) + rule + } + } + case class Keep(pattern: String) extends JarJarConfig { + def toPatternElement: PatternElement = { + val keep = new jarjar.Keep + keep.setPattern(pattern) + keep + } + } + } + + sealed abstract class Entry { + def name: String + def time: Long + def data: Array[Byte] + } + + case class JarEntryInput(jarFile: JarFile, entry: JarEntry) extends Entry { + def name = entry.getName + def time = entry.getTime + def data = sbt.IO.readBytes(jarFile.getInputStream(entry)) + } + case class FileInput(base: File, file: File) extends Entry { + def name = file.relativeTo(base).get.getPath + def time = file.lastModified + def data = sbt.IO.readBytes(file) + } + + private def newMainProcessor(patterns: java.util.List[PatternElement], verbose: Boolean, skipManifest: Boolean): JarProcessor = { + val cls = Class.forName("org.pantsbuild.jarjar.MainProcessor") + val constructor = cls.getConstructor(classOf[java.util.List[_]], java.lang.Boolean.TYPE, java.lang.Boolean.TYPE) + constructor.setAccessible(true) + constructor.newInstance(patterns, Boolean.box(verbose), Boolean.box(skipManifest)).asInstanceOf[JarProcessor] + } + + def apply(in: Iterator[Entry], outdir: File, + config: Seq[JarJarConfig], verbose: Boolean = false): Seq[File] = { + val patterns = config.map(_.toPatternElement).asJava + val processor: JarProcessor = newMainProcessor(patterns, verbose, false) + def process(e: Entry): Option[File] = { + val struct = new EntryStruct() + struct.name = e.name + struct.time = e.time + struct.data = e.data + if (processor.process(struct)) { + if (struct.name.endsWith("/")) None + else { + val f = outdir / struct.name + try { + f.getParentFile.mkdirs() + sbt.IO.write(f, struct.data) + } catch { + case ex: Exception => + throw new IOException(s"Failed to write ${e.name} / ${f.getParentFile} / ${f.getParentFile.exists}", ex) + } + Some(f) + } + } + else None + } + in.flatMap(entry => process(entry)).toList + + } +} diff --git a/project/plugins.sbt b/project/plugins.sbt index dc266a8db1..862887d57f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1 +1,3 @@ -libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" \ No newline at end of file +libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" + +libraryDependencies += "org.pantsbuild" % "jarjar" % "1.6.0" diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index bf7c8551e5..adac438b37 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -876,7 +876,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) if (settings.debug) { val readerDiags = (readerClasses, readers).zipped map { - case (cls, Failure(e)) => s" - $cls --> " + e.getStackTrace.mkString(e.toString+"\n\t", "\n\t","\n") + case (cls, Failure(e)) => s" - $cls --> \n\t" + scala.tools.nsc.util.stackTraceString(e) + "\n" case (cls, Success(_)) => s" - $cls OK" } Console.println(s"All InteractiveReaders tried: ${readerDiags.mkString("\n","\n","\n")}") -- cgit v1.2.3 From 69db6301598f1271584e3bf56dd248936e504539 Mon Sep 17 00:00:00 2001 From: kenji yoshida <6b656e6a69@gmail.com> Date: Mon, 2 Nov 2015 14:57:04 +0900 Subject: "macro" is a reserved word since Scala 2.11 --- spec/01-lexical-syntax.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index b26e5b2328..53c8caf745 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -90,11 +90,11 @@ syntactic class `id` of lexical identifiers. abstract case catch class def do else extends false final finally for forSome if implicit -import lazy match new null -object override package private protected -return sealed super this throw -trait try true type val -var while with yield +import lazy macro match new +null object override package private +protected return sealed super this +throw trait try true type +val var while with yield _ : = => <- <: <% >: # @ ``` -- cgit v1.2.3 From f79ef27c30dc5438f026d59a905df63b579a0f15 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Mon, 2 Nov 2015 16:09:30 +0100 Subject: Create usable “quick” and “pack” builds from sbt MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add dependencies for scala-swing, scala-continuations-library and scala-continuations-plugin - Build all JARs for “pack” in dist/packageBin - Add “dist/mkQuick” task for building all required projects for “quick” and creating the launcher scripts - Add “dist/mkPack” task for packaging all required projects for “pack” and creating the launcher scripts - Include classes from “forkjoin” project in scala-library.jar --- build.sbt | 96 ++++++++++++++++++++++++++++++++++++++++++--------------------- 1 file changed, 64 insertions(+), 32 deletions(-) diff --git a/build.sbt b/build.sbt index 1bb6962977..a4dc472dac 100644 --- a/build.sbt +++ b/build.sbt @@ -1,10 +1,11 @@ /* * The new, sbt-based build definition for Scala. * - * What you see below is very much work-in-progress. Basics like compiling and packaging jars - * (into right location) work. Everything else is missing: - * building docs, placing shell scripts in right locations (so you can run compiler easily), - * running partest test, compiling and running JUnit test, and many, many other things. + * What you see below is very much work-in-progress. The following features are implemented: + * - Compiling all classses for the compiler and library ("compile" in the respective subprojects) + * - Running JUnit tests ("test") and partest ("test/it:test") + * - Creating build-sbt/quick with all compiled classes and launcher scripts ("dist/mkQuick") + * - Creating build-sbt/pack with all JARs and launcher scripts ("dist/mkPack") * * You'll notice that this build definition is much more complicated than your typical sbt build. * The main reason is that we are not benefiting from sbt's conventions when it comes project @@ -55,7 +56,10 @@ val bootstrapScalaVersion = versionProps("starr.version") def withoutScalaLang(moduleId: ModuleID): ModuleID = moduleId exclude("org.scala-lang", "*") // exclusion of the scala-library transitive dependency avoids eviction warnings during `update`. +val scalaContinuationsLibraryDep = withoutScalaLang("org.scala-lang.plugins" %% "scala-continuations-library" % versionNumber("scala-continuations-library")) +val scalaContinuationsPluginDep = withoutScalaLang("org.scala-lang.plugins" % ("scala-continuations-plugin_" + versionProps("scala.full.version")) % versionNumber("scala-continuations-plugin")) val scalaParserCombinatorsDep = withoutScalaLang("org.scala-lang.modules" %% "scala-parser-combinators" % versionNumber("scala-parser-combinators")) +val scalaSwingDep = withoutScalaLang("org.scala-lang.modules" %% "scala-swing" % versionNumber("scala-swing")) val scalaXmlDep = withoutScalaLang("org.scala-lang.modules" %% "scala-xml" % versionNumber("scala-xml")) val partestDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest" % versionNumber("partest")) val partestInterfaceDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest-interface" % "0.5.0") @@ -155,7 +159,11 @@ lazy val library = configureAsSubproject(project) Seq("-doc-no-compile", libraryAuxDir.toString) }, unmanagedResourceDirectories in Compile += (baseDirectory in ThisBuild).value / "src" / project.id, - includeFilter in unmanagedResources in Compile := libIncludes) + includeFilter in unmanagedResources in Compile := libIncludes, + // Include forkjoin classes in scala-library.jar + mappings in Compile in packageBin ++= + (mappings in Compile in packageBin in LocalProject("forkjoin")).value + ) .dependsOn (forkjoin) lazy val reflect = configureAsSubproject(project) @@ -184,7 +192,7 @@ lazy val compiler = configureAsSubproject(project) streams.value.cacheDirectory) ++ (mappings in Compile in packageBin in LocalProject("interactive")).value ++ (mappings in Compile in packageBin in LocalProject("scaladoc")).value ++ - (mappings in Compile in packageBin in LocalProject("repl-jline-shaded")).value, + (mappings in Compile in packageBin in LocalProject("repl")).value, unmanagedResourceDirectories in Compile += (baseDirectory in ThisBuild).value / "src" / project.id, includeFilter in unmanagedResources in Compile := compilerIncludes) .dependsOn(library, reflect) @@ -194,25 +202,25 @@ lazy val interactive = configureAsSubproject(project) .dependsOn(compiler) lazy val repl = configureAsSubproject(project) + .settings(disableDocsAndPublishingTasks: _*) .settings( connectInput in run := true, outputStrategy in run := Some(StdoutOutput), run <<= (run in Compile).partialInput(" -usejavacp") // Automatically add this so that `repl/run` works without additional arguments. ) - .settings(disableDocsAndPublishingTasks: _*) .dependsOn(compiler, interactive) lazy val replJline = configureAsSubproject(Project("repl-jline", file(".") / "src" / "repl-jline")) .settings( - libraryDependencies += jlineDep + libraryDependencies += jlineDep, + name := "scala-repl-jline" ) - .settings(disableDocsAndPublishingTasks: _*) .dependsOn(repl) -lazy val replJlineShaded = Project("repl-jline-shaded", file(".") / "target" / "repl-jline-shaded-src-dummy") +lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target" / "repl-jline-embedded-src-dummy") .settings(scalaSubprojectSettings: _*) - .settings(disableDocsAndPublishingTasks: _*) .settings( + name := "scala-repl-jline-embedded", // There is nothing to compile for this project. Instead we use the compile task to create // shaded versions of repl-jline and jline.jar. dist/mkBin puts all of quick/repl, // quick/repl-jline and quick/repl-jline-shaded on the classpath for quick/bin scripts. @@ -227,7 +235,9 @@ lazy val replJlineShaded = Project("repl-jline-shaded", file(".") / "target" / " val jarFile = new JarFile(jlineJAR) val jarEntries = jarFile.entries.asScala.filterNot(_.isDirectory).map(entry => JarJar.JarEntryInput(jarFile, entry)) def compiledClasses = repljlineClasses.iterator.map { case (base, file) => JarJar.FileInput(base, file) } - (jarEntries ++ compiledClasses).filter(x => x.name.endsWith(".class") || x.name.endsWith(".properties") || x.name.startsWith("META-INF/native")) + (jarEntries ++ compiledClasses).filter(x => + x.name.endsWith(".class") || x.name.endsWith(".properties") || x.name.startsWith("META-INF/native") || x.name.startsWith("META-INF/maven") + ) } //println(inputs.map(_.name).mkString("\n")) import JarJar.JarJarConfig._ @@ -263,9 +273,10 @@ lazy val actors = configureAsSubproject(project) lazy val forkjoin = configureAsForkOfJavaProject(project) lazy val partestExtras = configureAsSubproject(Project("partest-extras", file(".") / "src" / "partest-extras")) - .dependsOn(replJlineShaded) + .dependsOn(replJlineEmbedded) .settings(clearSourceAndResourceDirectories: _*) .settings( + name := "scala-partest-extras", libraryDependencies += partestDep, unmanagedSourceDirectories in Compile := List(baseDirectory.value) ) @@ -300,7 +311,7 @@ lazy val partestJavaAgent = (project in file(".") / "src" / "partest-javaagent") ) lazy val test = project. - dependsOn(compiler, interactive, actors, replJlineShaded, scalap, partestExtras, partestJavaAgent, scaladoc). + dependsOn(compiler, interactive, actors, replJlineEmbedded, scalap, partestExtras, partestJavaAgent, scaladoc). configs(IntegrationTest). settings(disableDocsAndPublishingTasks: _*). settings(commonSettings: _*). @@ -330,7 +341,7 @@ lazy val test = project. ) lazy val root = (project in file(".")). - aggregate(library, forkjoin, reflect, compiler, interactive, repl, replJline, replJlineShaded, + aggregate(library, forkjoin, reflect, compiler, interactive, repl, replJline, replJlineEmbedded, scaladoc, scalap, actors, partestExtras, junit).settings( sources in Compile := Seq.empty, onLoadMessage := """|*** Welcome to the sbt build definition for Scala! *** @@ -339,10 +350,35 @@ lazy val root = (project in file(".")). |the Ant build definition for now. Check README.md for more information.""".stripMargin ) -lazy val dist = (project in file("dist")).settings( - mkBin := mkBinImpl.value, - target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id -) +// The following subprojects' binaries are required for building "pack": +lazy val distDependencies = Seq(replJline, replJlineEmbedded, compiler, library, partestExtras, partestJavaAgent, reflect, scalap, actors, scaladoc) + +lazy val dist = (project in file("dist")) + .settings(commonSettings) + .settings( + libraryDependencies ++= Seq(scalaContinuationsLibraryDep, scalaContinuationsPluginDep, scalaSwingDep, jlineDep), + mkBin := mkBinImpl.value, + mkQuick <<= Def.task {} dependsOn ((distDependencies.map(compile in Compile in _) :+ mkBin): _*), + mkPack <<= Def.task {} dependsOn (packageBin in Compile, mkBin), + target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id, + packageBin in Compile := { + val extraDeps = Set(scalaContinuationsLibraryDep, scalaContinuationsPluginDep, scalaSwingDep, scalaParserCombinatorsDep, scalaXmlDep) + val targetDir = (buildDirectory in ThisBuild).value / "pack" / "lib" + def uniqueModule(m: ModuleID) = (m.organization, m.name.replaceFirst("_.*", "")) + val extraModules = extraDeps.map(uniqueModule) + val extraJars = (externalDependencyClasspath in Compile).value.map(a => (a.get(moduleID.key), a.data)).collect { + case (Some(m), f) if extraModules contains uniqueModule(m) => f + } + val jlineJAR = (dependencyClasspath in Compile).value.find(_.get(moduleID.key) == Some(jlineDep)).get.data + val mappings = extraJars.map(f => (f, targetDir / f.getName)) :+ (jlineJAR, targetDir / "jline.jar") + IO.copy(mappings, overwrite = true) + targetDir + }, + cleanFiles += (buildDirectory in ThisBuild).value / "quick", + cleanFiles += (buildDirectory in ThisBuild).value / "pack", + packageBin in Compile <<= (packageBin in Compile).dependsOn(distDependencies.map(packageBin in Compile in _): _*) + ) + .dependsOn(distDependencies.map(p => p: ClasspathDep[ProjectReference]): _*) /** * Configures passed project as a subproject (e.g. compiler or repl) @@ -386,6 +422,8 @@ lazy val buildDirectory = settingKey[File]("The directory where all build produc lazy val copyrightString = settingKey[String]("Copyright string.") lazy val generateVersionPropertiesFile = taskKey[File]("Generating version properties file.") lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).") +lazy val mkQuick = taskKey[Unit]("Generate a full build, including scripts, in build-sbt/quick") +lazy val mkPack = taskKey[Unit]("Generate a full build, including scripts, in build-sbt/pack") lazy val generateVersionPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task { val propFile = (resourceManaged in Compile).value / s"${thisProject.value.id}.properties" @@ -496,6 +534,8 @@ lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task { javaOpts = "-Xmx256M -Xms32M", toolFlags = "") val rootDir = (classDirectory in Compile in compiler).value + val quickOutDir = buildDirectory.value / "quick/bin" + val packOutDir = buildDirectory.value / "pack/bin" def writeScripts(scalaTool: ScalaTool, file: String, outDir: File): Seq[File] = { val res = Seq( scalaTool.writeScript(file, "unix", rootDir, outDir), @@ -508,22 +548,14 @@ lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task { } res } - def mkQuickBin(file: String, mainCls: String, classpath: Seq[Attributed[File]]): Seq[File] = { - val scalaTool = mkScalaTool(mainCls, classpath) - val outDir = buildDirectory.value / "quick/bin" - writeScripts(scalaTool, file, outDir) - } - - def mkPackBin(file: String, mainCls: String): Seq[File] = { - val scalaTool = mkScalaTool(mainCls, classpath = Nil) - val outDir = buildDirectory.value / "pack/bin" - writeScripts(scalaTool, file, outDir) - } def mkBin(file: String, mainCls: String, classpath: Seq[Attributed[File]]): Seq[File] = - mkQuickBin(file, mainCls, classpath) ++ mkPackBin(file, mainCls) + writeScripts(mkScalaTool(mainCls, classpath), file, quickOutDir) ++ + writeScripts(mkScalaTool(mainCls, Nil ), file, packOutDir) + + streams.value.log.info(s"Creating scripts in $quickOutDir and $packOutDir") - mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (fullClasspath in Compile in replJlineShaded).value) ++ + mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (fullClasspath in Compile in replJlineEmbedded).value) ++ mkBin("scalac" , "scala.tools.nsc.Main", (fullClasspath in Compile in compiler).value) ++ mkBin("fsc" , "scala.tools.nsc.CompileClient", (fullClasspath in Compile in compiler).value) ++ mkBin("scaladoc" , "scala.tools.nsc.ScalaDoc", (fullClasspath in Compile in scaladoc).value) ++ -- cgit v1.2.3 From 153c70b5b64344db5a97a3de23e91e49f57ac337 Mon Sep 17 00:00:00 2001 From: wpopielarski Date: Wed, 21 Oct 2015 16:58:20 +0200 Subject: Multi output problem with delambdafied compilation User code compilation with -Ybackend:GenBCode -Ydelambdafy:method fails for projects with multiple output directories. The problem has its root in a fact that some `lambdaClass` symbols the `associatedFile` field is not set. It can be done in Delambdafy.scala (`makeAnonymousClass` method) and is working for following lambda examples: {{{ package acme object Delambdafy { type -->[D, I] = PartialFunction[D, I] def main(args: Array[String]): Unit = { val result = List(1, 2, 4).map { a => val list = List("1", "2", "3").map { _ + "test" } list.find { _ == a.toString + "test" } } lazy val _foo = foo(result) { case x::xs if x isDefined => x.get.length case _ => 0 } lazy val bar: Int => Int = { case 2 => 13 case _ => val v = List(1).map(_ + 42).head v + 1 } } def foo(b: List[Option[String]])(a: List[Option[String]] => Int): Int = a(b) } }}} but is NOT working for following lambda: {{{ package acme object Delambdafy { type -->[D, I] = PartialFunction[D, I] def main(args: Array[String]): Unit = { lazy val _baz = baz { case 1 => val local = List(1).map(_ + 1) local.head } } def baz[T](f: Any --> Any): Any => Any = f } }}} so that's why source of compilation unit is used to determine output directory in case when source file is not found for symbol. --- .../scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 29 +++++++++++++++------- .../tools/nsc/backend/jvm/BytecodeWriters.scala | 3 +++ 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 65a6b82570..813180a8c7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -35,15 +35,26 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { /* * must-single-thread */ - def getOutFolder(csym: Symbol, cName: String, cunit: CompilationUnit): _root_.scala.tools.nsc.io.AbstractFile = { - try { - outputDirectory(csym) - } catch { - case ex: Throwable => - reporter.error(cunit.body.pos, s"Couldn't create file for class $cName\n${ex.getMessage}") - null - } - } + def getOutFolder(csym: Symbol, cName: String, cunit: CompilationUnit): _root_.scala.tools.nsc.io.AbstractFile = Option { + try { + outputDirectory(csym) + } catch { + case ex: Throwable => + reporter.warning(cunit.body.pos, s"Couldn't find output folder for symbol source ${csym.name}. Dropping to compliation unit source.\n${ex.getMessage}") + null + } + }.orElse { Option { + try { + outputDirectory(cunit.source) + } catch { + case ex: Throwable => + reporter.warning(cunit.body.pos, s"Couldn't find output folder for compilation unit $cName\n${ex.getMessage}") + null + } + }}.orElse { + reporter.error(cunit.body.pos, s"Couldn't create file for class $cName") + None + }.orNull var pickledBytes = 0 // statistics diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala index 1d29fdee10..e4fcb729a2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala @@ -25,6 +25,9 @@ trait BytecodeWriters { def outputDirectory(sym: Symbol): AbstractFile = settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile) + def outputDirectory(cunitSource: scala.reflect.internal.util.SourceFile): AbstractFile = + settings.outputDirs outputDirFor cunitSource.file + /** * @param clsName cls.getName */ -- cgit v1.2.3 From d6e759165e57a9d920e7008774a81e190be7edde Mon Sep 17 00:00:00 2001 From: jvican Date: Sun, 18 Oct 2015 21:09:02 +0200 Subject: [SI-9503] Deprecate scala.collection.immutable.PagedSeq --- src/library/scala/collection/immutable/PagedSeq.scala | 3 ++- test/files/run/t3647.check | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t3647.check diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala index a86d4b6746..982c10687c 100644 --- a/src/library/scala/collection/immutable/PagedSeq.scala +++ b/src/library/scala/collection/immutable/PagedSeq.scala @@ -23,6 +23,7 @@ import scala.reflect.ClassTag * `fromIterator` and `fromIterable` provide generalised instances of `PagedSeq` * @since 2.7 */ +@deprecated("This object will be moved to the scala-parser-combinators module", "2.11.8") object PagedSeq { final val UndeterminedEnd = Int.MaxValue @@ -126,7 +127,7 @@ import PagedSeq._ * @define mayNotTerminateInf * @define willNotTerminateInf */ -@deprecatedInheritance("The implementation details of paged sequences make inheriting from them unwise.", "2.11.0") +@deprecated("This class will be moved to the scala-parser-combinators module", "2.11.8") class PagedSeq[T: ClassTag] protected( more: (Array[T], Int, Int) => Int, first1: Page[T], diff --git a/test/files/run/t3647.check b/test/files/run/t3647.check new file mode 100644 index 0000000000..e5c1ee1701 --- /dev/null +++ b/test/files/run/t3647.check @@ -0,0 +1 @@ +warning: there were three deprecation warnings; re-run with -deprecation for details -- cgit v1.2.3 From 65b60c8e9ed9c1b9ee343ddd52ef70f89e59889f Mon Sep 17 00:00:00 2001 From: wpopielarski Date: Thu, 5 Nov 2015 10:31:02 +0100 Subject: Allows to propagate fatal errors when output folder not found. --- .../scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 28 +++++++--------------- .../tools/nsc/backend/jvm/BytecodeWriters.scala | 3 --- 2 files changed, 8 insertions(+), 23 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 813180a8c7..1b97681743 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -35,26 +35,14 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { /* * must-single-thread */ - def getOutFolder(csym: Symbol, cName: String, cunit: CompilationUnit): _root_.scala.tools.nsc.io.AbstractFile = Option { - try { - outputDirectory(csym) - } catch { - case ex: Throwable => - reporter.warning(cunit.body.pos, s"Couldn't find output folder for symbol source ${csym.name}. Dropping to compliation unit source.\n${ex.getMessage}") - null - } - }.orElse { Option { - try { - outputDirectory(cunit.source) - } catch { - case ex: Throwable => - reporter.warning(cunit.body.pos, s"Couldn't find output folder for compilation unit $cName\n${ex.getMessage}") - null - } - }}.orElse { - reporter.error(cunit.body.pos, s"Couldn't create file for class $cName") - None - }.orNull + def getOutFolder(csym: Symbol, cName: String, cunit: CompilationUnit): _root_.scala.tools.nsc.io.AbstractFile = + _root_.scala.util.Try { + outputDirectory(csym) + }.recover { + case ex: Throwable => + reporter.error(cunit.body.pos, s"Couldn't create file for class $cName\n${ex.getMessage}") + null + }.get var pickledBytes = 0 // statistics diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala index e4fcb729a2..1d29fdee10 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala @@ -25,9 +25,6 @@ trait BytecodeWriters { def outputDirectory(sym: Symbol): AbstractFile = settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile) - def outputDirectory(cunitSource: scala.reflect.internal.util.SourceFile): AbstractFile = - settings.outputDirs outputDirFor cunitSource.file - /** * @param clsName cls.getName */ -- cgit v1.2.3 From 9688625afddfcbbfae121a6a27c0b44edd95efa0 Mon Sep 17 00:00:00 2001 From: wpopielarski Date: Thu, 5 Nov 2015 10:37:31 +0100 Subject: Sets source for newly created lambda class This source is then used to figure out output folder for compilation product. --- src/compiler/scala/tools/nsc/transform/Delambdafy.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index ea8c1cbaf6..8e323de623 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -294,6 +294,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val name = unit.freshTypeName(s"$oldClassPart$suffix".replace("$anon", "$nestedInAnon")) val lambdaClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation + lambdaClass.associatedFile = unit.source.file // make sure currentRun.compiles(lambdaClass) is true (AddInterfaces does the same for trait impl classes) currentRun.symSource(lambdaClass) = funOwner.sourceFile lambdaClass setInfo ClassInfoType(parents, newScope, lambdaClass) -- cgit v1.2.3 From 3cddb7fa3e044734ef6999b4b61f1246b0e37657 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Tue, 3 Nov 2015 17:36:01 +0100 Subject: Build scaladoc sets from sbt - Docs for actors, compiler, library, reflect and scalap are built by default. Generated artifacts are removed when cleaning the respective subproject. - Fix some exclude patterns for scaladoc sources in the ANT build --- build.sbt | 49 ++++++++++++++++++++++++++++++++++--------------- build.xml | 11 +++-------- 2 files changed, 37 insertions(+), 23 deletions(-) diff --git a/build.sbt b/build.sbt index a4dc472dac..d1136b2eb2 100644 --- a/build.sbt +++ b/build.sbt @@ -6,6 +6,7 @@ * - Running JUnit tests ("test") and partest ("test/it:test") * - Creating build-sbt/quick with all compiled classes and launcher scripts ("dist/mkQuick") * - Creating build-sbt/pack with all JARs and launcher scripts ("dist/mkPack") + * - Building all scaladoc sets ("doc") * * You'll notice that this build definition is much more complicated than your typical sbt build. * The main reason is that we are not benefiting from sbt's conventions when it comes project @@ -101,11 +102,12 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ Seq[Setting[_]]( // each subproject has to ask specifically for files they want to include includeFilter in unmanagedResources in Compile := NothingFilter, target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id, - target in Compile in doc := buildDirectory.value / "scaladoc" / thisProject.value.id, classDirectory in Compile := buildDirectory.value / "quick/classes" / thisProject.value.id, - // given that classDirectory is overriden to be _outside_ of target directory, we have - // to make sure its being cleaned properly + target in Compile in doc := buildDirectory.value / "scaladoc" / thisProject.value.id, + // given that classDirectory and doc target are overriden to be _outside_ of target directory, we have + // to make sure they are being cleaned properly cleanFiles += (classDirectory in Compile).value, + cleanFiles += (target in Compile in doc).value, fork in run := true ) @@ -140,6 +142,24 @@ lazy val generatePropertiesFileSettings = Seq[Setting[_]]( generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value ) +def filterDocSources(ff: FileFilter): Seq[Setting[_]] = Seq( + sources in (Compile, doc) ~= (_.filter(ff.accept _)), + // Excluded sources may still be referenced by the included sources, so we add the compiler + // output to the scaladoc classpath to resolve them. For the `library` project this is + // always required because otherwise the compiler cannot even initialize Definitions without + // binaries of the library on the classpath. Specifically, we get this error: + // (library/compile:doc) scala.reflect.internal.FatalError: package class scala does not have a member Int + // Ant build does the same thing always: it puts binaries for documented classes on the classpath + // sbt never does this by default (which seems like a good default) + dependencyClasspath in (Compile, doc) += (classDirectory in Compile).value, + doc in Compile <<= doc in Compile dependsOn (compile in Compile) +) + +def regexFileFilter(s: String): FileFilter = new FileFilter { + val pat = s.r.pattern + def accept(f: File) = pat.matcher(f.getAbsolutePath.replace('\\', '/')).matches() +} + val libIncludes: FileFilter = "*.tmpl" | "*.xml" | "*.js" | "*.css" | "rootdoc.txt" lazy val library = configureAsSubproject(project) @@ -147,13 +167,6 @@ lazy val library = configureAsSubproject(project) .settings( name := "scala-library", scalacOptions in Compile ++= Seq[String]("-sourcepath", (scalaSource in Compile).value.toString), - // Workaround for a bug in `scaladoc` that it seems to not respect the `-sourcepath` option - // as a result of this bug, the compiler cannot even initialize Definitions without - // binaries of the library on the classpath. Specifically, we get this error: - // (library/compile:doc) scala.reflect.internal.FatalError: package class scala does not have a member Int - // Ant build does the same thing always: it puts binaries for documented classes on the classpath - // sbt never does this by default (which seems like a good default) - dependencyClasspath in Compile in doc += (classDirectory in Compile).value, scalacOptions in Compile in doc ++= { val libraryAuxDir = (baseDirectory in ThisBuild).value / "src/library-aux" Seq("-doc-no-compile", libraryAuxDir.toString) @@ -164,7 +177,10 @@ lazy val library = configureAsSubproject(project) mappings in Compile in packageBin ++= (mappings in Compile in packageBin in LocalProject("forkjoin")).value ) - .dependsOn (forkjoin) + .settings(filterDocSources("*.scala" -- (regexFileFilter(".*/runtime/.*\\$\\.scala") || + regexFileFilter(".*/runtime/ScalaRunTime\\.scala") || + regexFileFilter(".*/runtime/StringAdd\\.scala"))): _*) + .dependsOn(forkjoin) lazy val reflect = configureAsSubproject(project) .settings(generatePropertiesFileSettings: _*) @@ -213,7 +229,8 @@ lazy val repl = configureAsSubproject(project) lazy val replJline = configureAsSubproject(Project("repl-jline", file(".") / "src" / "repl-jline")) .settings( libraryDependencies += jlineDep, - name := "scala-repl-jline" + name := "scala-repl-jline", + doc := file("!!! NO DOCS !!!") ) .dependsOn(repl) @@ -264,10 +281,10 @@ lazy val scalap = configureAsSubproject(project). dependsOn(compiler) // deprecated Scala Actors project -// TODO: it packages into actors.jar but it should be scala-actors.jar lazy val actors = configureAsSubproject(project) .settings(generatePropertiesFileSettings: _*) .settings(name := "scala-actors") + .settings(filterDocSources("*.scala"): _*) .dependsOn(library) lazy val forkjoin = configureAsForkOfJavaProject(project) @@ -278,7 +295,8 @@ lazy val partestExtras = configureAsSubproject(Project("partest-extras", file(". .settings( name := "scala-partest-extras", libraryDependencies += partestDep, - unmanagedSourceDirectories in Compile := List(baseDirectory.value) + unmanagedSourceDirectories in Compile := List(baseDirectory.value), + doc := file("!!! NO DOCS !!!") ) lazy val junit = project.in(file("test") / "junit") @@ -289,7 +307,8 @@ lazy val junit = project.in(file("test") / "junit") fork in Test := true, libraryDependencies ++= Seq(junitDep, junitIntefaceDep), testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), - unmanagedSourceDirectories in Test := List(baseDirectory.value) + unmanagedSourceDirectories in Test := List(baseDirectory.value), + doc := file("!!! NO DOCS !!!") ) lazy val partestJavaAgent = (project in file(".") / "src" / "partest-javaagent"). diff --git a/build.xml b/build.xml index 7f8c91b47b..8583292ea7 100644 --- a/build.xml +++ b/build.xml @@ -1684,20 +1684,15 @@ TODO: - - - + + + - - - - - -- cgit v1.2.3 From b237fb30554dbd3846193aae2c3ffe8bdb359c79 Mon Sep 17 00:00:00 2001 From: wpopielarski Date: Fri, 6 Nov 2015 18:15:12 +0100 Subject: Test added --- .../nsc/transform/delambdafy/DelambdafyTest.scala | 73 ++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala diff --git a/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala b/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala new file mode 100644 index 0000000000..010078e28a --- /dev/null +++ b/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala @@ -0,0 +1,73 @@ +package scala.tools.nsc.transform.delambdafy + +import scala.reflect.io.Path.jfile2path +import scala.tools.nsc.backend.jvm.CodeGenTools.getGeneratedClassfiles +import scala.tools.nsc.backend.jvm.CodeGenTools.makeSourceFile +import scala.tools.nsc.backend.jvm.CodeGenTools.newCompilerWithoutVirtualOutdir +import scala.tools.nsc.io.AbstractFile +import scala.tools.testing.TempDir + +import org.junit.Assert.assertTrue +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class DelambdafyTest { + def compileToMultipleOutputWithDelamdbafyMethod(): List[(String, Array[Byte])] = { + val codeForMultiOutput = """ +object Delambdafy { + type -->[D, I] = PartialFunction[D, I] + + def main(args: Array[String]): Unit = { + val result = List(1, 2, 4).map { a => + val list = List("1", "2", "3").map { _ + "test" } + list.find { _ == a.toString + "test" } + } + println(result) + lazy val _foo = foo(result) { + case x :: xs if x isDefined => x.get.length + case _ => 0 + } + println(_foo) + lazy val bar: Int => Int = { + case 2 => 23 + case _ => + val v = List(1).map { _ + 42 }.head + v + 31 + } + bar(3) + lazy val _baz = baz { + case 1 => + val local = List(1).map(_ + 1) + local.head + } + } + + def baz[T](f: Any --> Any): Any => Any = f + + def foo(b: List[Option[String]])(a: List[Option[String]] => Int): Int = a(b) +} +""" + val srcFile = makeSourceFile(codeForMultiOutput, "delambdafyTest.scala") + val outDir = AbstractFile.getDirectory(TempDir.createTempDir()) + val outDirPath = outDir.canonicalPath + val extraArgs = "-Ybackend:GenBCode -Ydelambdafy:method" + val argsWithOutDir = extraArgs + s" -d $outDirPath -cp $outDirPath" + val compiler = newCompilerWithoutVirtualOutdir(extraArgs = argsWithOutDir) + compiler.settings.outputDirs.add(srcFile.file, outDir) + + new compiler.Run().compileSources(List(srcFile)) + + val classfiles = getGeneratedClassfiles(outDir) + outDir.delete() + classfiles + } + + @Test + def shouldFindOutputFoldersForAllPromotedLambdasAsMethod(): Unit = { + val actual = compileToMultipleOutputWithDelamdbafyMethod() + + assertTrue(actual.length > 0) + } +} -- cgit v1.2.3 From 7248894154180291f18def808ed992fb495b257e Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Thu, 5 Nov 2015 18:37:44 +0100 Subject: Improve version handling and scaladoc generation in sbt build - Move version-related code into a separate VersionUtil object - Fix the canonical version. The sbt build previously took the full version including the suffix (plus timestamp and hash) whereas the ant build uses the version without the suffix - Include the version number in the generated scaladocs - Add project descriptions and include them in the scaladocs (like the ant build does) - Add other missing scaladoc options to the sbt build - Copy resources in all subprojects when building dist/mkQuick and fix `includeFilter` settings to include all required files --- build.sbt | 155 +++++++++++++++++++--------------------------- project/VersionUtil.scala | 103 ++++++++++++++++++++++++++++++ 2 files changed, 167 insertions(+), 91 deletions(-) create mode 100644 project/VersionUtil.scala diff --git a/build.sbt b/build.sbt index d1136b2eb2..6540d0b9c0 100644 --- a/build.sbt +++ b/build.sbt @@ -52,6 +52,8 @@ * https://groups.google.com/d/topic/scala-internals/gp5JsM1E0Fo/discussion */ +import VersionUtil.{versionProps, versionNumber, generatePropertiesFileSettings, versionProperties, versionPropertiesSettings} + val bootstrapScalaVersion = versionProps("starr.version") def withoutScalaLang(moduleId: ModuleID): ModuleID = moduleId exclude("org.scala-lang", "*") @@ -71,8 +73,9 @@ val jlineDep = "jline" % "jline" % versionProps("jline.version") val antDep = "org.apache.ant" % "ant" % "1.9.4" val scalacheckDep = withoutScalaLang("org.scalacheck" %% "scalacheck" % versionNumber("scalacheck") % "it") -lazy val commonSettings = clearSourceAndResourceDirectories ++ Seq[Setting[_]]( +lazy val commonSettings = clearSourceAndResourceDirectories ++ versionPropertiesSettings ++ Seq[Setting[_]]( organization := "org.scala-lang", + // The ANT build uses the file "build.number" and the property "build.release" to compute the version version := "2.11.8-SNAPSHOT", scalaVersion := bootstrapScalaVersion, // we don't cross build Scala itself @@ -95,6 +98,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ Seq[Setting[_]]( unmanagedJars in Compile := Seq.empty, sourceDirectory in Compile := baseDirectory.value, unmanagedSourceDirectories in Compile := List(baseDirectory.value), + unmanagedResourceDirectories in Compile += (baseDirectory in ThisBuild).value / "src" / thisProject.value.id, scalaSource in Compile := (sourceDirectory in Compile).value, javaSource in Compile := (sourceDirectory in Compile).value, // resources are stored along source files in our current layout @@ -108,7 +112,17 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ Seq[Setting[_]]( // to make sure they are being cleaned properly cleanFiles += (classDirectory in Compile).value, cleanFiles += (target in Compile in doc).value, - fork in run := true + fork in run := true, + scalacOptions in Compile in doc ++= Seq( + "-doc-footer", "epfl", + "-diagrams", + "-implicits", + "-groups", + "-doc-version", versionProperties.value.canonicalVersion, + "-doc-title", description.value, + "-sourcepath", (baseDirectory in ThisBuild).value.toString, + "-doc-source-url", s"https://github.com/scala/scala/tree/${versionProperties.value.githubTree}€{FILE_PATH}.scala#L1" + ) ) // disable various tasks that are not needed for projects that are used @@ -134,13 +148,7 @@ lazy val setJarLocation: Setting[_] = val resolvedArtifactName = s"${resolvedArtifact.name}.${resolvedArtifact.extension}" buildDirectory.value / "pack/lib" / resolvedArtifactName } -lazy val scalaSubprojectSettings: Seq[Setting[_]] = commonSettings :+ setJarLocation - -lazy val generatePropertiesFileSettings = Seq[Setting[_]]( - copyrightString := "Copyright 2002-2015, LAMP/EPFL", - resourceGenerators in Compile += generateVersionPropertiesFile.map(file => Seq(file)).taskValue, - generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value -) +lazy val scalaSubprojectSettings: Seq[Setting[_]] = commonSettings ++ generatePropertiesFileSettings :+ setJarLocation def filterDocSources(ff: FileFilter): Seq[Setting[_]] = Seq( sources in (Compile, doc) ~= (_.filter(ff.accept _)), @@ -160,19 +168,21 @@ def regexFileFilter(s: String): FileFilter = new FileFilter { def accept(f: File) = pat.matcher(f.getAbsolutePath.replace('\\', '/')).matches() } -val libIncludes: FileFilter = "*.tmpl" | "*.xml" | "*.js" | "*.css" | "rootdoc.txt" - lazy val library = configureAsSubproject(project) .settings(generatePropertiesFileSettings: _*) .settings( name := "scala-library", + description := "Scala Standard Library", scalacOptions in Compile ++= Seq[String]("-sourcepath", (scalaSource in Compile).value.toString), scalacOptions in Compile in doc ++= { val libraryAuxDir = (baseDirectory in ThisBuild).value / "src/library-aux" - Seq("-doc-no-compile", libraryAuxDir.toString) + Seq( + "-doc-no-compile", libraryAuxDir.toString, + "-skip-packages", "scala.concurrent.impl", + "-doc-root-content", (sourceDirectory in Compile).value + "/rootdoc.txt" + ) }, - unmanagedResourceDirectories in Compile += (baseDirectory in ThisBuild).value / "src" / project.id, - includeFilter in unmanagedResources in Compile := libIncludes, + includeFilter in unmanagedResources in Compile := "*.tmpl" | "*.xml" | "*.js" | "*.css" | "rootdoc.txt", // Include forkjoin classes in scala-library.jar mappings in Compile in packageBin ++= (mappings in Compile in packageBin in LocalProject("forkjoin")).value @@ -184,17 +194,20 @@ lazy val library = configureAsSubproject(project) lazy val reflect = configureAsSubproject(project) .settings(generatePropertiesFileSettings: _*) - .settings(name := "scala-reflect") + .settings( + name := "scala-reflect", + description := "Scala Reflection Library", + scalacOptions in Compile in doc ++= Seq( + "-skip-packages", "scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io" + ) + ) .dependsOn(library) -val compilerIncludes: FileFilter = - "*.tmpl" | "*.xml" | "*.js" | "*.css" | "*.html" | "*.properties" | "*.swf" | - "*.png" | "*.gif" | "*.gif" | "*.txt" - lazy val compiler = configureAsSubproject(project) .settings(generatePropertiesFileSettings: _*) .settings( name := "scala-compiler", + description := "Scala Compiler", libraryDependencies ++= Seq(antDep, asmDep), // this a way to make sure that classes from interactive and scaladoc projects // end up in compiler jar (that's what Ant build does) @@ -209,12 +222,21 @@ lazy val compiler = configureAsSubproject(project) (mappings in Compile in packageBin in LocalProject("interactive")).value ++ (mappings in Compile in packageBin in LocalProject("scaladoc")).value ++ (mappings in Compile in packageBin in LocalProject("repl")).value, - unmanagedResourceDirectories in Compile += (baseDirectory in ThisBuild).value / "src" / project.id, - includeFilter in unmanagedResources in Compile := compilerIncludes) + includeFilter in unmanagedResources in Compile := + "*.tmpl" | "*.xml" | "*.js" | "*.css" | "*.html" | "*.properties" | "*.swf" | + "*.png" | "*.gif" | "*.gif" | "*.txt", + scalacOptions in Compile in doc ++= Seq( + "-doc-root-content", (sourceDirectory in Compile).value + "/rootdoc.txt" + ) + ) .dependsOn(library, reflect) lazy val interactive = configureAsSubproject(project) .settings(disableDocsAndPublishingTasks: _*) + .settings( + name := "scala-compiler-interactive", + description := "Scala Interactive Compiler" + ) .dependsOn(compiler) lazy val repl = configureAsSubproject(project) @@ -272,18 +294,29 @@ lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target" lazy val scaladoc = configureAsSubproject(project) .settings( - libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, partestDep) + name := "scala-compiler-doc", + description := "Scala Documentation Generator", + libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, partestDep), + includeFilter in unmanagedResources in Compile := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt" ) .settings(disableDocsAndPublishingTasks: _*) .dependsOn(compiler) lazy val scalap = configureAsSubproject(project). - dependsOn(compiler) + settings( + description := "Scala Bytecode Parser", + // Include decoder.properties + includeFilter in unmanagedResources in Compile := "*.properties" + ) + .dependsOn(compiler) // deprecated Scala Actors project lazy val actors = configureAsSubproject(project) .settings(generatePropertiesFileSettings: _*) - .settings(name := "scala-actors") + .settings( + name := "scala-actors", + description := "Scala Actors Library" + ) .settings(filterDocSources("*.scala"): _*) .dependsOn(library) @@ -294,6 +327,7 @@ lazy val partestExtras = configureAsSubproject(Project("partest-extras", file(". .settings(clearSourceAndResourceDirectories: _*) .settings( name := "scala-partest-extras", + description := "Scala Compiler Testing Tool (compiler-specific extras)", libraryDependencies += partestDep, unmanagedSourceDirectories in Compile := List(baseDirectory.value), doc := file("!!! NO DOCS !!!") @@ -311,15 +345,17 @@ lazy val junit = project.in(file("test") / "junit") doc := file("!!! NO DOCS !!!") ) -lazy val partestJavaAgent = (project in file(".") / "src" / "partest-javaagent"). - settings(commonSettings: _*). - settings( +lazy val partestJavaAgent = Project("partest-javaagent", file(".") / "src" / "partest-javaagent") + .settings(commonSettings: _*) + .settings(generatePropertiesFileSettings: _*) + .settings( libraryDependencies += asmDep, doc := file("!!! NO DOCS !!!"), publishLocal := {}, publish := {}, // Setting name to "scala-partest-javaagent" so that the jar file gets that name, which the Runner relies on name := "scala-partest-javaagent", + description := "Scala Compiler Testing Tool (compiler-specific java agent)", // writing jar file to $buildDirectory/pack/lib because that's where it's expected to be found setJarLocation, // add required manifest entry - previously included from file @@ -377,7 +413,7 @@ lazy val dist = (project in file("dist")) .settings( libraryDependencies ++= Seq(scalaContinuationsLibraryDep, scalaContinuationsPluginDep, scalaSwingDep, jlineDep), mkBin := mkBinImpl.value, - mkQuick <<= Def.task {} dependsOn ((distDependencies.map(compile in Compile in _) :+ mkBin): _*), + mkQuick <<= Def.task {} dependsOn ((distDependencies.map(products in Runtime in _) :+ mkBin): _*), mkPack <<= Def.task {} dependsOn (packageBin in Compile, mkBin), target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id, packageBin in Compile := { @@ -438,59 +474,10 @@ def configureAsForkOfJavaProject(project: Project): Project = { } lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build") -lazy val copyrightString = settingKey[String]("Copyright string.") -lazy val generateVersionPropertiesFile = taskKey[File]("Generating version properties file.") lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).") lazy val mkQuick = taskKey[Unit]("Generate a full build, including scripts, in build-sbt/quick") lazy val mkPack = taskKey[Unit]("Generate a full build, including scripts, in build-sbt/pack") -lazy val generateVersionPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task { - val propFile = (resourceManaged in Compile).value / s"${thisProject.value.id}.properties" - val props = new java.util.Properties - - /** - * Regexp that splits version number split into two parts: version and suffix. - * Examples of how the split is performed: - * - * "2.11.5": ("2.11.5", null) - * "2.11.5-acda7a": ("2.11.5", "-acda7a") - * "2.11.5-SNAPSHOT": ("2.11.5", "-SNAPSHOT") - * - */ - val versionSplitted = """([\w+\.]+)(-[\w+\.]+)??""".r - - val versionSplitted(ver, suffixOrNull) = version.value - val osgiSuffix = suffixOrNull match { - case null => "-VFINAL" - case "-SNAPSHOT" => "" - case suffixStr => suffixStr - } - - def executeTool(tool: String) = { - val cmd = - if (System.getProperty("os.name").toLowerCase.contains("windows")) - s"cmd.exe /c tools\\$tool.bat -p" - else s"tools/$tool" - Process(cmd).lines.head - } - - val commitDate = executeTool("get-scala-commit-date") - val commitSha = executeTool("get-scala-commit-sha") - - props.put("version.number", s"${version.value}-$commitDate-$commitSha") - props.put("maven.version.number", s"${version.value}") - props.put("osgi.version.number", s"$ver.v$commitDate$osgiSuffix-$commitSha") - props.put("copyright.string", copyrightString.value) - - // unfortunately, this will write properties in arbitrary order - // this makes it harder to test for stability of generated artifacts - // consider using https://github.com/etiennestuder/java-ordered-properties - // instead of java.util.Properties - IO.write(props, null, propFile) - - propFile -} - /** * Extract selected dependencies to the `cacheDirectory` and return a mapping for the content. * Heavily inspired by sbt-assembly (https://github.com/sbt/sbt-assembly/blob/0.13.0/src/main/scala/sbtassembly/Assembly.scala#L157) @@ -582,17 +569,3 @@ lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task { } buildDirectory in ThisBuild := (baseDirectory in ThisBuild).value / "build-sbt" - -lazy val versionProps: Map[String, String] = { - import java.io.FileInputStream - import java.util.Properties - val props = new Properties() - val in = new FileInputStream(file("versions.properties")) - try props.load(in) - finally in.close() - import scala.collection.JavaConverters._ - props.asScala.toMap -} - -def versionNumber(name: String): String = - versionProps(s"$name.version.number") diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala new file mode 100644 index 0000000000..71de772b08 --- /dev/null +++ b/project/VersionUtil.scala @@ -0,0 +1,103 @@ +import sbt._ +import Keys._ +import java.util.Properties +import java.io.FileInputStream +import scala.collection.JavaConverters._ + +object VersionUtil { + lazy val copyrightString = settingKey[String]("Copyright string.") + lazy val versionProperties = settingKey[Versions]("Version properties.") + lazy val generateVersionPropertiesFile = taskKey[File]("Generating version properties file.") + + lazy val versionPropertiesSettings = Seq[Setting[_]]( + versionProperties := versionPropertiesImpl.value + ) + + lazy val generatePropertiesFileSettings = Seq[Setting[_]]( + copyrightString := "Copyright 2002-2015, LAMP/EPFL", + resourceGenerators in Compile += generateVersionPropertiesFile.map(file => Seq(file)).taskValue, + versionProperties := versionPropertiesImpl.value, + generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value + ) + + case class Versions(canonicalVersion: String, mavenVersion: String, osgiVersion: String, commitSha: String, commitDate: String, isRelease: Boolean) { + val githubTree = + if(isRelease) "v" + mavenVersion + else if(commitSha != "unknown") commitSha + else "master" + + override def toString = s"Canonical: $canonicalVersion, Maven: $mavenVersion, OSGi: $osgiVersion, github: $githubTree" + + def toProperties: Properties = { + val props = new Properties + props.put("version.number", canonicalVersion) + props.put("maven.version.number", mavenVersion) + props.put("osgi.version.number", osgiVersion) + props + } + } + + lazy val versionPropertiesImpl: Def.Initialize[Versions] = Def.setting { + /** Regexp that splits version number split into two parts: version and suffix. + * Examples of how the split is performed: + * + * "2.11.5": ("2.11.5", null) + * "2.11.5-acda7a": ("2.11.5", "-acda7a") + * "2.11.5-SNAPSHOT": ("2.11.5", "-SNAPSHOT") */ + val versionSplitted = """([\w+\.]+)(-[\w+\.]+)??""".r + + val versionSplitted(ver, suffixOrNull) = version.value + + val osgiSuffix = suffixOrNull match { + case null => "-VFINAL" + case "-SNAPSHOT" => "" + case suffixStr => suffixStr + } + + def executeTool(tool: String) = { + val cmd = + if (System.getProperty("os.name").toLowerCase.contains("windows")) + s"cmd.exe /c tools\\$tool.bat -p" + else s"tools/$tool" + Process(cmd).lines.head + } + + val commitDate = executeTool("get-scala-commit-date") + val commitSha = executeTool("get-scala-commit-sha") + + Versions( + canonicalVersion = s"$ver-$commitDate-$commitSha", + mavenVersion = s"${version.value}", + osgiVersion = s"$ver.v$commitDate$osgiSuffix-$commitSha", + commitSha = commitSha, + commitDate = commitDate, + isRelease = !osgiSuffix.isEmpty + ) + } + + lazy val generateVersionPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task { + val props = versionProperties.value.toProperties + val propFile = (resourceManaged in Compile).value / s"${thisProject.value.id}.properties" + props.put("copyright.string", copyrightString.value) + + // unfortunately, this will write properties in arbitrary order + // this makes it harder to test for stability of generated artifacts + // consider using https://github.com/etiennestuder/java-ordered-properties + // instead of java.util.Properties + IO.write(props, null, propFile) + propFile + } + + /** The global versions.properties data */ + lazy val versionProps: Map[String, String] = { + val props = new Properties() + val in = new FileInputStream(file("versions.properties")) + try props.load(in) + finally in.close() + props.asScala.toMap + } + + /** Get a subproject version number from `versionProps` */ + def versionNumber(name: String): String = + versionProps(s"$name.version.number") +} -- cgit v1.2.3 From 167f79ca1ee300860a4dfc570a03590496764f88 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Sat, 7 Nov 2015 17:30:26 -0500 Subject: less confusing wording for a dependent method type error note to reviewers: the error messages in this file are over the place about whether they're called "parameter sections", or "argument lists", or what, so there's no point in being picky about that here for context see SI-823 --- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 2 +- test/files/neg/depmet_1.check | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index b0bd9977a8..727f09290a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -1190,7 +1190,7 @@ trait ContextErrors { def IllegalDependentMethTpeError(sym: Symbol)(context: Context) = { val errorAddendum = - ": parameter appears in the type of another parameter in the same section or an earlier one" + ": parameter may only be referenced in a subsequent parameter section" issueSymbolTypeError(sym, "illegal dependent method type" + errorAddendum)(context) } diff --git a/test/files/neg/depmet_1.check b/test/files/neg/depmet_1.check index 7a4f845fd5..15498568c5 100644 --- a/test/files/neg/depmet_1.check +++ b/test/files/neg/depmet_1.check @@ -1,7 +1,7 @@ -depmet_1.scala:2: error: illegal dependent method type: parameter appears in the type of another parameter in the same section or an earlier one +depmet_1.scala:2: error: illegal dependent method type: parameter may only be referenced in a subsequent parameter section def precise0(y: x.type)(x: String): Unit = {} ^ -depmet_1.scala:3: error: illegal dependent method type: parameter appears in the type of another parameter in the same section or an earlier one +depmet_1.scala:3: error: illegal dependent method type: parameter may only be referenced in a subsequent parameter section def precise1(x: String, y: x.type): Unit = {} ^ depmet_1.scala:4: error: not found: value y -- cgit v1.2.3 From 4c3e766ee17afdb44ceeeb764adc660e2a501e9f Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 11 Nov 2015 19:51:43 -0500 Subject: it's Scaladoc, not "ScalaDoc" or "Scala doc" renaming the existing ScalaDoc and ScalaDocReporter classes might break stuff, sadly, but at least we can fix the rest --- src/compiler/scala/tools/nsc/Global.scala | 2 +- src/compiler/scala/tools/nsc/Reporting.scala | 2 +- src/compiler/scala/tools/nsc/settings/Warnings.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 6 +++--- src/interactive/scala/tools/nsc/interactive/Global.scala | 2 +- src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala | 4 ++-- src/scaladoc/scala/tools/nsc/doc/Settings.scala | 2 +- test/disabled/presentation/akka/src/akka/actor/Supervisor.scala | 2 +- test/files/neg/macro-without-xmacros-a.check | 2 +- test/files/neg/macro-without-xmacros-b.check | 2 +- test/files/neg/t6040.check | 2 +- test/files/neg/t6120.check | 2 +- test/files/neg/t6952.check | 2 +- test/files/neg/t8736-c.check | 2 +- test/scaladoc/resources/links.scala | 2 +- test/scaladoc/run/links.scala | 2 +- 16 files changed, 19 insertions(+), 19 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 936bed7c8f..a618b080c8 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -231,7 +231,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Called from parser, which signals hereby that a method definition has been parsed. */ def signalParseProgress(pos: Position) {} - /** Called by ScalaDocAnalyzer when a doc comment has been parsed. */ + /** Called by ScaladocAnalyzer when a doc comment has been parsed. */ def signalParsedDocComment(comment: String, pos: Position) = { // TODO: this is all very broken (only works for scaladoc comments, not regular ones) // --> add hooks to parser and refactor Interactive global to handle comments directly diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index 4e7a527a5a..e01c536ad1 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -78,7 +78,7 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w s"""| |This can be achieved by adding the import clause 'import $fqname' |or by setting the compiler option -language:$featureName. - |See the Scala docs for value $fqname for a discussion + |See the Scaladoc for value $fqname for a discussion |why the feature $req be explicitly enabled.""".stripMargin ) reportedFeature += featureTrait diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index 59cc13c64e..f570037760 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -49,7 +49,7 @@ trait Warnings { val NullaryOverride = LintWarning("nullary-override", "Warn when non-nullary `def f()' overrides nullary `def f'.", true) val InferAny = LintWarning("infer-any", "Warn when a type argument is inferred to be `Any`.", true) val MissingInterpolator = LintWarning("missing-interpolator", "A string literal appears to be missing an interpolator id.") - val DocDetached = LintWarning("doc-detached", "A ScalaDoc comment appears to be detached from its element.") + val DocDetached = LintWarning("doc-detached", "A Scaladoc comment appears to be detached from its element.") val PrivateShadow = LintWarning("private-shadow", "A private field (or class parameter) shadows a superclass field.") val TypeParameterShadow = LintWarning("type-parameter-shadow", "A local type parameter shadows a type already in scope.") val PolyImplicitOverload = LintWarning("poly-implicit-overload", "Parameterized overloaded implicit methods are not visible as view bounds.") diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 494e1e49b7..509ce59104 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -588,10 +588,10 @@ trait Implicits { if (Statistics.canEnable) Statistics.incCounter(matchingImplicits) // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints - val isScalaDoc = context.tree == EmptyTree + val isScaladoc = context.tree == EmptyTree val itree0 = atPos(pos.focus) { - if (isLocalToCallsite && !isScalaDoc) { + if (isLocalToCallsite && !isScaladoc) { // SI-4270 SI-5376 Always use an unattributed Ident for implicits in the local scope, // rather than an attributed Select, to detect shadowing. Ident(info.name) @@ -628,7 +628,7 @@ trait Implicits { // for instance, if we have `class C[T]` and `implicit def conv[T: Numeric](c: C[T]) = ???` // then Scaladoc will give us something of type `C[T]`, and it would like to know // that `conv` is potentially available under such and such conditions - case tree if isImplicitMethodType(tree.tpe) && !isScalaDoc => + case tree if isImplicitMethodType(tree.tpe) && !isScaladoc => applyImplicitArgs(tree) case tree => tree } diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 6600fea2d8..27a02c46a2 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -80,7 +80,7 @@ trait InteractiveAnalyzer extends Analyzer { val existingDerivedSym = owningInfo.decl(sym.name.toTermName).filter(sym => sym.isSynthetic && sym.isMethod) existingDerivedSym.alternatives foreach (owningInfo.decls.unlink) val defTree = tree match { - case dd: DocDef => dd.definition // See SI-9011, Scala IDE's presentation compiler incorporates ScalaDocGlobal with InterativeGlobal, so we have to unwrap DocDefs. + case dd: DocDef => dd.definition // See SI-9011, Scala IDE's presentation compiler incorporates ScaladocGlobal with InteractiveGlobal, so we have to unwrap DocDefs. case _ => tree } enterImplicitWrapper(defTree.asInstanceOf[ClassDef]) diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala index cbf8ff22ba..8ea8c4deff 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -125,9 +125,9 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax } else if (in.ch == '*') { docBuffer = null in.next - val scalaDoc = ("/**", "*/") + val scaladoc = ("/**", "*/") if (in.ch == '*') - docBuffer = new StringBuilder(scalaDoc._1) + docBuffer = new StringBuilder(scaladoc._1) do { do { if (in.ch != '*' && in.ch != SU) { diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala index 067b2b2c29..90efa4e595 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala @@ -45,7 +45,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) val docfooter = StringSetting ( "-doc-footer", "footer", - "A footer on every ScalaDoc page, by default the EPFL/Typesafe copyright notice. Can be overridden with a custom footer.", + "A footer on every Scaladoc page, by default the EPFL/Typesafe copyright notice. Can be overridden with a custom footer.", "" ) diff --git a/test/disabled/presentation/akka/src/akka/actor/Supervisor.scala b/test/disabled/presentation/akka/src/akka/actor/Supervisor.scala index 4a1309faef..bec3c83f1a 100644 --- a/test/disabled/presentation/akka/src/akka/actor/Supervisor.scala +++ b/test/disabled/presentation/akka/src/akka/actor/Supervisor.scala @@ -95,7 +95,7 @@ case class SupervisorFactory(val config: SupervisorConfig) { * wire the children together using 'link', 'spawnLink' etc. and set the 'trapExit' flag in the * children that should trap error signals and trigger restart. *

- * See the ScalaDoc for the SupervisorFactory for an example on how to declaratively wire up children. + * See the Scaladoc for the SupervisorFactory for an example on how to declaratively wire up children. * * @author Jonas Bonér */ diff --git a/test/files/neg/macro-without-xmacros-a.check b/test/files/neg/macro-without-xmacros-a.check index ec194be3a9..65445d80dd 100644 --- a/test/files/neg/macro-without-xmacros-a.check +++ b/test/files/neg/macro-without-xmacros-a.check @@ -2,7 +2,7 @@ Macros_2.scala:5: error: macro definition needs to be enabled by making the implicit value scala.language.experimental.macros visible. This can be achieved by adding the import clause 'import scala.language.experimental.macros' or by setting the compiler option -language:experimental.macros. -See the Scala docs for value scala.language.experimental.macros for a discussion +See the Scaladoc for value scala.language.experimental.macros for a discussion why the feature needs to be explicitly enabled. def foo(x: Int): Int = macro foo_impl ^ diff --git a/test/files/neg/macro-without-xmacros-b.check b/test/files/neg/macro-without-xmacros-b.check index c97850f0a9..e3c1010d50 100644 --- a/test/files/neg/macro-without-xmacros-b.check +++ b/test/files/neg/macro-without-xmacros-b.check @@ -2,7 +2,7 @@ Macros_2.scala:3: error: macro definition needs to be enabled by making the implicit value scala.language.experimental.macros visible. This can be achieved by adding the import clause 'import scala.language.experimental.macros' or by setting the compiler option -language:experimental.macros. -See the Scala docs for value scala.language.experimental.macros for a discussion +See the Scaladoc for value scala.language.experimental.macros for a discussion why the feature needs to be explicitly enabled. def foo(x: Int): Int = macro Impls.foo_impl ^ diff --git a/test/files/neg/t6040.check b/test/files/neg/t6040.check index 16c90ede7e..350f796d18 100644 --- a/test/files/neg/t6040.check +++ b/test/files/neg/t6040.check @@ -2,7 +2,7 @@ t6040.scala:1: error: extension of type scala.Dynamic needs to be enabled by making the implicit value scala.language.dynamics visible. This can be achieved by adding the import clause 'import scala.language.dynamics' or by setting the compiler option -language:dynamics. -See the Scala docs for value scala.language.dynamics for a discussion +See the Scaladoc for value scala.language.dynamics for a discussion why the feature needs to be explicitly enabled. class X extends Dynamic ^ diff --git a/test/files/neg/t6120.check b/test/files/neg/t6120.check index a7d17e29cf..f432fde32f 100644 --- a/test/files/neg/t6120.check +++ b/test/files/neg/t6120.check @@ -2,7 +2,7 @@ t6120.scala:5: warning: postfix operator bippy should be enabled by making the implicit value scala.language.postfixOps visible. This can be achieved by adding the import clause 'import scala.language.postfixOps' or by setting the compiler option -language:postfixOps. -See the Scala docs for value scala.language.postfixOps for a discussion +See the Scaladoc for value scala.language.postfixOps for a discussion why the feature should be explicitly enabled. def f = null == null bippy ^ diff --git a/test/files/neg/t6952.check b/test/files/neg/t6952.check index 1a591d02c6..acee0e7d60 100644 --- a/test/files/neg/t6952.check +++ b/test/files/neg/t6952.check @@ -2,7 +2,7 @@ t6952.scala:2: error: extension of type scala.Dynamic needs to be enabled by making the implicit value scala.language.dynamics visible. This can be achieved by adding the import clause 'import scala.language.dynamics' or by setting the compiler option -language:dynamics. -See the Scala docs for value scala.language.dynamics for a discussion +See the Scaladoc for value scala.language.dynamics for a discussion why the feature needs to be explicitly enabled. trait B extends Dynamic ^ diff --git a/test/files/neg/t8736-c.check b/test/files/neg/t8736-c.check index 06b2228543..7debb6d515 100644 --- a/test/files/neg/t8736-c.check +++ b/test/files/neg/t8736-c.check @@ -2,7 +2,7 @@ t8736-c.scala:4: warning: higher-kinded type should be enabled by making the implicit value scala.language.higherKinds visible. This can be achieved by adding the import clause 'import scala.language.higherKinds' or by setting the compiler option -language:higherKinds. -See the Scala docs for value scala.language.higherKinds for a discussion +See the Scaladoc for value scala.language.higherKinds for a discussion why the feature should be explicitly enabled. def hk[M[_]] = ??? ^ diff --git a/test/scaladoc/resources/links.scala b/test/scaladoc/resources/links.scala index ecac9c63cf..8e000ab979 100644 --- a/test/scaladoc/resources/links.scala +++ b/test/scaladoc/resources/links.scala @@ -1,6 +1,6 @@ // that would be: // SI-5079 "Scaladoc can't link to an object (only a class or trait)" -// SI-4497 "Links in ScalaDoc - Spec and implementation unsufficient" +// SI-4497 "Links in Scaladoc - Spec and implementation unsufficient" // SI-4224 "Wiki-links should support method targets" // SI-3695 "support non-fully-qualified type links in scaladoc comments" // SI-6487 "Scaladoc can't link to inner classes" diff --git a/test/scaladoc/run/links.scala b/test/scaladoc/run/links.scala index 64441c2d95..01db66aec3 100644 --- a/test/scaladoc/run/links.scala +++ b/test/scaladoc/run/links.scala @@ -3,7 +3,7 @@ import scala.tools.nsc.doc.model._ import scala.tools.partest.ScaladocModelTest // SI-5079 "Scaladoc can't link to an object (only a class or trait)" -// SI-4497 "Links in ScalaDoc - Spec and implementation unsufficient" +// SI-4497 "Links in Scaladoc - Spec and implementation unsufficient" // SI-4224 "Wiki-links should support method targets" // SI-3695 "support non-fully-qualified type links in scaladoc comments" // SI-6487 "Scaladoc can't link to inner classes" -- cgit v1.2.3 From 238b1fba3d5085457d05817c646d436542def5ea Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 29 Oct 2015 13:01:07 +1000 Subject: Attacking exponential complexity in TypeMaps - Don't normalize existentials during the `contain`-s type map; `ExistentialType#normalize' calls contains internally and an exponential blowup ensues. - Ensure that the type map used in variance validation never returns modified types in order to avoid needless cloning of symbols. The enclosed test case still gets stuck in Uncurry, thanks to the way that `TypeMap#mapOver(List[Symbol])` recurses through the type first to check whether the type map would be an no-op or not. If not, it repeats the type map with cloned symbols. Doing the work twice at each level of recursion blows up the complexity. Removing that "fast path" allows the enclosed test to compile completely. As at this commit, it gets stuck in uncurry, which dealiases `s.List` to `s.c.i.List` within the type. Some more background on the troublesome part of `TypeMap`: http://lrytz.github.io/scala-aladdin-bugtracker/displayItem.do%3Fid=1210.html https://github.com/scala/scala/commit/f8b2b21050e7a2ca0f537ef70e3e0c8eead43abc --- src/reflect/scala/reflect/internal/Variances.scala | 24 ++++++++++++++-------- .../scala/reflect/internal/tpe/TypeMaps.scala | 18 ++++++++++++---- test/files/pos/existental-slow-compile2.scala | 7 +++++++ test/files/pos/existential-slow-compile1.flags | 1 + test/files/pos/existential-slow-compile1.scala | 7 +++++++ 5 files changed, 44 insertions(+), 13 deletions(-) create mode 100644 test/files/pos/existental-slow-compile2.scala create mode 100644 test/files/pos/existential-slow-compile1.flags create mode 100644 test/files/pos/existential-slow-compile1.scala diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala index ef22df3f2e..af04f47e0e 100644 --- a/src/reflect/scala/reflect/internal/Variances.scala +++ b/src/reflect/scala/reflect/internal/Variances.scala @@ -122,15 +122,21 @@ trait Variances { * same is true of the parameters (ValDefs) unless we are inside a * refinement, in which case they are checked from here. */ - def apply(tp: Type): Type = tp match { - case _ if isUncheckedVariance(tp) => tp - case _ if resultTypeOnly(tp) => this(tp.resultType) - case TypeRef(_, sym, _) if sym.isAliasType => this(tp.normalize) - case TypeRef(_, sym, _) if !sym.variance.isInvariant => checkVarianceOfSymbol(sym) ; mapOver(tp) - case RefinedType(_, _) => withinRefinement(mapOver(tp)) - case ClassInfoType(parents, _, _) => parents foreach this ; tp - case mt @ MethodType(_, result) => flipped(mt.paramTypes foreach this) ; this(result) - case _ => mapOver(tp) + def apply(tp: Type): Type = { + tp match { + case _ if isUncheckedVariance(tp) => + case _ if resultTypeOnly(tp) => this(tp.resultType) + case TypeRef(_, sym, _) if sym.isAliasType => this(tp.normalize) + case TypeRef(_, sym, _) if !sym.variance.isInvariant => checkVarianceOfSymbol(sym) ; mapOver(tp) + case RefinedType(_, _) => withinRefinement(mapOver(tp)) + case ClassInfoType(parents, _, _) => parents foreach this + case mt @ MethodType(_, result) => flipped(mt.paramTypes foreach this) ; this(result) + case _ => mapOver(tp) + } + // We're using TypeMap here for type traversal only. To avoid wasteful symbol + // cloning during the recursion, it is important to return the input `tp`, rather + // than the result of the pattern match above, which normalizes types. + tp } def validateDefinition(base: Symbol) { val saved = this.base diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index b8d4050d7d..804360b677 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -998,10 +998,20 @@ private[internal] trait TypeMaps { class ContainsCollector(sym: Symbol) extends TypeCollector(false) { def traverse(tp: Type) { if (!result) { - tp.normalize match { - case TypeRef(_, sym1, _) if (sym == sym1) => result = true - case SingleType(_, sym1) if (sym == sym1) => result = true - case _ => mapOver(tp) + tp match { + case _: ExistentialType => + // ExistentialType#normalize internally calls contains, which leads to exponential performance + // for types like: `A[_ <: B[_ <: ... ]]`. Example: pos/existential-contains.scala. + // + // We can just map over the components and wait until we see the underlying type before we call + // normalize. + mapOver(tp) + case _ => + tp.normalize match { + case TypeRef(_, sym1, _) if (sym == sym1) => result = true + case SingleType(_, sym1) if (sym == sym1) => result = true + case _ => mapOver(tp) + } } } } diff --git a/test/files/pos/existental-slow-compile2.scala b/test/files/pos/existental-slow-compile2.scala new file mode 100644 index 0000000000..907344982c --- /dev/null +++ b/test/files/pos/existental-slow-compile2.scala @@ -0,0 +1,7 @@ +class C { + class L[+A] + def test = { + val foo: + L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: _ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: _ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_]]]]]]]]]]]]]]]]]]]]]]]] + = ??? } } + diff --git a/test/files/pos/existential-slow-compile1.flags b/test/files/pos/existential-slow-compile1.flags new file mode 100644 index 0000000000..7f7581974d --- /dev/null +++ b/test/files/pos/existential-slow-compile1.flags @@ -0,0 +1 @@ +-Ystop-after:refchecks diff --git a/test/files/pos/existential-slow-compile1.scala b/test/files/pos/existential-slow-compile1.scala new file mode 100644 index 0000000000..8602afd9db --- /dev/null +++ b/test/files/pos/existential-slow-compile1.scala @@ -0,0 +1,7 @@ +class C { + type L[+A] = scala.collection.immutable.List[A] + def test = { + val foo: + L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: _ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: _ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_ <: L[_]]]]]]]]]]]]]]]]]]]]]]]] + = ??? } } + -- cgit v1.2.3 From d9068b3358e8549ba7c36e083b39fa47bdfd4a6c Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Tue, 17 Nov 2015 23:20:10 +0000 Subject: Remove notes about -doc-title and -doc-version not being used Confirmed these options are used by editing build-ant-macros.xml and viewing the output, Unit, val printMsg: String => Unit = println(_)) ) /** A setting that defines the overall title of the documentation, typically the name of the library being - * documented. ''Note:'' This setting is currently not used. */ + * documented. */ val doctitle = StringSetting ( "-doc-title", "title", @@ -34,7 +34,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) ) /** A setting that defines the overall version number of the documentation, typically the version of the library being - * documented. ''Note:'' This setting is currently not used. */ + * documented. */ val docversion = StringSetting ( "-doc-version", "version", -- cgit v1.2.3 From 6fceb3952b0f58095e1ac8727d9f368adc1fcae1 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Wed, 18 Nov 2015 11:44:02 +0000 Subject: Add two missed scaladoc commands into scaladoc specific help section Prior to this change these scaladoc options were buried in the scalac section of the help text, -doc-external-doc -implicits-sound-shadowing With this change the options are listed in the scaladoc section. This will make the commands easier to discover. --- src/scaladoc/scala/tools/nsc/doc/Settings.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala index 90efa4e595..86ceea0b7d 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala @@ -212,9 +212,10 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) // For improved help output. def scaladocSpecific = Set[Settings#Setting]( docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes, + docExternalDoc, docAuthor, docDiagrams, docDiagramsDebug, docDiagramsDotPath, docDiagramsDotTimeout, docDiagramsDotRestart, - docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide, + docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide, docImplicitsSoundShadowing, docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses, docNoPrefixes, docNoLinkWarnings, docRawOutput, docSkipPackages, docExpandAllTypes, docGroups -- cgit v1.2.3 From 9e174c2d434362a1cb71fd071b170d1397b0effd Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 19 Nov 2015 20:30:05 -0500 Subject: upgrade to MiMa 0.1.8 just for general dogfooding purposes. also because the new version understands 2.12 bytecode better, and this commit will get merged onto 2.12 --- build.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.xml b/build.xml index 7f8c91b47b..af3d304856 100644 --- a/build.xml +++ b/build.xml @@ -1642,7 +1642,7 @@ TODO: - + -- cgit v1.2.3 From f0f7dcdb124d212d82ae7e0ea32f88175f6a664f Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Fri, 20 Nov 2015 22:33:43 +0000 Subject: Refactor excluded qname test to be more data like This refactoring extracts data from code into a form which is closer to configuration data. This is a step change toward making this configurable. --- src/scaladoc/scala/tools/nsc/doc/Settings.scala | 34 +++++++++++++++++-------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala index 90efa4e595..0dd308eb8c 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala @@ -275,24 +275,36 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) ("scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) + ("scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) + ("scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) + - ("scala.reflect.api.TypeTags.WeakTypeTag" -> ((tparam: String) => tparam + " is accompanied by an WeakTypeTag, which is a runtime representation of its type that survives erasure")) + + ("scala.reflect.api.TypeTags.WeakTypeTag" -> ((tparam: String) => tparam + " is accompanied by a WeakTypeTag, which is a runtime representation of its type that survives erasure")) + ("scala.reflect.api.TypeTags.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure")) + private val excludedClassnamePatterns = Set( + """^scala.Tuple.*""", + """^scala.Product.*""", + """^scala.Function.*""", + """^scala.runtime.AbstractFunction.*""" + ) map (_.r) + + private val notExcludedClasses = Set( + "scala.Tuple1", + "scala.Tuple2", + "scala.Product", + "scala.Product1", + "scala.Product2", + "scala.Function", + "scala.Function1", + "scala.Function2", + "scala.runtime.AbstractFunction0", + "scala.runtime.AbstractFunction1", + "scala.runtime.AbstractFunction2" + ) + /** * Set of classes to exclude from index and diagrams * TODO: Should be configurable */ def isExcluded(qname: String) = { - ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") || - qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction") - ) && !( - qname == "scala.Tuple1" || qname == "scala.Tuple2" || - qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" || - qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" || - qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" || - qname == "scala.runtime.AbstractFunction2" - ) - ) + excludedClassnamePatterns.exists(_.findFirstMatchIn(qname).isDefined) && !notExcludedClasses(qname) } /** Common conversion targets that affect any class in Scala */ -- cgit v1.2.3 From a7924629ca1e37e46016a17f333cd7cc8f8772b6 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Sun, 22 Nov 2015 21:08:20 +0000 Subject: Fix List Scaladoc time & space formatting The Performance section got sucked into a wormhole and popped up in the example tag. The laws of physics differ in the attributes block resulting in the loss of the line break between the Time and Space paragraphs. Fixed by moving the section out of the example tag. --- src/library/scala/collection/immutable/List.scala | 26 +++++++++++------------ 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 53146bd66d..75ddded6d2 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -25,6 +25,19 @@ import java.io._ * This class is optimal for last-in-first-out (LIFO), stack-like access patterns. If you need another access * pattern, for example, random access or FIFO, consider using a collection more suited to this than `List`. * + * ==Performance== + * '''Time:''' `List` has `O(1)` prepend and head/tail access. Most other operations are `O(n)` on the number of elements in the list. + * This includes the index-based lookup of elements, `length`, `append` and `reverse`. + * + * '''Space:''' `List` implements '''structural sharing''' of the tail list. This means that many operations are either + * zero- or constant-memory cost. + * {{{ + * val mainList = List(3, 2, 1) + * val with4 = 4 :: mainList // re-uses mainList, costs one :: instance + * val with42 = 42 :: mainList // also re-uses mainList, cost one :: instance + * val shorter = mainList.tail // costs nothing as it uses the same 2::1::Nil instances as mainList + * }}} + * * @example {{{ * // Make a list via the companion object factory * val days = List("Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday") @@ -41,19 +54,6 @@ import java.io._ * } * }}} * - * ==Performance== - * '''Time:''' `List` has `O(1)` prepend and head/tail access. Most other operations are `O(n)` on the number of elements in the list. - * This includes the index-based lookup of elements, `length`, `append` and `reverse`. - * - * '''Space:''' `List` implements '''structural sharing''' of the tail list. This means that many operations are either - * zero- or constant-memory cost. - * {{{ - * val mainList = List(3, 2, 1) - * val with4 = 4 :: mainList // re-uses mainList, costs one :: instance - * val with42 = 42 :: mainList // also re-uses mainList, cost one :: instance - * val shorter = mainList.tail // costs nothing as it uses the same 2::1::Nil instances as mainList - * }}} - * * @note The functional list is characterized by persistence and structural sharing, thus offering considerable * performance and space consumption benefits in some scenarios if used correctly. * However, note that objects having multiple references into the same functional list (that is, -- cgit v1.2.3 From 35dd1a9a07aa5e993212cef5fd401f5534a4725a Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Mon, 23 Nov 2015 20:34:24 +0000 Subject: Align DotRunner dot restart count with command option description This is the command option description, val docDiagramsDotRestart = IntSetting( "-diagrams-dot-restart", "The number of times to restart a malfunctioning dot process before disabling diagrams (default: 5)", 5, None, _ => None ) Prior to this change dot was restarted four times instead of five. Maybe the intention of the option was to allow a total number of attempts to be specified but with 5 restarts we need 6 attempts. The local var was renamed to reflect this. --- .../scala/tools/nsc/doc/html/page/diagram/DotRunner.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala index 9381cf3a35..9287bfbc2b 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala @@ -18,16 +18,16 @@ import model._ /** This class takes care of running the graphviz dot utility */ class DotRunner(settings: doc.Settings) { - private[this] var dotRestarts = 0 + private[this] var dotAttempts = 0 private[this] var dotProcess: DotProcess = null def feedToDot(dotInput: String, template: DocTemplateEntity): String = { if (dotProcess == null) { - if (dotRestarts < settings.docDiagramsDotRestart.value) { - if (dotRestarts != 0) + if (dotAttempts < settings.docDiagramsDotRestart.value + 1) { + if (dotAttempts > 0) settings.printMsg("Graphviz will be restarted...\n") - dotRestarts += 1 + dotAttempts += 1 dotProcess = new DotProcess(settings) } else return null @@ -41,7 +41,7 @@ class DotRunner(settings: doc.Settings) { if (result == null) { dotProcess.cleanup() dotProcess = null - if (dotRestarts == settings.docDiagramsDotRestart.value) { + if (dotAttempts == 1 + settings.docDiagramsDotRestart.value) { settings.printMsg("\n") settings.printMsg("**********************************************************************") settings.printMsg("Diagrams will be disabled for this run because the graphviz dot tool") -- cgit v1.2.3 From 57ad0b421ac507c1f4417dc12181113f44e093cf Mon Sep 17 00:00:00 2001 From: "Frank S. Thomas" Date: Mon, 23 Nov 2015 18:16:43 +0100 Subject: Update number of subsections in Value Conversions --- spec/06-expressions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 9cd58ea346..c24ca01c3b 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -1341,7 +1341,7 @@ to $U$ after applying [eta-expansion](#eta-expansion) and ### Value Conversions -The following five implicit conversions can be applied to an +The following seven implicit conversions can be applied to an expression $e$ which has some value type $T$ and which is type-checked with some expected type $\mathit{pt}$. -- cgit v1.2.3