summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore3
-rw-r--r--CONTRIBUTING.md2
-rw-r--r--README.md4
-rw-r--r--build.sbt71
-rw-r--r--build.xml2
-rw-r--r--doc/LICENSE.md4
-rw-r--r--doc/License.rtf4
-rw-r--r--project/ParserUtil.scala54
-rw-r--r--project/PartestUtil.scala92
-rw-r--r--project/ScalaOptionParser.scala128
-rw-r--r--project/ScriptCommands.scala19
-rw-r--r--project/VersionUtil.scala101
-rw-r--r--scripts/common3
-rwxr-xr-xscripts/jobs/integrate/bootstrap20
-rwxr-xr-xscripts/jobs/validate/publish-core13
-rw-r--r--src/compiler/scala/reflect/quasiquotes/Parsers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala57
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala32
-rw-r--r--src/intellij/scala-build.iml.SAMPLE109
-rw-r--r--src/intellij/scala.ipr.SAMPLE3
-rw-r--r--src/library/scala/collection/Iterator.scala51
-rw-r--r--src/library/scala/reflect/Manifest.scala3
-rw-r--r--src/library/scala/util/Properties.scala2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala2
-rw-r--r--src/scalap/decoder.properties2
-rw-r--r--test/files/neg/t9572.check7
-rw-r--r--test/files/neg/t9572.scala6
-rw-r--r--test/junit/scala/collection/IteratorTest.scala28
-rw-r--r--test/junit/scala/reflect/ClassTag.scala12
29 files changed, 712 insertions, 126 deletions
diff --git a/.gitignore b/.gitignore
index d6571a377f..061cd274ac 100644
--- a/.gitignore
+++ b/.gitignore
@@ -26,6 +26,9 @@
/build.properties
/buildcharacter.properties
+# might get generated when testing Jenkins scripts locally
+/jenkins.properties
+
# target directories for ant build
/build/
/dists/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index d01a71b9bd..462b5404b2 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -14,7 +14,7 @@ By the way, the team at Typesafe is: @adriaanm, @lrytz, @retronym, and @SethTisu
## What kind of PR are you submitting?
-Regardless of the nature of your Pull Request, we have to ask you to digitally sign the [Scala CLA](http://typesafe.com/contribute/cla/scala), to protect the OSS nature of the code base.
+Regardless of the nature of your Pull Request, we have to ask you to digitally sign the [Scala CLA](http://www.typesafe.com/contribute/cla/scala), to protect the OSS nature of the code base.
You don't need to submit separate PRs for 2.11.x, 2.12.x, and 2.13.x. Any changes accepted on one of these branches will, in time, be merged into the later branches.
diff --git a/README.md b/README.md
index 4856d67d99..89454ee6fd 100644
--- a/README.md
+++ b/README.md
@@ -3,7 +3,7 @@ This is the official repository for the [Scala Programming Language](http://www.
# How to contribute
-To contribute to the Scala Standard Library, Scala Compiler and Scala Language Specification, please send us a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository! We do have to ask you to sign the [Scala CLA](http://typesafe.com/contribute/cla/scala) before we can merge any of your work into our code base, to protect its open source nature.
+To contribute to the Scala Standard Library, Scala Compiler and Scala Language Specification, please send us a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository! We do have to ask you to sign the [Scala CLA](http://www.typesafe.com/contribute/cla/scala) before we can merge any of your work into our code base, to protect its open source nature.
For more information on building and developing the core of Scala, read on!
@@ -23,7 +23,7 @@ If you need some help with your PR at any time, please feel free to @-mention an
| username | talk to me about... |
--------------------------------------------------------------------------------------------------|----------------------------------------------------------------|---------------------------------------------------|
<img src="https://avatars.githubusercontent.com/adriaanm" height="50px" title="Adriaan Moors"/> | [`@adriaanm`](https://github.com/adriaanm) | type checker, pattern matcher, infrastructure, language spec |
- <img src="https://avatars.githubusercontent.com/SethTisue" height="50px" title="Seth Tisue"/> | [`@SethTisue`](https://github.com/SethTisue) | back-end, library, the welcome-to-Scala experience, build |
+ <img src="https://avatars.githubusercontent.com/SethTisue" height="50px" title="Seth Tisue"/> | [`@SethTisue`](https://github.com/SethTisue) | build, developer docs, community build, Jenkins, library, the welcome-to-Scala experience |
<img src="https://avatars.githubusercontent.com/retronym" height="50px" title="Jason Zaugg"/> | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, Java 8 lambdas, REPL |
<img src="https://avatars.githubusercontent.com/Ichoran" height="50px" title="Rex Kerr"/> | [`@Ichoran`](https://github.com/Ichoran) | collections library, performance |
<img src="https://avatars.githubusercontent.com/lrytz" height="50px" title="Lukas Rytz"/> | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments |
diff --git a/build.sbt b/build.sbt
index ad9d5c8866..f9538d0b60 100644
--- a/build.sbt
+++ b/build.sbt
@@ -53,7 +53,7 @@
* https://groups.google.com/d/topic/scala-internals/gp5JsM1E0Fo/discussion
*/
-import VersionUtil.{versionProps, versionNumber, generatePropertiesFileSettings, versionProperties, versionPropertiesSettings}
+import VersionUtil._
val bootstrapScalaVersion = versionProps("starr.version")
@@ -98,13 +98,27 @@ lazy val publishSettings : Seq[Setting[_]] = Seq(
(f, to)
}
IO.copy(mappings)
- }
+ },
+ credentials ++= {
+ val file = Path.userHome / ".credentials"
+ if (file.exists) List(Credentials(file))
+ else Nil
+ },
+ publishMavenStyle := true
)
-lazy val commonSettings = clearSourceAndResourceDirectories ++ versionPropertiesSettings ++ publishSettings ++ Seq[Setting[_]](
+// Set the version number: The ANT build uses the file "build.number" to get the base version. Overriding versions or
+// suffixes for certain builds is done by directly setting variables from the shell scripts. For example, in
+// publish-core this requires computing the commit SHA first and then passing it to ANT. In the sbt build we use
+// the two settings `baseVersion` and `baseVersionSuffix` to compute all versions (canonical, Maven, OSGi). See
+// VersionUtil.versionPropertiesImpl for details. The standard sbt `version` setting should not be set directly. It
+// is the same as the Maven version and derived automatically from `baseVersion` and `baseVersionSuffix`.
+globalVersionSettings
+baseVersion in Global := "2.11.8"
+baseVersionSuffix in Global := "SNAPSHOT"
+
+lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings ++ Seq[Setting[_]](
organization := "org.scala-lang",
- // The ANT build uses the file "build.number" and the property "build.release" to compute the version
- version := "2.11.8-SNAPSHOT",
scalaVersion := bootstrapScalaVersion,
// we don't cross build Scala itself
crossPaths := false,
@@ -186,7 +200,18 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ versionProperties
},
// Remove auto-generated manifest attributes
packageOptions in Compile in packageBin := Seq.empty,
- packageOptions in Compile in packageSrc := Seq.empty
+ packageOptions in Compile in packageSrc := Seq.empty,
+
+ // Lets us CTRL-C partest without exiting SBT entirely
+ cancelable in Global := true,
+ // When we fork subprocesses, use the base directory as the working directory.
+ // This enables `sbt> partest test/files/run/t1.scala` or `sbt> scalac sandbox/test.scala`
+ baseDirectory in Compile := (baseDirectory in ThisBuild).value,
+ baseDirectory in Test := (baseDirectory in ThisBuild).value,
+
+ // Don't log process output (e.g. of forked `compiler/runMain ...Main`), just pass it
+ // directly to stdout
+ outputStrategy in run := Some(StdoutOutput)
)
/** Extra post-processing for the published POM files. These are needed to create POMs that
@@ -404,7 +429,6 @@ lazy val repl = configureAsSubproject(project)
.settings(
connectInput in run := true,
publishArtifact := false,
- outputStrategy in run := Some(StdoutOutput),
run <<= (run in Compile).partialInput(" -usejavacp") // Automatically add this so that `repl/run` works without additional arguments.
)
.dependsOn(compiler, interactive)
@@ -451,7 +475,8 @@ lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target"
val outdir = (classDirectory in Compile).value
JarJar(inputs, outdir, config)
}),
- publishArtifact := false
+ publishArtifact := false,
+ connectInput in run := true
)
.dependsOn(replJline)
@@ -649,7 +674,13 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di
lazy val root = (project in file("."))
.settings(disableDocs: _*)
- .settings(publishArtifact := false)
+ .settings(generateBuildCharacterFileSettings: _*)
+ .settings(
+ publishArtifact := false,
+ publish := {},
+ publishLocal := {},
+ commands ++= ScriptCommands.all
+ )
.aggregate(library, forkjoin, reflect, compiler, interactive, repl, replJline, replJlineEmbedded,
scaladoc, scalap, actors, partestExtras, junit, libraryAll, scalaDist).settings(
sources in Compile := Seq.empty,
@@ -799,3 +830,25 @@ def generateServiceProviderResources(services: (String, String)*): Setting[_] =
}.taskValue
buildDirectory in ThisBuild := (baseDirectory in ThisBuild).value / "build-sbt"
+
+// Add tab completion to partest
+commands += Command("partest")(_ => PartestUtil.partestParser((baseDirectory in ThisBuild).value, (baseDirectory in ThisBuild).value / "test")) { (state, parsed) =>
+ ("test/it:testOnly -- " + parsed) :: state
+}
+
+// Add tab completion to scalac et al.
+commands ++= {
+ val commands =
+ List(("scalac", "compiler", "scala.tools.nsc.Main"),
+ ("scala", "repl-jline-embedded", "scala.tools.nsc.MainGenericRunner"),
+ ("scaladoc", "scaladoc", "scala.tools.nsc.ScalaDoc"))
+
+ commands.map {
+ case (entryPoint, projectRef, mainClassName) =>
+ Command(entryPoint)(_ => ScalaOptionParser.scalaParser(entryPoint, (baseDirectory in ThisBuild).value)) { (state, parsedOptions) =>
+ (projectRef + "/runMain " + mainClassName + " -usejavacp " + parsedOptions) :: state
+ }
+ }
+}
+
+addCommandAlias("scalap", "scalap/compile:runMain scala.tools.scalap.Main -usejavacp")
diff --git a/build.xml b/build.xml
index 129d5982d9..8cf68b668c 100644
--- a/build.xml
+++ b/build.xml
@@ -184,7 +184,7 @@ TODO:
<property name="dists.dir" value="${basedir}/dists"/>
- <property name="copyright.string" value="Copyright 2002-2015, LAMP/EPFL"/>
+ <property name="copyright.string" value="Copyright 2002-2016, LAMP/EPFL"/>
<!-- These are NOT the flags used to run SuperSabbus, but the ones written
into the script runners created with scala.tools.ant.ScalaTool -->
diff --git a/doc/LICENSE.md b/doc/LICENSE.md
index 55e82f64ba..b16711896c 100644
--- a/doc/LICENSE.md
+++ b/doc/LICENSE.md
@@ -2,9 +2,9 @@ Scala is licensed under the [BSD 3-Clause License](http://opensource.org/license
## Scala License
-Copyright (c) 2002-2015 EPFL
+Copyright (c) 2002-2016 EPFL
-Copyright (c) 2011-2015 Typesafe, Inc.
+Copyright (c) 2011-2016 Typesafe, Inc.
All rights reserved.
diff --git a/doc/License.rtf b/doc/License.rtf
index c475bda3ef..21beba0e9f 100644
--- a/doc/License.rtf
+++ b/doc/License.rtf
@@ -10,8 +10,8 @@
\fs48 Scala License
\fs40 \
-\fs26 Copyright (c) 2002-2015 EPFL\
-Copyright (c) 2011-2015 Typesafe, Inc.\
+\fs26 Copyright (c) 2002-2016 EPFL\
+Copyright (c) 2011-2016 Typesafe, Inc.\
All rights reserved.\
\
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:\
diff --git a/project/ParserUtil.scala b/project/ParserUtil.scala
new file mode 100644
index 0000000000..f6658b146b
--- /dev/null
+++ b/project/ParserUtil.scala
@@ -0,0 +1,54 @@
+import sbt._
+import sbt.complete.Parser._
+import sbt.complete.Parsers._
+import sbt.complete._
+
+object ParserUtil {
+ def notStartingWith(parser: Parser[String], c: Char): Parser[String] = parser & not(c ~> any.*, "value cannot start with " + c + ".")
+ def concat(p: Parser[(String, String)]): Parser[String] = {
+ p.map(x => x._1 + x._2)
+ }
+
+ def EitherOr(a: Parser[String], b: Parser[String]): Parser[String] = {
+ a.flatMap[String] {
+ case "" => b
+ case x: String =>
+ concat(Space.string ~ b).map[String]((s: String) => x + s)
+ }
+ }
+ def Opt(a: Parser[String]) = a.?.map(_.getOrElse(""))
+
+ val StringBasicNotStartingWithDash = notStartingWith(StringBasic, '-')
+ val IsDirectoryFilter = new SimpleFileFilter(_.isDirectory)
+ val JarOrDirectoryParser = FileParser(GlobFilter("*.jar") || IsDirectoryFilter)
+ def FileParser(filter: FileFilter, dirFilter: FileFilter = AllPassFilter, base: File = file(".")) = {
+ def matching(prefix: String): List[String] = {
+ val preFile = file(prefix)
+ val cwd = base
+ val parent = Option(preFile.getParentFile).getOrElse(cwd)
+ if (preFile.exists) {
+ if (preFile.isDirectory) {
+ preFile.*(IsDirectoryFilter.&&(dirFilter) || filter).get.map(_.getPath).toList
+ } else {
+ List(preFile).filter(filter.accept).map(_.getPath)
+ }
+ }
+ else if (parent != null) {
+ def ensureSuffix(s: String, suffix: String) = if (s.endsWith(suffix)) s else s + suffix
+ def pathOf(f: File): String = if (f.isDirectory && !filter.accept(f)) ensureSuffix(f.getPath, "/") else f.getPath
+ parent.*(GlobFilter(preFile.name + "*") && ((IsDirectoryFilter && dirFilter) || filter)).get.map(x => pathOf(if (parent == cwd) x.relativeTo(cwd).get else x)).toList
+ } else Nil
+ }
+ def displayPath = Completions.single(Completion.displayOnly("<path>"))
+ token(StringBasic, TokenCompletions.fixed((seen, level) => if (seen.isEmpty) displayPath else matching(seen) match {
+ case Nil => displayPath
+ case x :: Nil =>
+ if (filter.accept(file(x)))
+ Completions.strict(Set(Completion.tokenDisplay(x.stripPrefix(seen), x)))
+ else
+ Completions.strict(Set(Completion.suggestion(x.stripPrefix(seen))))
+ case xs =>
+ Completions.strict(xs.map(x => Completion.tokenDisplay(x.stripPrefix(seen), x)).toSet)
+ })).filter(!_.startsWith("-"), x => x)
+ }
+} \ No newline at end of file
diff --git a/project/PartestUtil.scala b/project/PartestUtil.scala
new file mode 100644
index 0000000000..0c0c677a6f
--- /dev/null
+++ b/project/PartestUtil.scala
@@ -0,0 +1,92 @@
+import sbt._
+import sbt.complete._, Parser._, Parsers._
+
+object PartestUtil {
+ private case class TestFiles(srcPath: String, globalBase: File, testBase: File) {
+ private val testCaseDir = new SimpleFileFilter(f => f.isDirectory && f.listFiles.nonEmpty && !(f.getParentFile / (f.name + ".res")).exists)
+ private val testCaseFilter = GlobFilter("*.scala") | GlobFilter("*.java") | GlobFilter("*.res") || testCaseDir
+ private def testCaseFinder = (testBase / srcPath).*(AllPassFilter).*(testCaseFilter)
+ private val basePaths = allTestCases.map(_._2.split('/').take(3).mkString("/") + "/").distinct
+
+ def allTestCases = testCaseFinder.pair(relativeTo(globalBase))
+ def basePathExamples = new FixedSetExamples(basePaths)
+ private def equiv(f1: File, f2: File) = f1.getCanonicalFile == f2.getCanonicalFile
+ def parentChain(f: File): Iterator[File] =
+ if (f == null || !f.exists) Iterator()
+ else Iterator(f) ++ (if (f.getParentFile == null) Nil else parentChain(f.getParentFile))
+ def isParentOf(parent: File, f2: File, maxDepth: Int) =
+ parentChain(f2).take(maxDepth).exists(p1 => equiv(p1, parent))
+ def isTestCase(f: File) = {
+ val grandParent = if (f != null && f.getParentFile != null) f.getParentFile.getParentFile else null
+ grandParent != null && equiv(grandParent, testBase / srcPath) && testCaseFilter.accept(f)
+ }
+ def mayContainTestCase(f: File) = {
+ isParentOf(testBase / srcPath, f, 2) || isParentOf(f, testBase / srcPath, Int.MaxValue)
+ }
+ }
+ /** A parser for the custom `partest` command */
+ def partestParser(globalBase: File, testBase: File): Parser[String] = {
+ val knownUnaryOptions = List(
+ "--pos", "--neg", "--run", "--jvm", "--res", "--ant", "--scalap", "--specialized",
+ "--scalacheck", "--instrumented", "--presentation", "--failed", "--update-check",
+ "--show-diff", "--verbose", "--terse", "--debug", "--version", "--self-test", "--help")
+ val srcPathOption = "--srcpath"
+ val grepOption = "--grep"
+
+ // HACK: if we parse `--srpath scaladoc`, we overwrite this var. The parser for test file paths
+ // then lazily creates the examples based on the current value.
+ // TODO is there a cleaner way to do this with SBT's parser infrastructure?
+ var srcPath = "files"
+ var _testFiles: TestFiles = null
+ def testFiles = {
+ if (_testFiles == null || _testFiles.srcPath != srcPath) _testFiles = new TestFiles(srcPath, globalBase, testBase)
+ _testFiles
+ }
+ val TestPathParser = ParserUtil.FileParser(
+ new SimpleFileFilter(f => testFiles.isTestCase(f)),
+ new SimpleFileFilter(f => testFiles.mayContainTestCase(f)), globalBase)
+
+ // allow `--grep "is unchecked" | --grep *t123*, in the spirit of ./bin/partest-ack
+ // superset of the --grep built into partest itself.
+ val Grep = {
+ def expandGrep(x: String): Seq[String] = {
+ val matchingFileContent = try {
+ val Pattern = ("(?i)" + x).r
+ testFiles.allTestCases.filter {
+ case (testFile, testPath) =>
+ val assocFiles = List(".check", ".flags").map(testFile.getParentFile / _)
+ val sourceFiles = if (testFile.isFile) List(testFile) else testFile.**(AllPassFilter).get.toList
+ val allFiles = testFile :: assocFiles ::: sourceFiles
+ allFiles.exists { f => f.exists && f.isFile && Pattern.findFirstIn(IO.read(f)).isDefined }
+ }
+ } catch {
+ case _: Throwable => Nil
+ }
+ val matchingFileName = try {
+ val filter = GlobFilter("*" + x + "*")
+ testFiles.allTestCases.filter(x => filter.accept(x._1.name))
+ } catch {
+ case t: Throwable => Nil
+ }
+ (matchingFileContent ++ matchingFileName).map(_._2).distinct.sorted
+ }
+
+ val completion = Completions.strict(Set("<filename glob>", "<regex> (for source, flags or checkfile contents)").map(s => Completion.displayOnly(s)))
+ val tokenCompletion = TokenCompletions.fixed((seen, level) => completion)
+
+ val globOrPattern = StringBasic.map(expandGrep).flatMap {
+ case Seq() => failure("no tests match pattern / glob")
+ case x => success(x.mkString(" "))
+ }
+ token(grepOption <~ Space) ~> token(globOrPattern, tokenCompletion)
+ }
+
+ val SrcPath = ((token(srcPathOption) <~ Space) ~ token(StringBasic.examples(Set("files", "pending", "scaladoc")))) map {
+ case opt ~ path =>
+ srcPath = path
+ opt + " " + path
+ }
+ val P = oneOf(knownUnaryOptions.map(x => token(x))) | SrcPath | TestPathParser | Grep
+ (Space ~> repsep(P, oneOrMore(Space))).map(_.mkString(" ")).?.map(_.getOrElse(""))
+ }
+}
diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala
new file mode 100644
index 0000000000..a11bd93d82
--- /dev/null
+++ b/project/ScalaOptionParser.scala
@@ -0,0 +1,128 @@
+import ParserUtil._
+import sbt._
+import sbt.complete.Parser._
+import sbt.complete.Parsers._
+import sbt.complete._
+
+object ScalaOptionParser {
+ /** A SBT parser for the Scala command line runners (scala, scalac, etc) */
+ def scalaParser(entryPoint: String, globalBase: File): Parser[String] = {
+ def BooleanSetting(name: String): Parser[String] =
+ token(name)
+ def StringSetting(name: String): Parser[String] = {
+ val valueParser = name match {
+ case "-d" => JarOrDirectoryParser
+ case _ => token(StringBasic, TokenCompletions.displayOnly("<value>"))
+ }
+ concat(concat(token(name ~ Space.string)) ~ valueParser)
+ }
+ def MultiStringSetting(name: String): Parser[String] =
+ concat(concat(token(name ~ ":")) ~ repsep(token(StringBasicNotStartingWithDash, TokenCompletions.displayOnly("<value>")), token(",")).map(_.mkString))
+ def IntSetting(name: String): Parser[String] =
+ concat(concat(token(name ~ ":")) ~ token(IntBasic.map(_.toString), TokenCompletions.displayOnly("<integer>")))
+ def ChoiceSetting(name: String, choices: List[String]): Parser[String] =
+ concat(token(concat(name ~ ":")) ~ token(StringBasic.examples(choices: _*)).map(_.mkString))
+ def MultiChoiceSetting(name: String, choices: List[String]): Parser[String] =
+ concat(token(concat(name ~ ":")) ~ rep1sep(token(StringBasic.examples(choices: _*)), token(",")).map(_.mkString))
+ def PathSetting(name: String): Parser[String] = {
+ concat(concat(token(name) ~ Space.string) ~ rep1sep(JarOrDirectoryParser.filter(!_.contains(":"), x => x), token(java.io.File.pathSeparator)).map(_.mkString))
+ }
+ def FileSetting(name: String): Parser[String] = {
+ concat(concat(token(name) ~ Space.string) ~ rep1sep(JarOrDirectoryParser.filter(!_.contains(":"), x => x), token(java.io.File.pathSeparator)).map(_.mkString))
+ }
+ val Phase = token(NotSpace.examples(phases: _*))
+ def PhaseSettingParser(name: String): Parser[String] = {
+ MultiChoiceSetting(name, phases)
+ }
+ def ScalaVersionSetting(name: String): Parser[String] = {
+ concat(concat(token(name ~ Space.string)) ~ token(StringBasic, TokenCompletions.displayOnly("<scala version>")))
+ }
+ val Property: Parser[String] = {
+ val PropName = concat(token("-D" ~ oneOrMore(NotSpaceClass & not('=', "not =")).string, TokenCompletions.displayOnly("-D<property name>")))
+ val EqualsValue = concat("=" ~ token(OptNotSpace, TokenCompletions.displayOnly("<property value>")))
+ concat(PropName ~ EqualsValue.?.map(_.getOrElse("")))
+ }
+
+ val sourceFile = FileParser(GlobFilter("*.scala") | GlobFilter("*.java"))
+
+ // TODO Allow JVM settings via -J-... and temporarily add them to the ForkOptions
+ val UniversalOpt = Property | oneOf(pathSettingNames.map(PathSetting) ++ phaseSettings.map(PhaseSettingParser) ++ booleanSettingNames.map(BooleanSetting) ++ stringSettingNames.map(StringSetting) ++ multiStringSettingNames.map(MultiStringSetting) ++ intSettingNames.map(IntSetting) ++ choiceSettingNames.map { case (k, v) => ChoiceSetting(k, v) } ++ multiChoiceSettingNames.map { case (k, v) => MultiChoiceSetting(k, v) } ++ scalaVersionSettings.map(ScalaVersionSetting))
+ val ScalacOpt = sourceFile | UniversalOpt
+
+ val ScalaExtraSettings = oneOf(
+ scalaChoiceSettingNames.map { case (k, v) => ChoiceSetting(k,v)}.toList
+ ++ scalaStringSettingNames.map(StringSetting)
+ ++ scalaBooleanSettingNames.map(BooleanSetting))
+ val ScalaOpt = UniversalOpt | ScalaExtraSettings
+
+ val ScalaDocExtraSettings = oneOf(
+ scalaDocBooleanSettingNames.map(BooleanSetting)
+ ++ scalaDocIntSettingNames.map(IntSetting)
+ ++ scalaDocChoiceSettingNames.map { case (k, v) => ChoiceSetting(k, v)}
+ ++ scaladocStringSettingNames.map(StringSetting)
+ ++ scaladocPathSettingNames.map(PathSetting)
+ ++ scaladocMultiStringSettingNames.map(MultiStringSetting)
+ )
+ val ScalaDocOpt = sourceFile | ScalaOpt | ScalaDocExtraSettings
+
+ entryPoint match {
+ case "scala" =>
+ val runnable = token(StringBasicNotStartingWithDash, TokenCompletions.displayOnly("<script|class|object|jar>")).filter(!_.startsWith("-"), x => x)
+ val runnableAndArgs = concat(runnable ~ Opt(concat(Space.string ~ repsep(token(StringBasic, TokenCompletions.displayOnly("<arg>")), Space).map(_.mkString(" ")))))
+ val options = repsep(ScalaOpt, Space).map(_.mkString(" "))
+ Opt(Space ~> EitherOr(options, runnableAndArgs))
+ case "scaladoc" =>
+ Opt(Space ~> Opt(repsep(ScalaDocOpt, Space).map(_.mkString(" "))))
+ case "scalac" =>
+ Opt(Space ~> repsep(ScalacOpt, Space).map(_.mkString(" ")))
+ }
+ }
+
+ // TODO retrieve this data programatically, ala https://github.com/scala/scala-tool-support/blob/master/bash-completion/src/main/scala/BashCompletion.scala
+ private def booleanSettingNames = List("-X", "-Xcheckinit", "-Xdev", "-Xdisable-assertions", "-Xexperimental", "-Xfatal-warnings", "-Xfull-lubs", "-Xfuture", "-Xlog-free-terms", "-Xlog-free-types", "-Xlog-implicit-conversions", "-Xlog-implicits", "-Xlog-reflective-calls",
+ "-Xno-forwarders", "-Xno-patmat-analysis", "-Xno-uescape", "-Xnojline", "-Xprint-pos", "-Xprint-types", "-Xprompt", "-Xresident", "-Xshow-phases", "-Xstrict-inference", "-Xverify", "-Y",
+ "-Ybreak-cycles", "-Yclosure-elim", "-Yconst-opt", "-Ydead-code", "-Ydebug", "-Ycompact-trees", "-Ydisable-unreachable-prevention", "-YdisableFlatCpCaching", "-Ydoc-debug",
+ "-Yeta-expand-keeps-star", "-Yide-debug", "-Yinfer-argument-types", "-Yinfer-by-name", "-Yinfer-debug", "-Yinline", "-Yinline-handlers",
+ "-Yinline-warnings", "-Yissue-debug", "-Ylog-classpath", "-Ymacro-debug-lite", "-Ymacro-debug-verbose", "-Ymacro-no-expand",
+ "-Yno-completion", "-Yno-generic-signatures", "-Yno-imports", "-Yno-load-impl-class", "-Yno-predef", "-Ynooptimise",
+ "-Yoverride-objects", "-Yoverride-vars", "-Ypatmat-debug", "-Yno-adapted-args", "-Ypos-debug", "-Ypresentation-debug",
+ "-Ypresentation-strict", "-Ypresentation-verbose", "-Yquasiquote-debug", "-Yrangepos", "-Yreify-copypaste", "-Yreify-debug", "-Yrepl-class-based",
+ "-Yrepl-sync", "-Yshow-member-pos", "-Yshow-symkinds", "-Yshow-symowners", "-Yshow-syms", "-Yshow-trees", "-Yshow-trees-compact", "-Yshow-trees-stringified", "-Ytyper-debug",
+ "-Ywarn-adapted-args", "-Ywarn-dead-code", "-Ywarn-inaccessible", "-Ywarn-infer-any", "-Ywarn-nullary-override", "-Ywarn-nullary-unit", "-Ywarn-numeric-widen", "-Ywarn-unused", "-Ywarn-unused-import", "-Ywarn-value-discard",
+ "-deprecation", "-explaintypes", "-feature", "-help", "-no-specialization", "-nobootcp", "-nowarn", "-optimise", "-print", "-unchecked", "-uniqid", "-usejavacp", "-usemanifestcp", "-verbose", "-version")
+ private def stringSettingNames = List("-Xgenerate-phase-graph", "-Xmain-class", "-Xpluginsdir", "-Xshow-class", "-Xshow-object", "-Xsource-reader", "-Ydump-classes", "-Ygen-asmp",
+ "-Ygen-javap", "-Ypresentation-log", "-Ypresentation-replay", "-Yrepl-outdir", "-d", "-dependencyfile", "-encoding", "-Xscript")
+ private def pathSettingNames = List("-bootclasspath", "-classpath", "-extdirs", "-javabootclasspath", "-javaextdirs", "-sourcepath", "-toolcp")
+ private val phases = List("all", "parser", "namer", "packageobjects", "typer", "patmat", "superaccessors", "extmethods", "pickler", "refchecks", "uncurry", "tailcalls", "specialize", "explicitouter", "erasure", "posterasure", "lazyvals", "lambdalift", "constructors", "flatten", "mixin", "cleanup", "delambdafy", "icode", "jvm", "terminal")
+ private val phaseSettings = List("-Xprint-icode", "-Ystop-after", "-Yskip", "-Yshow", "-Ystop-before", "-Ybrowse", "-Ylog", "-Ycheck", "-Xprint")
+ private def multiStringSettingNames = List("-Xmacro-settings", "-Xplugin", "-Xplugin-disable", "-Xplugin-require")
+ private def intSettingNames = List("-Xmax-classfile-name", "-Xelide-below", "-Ypatmat-exhaust-depth", "-Ypresentation-delay", "-Yrecursion")
+ private def choiceSettingNames = Map[String, List[String]](
+ "-Ybackend" -> List("GenASM", "GenBCode"),
+ "-YclasspathImpl" -> List("flat", "recursive"),
+ "-Ydelambdafy" -> List("inline", "method"),
+ "-Ylinearizer" -> List("dfs", "dump", "normal", "rpo"),
+ "-Ymacro-expand" -> List("discard", "none"),
+ "-Yresolve-term-conflict" -> List("error", "object", "package"),
+ "-g" -> List("line", "none", "notailcails", "source", "vars"),
+ "-target" -> List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8"))
+ private def multiChoiceSettingNames = Map[String, List[String]](
+ "-Xlint" -> List("adapted-args", "nullary-unit", "inaccessible", "nullary-override", "infer-any", "missing-interpolator", "doc-detached", "private-shadow", "type-parameter-shadow", "poly-implicit-overload", "option-implicit", "delayedinit-select", "by-name-right-associative", "package-object-classes", "unsound-match", "stars-align"),
+ "-language" -> List("help", "_", "dynamics", "postfixOps", "reflectiveCalls", "implicitConversions", "higherKinds", "existentials", "experimental.macros"),
+ "-Yopt" -> List("l:none", "l:default", "l:method", "l:project", "l:classpath", "unreachable-code", "simplify-jumps", "empty-line-numbers", "empty-labels", "compact-locals", "nullness-tracking", "closure-elimination", "inline-project", "inline-global"),
+ "-Ystatistics" -> List("parser", "typer", "patmat", "erasure", "cleanup", "jvm")
+ )
+ private def scalaVersionSettings = List("-Xmigration", "-Xsource")
+
+ private def scalaChoiceSettingNames = Map("-howtorun" -> List("object", "script", "jar", "guess"))
+ private def scalaStringSettingNames = List("-i", "-e")
+ private def scalaBooleanSettingNames = List("-nc", "-save")
+
+ private def scalaDocBooleanSettingNames = List("-Yuse-stupid-types", "-implicits", "-implicits-debug", "-implicits-show-all", "-implicits-sound-shadowing", "-implicits-hide", "-author", "-diagrams", "-diagrams-debug", "-raw-output", "-no-prefixes", "-no-link-warnings", "-expand-all-types", "-groups")
+ private def scalaDocIntSettingNames = List("-diagrams-max-classes", "-diagrams-max-implicits", "-diagrams-dot-timeout", "-diagrams-dot-restart")
+ private def scalaDocChoiceSettingNames = Map("-doc-format" -> List("html"))
+ private def scaladocStringSettingNames = List("-doc-title", "-doc-version", "-doc-footer", "-doc-no-compile", "-doc-source-url", "-doc-generator", "-skip-packages")
+ private def scaladocPathSettingNames = List("-doc-root-content", "-diagrams-dot-path")
+ private def scaladocMultiStringSettingNames = List("-doc-external-doc")
+
+}
diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala
new file mode 100644
index 0000000000..537990d985
--- /dev/null
+++ b/project/ScriptCommands.scala
@@ -0,0 +1,19 @@
+import sbt._
+import Keys._
+import complete.DefaultParsers._
+
+/** Custom commands for use by the Jenkins scripts. This keeps the surface area and call syntax small. */
+object ScriptCommands {
+ def all = Seq(setupPublishCore)
+
+ /** Set up the environment for `validate/publish-core`. The argument is the Artifactory snapshot repository URL. */
+ def setupPublishCore = Command.single("setupPublishCore") { case (state, url) =>
+ Project.extract(state).append(Seq(
+ VersionUtil.baseVersionSuffix in Global := "SHA-SNAPSHOT",
+ // Append build.timestamp to Artifactory URL to get consistent build numbers (see https://github.com/sbt/sbt/issues/2088):
+ publishTo in Global := Some("scala-pr" at url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis),
+ publishArtifact in (Compile, packageDoc) in ThisBuild := false,
+ scalacOptions in Compile in ThisBuild += "-optimise"
+ ), state)
+ }
+}
diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala
index 71de772b08..fab22e66d4 100644
--- a/project/VersionUtil.scala
+++ b/project/VersionUtil.scala
@@ -5,21 +5,29 @@ import java.io.FileInputStream
import scala.collection.JavaConverters._
object VersionUtil {
+ lazy val baseVersion = settingKey[String]("The base version number from which all others are derived")
+ lazy val baseVersionSuffix = settingKey[String]("Identifies the kind of version to build")
lazy val copyrightString = settingKey[String]("Copyright string.")
lazy val versionProperties = settingKey[Versions]("Version properties.")
lazy val generateVersionPropertiesFile = taskKey[File]("Generating version properties file.")
+ lazy val generateBuildCharacterPropertiesFile = taskKey[File]("Generating buildcharacter.properties file.")
- lazy val versionPropertiesSettings = Seq[Setting[_]](
- versionProperties := versionPropertiesImpl.value
+ lazy val globalVersionSettings = Seq[Setting[_]](
+ // Set the version properties globally (they are the same for all projects)
+ versionProperties in Global := versionPropertiesImpl.value,
+ version in Global := versionProperties.value.mavenVersion
)
lazy val generatePropertiesFileSettings = Seq[Setting[_]](
- copyrightString := "Copyright 2002-2015, LAMP/EPFL",
+ copyrightString := "Copyright 2002-2016, LAMP/EPFL",
resourceGenerators in Compile += generateVersionPropertiesFile.map(file => Seq(file)).taskValue,
- versionProperties := versionPropertiesImpl.value,
generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value
)
+ lazy val generateBuildCharacterFileSettings = Seq[Setting[_]](
+ generateBuildCharacterPropertiesFile := generateBuildCharacterPropertiesFileImpl.value
+ )
+
case class Versions(canonicalVersion: String, mavenVersion: String, osgiVersion: String, commitSha: String, commitDate: String, isRelease: Boolean) {
val githubTree =
if(isRelease) "v" + mavenVersion
@@ -28,30 +36,36 @@ object VersionUtil {
override def toString = s"Canonical: $canonicalVersion, Maven: $mavenVersion, OSGi: $osgiVersion, github: $githubTree"
- def toProperties: Properties = {
- val props = new Properties
- props.put("version.number", canonicalVersion)
- props.put("maven.version.number", mavenVersion)
- props.put("osgi.version.number", osgiVersion)
- props
- }
+ def toMap: Map[String, String] = Map(
+ "version.number" -> canonicalVersion,
+ "maven.version.number" -> mavenVersion,
+ "osgi.version.number" -> osgiVersion
+ )
}
- lazy val versionPropertiesImpl: Def.Initialize[Versions] = Def.setting {
- /** Regexp that splits version number split into two parts: version and suffix.
- * Examples of how the split is performed:
- *
- * "2.11.5": ("2.11.5", null)
- * "2.11.5-acda7a": ("2.11.5", "-acda7a")
- * "2.11.5-SNAPSHOT": ("2.11.5", "-SNAPSHOT") */
- val versionSplitted = """([\w+\.]+)(-[\w+\.]+)??""".r
-
- val versionSplitted(ver, suffixOrNull) = version.value
-
- val osgiSuffix = suffixOrNull match {
- case null => "-VFINAL"
- case "-SNAPSHOT" => ""
- case suffixStr => suffixStr
+ /** Compute the canonical, Maven and OSGi version number from `baseVersion` and `baseVersionSuffix`.
+ * Examples of the generated versions:
+ *
+ * ("2.11.8", "SNAPSHOT" ) -> ("2.11.8-20151215-133023-7559aed3c5", "2.11.8-SNAPSHOT", "2.11.8.v20151215-133023-7559aed3c5")
+ * ("2.11.8", "SHA-SNAPSHOT") -> ("2.11.8-20151215-133023-7559aed3c5", "2.11.8-7559aed3c5-SNAPSHOT", "2.11.8.v20151215-133023-7559aed3c5")
+ * ("2.11.8", "" ) -> ("2.11.8", "2.11.8", "2.11.8.v20151215-133023-VFINAL-7559aed3c5")
+ * ("2.11.8", "M3" ) -> ("2.11.8-M3", "2.11.8-M3", "2.11.8.v20151215-133023-M3-7559aed3c5")
+ * ("2.11.8", "RC4" ) -> ("2.11.8-RC4", "2.11.8-RC4", "2.11.8.v20151215-133023-RC4-7559aed3c5")
+ * ("2.11.8-RC4", "SPLIT" ) -> ("2.11.8-RC4", "2.11.8-RC4", "2.11.8.v20151215-133023-RC4-7559aed3c5")
+ *
+ * A `baseVersionSuffix` of "SNAPSHOT" is the default, which is used for local snapshot builds. The PR validation
+ * job uses "SHA-SNAPSHOT". An empty suffix is used for releases. All other suffix values are treated as RC /
+ * milestone builds. The special suffix value "SPLIT" is used to split the real suffix off from `baseVersion`
+ * instead and then apply the usual logic. */
+ private lazy val versionPropertiesImpl: Def.Initialize[Versions] = Def.setting {
+
+ val (base, suffix) = {
+ val (b, s) = (baseVersion.value, baseVersionSuffix.value)
+ if(s == "SPLIT") {
+ val split = """([\w+\.]+)(-[\w+\.]+)??""".r
+ val split(b2, sOrNull) = b
+ (b2, Option(sOrNull).map(_.drop(1)).getOrElse(""))
+ } else (b, s)
}
def executeTool(tool: String) = {
@@ -62,24 +76,31 @@ object VersionUtil {
Process(cmd).lines.head
}
- val commitDate = executeTool("get-scala-commit-date")
- val commitSha = executeTool("get-scala-commit-sha")
+ val date = executeTool("get-scala-commit-date")
+ val sha = executeTool("get-scala-commit-sha").substring(0, 7) // The script produces 10 digits at the moment
- Versions(
- canonicalVersion = s"$ver-$commitDate-$commitSha",
- mavenVersion = s"${version.value}",
- osgiVersion = s"$ver.v$commitDate$osgiSuffix-$commitSha",
- commitSha = commitSha,
- commitDate = commitDate,
- isRelease = !osgiSuffix.isEmpty
- )
+ val (canonicalV, mavenV, osgiV, release) = suffix match {
+ case "SNAPSHOT" => (s"$base-$date-$sha", s"$base-SNAPSHOT", s"$base.v$date-$sha", false)
+ case "SHA-SNAPSHOT" => (s"$base-$date-$sha", s"$base-$sha-SNAPSHOT", s"$base.v$date-$sha", false)
+ case "" => (s"$base", s"$base", s"$base.v$date-VFINAL-$sha", true)
+ case suffix => (s"$base-$suffix", s"$base-$suffix", s"$base.v$date-$suffix-$sha", true)
+ }
+
+ Versions(canonicalV, mavenV, osgiV, sha, date, release)
}
- lazy val generateVersionPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task {
- val props = versionProperties.value.toProperties
- val propFile = (resourceManaged in Compile).value / s"${thisProject.value.id}.properties"
- props.put("copyright.string", copyrightString.value)
+ private lazy val generateVersionPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task {
+ writeProps(versionProperties.value.toMap + ("copyright.string" -> copyrightString.value),
+ (resourceManaged in Compile).value / s"${thisProject.value.id}.properties")
+ }
+
+ private lazy val generateBuildCharacterPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task {
+ writeProps(versionProperties.value.toMap, (baseDirectory in ThisBuild).value / "buildcharacter.properties")
+ }
+ private def writeProps(m: Map[String, String], propFile: File): File = {
+ val props = new Properties
+ m.foreach { case (k, v) => props.put(k, v) }
// unfortunately, this will write properties in arbitrary order
// this makes it harder to test for stability of generated artifacts
// consider using https://github.com/etiennestuder/java-ordered-properties
diff --git a/scripts/common b/scripts/common
index b075469379..35199992bc 100644
--- a/scripts/common
+++ b/scripts/common
@@ -18,6 +18,9 @@ IVY_CACHE="$WORKSPACE/.ivy2"
mkdir -p $IVY_CACHE
rm -rf $IVY_CACHE/cache/org.scala-lang
+SBT_CMD=${sbtCmd-sbt}
+SBT_CMD="$SBT_CMD -sbt-version 0.13.9"
+
# temp dir where all 'non-build' operation are performed
TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX)
TMP_DIR="${TMP_ROOT_DIR}/tmp"
diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap
index 8d04e7fc79..cbb8e1026b 100755
--- a/scripts/jobs/integrate/bootstrap
+++ b/scripts/jobs/integrate/bootstrap
@@ -22,7 +22,7 @@
# - To prevent staging on sonatype (for testing), set publishToSonatype to anything but "yes"
# - Note: After building a release, the jenkins job provides an updated versions.properties file as artifact.
# Put this file in the Scala repo and create a pull request, and also update the file build.number.
-#
+#
# - Otherwise, a nightly release is built:
# - version number is read from the build.number file, extended with -$sha-nightly
@@ -73,7 +73,7 @@
# Requirements
-# - sbtCmd must point to sbt from sbt-extras
+# - SBT_CMD must point to sbt from sbt-extras
# - ~/.sonatype-curl, ~/.m2/settings.xml, ~/.credentials, ~/.credentials-sonatype, ~/.credentials-private-repo
# as defined by https://github.com/scala/scala-jenkins-infra/tree/master/templates/default
# - ~/.sbt/0.13/plugins/gpg.sbt with:
@@ -89,10 +89,6 @@ publishSonatypeTaskCore=${publishSonatypeTaskCore-"publish-signed"}
publishSonatypeTaskModules=${publishSonatypeTaskModules-"publish-signed"}
publishLockerPrivateTask=${publishLockerPrivateTask-$publishPrivateTask} # set to "init" to speed up testing of the script (if you already built locker before)
-sbtCmd=${sbtCmd-sbt} # TESTING (this is a marker for defaults to change when testing locally: should be sbtx on my mac)
-
-sbtCmd="$sbtCmd -sbt-version 0.13.8"
-
forceRebuild=${forceRebuild-no}
antBuildTask="${antBuildTask-nightly}" # TESTING leave empty to avoid the sanity check (don't set it to "init" because ant will croak)
@@ -180,8 +176,8 @@ function st_stagingRepoClose() {
sbtArgs="-no-colors -ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13"
sbtBuild() {
- echo "### sbtBuild: "$sbtCmd $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@"
- $sbtCmd $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" >> $baseDir/logs/builds 2>&1
+ echo "### sbtBuild: "$SBT_CMD $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@"
+ $SBT_CMD $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" >> $baseDir/logs/builds 2>&1
}
sbtResolve() {
@@ -189,8 +185,8 @@ sbtResolve() {
touch build.sbt
# Can be set to `full` if a module requires cross-versioning against the full Scala version, like the continuations plugin.
cross=${4-binary}
- echo "### sbtResolve: $sbtCmd $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross"
- $sbtCmd $sbtArgs "${scalaVersionTasks[@]}" \
+ echo "### sbtResolve: $SBT_CMD $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross"
+ $SBT_CMD $sbtArgs "${scalaVersionTasks[@]}" \
"set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \
'show update' >> $baseDir/logs/resolution 2>&1
}
@@ -257,7 +253,7 @@ buildContinuations() {
else
update scala scala-continuations $CONTINUATIONS_REF && gfxd
- $sbtCmd $sbtArgs 'project plugin' "${scalaVersionTasks[@]}" "${publishTasks[@]}" \
+ $SBT_CMD $sbtArgs 'project plugin' "${scalaVersionTasks[@]}" "${publishTasks[@]}" \
'set version := "'$CONTINUATIONS_VER'"' $clean "compile:package" test "${buildTasks[@]}" # https://github.com/scala/scala-continuations/pull/4
CONT_PLUG_BUILT="yes"
fi
@@ -266,7 +262,7 @@ buildContinuations() {
then echo "Found scala-continuations-library $CONTINUATIONS_VER; not building."
else
update scala scala-continuations $CONTINUATIONS_REF && gfxd
- $sbtCmd $sbtArgs 'project library' "${scalaVersionTasks[@]}" "${publishTasks[@]}" \
+ $SBT_CMD $sbtArgs 'project library' "${scalaVersionTasks[@]}" "${publishTasks[@]}" \
'set version := "'$CONTINUATIONS_VER'"' $clean test "${buildTasks[@]}"
CONT_LIB_BUILT="yes"
fi
diff --git a/scripts/jobs/validate/publish-core b/scripts/jobs/validate/publish-core
index 9dff5a34b0..bb0056722d 100755
--- a/scripts/jobs/validate/publish-core
+++ b/scripts/jobs/validate/publish-core
@@ -15,17 +15,8 @@ case $prDryRun in
mkdir -p build/pack ; mkdir -p dists/maven/latest
;;
*)
- sha=$(git rev-parse HEAD) # TODO: warn if $repo_ref != $sha (we shouldn't do PR validation using symbolic gitrefs)
- echo "sha/repo_ref == $sha/$repo_ref ?"
-
- parseScalaProperties build.number
-
- ./pull-binary-libs.sh
- # "noyoudont" is there juuuust in case
- antDeployArgs="-Dmaven.version.suffix=\"-${sha:0:7}-SNAPSHOT\" -Dremote.snapshot.repository=$prRepoUrl -Drepository.credentials.id=pr-scala -Dremote.release.repository=noyoudont"
-
echo ">>> Getting Scala version number."
- ant -q $antDeployArgs init
+ $SBT_CMD "setupPublishCore $prRepoUrl" generateBuildCharacterPropertiesFile
parseScalaProperties buildcharacter.properties # produce maven_version_number
echo ">>> Checking availability of Scala ${maven_version_number} in $prRepoUrl."
@@ -36,7 +27,7 @@ case $prDryRun in
if $libraryAvailable && $reflectAvailable && $compilerAvailable; then
echo "Scala core already built!"
else
- ant $antDeployArgs $antBuildArgs publish-opt-nodocs
+ $SBT_CMD "setupPublishCore $prRepoUrl" $antBuildArgs publish
fi
mv buildcharacter.properties jenkins.properties # parsed by the jenkins job
diff --git a/src/compiler/scala/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/reflect/quasiquotes/Parsers.scala
index 97ec7dbfc3..108ad0bc2e 100644
--- a/src/compiler/scala/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/reflect/quasiquotes/Parsers.scala
@@ -59,6 +59,10 @@ trait Parsers { self: Quasiquotes =>
override implicit lazy val fresh: FreshNameCreator = new FreshNameCreator(nme.QUASIQUOTE_PREFIX)
+ // Do not check for tuple arity. The placeholders can support arbitrary tuple sizes.
+ override def makeSafeTupleTerm(trees: List[Tree], offset: Offset): Tree = treeBuilder.makeTupleTerm(trees)
+ override def makeSafeTupleType(trees: List[Tree], offset: Offset): Tree = treeBuilder.makeTupleType(trees)
+
override val treeBuilder = new ParserTreeBuilder {
override implicit def fresh: FreshNameCreator = parser.fresh
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 4494a8ac8d..c04d305f9e 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -766,7 +766,58 @@ self =>
@inline final def caseSeparated[T](part: => T): List[T] = tokenSeparated(CASE, sepFirst = true, part)
def readAnnots(part: => Tree): List[Tree] = tokenSeparated(AT, sepFirst = true, part)
-/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
+ /** Create a tuple type Tree. If the arity is not supported, a syntax error is emitted. */
+ def makeSafeTupleType(elems: List[Tree], offset: Offset) = {
+ if (checkTupleSize(elems, offset)) makeTupleType(elems)
+ else makeTupleType(Nil) // create a dummy node; makeTupleType(elems) would fail
+ }
+
+ /** Create a tuple term Tree. If the arity is not supported, a syntax error is emitted. */
+ def makeSafeTupleTerm(elems: List[Tree], offset: Offset) = {
+ checkTupleSize(elems, offset)
+ makeTupleTerm(elems)
+ }
+
+ private[this] def checkTupleSize(elems: List[Tree], offset: Offset): Boolean =
+ if (elems.lengthCompare(definitions.MaxTupleArity) > 0) {
+ syntaxError(offset, "too many elements for tuple: "+elems.length+", allowed: "+definitions.MaxTupleArity, skipIt = false)
+ false
+ } else true
+
+ /** Strip the artifitial `Parens` node to create a tuple term Tree. */
+ def stripParens(t: Tree) = t match {
+ case Parens(ts) => atPos(t.pos) { makeSafeTupleTerm(ts, t.pos.point) }
+ case _ => t
+ }
+
+ /** Create tree representing (unencoded) binary operation expression or pattern. */
+ def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position, targs: List[Tree] = Nil): Tree = {
+ require(isExpr || targs.isEmpty || targs.exists(_.isErroneous), s"Incompatible args to makeBinop: !isExpr but targs=$targs")
+
+ def mkSelection(t: Tree) = {
+ def sel = atPos(opPos union t.pos)(Select(stripParens(t), op.encode))
+ if (targs.isEmpty) sel else atPos(left.pos)(TypeApply(sel, targs))
+ }
+ def mkNamed(args: List[Tree]) = if (isExpr) args map treeInfo.assignmentToMaybeNamedArg else args
+ val arguments = right match {
+ case Parens(args) => mkNamed(args)
+ case _ => List(right)
+ }
+ if (isExpr) {
+ if (treeInfo.isLeftAssoc(op)) {
+ Apply(mkSelection(left), arguments)
+ } else {
+ val x = freshTermName()
+ Block(
+ List(ValDef(Modifiers(symtab.Flags.SYNTHETIC | symtab.Flags.ARTIFACT), x, TypeTree(), stripParens(left))),
+ Apply(mkSelection(right), List(Ident(x))))
+ }
+ } else {
+ Apply(Ident(op.encode), stripParens(left) :: arguments)
+ }
+ }
+
+ /* --------- OPERAND/OPERATOR STACK --------------------------------------- */
/** Modes for infix types. */
object InfixMode extends Enumeration {
@@ -870,7 +921,7 @@ self =>
atPos(start, in.skipToken()) { makeFunctionTypeTree(ts, typ()) }
else {
ts foreach checkNotByNameOrVarargs
- val tuple = atPos(start) { makeTupleType(ts) }
+ val tuple = atPos(start) { makeSafeTupleType(ts, start) }
infixTypeRest(
compoundTypeRest(
annotTypeRest(
@@ -937,7 +988,7 @@ self =>
def simpleType(): Tree = {
val start = in.offset
simpleTypeRest(in.token match {
- case LPAREN => atPos(start)(makeTupleType(inParens(types())))
+ case LPAREN => atPos(start)(makeSafeTupleType(inParens(types()), start))
case USCORE => wildcardType(in.skipToken())
case _ =>
path(thisOK = false, typeOK = true) match {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 6e5a3f6ef7..cc9e39f430 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -43,44 +43,12 @@ abstract class TreeBuilder {
def makeTupleType(elems: List[Tree]) = gen.mkTupleType(elems)
- def stripParens(t: Tree) = t match {
- case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts) }
- case _ => t
- }
-
def makeAnnotated(t: Tree, annot: Tree): Tree =
atPos(annot.pos union t.pos)(Annotated(annot, t))
def makeSelfDef(name: TermName, tpt: Tree): ValDef =
ValDef(Modifiers(PRIVATE), name, tpt, EmptyTree)
- /** Create tree representing (unencoded) binary operation expression or pattern. */
- def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position, targs: List[Tree] = Nil): Tree = {
- require(isExpr || targs.isEmpty || targs.exists(_.isErroneous), s"Incompatible args to makeBinop: !isExpr but targs=$targs")
-
- def mkSelection(t: Tree) = {
- def sel = atPos(opPos union t.pos)(Select(stripParens(t), op.encode))
- if (targs.isEmpty) sel else atPos(left.pos)(TypeApply(sel, targs))
- }
- def mkNamed(args: List[Tree]) = if (isExpr) args map treeInfo.assignmentToMaybeNamedArg else args
- val arguments = right match {
- case Parens(args) => mkNamed(args)
- case _ => List(right)
- }
- if (isExpr) {
- if (treeInfo.isLeftAssoc(op)) {
- Apply(mkSelection(left), arguments)
- } else {
- val x = freshTermName()
- Block(
- List(ValDef(Modifiers(SYNTHETIC | ARTIFACT), x, TypeTree(), stripParens(left))),
- Apply(mkSelection(right), List(Ident(x))))
- }
- } else {
- Apply(Ident(op.encode), stripParens(left) :: arguments)
- }
- }
-
/** Tree for `od op`, start is start0 if od.pos is borked. */
def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = {
val start = if (od.pos.isDefined) od.pos.start else start0
diff --git a/src/intellij/scala-build.iml.SAMPLE b/src/intellij/scala-build.iml.SAMPLE
new file mode 100644
index 0000000000..bf722e464f
--- /dev/null
+++ b/src/intellij/scala-build.iml.SAMPLE
@@ -0,0 +1,109 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module external.linked.project.id="scalaz-build" external.linked.project.path="$MODULE_DIR$/../../project" external.root.project.path="$MODULE_DIR$/../.." external.system.id="SBT" sbt.imports="sbt._, Keys._, dsl._, _root_.com.typesafe.sbt.SbtPgp.autoImport._, _root_.sbt.plugins.IvyPlugin, _root_.sbt.plugins.JvmPlugin, _root_.sbt.plugins.CorePlugin, _root_.sbt.plugins.JUnitXmlReportPlugin, _root_.com.typesafe.sbt.SbtPgp" sbt.resolvers="https://repo1.maven.org/maven2/|maven|public, /Users/jason/.ivy2/cache|ivy|Local cache" type="SBT_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="false">
+ <output url="file://$MODULE_DIR$/../../project/target/idea-classes" />
+ <output-test url="file://$MODULE_DIR$/../../project/target/idea-test-classes" />
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../../project">
+ <sourceFolder url="file://$MODULE_DIR$/../../project" isTestSource="false" />
+ <excludeFolder url="file://$MODULE_DIR$/../../project/project/target" />
+ <excludeFolder url="file://$MODULE_DIR$/../../project/target" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module-library">
+ <library name="SBT: sbt-and-plugins">
+ <CLASSES>
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.apache.commons/commons-lang3/jars/commons-lang3-3.3.2.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.pantsbuild/jarjar/jars/jarjar-1.6.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant/jars/ant-1.9.6.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.apache.ant/ant-launcher/jars/ant-launcher-1.9.6.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.ow2.asm/asm/jars/asm-5.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.ow2.asm/asm-commons/jars/asm-commons-5.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.ow2.asm/asm-tree/jars/asm-tree-5.0.4.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.apache.maven/maven-plugin-api/jars/maven-plugin-api-3.3.3.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.apache.maven/maven-model/jars/maven-model-3.3.3.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.codehaus.plexus/plexus-utils/jars/plexus-utils-3.0.20.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.apache.maven/maven-artifact/jars/maven-artifact-3.3.3.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.sisu/org.eclipse.sisu.plexus/eclipse-plugins/org.eclipse.sisu.plexus-0.3.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/javax.enterprise/cdi-api/jars/cdi-api-1.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/javax.annotation/jsr250-api/jars/jsr250-api-1.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/javax.inject/javax.inject/jars/javax.inject-1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.eclipse.sisu/org.eclipse.sisu.inject/eclipse-plugins/org.eclipse.sisu.inject-0.3.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.codehaus.plexus/plexus-component-annotations/jars/plexus-component-annotations-1.5.5.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.codehaus.plexus/plexus-classworlds/bundles/plexus-classworlds-2.5.2.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/biz.aQute.bnd/biz.aQute.bnd/jars/biz.aQute.bnd-2.4.1.jar!/" />
+ <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.5/lib/scala-library.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/sbt/jars/sbt-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/main/jars/main-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/actions/jars/actions-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/classpath/jars/classpath-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.5/lib/scala-compiler.jar!/" />
+ <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.5/lib/scala-reflect.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/interface/jars/interface-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/io/jars/io-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/control/jars/control-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/launcher-interface/jars/launcher-interface-1.0.0-M1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/completion/jars/completion-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/collections/jars/collections-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/jline/jline/jars/jline-2.11.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/api/jars/api-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compiler-integration/jars/compiler-integration-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/incremental-compiler/jars/incremental-compiler-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/logging/jars/logging-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/process/jars/process-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/relation/jars/relation-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compile/jars/compile-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/classfile/jars/classfile-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/persist/jars/persist-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-tools.sbinary/sbinary_2.10/jars/sbinary_2.10-0.4.2.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compiler-ivy-integration/jars/compiler-ivy-integration-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/ivy/jars/ivy-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/cross/jars/cross-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt.ivy/ivy/jars/ivy-2.3.0-sbt-c5d1b95fdcc1e1007740ffbecf4eb07abc51ec93.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/com.jcraft/jsch/jars/jsch-0.1.46.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/serialization_2.10/jars/serialization_2.10-0.1.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang.modules/scala-pickling_2.10/jars/scala-pickling_2.10-0.10.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scalamacros/quasiquotes_2.10/jars/quasiquotes_2.10-2.0.1.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.json4s/json4s-core_2.10/jars/json4s-core_2.10-3.2.10.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.json4s/json4s-ast_2.10/jars/json4s-ast_2.10-3.2.10.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/com.thoughtworks.paranamer/paranamer/jars/paranamer-2.6.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.spire-math/jawn-parser_2.10/jars/jawn-parser_2.10-0.6.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.spire-math/json4s-support_2.10/jars/json4s-support_2.10-0.6.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/run/jars/run-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/task-system/jars/task-system-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/tasks/jars/tasks-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/tracking/jars/tracking-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/cache/jars/cache-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/testing/jars/testing-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-agent/jars/test-agent-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-interface/jars/test-interface-1.0.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/main-settings/jars/main-settings-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/apply-macro/jars/apply-macro-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/command/jars/command-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/logic/jars/logic-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compiler-interface/jars/compiler-interface-bin-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/compiler-interface/jars/compiler-interface-src-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/precompiled-2_8_2/jars/compiler-interface-bin-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/precompiled-2_9_2/jars/compiler-interface-bin-0.13.9.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/precompiled-2_9_3/jars/compiler-interface-bin-0.13.9.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES>
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/main/srcs/main-0.13.9-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang/scala-library/srcs/scala-library-2.10.5-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang/scala-reflect/srcs/scala-reflect-2.10.5-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang/scala-compiler/srcs/scala-compiler-2.10.5-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/actions/srcs/actions-0.13.9-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/sbt/srcs/sbt-0.13.9-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/run/srcs/run-0.13.9-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/test-interface/srcs/test-interface-1.0-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/testing/srcs/testing-0.13.9-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/tasks/srcs/tasks-0.13.9-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/ivy/srcs/ivy-0.13.9-sources.jar!/" />
+ <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-sbt/completion/srcs/completion-0.13.9-sources.jar!/" />
+ </SOURCES>
+ </library>
+ </orderEntry>
+ </component>
+</module> \ No newline at end of file
diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE
index 47ac2be188..420f559097 100644
--- a/src/intellij/scala.ipr.SAMPLE
+++ b/src/intellij/scala.ipr.SAMPLE
@@ -46,6 +46,7 @@
<module fileurl="file://$PROJECT_DIR$/reflect.iml" filepath="$PROJECT_DIR$/reflect.iml" />
<module fileurl="file://$PROJECT_DIR$/repl.iml" filepath="$PROJECT_DIR$/repl.iml" />
<module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scala-build.iml" filepath="$PROJECT_DIR$/scala-build.iml" />
<module fileurl="file://$PROJECT_DIR$/scaladoc.iml" filepath="$PROJECT_DIR$/scaladoc.iml" />
<module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
<module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
@@ -125,4 +126,4 @@
<SOURCES />
</library>
</component>
-</project> \ No newline at end of file
+</project>
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index ed536f10a8..8d88b1c6b1 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -10,7 +10,7 @@ package scala
package collection
import mutable.ArrayBuffer
-import scala.annotation.migration
+import scala.annotation.{tailrec, migration}
import immutable.Stream
import scala.collection.generic.CanBuildFrom
import scala.annotation.unchecked.{ uncheckedVariance => uV }
@@ -168,8 +168,10 @@ object Iterator {
private[scala] final class ConcatIterator[+A](private[this] var current: Iterator[A], initial: Vector[() => Iterator[A]]) extends Iterator[A] {
@deprecated def this(initial: Vector[() => Iterator[A]]) = this(Iterator.empty, initial) // for binary compatibility
private[this] var queue: Vector[() => Iterator[A]] = initial
+ private[this] var currentHasNextChecked = false
// Advance current to the next non-empty iterator
// current is set to null when all iterators are exhausted
+ @tailrec
private[this] def advance(): Boolean = {
if (queue.isEmpty) {
current = null
@@ -178,20 +180,57 @@ object Iterator {
else {
current = queue.head()
queue = queue.tail
- current.hasNext || advance()
+ if (current.hasNext) {
+ currentHasNextChecked = true
+ true
+ } else advance()
}
}
- def hasNext = (current ne null) && (current.hasNext || advance())
- def next() = if (hasNext) current.next else Iterator.empty.next
+ def hasNext =
+ if (currentHasNextChecked) true
+ else if (current eq null) false
+ else if (current.hasNext) {
+ currentHasNextChecked = true
+ true
+ } else advance()
+ def next() =
+ if (hasNext) {
+ currentHasNextChecked = false
+ current.next()
+ } else Iterator.empty.next()
override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] =
new ConcatIterator(current, queue :+ (() => that.toIterator))
}
private[scala] final class JoinIterator[+A](lhs: Iterator[A], that: => GenTraversableOnce[A]) extends Iterator[A] {
+ private[this] var state = 0 // 0: lhs not checked, 1: lhs has next, 2: switched to rhs
private[this] lazy val rhs: Iterator[A] = that.toIterator
- def hasNext = lhs.hasNext || rhs.hasNext
- def next = if (lhs.hasNext) lhs.next else rhs.next
+ def hasNext = state match {
+ case 0 =>
+ if (lhs.hasNext) {
+ state = 1
+ true
+ } else {
+ state = 2
+ rhs.hasNext
+ }
+ case 1 => true
+ case _ => rhs.hasNext
+ }
+ def next() = state match {
+ case 0 =>
+ if (lhs.hasNext) lhs.next()
+ else {
+ state = 2
+ rhs.next()
+ }
+ case 1 =>
+ state = 0
+ lhs.next()
+ case _ =>
+ rhs.next()
+ }
override def ++[B >: A](that: => GenTraversableOnce[B]) =
new ConcatIterator(this, Vector(() => that.toIterator))
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 4ff49c44d0..e099853463 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -157,6 +157,9 @@ object ManifestFactory {
override def newArray(len: Int): Array[Unit] = new Array[Unit](len)
override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len))
override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit()
+ override protected def arrayClass[T](tp: Class[_]): Class[Array[T]] =
+ if (tp eq runtimeClass) classOf[Array[scala.runtime.BoxedUnit]].asInstanceOf[Class[Array[T]]]
+ else super.arrayClass(tp)
private def readResolve(): Any = Manifest.Unit
}
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index d4a5e2f0e8..7ea597eac9 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -105,7 +105,7 @@ private[scala] trait PropertiesTrait {
* or "version (unknown)" if it cannot be determined.
*/
val versionString = "version " + scalaPropOrElse("version.number", "(unknown)")
- val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2015, LAMP/EPFL")
+ val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2016, LAMP/EPFL")
/** This is the encoding to use reading in source files, overridden with -encoding.
* Note that it uses "prop" i.e. looks in the scala jar, not the system properties.
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 1b3cfa236f..9daca10e63 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -280,7 +280,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
{
if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value))
- <div id="footer">Scala programming documentation. Copyright (c) 2003-2015 <a href="http://www.epfl.ch" target="_top">EPFL</a>, with contributions from <a href="http://typesafe.com" target="_top">Typesafe</a>.</div>
+ <div id="footer">Scala programming documentation. Copyright (c) 2003-2016 <a href="http://www.epfl.ch" target="_top">EPFL</a>, with contributions from <a href="http://typesafe.com" target="_top">Typesafe</a>.</div>
else
<div id="footer"> { tpl.universe.settings.docfooter.value } </div>
}
diff --git a/src/scalap/decoder.properties b/src/scalap/decoder.properties
index 333f6ce715..9bb8d130ea 100644
--- a/src/scalap/decoder.properties
+++ b/src/scalap/decoder.properties
@@ -1,2 +1,2 @@
version.number=2.0.1
-copyright.string=(c) 2002-2015 LAMP/EPFL
+copyright.string=(c) 2002-2016 LAMP/EPFL
diff --git a/test/files/neg/t9572.check b/test/files/neg/t9572.check
new file mode 100644
index 0000000000..b95bd015cf
--- /dev/null
+++ b/test/files/neg/t9572.check
@@ -0,0 +1,7 @@
+t9572.scala:3: error: too many elements for tuple: 23, allowed: 22
+ val term23 = (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23)
+ ^
+t9572.scala:5: error: too many elements for tuple: 23, allowed: 22
+ val type23: (Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int) = null
+ ^
+two errors found
diff --git a/test/files/neg/t9572.scala b/test/files/neg/t9572.scala
new file mode 100644
index 0000000000..32b2db320e
--- /dev/null
+++ b/test/files/neg/t9572.scala
@@ -0,0 +1,6 @@
+class T9572 {
+ val term22 = (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22)
+ val term23 = (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23)
+ val type22: (Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int) = null
+ val type23: (Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int) = null
+}
diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala
index 1c1e50aed9..329c85127a 100644
--- a/test/junit/scala/collection/IteratorTest.scala
+++ b/test/junit/scala/collection/IteratorTest.scala
@@ -164,4 +164,32 @@ class IteratorTest {
assertEquals(1, y.next)
assertFalse(x.hasNext) // was true, after advancing underlying iterator
}
+ // SI-9623
+ @Test def noExcessiveHasNextInJoinIterator: Unit = {
+ var counter = 0
+ val exp = List(1,2,3,1,2,3)
+ def it: Iterator[Int] = new Iterator[Int] {
+ val parent = List(1,2,3).iterator
+ def next(): Int = parent.next
+ def hasNext: Boolean = { counter += 1; parent.hasNext }
+ }
+ // Iterate separately
+ val res = new mutable.ArrayBuffer[Int]
+ it.foreach(res += _)
+ it.foreach(res += _)
+ assertSameElements(exp, res)
+ assertEquals(8, counter)
+ // JoinIterator
+ counter = 0
+ res.clear
+ (it ++ it).foreach(res += _)
+ assertSameElements(exp, res)
+ assertEquals(8, counter) // was 17
+ // ConcatIterator
+ counter = 0
+ res.clear
+ (Iterator.empty ++ it ++ it).foreach(res += _)
+ assertSameElements(exp, res)
+ assertEquals(8, counter) // was 14
+ }
}
diff --git a/test/junit/scala/reflect/ClassTag.scala b/test/junit/scala/reflect/ClassTag.scala
index 90cc981fc1..49022dccda 100644
--- a/test/junit/scala/reflect/ClassTag.scala
+++ b/test/junit/scala/reflect/ClassTag.scala
@@ -26,4 +26,14 @@ class ClassTagTest {
@Test def checkDouble = assertTrue(checkNotInt[Double] (0.toDouble))
@Test def checkBoolean = assertTrue(checkNotInt[Boolean](false))
@Test def checkUnit = assertTrue(checkNotInt[Unit] ({}))
-} \ No newline at end of file
+
+ @Test def t9534: Unit = {
+ val ct = implicitly[scala.reflect.ClassTag[Unit]]
+ val a1 = ct.newArray(1)
+ a1(0) = ()
+ val a2 = ct.wrap.newArray(1)
+ a2(0) = a1
+ val a3 = ct.newArray2(1)
+ a3(0) = a1
+ }
+}